Merge remote-tracking branch 'upstream/master'
diff --git a/.gitignore b/.gitignore
index 395a7ff..a242c60 100644
--- a/.gitignore
+++ b/.gitignore
@@ -24,6 +24,7 @@
 /git_bin
 /git-*_bin
 /svn_bin
+/external_bin
 /win_toolchain/vs2013_files
 /win_toolchain/.timestamps
 /win_toolchain/.vspro
@@ -45,3 +46,6 @@
 /tests/subversion_config/servers
 /tests/svn/
 /tests/svnrepo/
+
+# Ignore virtualenv created during bootstrapping.
+/ENV
diff --git a/.style.yapf b/.style.yapf
new file mode 100644
index 0000000..de0c6a7
--- /dev/null
+++ b/.style.yapf
@@ -0,0 +1,2 @@
+[style]
+based_on_style = chromium
diff --git a/OWNERS b/OWNERS
index 0343114..44c55ef 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,13 +1,18 @@
 set noparent
 agable@chromium.org
-bradnelson@google.com
-cmp@chromium.org
+bradnelson@chromium.org
 dpranke@chromium.org
+hinoka@chromium.org
 iannucci@chromium.org
 jochen@chromium.org
 maruel@chromium.org
-maruel@google.com
+nodir@chromium.org
 petermayo@chromium.org
+pgervais@chromium.org
 rogerta@chromium.org
 stip@chromium.org
-szager@chromium.org
+
+per-file commit_queue*=akuegel@chromium.org
+per-file commit_queue*=phajdan.jr@chromium.org
+per-file commit_queue*=sergiyb@chromium.org
+per-file commit_queue*=tandrii@chromium.org
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 767eac6..41c56d3 100644
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -18,10 +18,12 @@
   black_list = list(input_api.DEFAULT_BLACK_LIST) + [
       r'^cpplint\.py$',
       r'^cpplint_chromium\.py$',
+      r'^external_bin[\/\\].+',
       r'^python[0-9]*_bin[\/\\].+',
       r'^site-packages-py[0-9]\.[0-9][\/\\].+',
       r'^svn_bin[\/\\].+',
-      r'^testing_support[\/\\]_rietveld[\/\\].+']
+      r'^testing_support[\/\\]_rietveld[\/\\].+',
+      r'^bootstrap[\/\\].+']
   if os.path.exists('.gitignore'):
     with open('.gitignore') as fh:
       lines = [l.strip() for l in fh.readlines()]
diff --git a/apply_issue.py b/apply_issue.py
index 8bcb3b2..ba38987 100755
--- a/apply_issue.py
+++ b/apply_issue.py
@@ -18,6 +18,7 @@
 import breakpad  # pylint: disable=W0611
 
 import annotated_gclient
+import auth
 import checkout
 import fix_encoding
 import gclient_utils
@@ -56,13 +57,10 @@
       help='File containing the email address to access rietveld. '
            'If not specified, anonymous access will be used.')
   parser.add_option(
-      '-w', '--password',
-      help='Password for email addressed. Use - to read password from stdin. '
-           'if -k is provided, this is the private key file password.')
-  parser.add_option(
       '-k', '--private-key-file',
       help='Path to file containing a private key in p12 format for OAuth2 '
-           'authentication. Use -w to provide the decrypting password, if any.')
+           'authentication with "notasecret" password (as generated by Google '
+           'Cloud Console).')
   parser.add_option(
       '-i', '--issue', type='int', help='Rietveld issue number')
   parser.add_option(
@@ -92,13 +90,14 @@
                     help='Don\'t patch specified file(s).')
   parser.add_option('-d', '--ignore_deps', action='store_true',
                     help='Don\'t run gclient sync on DEPS changes.')
+
+  auth.add_auth_options(parser)
   options, args = parser.parse_args()
+  auth_config = auth.extract_auth_config_from_options(options)
 
   if options.whitelist and options.blacklist:
     parser.error('Cannot specify both --whitelist and --blacklist')
 
-  if options.password and options.private_key_file:
-    parser.error('-k and -w options are incompatible')
   if options.email and options.email_file:
     parser.error('-e and -E options are incompatible')
 
@@ -121,10 +120,6 @@
 
   options.revision_mapping = json.loads(options.revision_mapping)
 
-  if options.password == '-':
-    print('Reading password')
-    options.password = sys.stdin.readline().strip()
-
   # read email if needed
   if options.email_file:
     if not os.path.exists(options.email_file):
@@ -138,11 +133,11 @@
     # OAuth2 authentication
     obj = rietveld.JwtOAuth2Rietveld(options.server,
                                      options.email,
-                                     options.private_key_file,
-                                     private_key_password=options.password)
+                                     options.private_key_file)
     properties = obj.get_issue_properties(options.issue, False)
   else:
-    obj = rietveld.Rietveld(options.server, '', None)
+    # Passing None as auth_config disables authentication.
+    obj = rietveld.Rietveld(options.server, None)
     properties = None
     # Bad except clauses order (HTTPError is an ancestor class of
     # ClientLoginError)
@@ -156,77 +151,101 @@
         exit('FAIL: Login detected -- is issue private?')
       # TODO(maruel): A few 'Invalid username or password.' are printed first,
       # we should get rid of those.
-    except rietveld.upload.ClientLoginError, e:
+    except rietveld.upload.ClientLoginError as e:
       # Fine, we'll do proper authentication.
       pass
     if properties is None:
-      if options.email is not None:
-        obj = rietveld.Rietveld(options.server, options.email, options.password)
-        try:
-          properties = obj.get_issue_properties(options.issue, False)
-        except rietveld.upload.ClientLoginError, e:
-          if sys.stdout.closed:
-            print('Accessing the issue requires proper credentials.')
-            return 1
-      else:
-        print('Accessing the issue requires login.')
-        obj = rietveld.Rietveld(options.server, None, None)
-        try:
-          properties = obj.get_issue_properties(options.issue, False)
-        except rietveld.upload.ClientLoginError, e:
-          print('Accessing the issue requires proper credentials.')
-          return 1
+      obj = rietveld.Rietveld(options.server, auth_config, options.email)
+      try:
+        properties = obj.get_issue_properties(options.issue, False)
+      except rietveld.upload.ClientLoginError as e:
+        print('Accessing the issue requires proper credentials.')
+        return 1
 
   if not options.patchset:
     options.patchset = properties['patchsets'][-1]
     print('No patchset specified. Using patchset %d' % options.patchset)
 
-  print('Downloading the patch.')
-  try:
-    patchset = obj.get_patch(options.issue, options.patchset)
-  except urllib2.HTTPError, e:
-    print(
-        'Failed to fetch the patch for issue %d, patchset %d.\n'
-        'Try visiting %s/%d') % (
-            options.issue, options.patchset,
-            options.server, options.issue)
-    return 1
-  if options.whitelist:
-    patchset.patches = [patch for patch in patchset.patches
-                        if patch.filename in options.whitelist]
-  if options.blacklist:
-    patchset.patches = [patch for patch in patchset.patches
-                        if patch.filename not in options.blacklist]
-  for patch in patchset.patches:
-    print(patch)
-  full_dir = os.path.abspath(options.root_dir)
-  scm_type = scm.determine_scm(full_dir)
-  if scm_type == 'svn':
-    scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None)
-  elif scm_type == 'git':
-    scm_obj = checkout.GitCheckout(full_dir, None, None, None, None)
-  elif scm_type == None:
-    scm_obj = checkout.RawCheckout(full_dir, None, None)
-  else:
-    parser.error('Couldn\'t determine the scm')
+  issues_patchsets_to_apply = [(options.issue, options.patchset)]
+  depends_on_info = obj.get_depends_on_patchset(options.issue, options.patchset)
+  while depends_on_info:
+    depends_on_issue = int(depends_on_info['issue'])
+    depends_on_patchset = int(depends_on_info['patchset'])
+    try:
+      depends_on_info = obj.get_depends_on_patchset(depends_on_issue,
+                                                    depends_on_patchset)
+      issues_patchsets_to_apply.insert(0, (depends_on_issue,
+                                           depends_on_patchset))
+    except urllib2.HTTPError:
+      print ('The patchset that was marked as a dependency no longer '
+             'exists: %s/%d/#ps%d' % (
+                 options.server, depends_on_issue, depends_on_patchset))
+      print 'Therefore it is likely that this patch will not apply cleanly.'
+      print
+      depends_on_info = None
 
-  # TODO(maruel): HACK, remove me.
-  # When run a build slave, make sure buildbot knows that the checkout was
-  # modified.
-  if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot':
-    # See sourcedirIsPatched() in:
-    # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/
-    #    chromium_commands.py?view=markup
-    open('.buildbot-patched', 'w').close()
+  num_issues_patchsets_to_apply = len(issues_patchsets_to_apply)
+  if num_issues_patchsets_to_apply > 1:
+    print
+    print 'apply_issue.py found %d dependent CLs.' % (
+        num_issues_patchsets_to_apply - 1)
+    print 'They will be applied in the following order:'
+    num = 1
+    for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
+      print '  #%d %s/%d/#ps%d' % (
+          num, options.server, issue_to_apply, patchset_to_apply)
+      num += 1
+    print
 
-  print('\nApplying the patch.')
-  try:
-    scm_obj.apply_patch(patchset, verbose=True)
-  except checkout.PatchApplicationFailed, e:
-    print(str(e))
-    print('CWD=%s' % os.getcwd())
-    print('Checkout path=%s' % scm_obj.project_path)
-    return 1
+  for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
+    issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply,
+                                 patchset_to_apply)
+    print('Downloading patch from %s' % issue_url)
+    try:
+      patchset = obj.get_patch(issue_to_apply, patchset_to_apply)
+    except urllib2.HTTPError as e:
+      print(
+          'Failed to fetch the patch for issue %d, patchset %d.\n'
+          'Try visiting %s/%d') % (
+              issue_to_apply, patchset_to_apply,
+              options.server, issue_to_apply)
+      return 1
+    if options.whitelist:
+      patchset.patches = [patch for patch in patchset.patches
+                          if patch.filename in options.whitelist]
+    if options.blacklist:
+      patchset.patches = [patch for patch in patchset.patches
+                          if patch.filename not in options.blacklist]
+    for patch in patchset.patches:
+      print(patch)
+    full_dir = os.path.abspath(options.root_dir)
+    scm_type = scm.determine_scm(full_dir)
+    if scm_type == 'svn':
+      scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None)
+    elif scm_type == 'git':
+      scm_obj = checkout.GitCheckout(full_dir, None, None, None, None)
+    elif scm_type == None:
+      scm_obj = checkout.RawCheckout(full_dir, None, None)
+    else:
+      parser.error('Couldn\'t determine the scm')
+
+    # TODO(maruel): HACK, remove me.
+    # When run a build slave, make sure buildbot knows that the checkout was
+    # modified.
+    if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot':
+      # See sourcedirIsPatched() in:
+      # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/
+      #    chromium_commands.py?view=markup
+      open('.buildbot-patched', 'w').close()
+
+    print('\nApplying the patch from %s' % issue_url)
+    try:
+      scm_obj.apply_patch(patchset, verbose=True)
+    except checkout.PatchApplicationFailed as e:
+      print(str(e))
+      print('CWD=%s' % os.getcwd())
+      print('Checkout path=%s' % scm_obj.project_path)
+      return 1
 
   if ('DEPS' in map(os.path.basename, patchset.filenames)
       and not options.ignore_deps):
@@ -262,4 +281,8 @@
 
 if __name__ == "__main__":
   fix_encoding.fix_encoding()
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/auth.py b/auth.py
new file mode 100644
index 0000000..6e0d2f3
--- /dev/null
+++ b/auth.py
@@ -0,0 +1,685 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Google OAuth2 related functions."""
+
+import BaseHTTPServer
+import collections
+import datetime
+import functools
+import hashlib
+import json
+import logging
+import optparse
+import os
+import socket
+import sys
+import threading
+import urllib
+import urlparse
+import webbrowser
+
+from third_party import httplib2
+from third_party.oauth2client import client
+from third_party.oauth2client import multistore_file
+
+
+# depot_tools/.
+DEPOT_TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+# Google OAuth2 clients always have a secret, even if the client is an installed
+# application/utility such as this. Of course, in such cases the "secret" is
+# actually publicly known; security depends entirely on the secrecy of refresh
+# tokens, which effectively become bearer tokens. An attacker can impersonate
+# service's identity in OAuth2 flow. But that's generally fine as long as a list
+# of allowed redirect_uri's associated with client_id is limited to 'localhost'
+# or 'urn:ietf:wg:oauth:2.0:oob'. In that case attacker needs some process
+# running on user's machine to successfully complete the flow and grab refresh
+# token. When you have a malicious code running on your machine, you're screwed
+# anyway.
+# This particular set is managed by API Console project "chrome-infra-auth".
+OAUTH_CLIENT_ID = (
+    '446450136466-2hr92jrq8e6i4tnsa56b52vacp7t3936.apps.googleusercontent.com')
+OAUTH_CLIENT_SECRET = 'uBfbay2KCy9t4QveJ-dOqHtp'
+
+# List of space separated OAuth scopes for generated tokens. GAE apps usually
+# use userinfo.email scope for authentication.
+OAUTH_SCOPES = 'https://www.googleapis.com/auth/userinfo.email'
+
+# Additional OAuth scopes.
+ADDITIONAL_SCOPES = {
+  'code.google.com': 'https://www.googleapis.com/auth/projecthosting',
+}
+
+# Path to a file with cached OAuth2 credentials used by default relative to the
+# home dir (see _get_token_cache_path). It should be a safe location accessible
+# only to a current user: knowing content of this file is roughly equivalent to
+# knowing account password. Single file can hold multiple independent tokens
+# identified by token_cache_key (see Authenticator).
+OAUTH_TOKENS_CACHE = '.depot_tools_oauth2_tokens'
+
+
+# Authentication configuration extracted from command line options.
+# See doc string for 'make_auth_config' for meaning of fields.
+AuthConfig = collections.namedtuple('AuthConfig', [
+    'use_oauth2', # deprecated, will be always True
+    'save_cookies', # deprecated, will be removed
+    'use_local_webserver',
+    'webserver_port',
+    'refresh_token_json',
+])
+
+
+# OAuth access token with its expiration time (UTC datetime or None if unknown).
+AccessToken = collections.namedtuple('AccessToken', [
+    'token',
+    'expires_at',
+])
+
+
+# Refresh token passed via --auth-refresh-token-json.
+RefreshToken = collections.namedtuple('RefreshToken', [
+    'client_id',
+    'client_secret',
+    'refresh_token',
+])
+
+
+class AuthenticationError(Exception):
+  """Raised on errors related to authentication."""
+
+
+class LoginRequiredError(AuthenticationError):
+  """Interaction with the user is required to authenticate."""
+
+  def __init__(self, token_cache_key):
+    # HACK(vadimsh): It is assumed here that the token cache key is a hostname.
+    msg = (
+        'You are not logged in. Please login first by running:\n'
+        '  depot-tools-auth login %s' % token_cache_key)
+    super(LoginRequiredError, self).__init__(msg)
+
+
+def make_auth_config(
+    use_oauth2=None,
+    save_cookies=None,
+    use_local_webserver=None,
+    webserver_port=None,
+    refresh_token_json=None):
+  """Returns new instance of AuthConfig.
+
+  If some config option is None, it will be set to a reasonable default value.
+  This function also acts as an authoritative place for default values of
+  corresponding command line options.
+  """
+  default = lambda val, d: val if val is not None else d
+  return AuthConfig(
+      default(use_oauth2, True),
+      default(save_cookies, True),
+      default(use_local_webserver, not _is_headless()),
+      default(webserver_port, 8090),
+      default(refresh_token_json, ''))
+
+
+def add_auth_options(parser, default_config=None):
+  """Appends OAuth related options to OptionParser."""
+  default_config = default_config or make_auth_config()
+  parser.auth_group = optparse.OptionGroup(parser, 'Auth options')
+  parser.add_option_group(parser.auth_group)
+
+  # OAuth2 vs password switch.
+  auth_default = 'use OAuth2' if default_config.use_oauth2 else 'use password'
+  parser.auth_group.add_option(
+      '--oauth2',
+      action='store_true',
+      dest='use_oauth2',
+      default=default_config.use_oauth2,
+      help='Use OAuth 2.0 instead of a password. [default: %s]' % auth_default)
+  parser.auth_group.add_option(
+      '--no-oauth2',
+      action='store_false',
+      dest='use_oauth2',
+      default=default_config.use_oauth2,
+      help='Use password instead of OAuth 2.0. [default: %s]' % auth_default)
+
+  # Password related options, deprecated.
+  parser.auth_group.add_option(
+      '--no-cookies',
+      action='store_false',
+      dest='save_cookies',
+      default=default_config.save_cookies,
+      help='Do not save authentication cookies to local disk.')
+
+  # OAuth2 related options.
+  parser.auth_group.add_option(
+      '--auth-no-local-webserver',
+      action='store_false',
+      dest='use_local_webserver',
+      default=default_config.use_local_webserver,
+      help='Do not run a local web server when performing OAuth2 login flow.')
+  parser.auth_group.add_option(
+      '--auth-host-port',
+      type=int,
+      default=default_config.webserver_port,
+      help='Port a local web server should listen on. Used only if '
+          '--auth-no-local-webserver is not set. [default: %default]')
+  parser.auth_group.add_option(
+      '--auth-refresh-token-json',
+      default=default_config.refresh_token_json,
+      help='Path to a JSON file with role account refresh token to use.')
+
+
+def extract_auth_config_from_options(options):
+  """Given OptionParser parsed options, extracts AuthConfig from it.
+
+  OptionParser should be populated with auth options by 'add_auth_options'.
+  """
+  return make_auth_config(
+      use_oauth2=options.use_oauth2,
+      save_cookies=False if options.use_oauth2 else options.save_cookies,
+      use_local_webserver=options.use_local_webserver,
+      webserver_port=options.auth_host_port,
+      refresh_token_json=options.auth_refresh_token_json)
+
+
+def auth_config_to_command_options(auth_config):
+  """AuthConfig -> list of strings with command line options.
+
+  Omits options that are set to default values.
+  """
+  if not auth_config:
+    return []
+  defaults = make_auth_config()
+  opts = []
+  if auth_config.use_oauth2 != defaults.use_oauth2:
+    opts.append('--oauth2' if auth_config.use_oauth2 else '--no-oauth2')
+  if auth_config.save_cookies != auth_config.save_cookies:
+    if not auth_config.save_cookies:
+      opts.append('--no-cookies')
+  if auth_config.use_local_webserver != defaults.use_local_webserver:
+    if not auth_config.use_local_webserver:
+      opts.append('--auth-no-local-webserver')
+  if auth_config.webserver_port != defaults.webserver_port:
+    opts.extend(['--auth-host-port', str(auth_config.webserver_port)])
+  if auth_config.refresh_token_json != defaults.refresh_token_json:
+    opts.extend([
+        '--auth-refresh-token-json', str(auth_config.refresh_token_json)])
+  return opts
+
+
+def get_authenticator_for_host(hostname, config):
+  """Returns Authenticator instance to access given host.
+
+  Args:
+    hostname: a naked hostname or http(s)://<hostname>[/] URL. Used to derive
+        a cache key for token cache.
+    config: AuthConfig instance.
+
+  Returns:
+    Authenticator object.
+  """
+  hostname = hostname.lower().rstrip('/')
+  # Append some scheme, otherwise urlparse puts hostname into parsed.path.
+  if '://' not in hostname:
+    hostname = 'https://' + hostname
+  scopes = OAUTH_SCOPES
+  parsed = urlparse.urlparse(hostname)
+  if parsed.netloc in ADDITIONAL_SCOPES:
+    scopes = "%s %s" % (scopes, ADDITIONAL_SCOPES[parsed.netloc])
+
+  if parsed.path or parsed.params or parsed.query or parsed.fragment:
+    raise AuthenticationError(
+        'Expecting a hostname or root host URL, got %s instead' % hostname)
+  return Authenticator(parsed.netloc, config, scopes)
+
+
+class Authenticator(object):
+  """Object that knows how to refresh access tokens when needed.
+
+  Args:
+    token_cache_key: string key of a section of the token cache file to use
+        to keep the tokens. See hostname_to_token_cache_key.
+    config: AuthConfig object that holds authentication configuration.
+  """
+
+  def __init__(self, token_cache_key, config, scopes):
+    assert isinstance(config, AuthConfig)
+    assert config.use_oauth2
+    self._access_token = None
+    self._config = config
+    self._lock = threading.Lock()
+    self._token_cache_key = token_cache_key
+    self._external_token = None
+    self._scopes = scopes
+    if config.refresh_token_json:
+      self._external_token = _read_refresh_token_json(config.refresh_token_json)
+    logging.debug('Using auth config %r', config)
+
+  def login(self):
+    """Performs interactive login flow if necessary.
+
+    Raises:
+      AuthenticationError on error or if interrupted.
+    """
+    if self._external_token:
+      raise AuthenticationError(
+          'Can\'t run login flow when using --auth-refresh-token-json.')
+    return self.get_access_token(
+        force_refresh=True, allow_user_interaction=True)
+
+  def logout(self):
+    """Revokes the refresh token and deletes it from the cache.
+
+    Returns True if had some credentials cached.
+    """
+    with self._lock:
+      self._access_token = None
+      storage = self._get_storage()
+      credentials = storage.get()
+      had_creds = bool(credentials)
+      if credentials and credentials.refresh_token and credentials.revoke_uri:
+        try:
+          credentials.revoke(httplib2.Http())
+        except client.TokenRevokeError as e:
+          logging.warning('Failed to revoke refresh token: %s', e)
+      storage.delete()
+    return had_creds
+
+  def has_cached_credentials(self):
+    """Returns True if long term credentials (refresh token) are in cache.
+
+    Doesn't make network calls.
+
+    If returns False, get_access_token() later will ask for interactive login by
+    raising LoginRequiredError.
+
+    If returns True, most probably get_access_token() won't ask for interactive
+    login, though it is not guaranteed, since cached token can be already
+    revoked and there's no way to figure this out without actually trying to use
+    it.
+    """
+    with self._lock:
+      return bool(self._get_cached_credentials())
+
+  def get_access_token(self, force_refresh=False, allow_user_interaction=False):
+    """Returns AccessToken, refreshing it if necessary.
+
+    Args:
+      force_refresh: forcefully refresh access token even if it is not expired.
+      allow_user_interaction: True to enable blocking for user input if needed.
+
+    Raises:
+      AuthenticationError on error or if authentication flow was interrupted.
+      LoginRequiredError if user interaction is required, but
+          allow_user_interaction is False.
+    """
+    with self._lock:
+      if force_refresh:
+        logging.debug('Forcing access token refresh')
+        self._access_token = self._create_access_token(allow_user_interaction)
+        return self._access_token
+
+      # Load from on-disk cache on a first access.
+      if not self._access_token:
+        self._access_token = self._load_access_token()
+
+      # Refresh if expired or missing.
+      if not self._access_token or _needs_refresh(self._access_token):
+        # Maybe some other process already updated it, reload from the cache.
+        self._access_token = self._load_access_token()
+        # Nope, still expired, need to run the refresh flow.
+        if not self._access_token or _needs_refresh(self._access_token):
+          self._access_token = self._create_access_token(allow_user_interaction)
+
+      return self._access_token
+
+  def get_token_info(self):
+    """Returns a result of /oauth2/v2/tokeninfo call with token info."""
+    access_token = self.get_access_token()
+    resp, content = httplib2.Http().request(
+        uri='https://www.googleapis.com/oauth2/v2/tokeninfo?%s' % (
+            urllib.urlencode({'access_token': access_token.token})))
+    if resp.status == 200:
+      return json.loads(content)
+    raise AuthenticationError('Failed to fetch the token info: %r' % content)
+
+  def authorize(self, http):
+    """Monkey patches authentication logic of httplib2.Http instance.
+
+    The modified http.request method will add authentication headers to each
+    request and will refresh access_tokens when a 401 is received on a
+    request.
+
+    Args:
+       http: An instance of httplib2.Http.
+
+    Returns:
+       A modified instance of http that was passed in.
+    """
+    # Adapted from oauth2client.OAuth2Credentials.authorize.
+
+    request_orig = http.request
+
+    @functools.wraps(request_orig)
+    def new_request(
+        uri, method='GET', body=None, headers=None,
+        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+        connection_type=None):
+      headers = (headers or {}).copy()
+      headers['Authorization'] = 'Bearer %s' % self.get_access_token().token
+      resp, content = request_orig(
+          uri, method, body, headers, redirections, connection_type)
+      if resp.status in client.REFRESH_STATUS_CODES:
+        logging.info('Refreshing due to a %s', resp.status)
+        access_token = self.get_access_token(force_refresh=True)
+        headers['Authorization'] = 'Bearer %s' % access_token.token
+        return request_orig(
+            uri, method, body, headers, redirections, connection_type)
+      else:
+        return (resp, content)
+
+    http.request = new_request
+    return http
+
+  ## Private methods.
+
+  def _get_storage(self):
+    """Returns oauth2client.Storage with cached tokens."""
+    # Do not mix cache keys for different externally provided tokens.
+    if self._external_token:
+      token_hash = hashlib.sha1(self._external_token.refresh_token).hexdigest()
+      cache_key = '%s:refresh_tok:%s' % (self._token_cache_key, token_hash)
+    else:
+      cache_key = self._token_cache_key
+    path = _get_token_cache_path()
+    logging.debug('Using token storage %r (cache key %r)', path, cache_key)
+    return multistore_file.get_credential_storage_custom_string_key(
+        path, cache_key)
+
+  def _get_cached_credentials(self):
+    """Returns oauth2client.Credentials loaded from storage."""
+    storage = self._get_storage()
+    credentials = storage.get()
+
+    if not credentials:
+      logging.debug('No cached token')
+    else:
+      _log_credentials_info('cached token', credentials)
+
+    # Is using --auth-refresh-token-json?
+    if self._external_token:
+      # Cached credentials are valid and match external token -> use them. It is
+      # important to reuse credentials from the storage because they contain
+      # cached access token.
+      valid = (
+          credentials and not credentials.invalid and
+          credentials.refresh_token == self._external_token.refresh_token and
+          credentials.client_id == self._external_token.client_id and
+          credentials.client_secret == self._external_token.client_secret)
+      if valid:
+        logging.debug('Cached credentials match external refresh token')
+        return credentials
+      # Construct new credentials from externally provided refresh token,
+      # associate them with cache storage (so that access_token will be placed
+      # in the cache later too).
+      logging.debug('Putting external refresh token into the cache')
+      credentials = client.OAuth2Credentials(
+          access_token=None,
+          client_id=self._external_token.client_id,
+          client_secret=self._external_token.client_secret,
+          refresh_token=self._external_token.refresh_token,
+          token_expiry=None,
+          token_uri='https://accounts.google.com/o/oauth2/token',
+          user_agent=None,
+          revoke_uri=None)
+      credentials.set_store(storage)
+      storage.put(credentials)
+      return credentials
+
+    # Not using external refresh token -> return whatever is cached.
+    return credentials if (credentials and not credentials.invalid) else None
+
+  def _load_access_token(self):
+    """Returns cached AccessToken if it is not expired yet."""
+    logging.debug('Reloading access token from cache')
+    creds = self._get_cached_credentials()
+    if not creds or not creds.access_token or creds.access_token_expired:
+      logging.debug('Access token is missing or expired')
+      return None
+    return AccessToken(str(creds.access_token), creds.token_expiry)
+
+  def _create_access_token(self, allow_user_interaction=False):
+    """Mints and caches a new access token, launching OAuth2 dance if necessary.
+
+    Uses cached refresh token, if present. In that case user interaction is not
+    required and function will finish quietly. Otherwise it will launch 3-legged
+    OAuth2 flow, that needs user interaction.
+
+    Args:
+      allow_user_interaction: if True, allow interaction with the user (e.g.
+          reading standard input, or launching a browser).
+
+    Returns:
+      AccessToken.
+
+    Raises:
+      AuthenticationError on error or if authentication flow was interrupted.
+      LoginRequiredError if user interaction is required, but
+          allow_user_interaction is False.
+    """
+    logging.debug(
+        'Making new access token (allow_user_interaction=%r)',
+        allow_user_interaction)
+    credentials = self._get_cached_credentials()
+
+    # 3-legged flow with (perhaps cached) refresh token.
+    refreshed = False
+    if credentials and not credentials.invalid:
+      try:
+        logging.debug('Attempting to refresh access_token')
+        credentials.refresh(httplib2.Http())
+        _log_credentials_info('refreshed token', credentials)
+        refreshed = True
+      except client.Error as err:
+        logging.warning(
+            'OAuth error during access token refresh (%s). '
+            'Attempting a full authentication flow.', err)
+
+    # Refresh token is missing or invalid, go through the full flow.
+    if not refreshed:
+      # Can't refresh externally provided token.
+      if self._external_token:
+        raise AuthenticationError(
+            'Token provided via --auth-refresh-token-json is no longer valid.')
+      if not allow_user_interaction:
+        logging.debug('Requesting user to login')
+        raise LoginRequiredError(self._token_cache_key)
+      logging.debug('Launching OAuth browser flow')
+      credentials = _run_oauth_dance(self._config, self._scopes)
+      _log_credentials_info('new token', credentials)
+
+    logging.info(
+        'OAuth access_token refreshed. Expires in %s.',
+        credentials.token_expiry - datetime.datetime.utcnow())
+    storage = self._get_storage()
+    credentials.set_store(storage)
+    storage.put(credentials)
+    return AccessToken(str(credentials.access_token), credentials.token_expiry)
+
+
+## Private functions.
+
+
+def _get_token_cache_path():
+  # On non Win just use HOME.
+  if sys.platform != 'win32':
+    return os.path.join(os.path.expanduser('~'), OAUTH_TOKENS_CACHE)
+  # Prefer USERPROFILE over HOME, since HOME is overridden in
+  # git-..._bin/cmd/git.cmd to point to depot_tools. depot-tools-auth.py script
+  # (and all other scripts) doesn't use this override and thus uses another
+  # value for HOME. git.cmd doesn't touch USERPROFILE though, and usually
+  # USERPROFILE == HOME on Windows.
+  if 'USERPROFILE' in os.environ:
+    return os.path.join(os.environ['USERPROFILE'], OAUTH_TOKENS_CACHE)
+  return os.path.join(os.path.expanduser('~'), OAUTH_TOKENS_CACHE)
+
+
+def _is_headless():
+  """True if machine doesn't seem to have a display."""
+  return sys.platform == 'linux2' and not os.environ.get('DISPLAY')
+
+
+def _read_refresh_token_json(path):
+  """Returns RefreshToken by reading it from the JSON file."""
+  try:
+    with open(path, 'r') as f:
+      data = json.load(f)
+      return RefreshToken(
+          client_id=str(data.get('client_id', OAUTH_CLIENT_ID)),
+          client_secret=str(data.get('client_secret', OAUTH_CLIENT_SECRET)),
+          refresh_token=str(data['refresh_token']))
+  except (IOError, ValueError) as e:
+    raise AuthenticationError(
+        'Failed to read refresh token from %s: %s' % (path, e))
+  except KeyError as e:
+    raise AuthenticationError(
+        'Failed to read refresh token from %s: missing key %s' % (path, e))
+
+
+def _needs_refresh(access_token):
+  """True if AccessToken should be refreshed."""
+  if access_token.expires_at is not None:
+    # Allow 5 min of clock skew between client and backend.
+    now = datetime.datetime.utcnow() + datetime.timedelta(seconds=300)
+    return now >= access_token.expires_at
+  # Token without expiration time never expires.
+  return False
+
+
+def _log_credentials_info(title, credentials):
+  """Dumps (non sensitive) part of client.Credentials object to debug log."""
+  if credentials:
+    logging.debug('%s info: %r', title, {
+        'access_token_expired': credentials.access_token_expired,
+        'has_access_token': bool(credentials.access_token),
+        'invalid': credentials.invalid,
+        'utcnow': datetime.datetime.utcnow(),
+        'token_expiry': credentials.token_expiry,
+    })
+
+
+def _run_oauth_dance(config, scopes):
+  """Perform full 3-legged OAuth2 flow with the browser.
+
+  Returns:
+    oauth2client.Credentials.
+
+  Raises:
+    AuthenticationError on errors.
+  """
+  flow = client.OAuth2WebServerFlow(
+      OAUTH_CLIENT_ID,
+      OAUTH_CLIENT_SECRET,
+      scopes,
+      approval_prompt='force')
+
+  use_local_webserver = config.use_local_webserver
+  port = config.webserver_port
+  if config.use_local_webserver:
+    success = False
+    try:
+      httpd = _ClientRedirectServer(('localhost', port), _ClientRedirectHandler)
+    except socket.error:
+      pass
+    else:
+      success = True
+    use_local_webserver = success
+    if not success:
+      print(
+        'Failed to start a local webserver listening on port %d.\n'
+        'Please check your firewall settings and locally running programs that '
+        'may be blocking or using those ports.\n\n'
+        'Falling back to --auth-no-local-webserver and continuing with '
+        'authentication.\n' % port)
+
+  if use_local_webserver:
+    oauth_callback = 'http://localhost:%s/' % port
+  else:
+    oauth_callback = client.OOB_CALLBACK_URN
+  flow.redirect_uri = oauth_callback
+  authorize_url = flow.step1_get_authorize_url()
+
+  if use_local_webserver:
+    webbrowser.open(authorize_url, new=1, autoraise=True)
+    print(
+      'Your browser has been opened to visit:\n\n'
+      '    %s\n\n'
+      'If your browser is on a different machine then exit and re-run this '
+      'application with the command-line parameter\n\n'
+      '  --auth-no-local-webserver\n' % authorize_url)
+  else:
+    print(
+      'Go to the following link in your browser:\n\n'
+      '    %s\n' % authorize_url)
+
+  try:
+    code = None
+    if use_local_webserver:
+      httpd.handle_request()
+      if 'error' in httpd.query_params:
+        raise AuthenticationError(
+            'Authentication request was rejected: %s' %
+            httpd.query_params['error'])
+      if 'code' not in httpd.query_params:
+        raise AuthenticationError(
+            'Failed to find "code" in the query parameters of the redirect.\n'
+            'Try running with --auth-no-local-webserver.')
+      code = httpd.query_params['code']
+    else:
+      code = raw_input('Enter verification code: ').strip()
+  except KeyboardInterrupt:
+    raise AuthenticationError('Authentication was canceled.')
+
+  try:
+    return flow.step2_exchange(code)
+  except client.FlowExchangeError as e:
+    raise AuthenticationError('Authentication has failed: %s' % e)
+
+
+class _ClientRedirectServer(BaseHTTPServer.HTTPServer):
+  """A server to handle OAuth 2.0 redirects back to localhost.
+
+  Waits for a single request and parses the query parameters
+  into query_params and then stops serving.
+  """
+  query_params = {}
+
+
+class _ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler for OAuth 2.0 redirects back to localhost.
+
+  Waits for a single request and parses the query parameters
+  into the servers query_params and then stops serving.
+  """
+
+  def do_GET(self):
+    """Handle a GET request.
+
+    Parses the query parameters and prints a message
+    if the flow has completed. Note that we can't detect
+    if an error occurred.
+    """
+    self.send_response(200)
+    self.send_header('Content-type', 'text/html')
+    self.end_headers()
+    query = self.path.split('?', 1)[-1]
+    query = dict(urlparse.parse_qsl(query))
+    self.server.query_params = query
+    self.wfile.write('<html><head><title>Authentication Status</title></head>')
+    self.wfile.write('<body><p>The authentication flow has completed.</p>')
+    self.wfile.write('</body></html>')
+
+  def log_message(self, _format, *args):
+    """Do not log messages to stdout while running as command line program."""
diff --git a/bootstrap/.gitignore b/bootstrap/.gitignore
new file mode 100644
index 0000000..7603e80
--- /dev/null
+++ b/bootstrap/.gitignore
@@ -0,0 +1,2 @@
+BUILD_ENV
+wheelhouse
diff --git a/bootstrap/bootstrap.py b/bootstrap/bootstrap.py
new file mode 100755
index 0000000..eb596af
--- /dev/null
+++ b/bootstrap/bootstrap.py
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import contextlib
+import glob
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from util import STORAGE_URL, OBJECT_URL, LOCAL_STORAGE_PATH, LOCAL_OBJECT_URL
+from util import read_deps, merge_deps, print_deps, platform_tag
+
+LOGGER = logging.getLogger(__name__)
+
+# /path/to/infra
+ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+PYTHON_BAT_WIN = '@%~dp0\\..\\Scripts\\python.exe %*'
+
+
+class NoWheelException(Exception):
+  def __init__(self, name, version, build, source_sha):
+    super(NoWheelException, self).__init__(
+        'No matching wheel found for (%s==%s (build %s_%s))' %
+        (name, version, build, source_sha))
+
+
+def check_pydistutils():
+  if os.path.exists(os.path.expanduser('~/.pydistutils.cfg')):
+    print >> sys.stderr, '\n'.join([
+      '',
+      '',
+      '=========== ERROR ===========',
+      'You have a ~/.pydistutils.cfg file, which interferes with the ',
+      'infra virtualenv environment. Please move it to the side and bootstrap ',
+      'again. Once infra has bootstrapped, you may move it back.',
+      '',
+      'Upstream bug: https://github.com/pypa/virtualenv/issues/88/',
+      ''
+    ])
+    sys.exit(1)
+
+
+def ls(prefix):
+  from pip._vendor import requests  # pylint: disable=E0611
+  data = requests.get(STORAGE_URL, params=dict(
+      prefix=prefix,
+      fields='items(name,md5Hash)'
+  )).json()
+  entries = data.get('items', [])
+  for entry in entries:
+    entry['md5Hash'] = entry['md5Hash'].decode('base64').encode('hex')
+    entry['local'] = False
+  # Also look in the local cache
+  entries.extend([
+    {'name': fname, 'md5Hash': None, 'local': True}
+    for fname in glob.glob(os.path.join(LOCAL_STORAGE_PATH,
+                                        prefix.split('/')[-1] + '*'))])
+  return entries
+
+
+def sha_for(deps_entry):
+  if 'rev' in deps_entry:
+    return deps_entry['rev']
+  else:
+    return deps_entry['gs'].split('.')[0]
+
+
+def get_links(deps):
+  import pip.wheel  # pylint: disable=E0611
+  plat_tag = platform_tag()
+
+  links = []
+
+  for name, dep in deps.iteritems():
+    version, source_sha = dep['version'] , sha_for(dep)
+    prefix = 'wheels/{}-{}-{}_{}'.format(name, version, dep['build'],
+                                         source_sha)
+    generic_link = None
+    binary_link = None
+    local_link = None
+
+    for entry in ls(prefix):
+      fname = entry['name'].split('/')[-1]
+      md5hash = entry['md5Hash']
+      wheel_info = pip.wheel.Wheel.wheel_file_re.match(fname)
+      if not wheel_info:
+        LOGGER.warn('Skipping invalid wheel: %r', fname)
+        continue
+
+      if pip.wheel.Wheel(fname).supported():
+        if entry['local']:
+          link = LOCAL_OBJECT_URL.format(entry['name'])
+          local_link = link
+          continue
+        else:
+          link = OBJECT_URL.format(entry['name'], md5hash)
+        if fname.endswith('none-any.whl'):
+          if generic_link:
+            LOGGER.error(
+              'Found more than one generic matching wheel for %r: %r',
+              prefix, dep)
+            continue
+          generic_link = link
+        elif plat_tag in fname:
+          if binary_link:
+            LOGGER.error(
+              'Found more than one binary matching wheel for %r: %r',
+              prefix, dep)
+            continue
+          binary_link = link
+
+    if not binary_link and not generic_link and not local_link:
+      raise NoWheelException(name, version, dep['build'], source_sha)
+
+    links.append(local_link or binary_link or generic_link)
+
+  return links
+
+
+@contextlib.contextmanager
+def html_index(links):
+  tf = tempfile.mktemp('.html')
+  try:
+    with open(tf, 'w') as f:
+      print >> f, '<html><body>'
+      for link in links:
+        print >> f, '<a href="%s">wat</a>' % link
+      print >> f, '</body></html>'
+    yield tf
+  finally:
+    os.unlink(tf)
+
+
+def install(deps):
+  bin_dir = 'Scripts' if sys.platform.startswith('win') else 'bin'
+  pip = os.path.join(sys.prefix, bin_dir, 'pip')
+
+  links = get_links(deps)
+  with html_index(links) as ipath:
+    requirements = []
+    # TODO(iannucci): Do this as a requirements.txt
+    for name, deps_entry in deps.iteritems():
+      if not deps_entry.get('implicit'):
+        requirements.append('%s==%s' % (name, deps_entry['version']))
+    subprocess.check_call(
+        [pip, 'install', '--no-index', '--download-cache',
+         os.path.join(ROOT, '.wheelcache'), '-f', ipath] + requirements)
+
+
+def activate_env(env, deps, quiet=False):
+  if hasattr(sys, 'real_prefix'):
+    LOGGER.error('Already activated environment!')
+    return
+
+  if not quiet:
+    print 'Activating environment: %r' % env
+  assert isinstance(deps, dict)
+
+  manifest_path = os.path.join(env, 'manifest.pyl')
+  cur_deps = read_deps(manifest_path)
+  if cur_deps != deps:
+    if not quiet:
+      print '  Removing old environment: %r' % cur_deps
+    shutil.rmtree(env, ignore_errors=True)
+    cur_deps = None
+
+  if cur_deps is None:
+    check_pydistutils()
+
+    if not quiet:
+      print '  Building new environment'
+    # Add in bundled virtualenv lib
+    sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'virtualenv'))
+    import virtualenv  # pylint: disable=F0401
+    virtualenv.create_environment(
+        env, search_dirs=virtualenv.file_search_dirs())
+
+  if not quiet:
+    print '  Activating environment'
+  # Ensure hermeticity during activation.
+  os.environ.pop('PYTHONPATH', None)
+  bin_dir = 'Scripts' if sys.platform.startswith('win') else 'bin'
+  activate_this = os.path.join(env, bin_dir, 'activate_this.py')
+  execfile(activate_this, dict(__file__=activate_this))
+
+  if cur_deps is None:
+    if not quiet:
+      print '  Installing deps'
+      print_deps(deps, indent=2, with_implicit=False)
+    install(deps)
+    virtualenv.make_environment_relocatable(env)
+    with open(manifest_path, 'wb') as f:
+      f.write(repr(deps) + '\n')
+
+  # Create bin\python.bat on Windows to unify path where Python is found.
+  if sys.platform.startswith('win'):
+    bin_path = os.path.join(env, 'bin')
+    if not os.path.isdir(bin_path):
+      os.makedirs(bin_path)
+    python_bat_path = os.path.join(bin_path, 'python.bat')
+    if not os.path.isfile(python_bat_path):
+      with open(python_bat_path, 'w') as python_bat_file:
+        python_bat_file.write(PYTHON_BAT_WIN)
+
+  if not quiet:
+    print 'Done creating environment'
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--deps-file', '--deps_file', action='append',
+                      help='Path to deps.pyl file (may be used multiple times)')
+  parser.add_argument('-q', '--quiet', action='store_true', default=False,
+                      help='Supress all output')
+  parser.add_argument('env_path',
+                      help='Path to place environment (default: %(default)s)',
+                      default='ENV')
+  opts = parser.parse_args(args)
+
+  deps = merge_deps(opts.deps_file)
+  activate_env(opts.env_path, deps, opts.quiet)
+
+
+if __name__ == '__main__':
+  logging.basicConfig()
+  LOGGER.setLevel(logging.DEBUG)
+  sys.exit(main(sys.argv[1:]))
diff --git a/bootstrap/deps.pyl b/bootstrap/deps.pyl
new file mode 100644
index 0000000..c6236d4
--- /dev/null
+++ b/bootstrap/deps.pyl
@@ -0,0 +1,15 @@
+#vim: ft=python:
+{
+  'wheel': {
+    'version': '0.24.0',
+    'build': '0',
+    'gs': 'c02262299489646af253067e8136c060a93572e3.tar.gz',
+  },
+
+  'protobuf': {
+    'version': '2.6.0',
+    'build': '0',
+    'repo': 'external/github.com/google/protobuf',
+    'rev': '629a556879cc84e0f52546f0484b65b72ce44fe8',
+  },
+}
diff --git a/bootstrap/util.py b/bootstrap/util.py
new file mode 100644
index 0000000..d64b142
--- /dev/null
+++ b/bootstrap/util.py
@@ -0,0 +1,87 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import ast
+import contextlib
+import os
+import platform
+import shutil
+import sys
+import tempfile
+
+
+ROOT = os.path.dirname(os.path.abspath(__file__))
+WHEELHOUSE = os.path.join(ROOT, 'wheelhouse')
+
+BUCKET = 'chrome-python-wheelhouse'
+STORAGE_URL = 'https://www.googleapis.com/storage/v1/b/{}/o'.format(BUCKET)
+OBJECT_URL = 'https://storage.googleapis.com/{}/{{}}#md5={{}}'.format(BUCKET)
+LOCAL_OBJECT_URL = 'file://{}'
+
+LOCAL_STORAGE_PATH = os.path.join(ROOT, 'wheelhouse_cache')
+
+SOURCE_URL = 'gs://{}/sources/{{}}'.format(BUCKET)
+WHEELS_URL = 'gs://{}/wheels/'.format(BUCKET)
+
+
+class DepsConflictException(Exception):
+  def __init__(self, name):
+    super(DepsConflictException, self).__init__(
+        'Package \'%s\' is defined twice in deps.pyl' % name)
+
+
+def platform_tag():
+  if sys.platform.startswith('linux'):
+    return '_{0}_{1}'.format(*platform.linux_distribution())
+  return ''
+
+
+def print_deps(deps, indent=1, with_implicit=True):
+  for dep, entry in deps.iteritems():
+    if not with_implicit and entry.get('implicit'):
+      continue
+    print '  ' * indent + '%s: %r' % (dep, entry)
+  print
+
+
+@contextlib.contextmanager
+def tempdir(*args, **kwargs):
+  tdir = None
+  try:
+    tdir = tempfile.mkdtemp(*args, **kwargs)
+    yield tdir
+  finally:
+    if tdir:
+      shutil.rmtree(tdir, ignore_errors=True)
+
+
+@contextlib.contextmanager
+def tempname(*args, **kwargs):
+  tmp = None
+  try:
+    tmp = tempfile.mktemp(*args, **kwargs)
+    yield tmp
+  finally:
+    if tmp:
+      try:
+        os.unlink(tmp)
+      except OSError:
+        pass
+
+
+def read_deps(path):
+  if os.path.exists(path):
+    with open(path, 'rb') as f:
+      return ast.literal_eval(f.read())
+
+
+def merge_deps(paths):
+  deps = {}
+  for path in paths:
+    d = read_deps(path)
+    for key in d:
+      if key in deps:
+        raise DepsConflictException(key)
+    deps.update(d)
+  return deps
diff --git a/bootstrap/virtualenv/.gitignore b/bootstrap/virtualenv/.gitignore
new file mode 100644
index 0000000..6a79b83
--- /dev/null
+++ b/bootstrap/virtualenv/.gitignore
@@ -0,0 +1,10 @@
+virtualenv.egg-info
+build
+dist
+docs/_build
+.DS_Store
+*.pyc
+mock-*.egg
+nose-*.egg
+.tox
+tests/test_activate_actual.output
diff --git a/bootstrap/virtualenv/.travis.yml b/bootstrap/virtualenv/.travis.yml
new file mode 100644
index 0000000..b0c6d1a
--- /dev/null
+++ b/bootstrap/virtualenv/.travis.yml
@@ -0,0 +1,28 @@
+language: python
+
+env:
+  - TOXENV=py26
+  - TOXENV=py27
+  - TOXENV=py32
+  - TOXENV=py33
+  - TOXENV=py34
+  - TOXENV=pypy
+  - TOXENV=pypy3
+  - TOXENV=docs
+
+install: pip install tox
+
+script: tox
+
+branches:
+  only:
+    - master
+    - develop
+    - 1.11.X
+
+notifications:
+  irc:
+    channels:
+      - "irc.freenode.org#pypa-dev"
+    use_notice: true
+    skip_join: true
diff --git a/bootstrap/virtualenv/AUTHORS.txt b/bootstrap/virtualenv/AUTHORS.txt
new file mode 100644
index 0000000..2724941
--- /dev/null
+++ b/bootstrap/virtualenv/AUTHORS.txt
@@ -0,0 +1,91 @@
+Author
+------
+
+Ian Bicking
+
+Maintainers
+-----------
+
+Brian Rosner
+Carl Meyer
+Jannis Leidel
+Paul Moore
+Paul Nasrat
+Marcus Smith
+
+Contributors
+------------
+
+Alex Grönholm
+Anatoly Techtonik
+Antonio Cuni
+Antonio Valentino
+Armin Ronacher
+Barry Warsaw
+Benjamin Root
+Bradley Ayers
+Branden Rolston
+Brandon Carl
+Brian Kearns
+Cap Petschulat
+CBWhiz
+Chris Adams
+Chris McDonough
+Christos Kontas
+Christian Hudon
+Christian Stefanescu
+Christopher Nilsson
+Cliff Xuan
+Curt Micol
+Damien Nozay
+Dan Sully
+Daniel Hahler
+Daniel Holth
+David Schoonover
+Denis Costa
+Doug Hellmann
+Doug Napoleone
+Douglas Creager
+Eduard-Cristian Stefan
+Erik M. Bray
+Ethan Jucovy
+Gabriel de Perthuis
+Gunnlaugur Thor Briem
+Graham Dennis
+Greg Haskins
+Jason Penney
+Jason R. Coombs
+Jeff Hammel
+Jeremy Orem
+Jason Penney
+Jason R. Coombs
+John Kleint
+Jonathan Griffin
+Jonathan Hitchcock
+Jorge Vargas
+Josh Bronson
+Kamil Kisiel
+Kyle Gibson
+Konstantin Zemlyak
+Kumar McMillan
+Lars Francke
+Marc Abramowitz
+Mika Laitio
+Mike Hommey
+Miki Tebeka
+Philip Jenvey
+Philippe Ombredanne
+Piotr Dobrogost
+Preston Holmes
+Ralf Schmitt
+Raul Leal
+Ronny Pfannschmidt
+Satrajit Ghosh
+Sergio de Carvalho
+Stefano Rivera
+Tarek Ziadé
+Thomas Aglassinger
+Vinay Sajip
+Vitaly Babiy
+Vladimir Rutsky
+Wang Xuerui
\ No newline at end of file
diff --git a/bootstrap/virtualenv/CONTRIBUTING.rst b/bootstrap/virtualenv/CONTRIBUTING.rst
new file mode 100644
index 0000000..924e7e2
--- /dev/null
+++ b/bootstrap/virtualenv/CONTRIBUTING.rst
@@ -0,0 +1,21 @@
+virtualenv
+==========
+
+See docs/index.rst for user documentation.
+
+Contributor notes
+-----------------
+
+* virtualenv is designed to work on python 2 and 3 with a single code base.
+  Use Python 3 print-function syntax, and always ``use sys.exc_info()[1]``
+  inside the ``except`` block to get at exception objects.
+
+* virtualenv uses git-flow_ to `coordinate development`_. The latest stable
+  version should exist on the *master* branch, and new work should be
+  integrated to *develop*.
+
+* All changes to files inside virtualenv_embedded should be integrated to
+  ``virtualenv.py`` with ``bin/rebuild-script.py``.
+
+.. _git-flow: https://github.com/nvie/gitflow
+.. _coordinate development: http://nvie.com/posts/a-successful-git-branching-model/
diff --git a/bootstrap/virtualenv/LICENSE.txt b/bootstrap/virtualenv/LICENSE.txt
new file mode 100644
index 0000000..7e00d5d
--- /dev/null
+++ b/bootstrap/virtualenv/LICENSE.txt
@@ -0,0 +1,22 @@
+Copyright (c) 2007 Ian Bicking and Contributors
+Copyright (c) 2009 Ian Bicking, The Open Planning Project
+Copyright (c) 2011-2014 The virtualenv developers
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/bootstrap/virtualenv/MANIFEST.in b/bootstrap/virtualenv/MANIFEST.in
new file mode 100644
index 0000000..62621c8
--- /dev/null
+++ b/bootstrap/virtualenv/MANIFEST.in
@@ -0,0 +1,11 @@
+recursive-include bin *
+recursive-include docs *
+recursive-include scripts *
+recursive-include virtualenv_support *.whl
+recursive-include virtualenv_embedded *
+recursive-exclude docs/_templates *
+recursive-exclude docs/_build *
+include virtualenv_support/__init__.py
+include *.py
+include AUTHORS.txt
+include LICENSE.txt
diff --git a/bootstrap/virtualenv/README.rst b/bootstrap/virtualenv/README.rst
new file mode 100644
index 0000000..5a7a545
--- /dev/null
+++ b/bootstrap/virtualenv/README.rst
@@ -0,0 +1,10 @@
+virtualenv
+==========
+
+.. image:: https://pypip.in/v/virtualenv/badge.png
+        :target: https://pypi.python.org/pypi/virtualenv
+
+.. image:: https://secure.travis-ci.org/pypa/virtualenv.png?branch=develop
+   :target: http://travis-ci.org/pypa/virtualenv
+
+For documentation, see https://virtualenv.pypa.io/
diff --git a/bootstrap/virtualenv/bin/rebuild-script.py b/bootstrap/virtualenv/bin/rebuild-script.py
new file mode 100755
index 0000000..44fb129
--- /dev/null
+++ b/bootstrap/virtualenv/bin/rebuild-script.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+"""
+Helper script to rebuild virtualenv.py from virtualenv_support
+"""
+
+import re
+import os
+import sys
+
+here = os.path.dirname(__file__)
+script = os.path.join(here, '..', 'virtualenv.py')
+
+file_regex = re.compile(
+    r'##file (.*?)\n([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*convert\("""(.*?)"""\)',
+    re.S)
+file_template = '##file %(filename)s\n%(varname)s = convert("""\n%(data)s""")'
+
+def rebuild():
+    f = open(script, 'rb')
+    content = f.read()
+    f.close()
+    parts = []
+    last_pos = 0
+    match = None
+    for match in file_regex.finditer(content):
+        parts.append(content[last_pos:match.start()])
+        last_pos = match.end()
+        filename = match.group(1)
+        varname = match.group(2)
+        data = match.group(3)
+        print('Found reference to file %s' % filename)
+        pathname = os.path.join(here, '..', 'virtualenv_embedded', filename)
+        f = open(pathname, 'rb')
+        c = f.read()
+        f.close()
+        new_data = c.encode('zlib').encode('base64')
+        if new_data == data:
+            print('  Reference up to date (%s bytes)' % len(c))
+            parts.append(match.group(0))
+            continue
+        print('  Content changed (%s bytes -> %s bytes)' % (
+            zipped_len(data), len(c)))
+        new_match = file_template % dict(
+            filename=filename,
+            varname=varname,
+            data=new_data)
+        parts.append(new_match)
+    parts.append(content[last_pos:])
+    new_content = ''.join(parts)
+    if new_content != content:
+        sys.stdout.write('Content updated; overwriting... ')
+        f = open(script, 'wb')
+        f.write(new_content)
+        f.close()
+        print('done.')
+    else:
+        print('No changes in content')
+    if match is None:
+        print('No variables were matched/found')
+
+def zipped_len(data):
+    if not data:
+        return 'no data'
+    try:
+        return len(data.decode('base64').decode('zlib'))
+    except:
+        return 'unknown'
+
+if __name__ == '__main__':
+    rebuild()
+    
diff --git a/bootstrap/virtualenv/docs/Makefile b/bootstrap/virtualenv/docs/Makefile
new file mode 100644
index 0000000..e4de9f8
--- /dev/null
+++ b/bootstrap/virtualenv/docs/Makefile
@@ -0,0 +1,130 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = _build
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	-rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-compressor.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-compressor.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/django-compressor"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-compressor"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	make -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/bootstrap/virtualenv/docs/changes.rst b/bootstrap/virtualenv/docs/changes.rst
new file mode 100644
index 0000000..9d74fda
--- /dev/null
+++ b/bootstrap/virtualenv/docs/changes.rst
@@ -0,0 +1,747 @@
+Release History
+===============
+
+12.0 (2014-12-22)
+~~~~~~~~~~~~~~~~~
+
+* **PROCESS** Version numbers are now simply ``X.Y`` where the leading ``1``
+  has been dropped.
+* Split up documentation into structured pages
+* Now using pytest framework
+* Correct sys.path ordering for debian, issue #461
+* Correctly throws error on older Pythons, issue #619
+* Allow for empty $PATH, pull #601
+* Don't set prompt if $env:VIRTUAL_ENV_DISABLE_PROMPT is set for Powershell
+* Updated setuptools to 7.0
+
+1.11.6 (2014-05-16)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated setuptools to 3.6
+* Updated pip to 1.5.6
+
+1.11.5 (2014-05-03)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated setuptools to 3.4.4
+* Updated documentation to use https://virtualenv.pypa.io/
+* Updated pip to 1.5.5
+
+1.11.4 (2014-02-21)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated pip to 1.5.4
+
+
+1.11.3 (2014-02-20)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated setuptools to 2.2
+* Updated pip to 1.5.3
+
+
+1.11.2 (2014-01-26)
+~~~~~~~~~~~~~~~~~~~
+
+* Fixed easy_install installed virtualenvs by updated pip to 1.5.2
+
+1.11.1 (2014-01-20)
+~~~~~~~~~~~~~~~~~~~
+
+* Fixed an issue where pip and setuptools were not getting installed when using
+  the ``--system-site-packages`` flag.
+* Updated setuptools to fix an issue when installed with easy_install
+* Fixed an issue with Python 3.4 and sys.stdout encoding being set to ascii
+* Upgraded pip to v1.5.1
+* Upgraded setuptools to v2.1
+
+1.11 (2014-01-02)
+~~~~~~~~~~~~~~~~~
+
+* **BACKWARDS INCOMPATIBLE** Switched to using wheels for the bundled copies of
+  setuptools and pip. Using sdists is no longer supported - users supplying
+  their own versions of pip/setuptools will need to provide wheels.
+* **BACKWARDS INCOMPATIBLE** Modified the handling of ``--extra-search-dirs``.
+  This option now works like pip's ``--find-links`` option, in that it adds
+  extra directories to search for compatible wheels for pip and setuptools.
+  The actual wheel selected is chosen based on version and compatibility, using
+  the same algorithm as ``pip install setuptools``.
+* Fixed #495, --always-copy was failing (#PR 511)
+* Upgraded pip to v1.5
+* Upgraded setuptools to v1.4
+
+1.10.1 (2013-08-07)
+~~~~~~~~~~~~~~~~~~~
+
+* **New Signing Key** Release 1.10.1 is using a different key than normal with
+  fingerprint: 7C6B 7C5D 5E2B 6356 A926 F04F 6E3C BCE9 3372 DCFA
+* Upgraded pip to v1.4.1
+* Upgraded setuptools to v0.9.8
+
+
+1.10 (2013-07-23)
+~~~~~~~~~~~~~~~~~
+
+* **BACKWARDS INCOMPATIBLE** Dropped support for Python 2.5. The minimum
+  supported Python version is now Python 2.6.
+
+* **BACKWARDS INCOMPATIBLE** Using ``virtualenv.py`` as an isolated script
+  (i.e. without an associated ``virtualenv_support`` directory) is no longer
+  supported for security reasons and will fail with an error.
+
+  Along with this, ``--never-download`` is now always pinned to ``True``, and
+  is only being maintained in the short term for backward compatibility
+  (Pull #412).
+
+* **IMPORTANT** Switched to the new setuptools (v0.9.7) which has been merged
+  with Distribute_ again and works for Python 2 and 3 with one codebase.
+  The ``--distribute`` and ``--setuptools`` options are now no-op.
+
+* Updated to pip 1.4.
+
+* Added support for PyPy3k
+
+* Added the option to use a version number with the ``-p`` option to get the
+  system copy of that Python version (Windows only)
+
+* Removed embedded ``ez_setup.py``, ``distribute_setup.py`` and
+  ``distribute_from_egg.py`` files as part of switching to merged setuptools.
+
+* Fixed ``--relocatable`` to work better on Windows.
+
+* Fixed issue with readline on Windows.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+
+1.9.1 (2013-03-08)
+~~~~~~~~~~~~~~~~~~
+
+* Updated to pip 1.3.1 that fixed a major backward incompatible change of
+  parsing URLs to externally hosted packages that got accidentily included
+  in pip 1.3.
+
+1.9 (2013-03-07)
+~~~~~~~~~~~~~~~~
+
+* Unset VIRTUAL_ENV environment variable in deactivate.bat (Pull #364)
+* Upgraded distribute to 0.6.34.
+* Added ``--no-setuptools`` and ``--no-pip`` options (Pull #336).
+* Fixed Issue #373. virtualenv-1.8.4 was failing in cygwin (Pull #382).
+* Fixed Issue #378. virtualenv is now "multiarch" aware on debian/ubuntu (Pull #379).
+* Fixed issue with readline module path on pypy and OSX (Pull #374).
+* Made 64bit detection compatible with Python 2.5 (Pull #393).
+
+
+1.8.4 (2012-11-25)
+~~~~~~~~~~~~~~~~~~
+
+* Updated distribute to 0.6.31. This fixes #359 (numpy install regression) on
+  UTF-8 platforms, and provides a workaround on other platforms:
+  ``PYTHONIOENCODING=utf8 pip install numpy``.
+
+* When installing virtualenv via curl, don't forget to filter out arguments
+  the distribute setup script won't understand. Fixes #358.
+
+* Added some more integration tests.
+
+* Removed the unsupported embedded setuptools egg for Python 2.4 to reduce
+  file size.
+
+1.8.3 (2012-11-21)
+~~~~~~~~~~~~~~~~~~
+
+* Fixed readline on OS X. Thanks minrk
+
+* Updated distribute to 0.6.30 (improves our error reporting, plus new
+  distribute features and fixes). Thanks Gabriel (g2p)
+
+* Added compatibility with multiarch Python (Python 3.3 for example). Added an
+  integration test. Thanks Gabriel (g2p)
+
+* Added ability to install distribute from a user-provided egg, rather than the
+  bundled sdist, for better speed. Thanks Paul Moore.
+
+* Make the creation of lib64 symlink smarter about already-existing symlink,
+  and more explicit about full paths. Fixes #334 and #330. Thanks Jeremy Orem.
+
+* Give lib64 site-dir preference over lib on 64-bit systems, to avoid wrong
+  32-bit compiles in the venv. Fixes #328. Thanks Damien Nozay.
+
+* Fix a bug with prompt-handling in ``activate.csh`` in non-interactive csh
+  shells. Fixes #332. Thanks Benjamin Root for report and patch.
+
+* Make it possible to create a virtualenv from within a Python
+  3.3. pyvenv. Thanks Chris McDonough for the report.
+
+* Add optional --setuptools option to be able to switch to it in case
+  distribute is the default (like in Debian).
+
+1.8.2 (2012-09-06)
+~~~~~~~~~~~~~~~~~~
+
+* Updated the included pip version to 1.2.1 to fix regressions introduced
+  there in 1.2.
+
+
+1.8.1 (2012-09-03)
+~~~~~~~~~~~~~~~~~~
+
+* Fixed distribute version used with `--never-download`. Thanks michr for
+  report and patch.
+
+* Fix creating Python 3.3 based virtualenvs by unsetting the
+  ``__PYVENV_LAUNCHER__`` environment variable in subprocesses.
+
+
+1.8 (2012-09-01)
+~~~~~~~~~~~~~~~~
+
+* **Dropped support for Python 2.4** The minimum supported Python version is
+  now Python 2.5.
+
+* Fix `--relocatable` on systems that use lib64. Fixes #78. Thanks Branden
+  Rolston.
+
+* Symlink some additional modules under Python 3. Fixes #194. Thanks Vinay
+  Sajip, Ian Clelland, and Stefan Holek for the report.
+
+* Fix ``--relocatable`` when a script uses ``__future__`` imports. Thanks
+  Branden Rolston.
+
+* Fix a bug in the config option parser that prevented setting negative
+  options with environment variables. Thanks Ralf Schmitt.
+
+* Allow setting ``--no-site-packages`` from the config file.
+
+* Use ``/usr/bin/multiarch-platform`` if available to figure out the include
+  directory. Thanks for the patch, Mika Laitio.
+
+* Fix ``install_name_tool`` replacement to work on Python 3.X.
+
+* Handle paths of users' site-packages on Mac OS X correctly when changing
+  the prefix.
+
+* Updated the embedded version of distribute to 0.6.28 and pip to 1.2.
+
+
+1.7.2 (2012-06-22)
+~~~~~~~~~~~~~~~~~~
+
+* Updated to distribute 0.6.27.
+
+* Fix activate.fish on OS X. Fixes #8. Thanks David Schoonover.
+
+* Create a virtualenv-x.x script with the Python version when installing, so
+  virtualenv for multiple Python versions can be installed to the same
+  script location. Thanks Miki Tebeka.
+
+* Restored ability to create a virtualenv with a path longer than 78
+  characters, without breaking creation of virtualenvs with non-ASCII paths.
+  Thanks, Bradley Ayers.
+
+* Added ability to create virtualenvs without having installed Apple's
+  developers tools (using an own implementation of ``install_name_tool``).
+  Thanks Mike Hommey.
+
+* Fixed PyPy and Jython support on Windows. Thanks Konstantin Zemlyak.
+
+* Added pydoc script to ease use. Thanks Marc Abramowitz. Fixes #149.
+
+* Fixed creating a bootstrap script on Python 3. Thanks Raul Leal. Fixes #280.
+
+* Fixed inconsistency when having set the ``PYTHONDONTWRITEBYTECODE`` env var
+  with the --distribute option or the ``VIRTUALENV_USE_DISTRIBUTE`` env var.
+  ``VIRTUALENV_USE_DISTRIBUTE`` is now considered again as a legacy alias.
+
+
+1.7.1.2 (2012-02-17)
+~~~~~~~~~~~~~~~~~~~~
+
+* Fixed minor issue in `--relocatable`. Thanks, Cap Petschulat.
+
+
+1.7.1.1 (2012-02-16)
+~~~~~~~~~~~~~~~~~~~~
+
+* Bumped the version string in ``virtualenv.py`` up, too.
+
+* Fixed rST rendering bug of long description.
+
+
+1.7.1 (2012-02-16)
+~~~~~~~~~~~~~~~~~~
+
+* Update embedded pip to version 1.1.
+
+* Fix `--relocatable` under Python 3. Thanks Doug Hellmann.
+
+* Added environ PATH modification to activate_this.py. Thanks Doug
+  Napoleone. Fixes #14.
+
+* Support creating virtualenvs directly from a Python build directory on
+  Windows. Thanks CBWhiz. Fixes #139.
+
+* Use non-recursive symlinks to fix things up for posix_local install
+  scheme. Thanks michr.
+
+* Made activate script available for use with msys and cygwin on Windows.
+  Thanks Greg Haskins, Cliff Xuan, Jonathan Griffin and Doug Napoleone.
+  Fixes #176.
+
+* Fixed creation of virtualenvs on Windows when Python is not installed for
+  all users. Thanks Anatoly Techtonik for report and patch and Doug
+  Napoleone for testing and confirmation. Fixes #87.
+
+* Fixed creation of virtualenvs using -p in installs where some modules
+  that ought to be in the standard library (e.g. `readline`) are actually
+  installed in `site-packages` next to `virtualenv.py`. Thanks Greg Haskins
+  for report and fix. Fixes #167.
+
+* Added activation script for Powershell (signed by Jannis Leidel). Many
+  thanks to Jason R. Coombs.
+
+
+1.7 (2011-11-30)
+~~~~~~~~~~~~~~~~
+
+* Gave user-provided ``--extra-search-dir`` priority over default dirs for
+  finding setuptools/distribute (it already had priority for finding pip).
+  Thanks Ethan Jucovy.
+
+* Updated embedded Distribute release to 0.6.24. Thanks Alex Gronholm.
+
+* Made ``--no-site-packages`` behavior the default behavior.  The
+  ``--no-site-packages`` flag is still permitted, but displays a warning when
+  used. Thanks Chris McDonough.
+
+* New flag: ``--system-site-packages``; this flag should be passed to get the
+  previous default global-site-package-including behavior back.
+
+* Added ability to set command options as environment variables and options
+  in a ``virtualenv.ini`` file.
+
+* Fixed various encoding related issues with paths. Thanks Gunnlaugur Thor Briem.
+
+* Made ``virtualenv.py`` script executable.
+
+
+1.6.4 (2011-07-21)
+~~~~~~~~~~~~~~~~~~
+
+* Restored ability to run on Python 2.4, too.
+
+
+1.6.3 (2011-07-16)
+~~~~~~~~~~~~~~~~~~
+
+* Restored ability to run on Python < 2.7.
+
+
+1.6.2 (2011-07-16)
+~~~~~~~~~~~~~~~~~~
+
+* Updated embedded distribute release to 0.6.19.
+
+* Updated embedded pip release to 1.0.2.
+
+* Fixed #141 - Be smarter about finding pkg_resources when using the
+  non-default Python interpreter (by using the ``-p`` option).
+
+* Fixed #112 - Fixed path in docs.
+
+* Fixed #109 - Corrected doctests of a Logger method.
+
+* Fixed #118 - Fixed creating virtualenvs on platforms that use the
+  "posix_local" install scheme, such as Ubuntu with Python 2.7.
+
+* Add missing library to Python 3 virtualenvs (``_dummy_thread``).
+
+
+1.6.1 (2011-04-30)
+~~~~~~~~~~~~~~~~~~
+
+* Start to use git-flow.
+
+* Added support for PyPy 1.5
+
+* Fixed #121 -- added sanity-checking of the -p argument. Thanks Paul Nasrat.
+
+* Added progress meter for pip installation as well as setuptools. Thanks Ethan
+  Jucovy.
+
+* Added --never-download and --search-dir options. Thanks Ethan Jucovy.
+
+
+1.6
+~~~
+
+* Added Python 3 support! Huge thanks to Vinay Sajip and Vitaly Babiy.
+
+* Fixed creation of virtualenvs on Mac OS X when standard library modules
+  (readline) are installed outside the standard library.
+
+* Updated bundled pip to 1.0.
+
+
+1.5.2
+~~~~~
+
+* Moved main repository to Github: https://github.com/pypa/virtualenv
+
+* Transferred primary maintenance from Ian to Jannis Leidel, Carl Meyer and Brian Rosner
+
+* Fixed a few more pypy related bugs.
+
+* Updated bundled pip to 0.8.2.
+
+* Handed project over to new team of maintainers.
+
+* Moved virtualenv to Github at https://github.com/pypa/virtualenv
+
+
+1.5.1
+~~~~~
+
+* Added ``_weakrefset`` requirement for Python 2.7.1.
+
+* Fixed Windows regression in 1.5
+
+
+1.5
+~~~
+
+* Include pip 0.8.1.
+
+* Add support for PyPy.
+
+* Uses a proper temporary dir when installing environment requirements.
+
+* Add ``--prompt`` option to be able to override the default prompt prefix.
+
+* Fix an issue with ``--relocatable`` on Windows.
+
+* Fix issue with installing the wrong version of distribute.
+
+* Add fish and csh activate scripts.
+
+
+1.4.9
+~~~~~
+
+* Include pip 0.7.2
+
+
+1.4.8
+~~~~~
+
+* Fix for Mac OS X Framework builds that use
+  ``--universal-archs=intel``
+
+* Fix ``activate_this.py`` on Windows.
+
+* Allow ``$PYTHONHOME`` to be set, so long as you use ``source
+  bin/activate`` it will get unset; if you leave it set and do not
+  activate the environment it will still break the environment.
+
+* Include pip 0.7.1
+
+
+1.4.7
+~~~~~
+
+* Include pip 0.7
+
+
+1.4.6
+~~~~~
+
+* Allow ``activate.sh`` to skip updating the prompt (by setting
+  ``$VIRTUAL_ENV_DISABLE_PROMPT``).
+
+
+1.4.5
+~~~~~
+
+* Include pip 0.6.3
+
+* Fix ``activate.bat`` and ``deactivate.bat`` under Windows when
+  ``PATH`` contained a parenthesis
+
+
+1.4.4
+~~~~~
+
+* Include pip 0.6.2 and Distribute 0.6.10
+
+* Create the ``virtualenv`` script even when Setuptools isn't
+  installed
+
+* Fix problem with ``virtualenv --relocate`` when ``bin/`` has
+  subdirectories (e.g., ``bin/.svn/``); from Alan Franzoni.
+
+* If you set ``$VIRTUALENV_DISTRIBUTE`` then virtualenv will use
+  Distribute by default (so you don't have to remember to use
+  ``--distribute``).
+
+
+1.4.3
+~~~~~
+
+* Include pip 0.6.1
+
+
+1.4.2
+~~~~~
+
+* Fix pip installation on Windows
+
+* Fix use of stand-alone ``virtualenv.py`` (and boot scripts)
+
+* Exclude ~/.local (user site-packages) from environments when using
+  ``--no-site-packages``
+
+
+1.4.1
+~~~~~
+
+* Include pip 0.6
+
+
+1.4
+~~~
+
+* Updated setuptools to 0.6c11
+
+* Added the --distribute option
+
+* Fixed packaging problem of support-files
+
+
+1.3.4
+~~~~~
+
+* Virtualenv now copies the actual embedded Python binary on
+  Mac OS X to fix a hang on Snow Leopard (10.6).
+
+* Fail more gracefully on Windows when ``win32api`` is not installed.
+
+* Fix site-packages taking precedent over Jython's ``__classpath__``
+  and also specially handle the new ``__pyclasspath__`` entry in
+  ``sys.path``.
+
+* Now copies Jython's ``registry`` file to the virtualenv if it exists.
+
+* Better find libraries when compiling extensions on Windows.
+
+* Create ``Scripts\pythonw.exe`` on Windows.
+
+* Added support for the Debian/Ubuntu
+  ``/usr/lib/pythonX.Y/dist-packages`` directory.
+
+* Set ``distutils.sysconfig.get_config_vars()['LIBDIR']`` (based on
+  ``sys.real_prefix``) which is reported to help building on Windows.
+
+* Make ``deactivate`` work on ksh
+
+* Fixes for ``--python``: make it work with ``--relocatable`` and the
+  symlink created to the exact Python version.
+
+
+1.3.3
+~~~~~
+
+* Use Windows newlines in ``activate.bat``, which has been reported to help
+  when using non-ASCII directory names.
+
+* Fixed compatibility with Jython 2.5b1.
+
+* Added a function ``virtualenv.install_python`` for more fine-grained
+  access to what ``virtualenv.create_environment`` does.
+
+* Fix `a problem <https://bugs.launchpad.net/virtualenv/+bug/241581>`_
+  with Windows and paths that contain spaces.
+
+* If ``/path/to/env/.pydistutils.cfg`` exists (or
+  ``/path/to/env/pydistutils.cfg`` on Windows systems) then ignore
+  ``~/.pydistutils.cfg`` and use that other file instead.
+
+* Fix ` a problem
+  <https://bugs.launchpad.net/virtualenv/+bug/340050>`_ picking up
+  some ``.so`` libraries in ``/usr/local``.
+
+
+1.3.2
+~~~~~
+
+* Remove the ``[install] prefix = ...`` setting from the virtualenv
+  ``distutils.cfg`` -- this has been causing problems for a lot of
+  people, in rather obscure ways.
+
+* If you use a boot script it will attempt to import ``virtualenv``
+  and find a pre-downloaded Setuptools egg using that.
+
+* Added platform-specific paths, like ``/usr/lib/pythonX.Y/plat-linux2``
+
+
+1.3.1
+~~~~~
+
+* Real Python 2.6 compatibility.  Backported the Python 2.6 updates to
+  ``site.py``, including `user directories
+  <http://docs.python.org/dev/whatsnew/2.6.html#pep-370-per-user-site-packages-directory>`_
+  (this means older versions of Python will support user directories,
+  whether intended or not).
+
+* Always set ``[install] prefix`` in ``distutils.cfg`` -- previously
+  on some platforms where a system-wide ``distutils.cfg`` was present
+  with a ``prefix`` setting, packages would be installed globally
+  (usually in ``/usr/local/lib/pythonX.Y/site-packages``).
+
+* Sometimes Cygwin seems to leave ``.exe`` off ``sys.executable``; a
+  workaround is added.
+
+* Fix ``--python`` option.
+
+* Fixed handling of Jython environments that use a
+  jython-complete.jar.
+
+
+1.3
+~~~
+
+* Update to Setuptools 0.6c9
+* Added an option ``virtualenv --relocatable EXISTING_ENV``, which
+  will make an existing environment "relocatable" -- the paths will
+  not be absolute in scripts, ``.egg-info`` and ``.pth`` files.  This
+  may assist in building environments that can be moved and copied.
+  You have to run this *after* any new packages installed.
+* Added ``bin/activate_this.py``, a file you can use like
+  ``execfile("path_to/activate_this.py",
+  dict(__file__="path_to/activate_this.py"))`` -- this will activate
+  the environment in place, similar to what `the mod_wsgi example
+  does <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+* For Mac framework builds of Python, the site-packages directory
+  ``/Library/Python/X.Y/site-packages`` is added to ``sys.path``, from
+  Andrea Rech.
+* Some platform-specific modules in Macs are added to the path now
+  (``plat-darwin/``, ``plat-mac/``, ``plat-mac/lib-scriptpackages``),
+  from Andrea Rech.
+* Fixed a small Bashism in the ``bin/activate`` shell script.
+* Added ``__future__`` to the list of required modules, for Python
+  2.3.  You'll still need to backport your own ``subprocess`` module.
+* Fixed the ``__classpath__`` entry in Jython's ``sys.path`` taking
+  precedent over virtualenv's libs.
+
+
+1.2
+~~~
+
+* Added a ``--python`` option to select the Python interpreter.
+* Add ``warnings`` to the modules copied over, for Python 2.6 support.
+* Add ``sets`` to the module copied over for Python 2.3 (though Python
+  2.3 still probably doesn't work).
+
+
+1.1.1
+~~~~~
+
+* Added support for Jython 2.5.
+
+
+1.1
+~~~
+
+* Added support for Python 2.6.
+* Fix a problem with missing ``DLLs/zlib.pyd`` on Windows.  Create
+* ``bin/python`` (or ``bin/python.exe``) even when you run virtualenv
+  with an interpreter named, e.g., ``python2.4``
+* Fix MacPorts Python
+* Added --unzip-setuptools option
+* Update to Setuptools 0.6c8
+* If the current directory is not writable, run ez_setup.py in ``/tmp``
+* Copy or symlink over the ``include`` directory so that packages will
+  more consistently compile.
+
+
+1.0
+~~~
+
+* Fix build on systems that use ``/usr/lib64``, distinct from
+  ``/usr/lib`` (specifically CentOS x64).
+* Fixed bug in ``--clear``.
+* Fixed typos in ``deactivate.bat``.
+* Preserve ``$PYTHONPATH`` when calling subprocesses.
+
+
+0.9.2
+~~~~~
+
+* Fix include dir copying on Windows (makes compiling possible).
+* Include the main ``lib-tk`` in the path.
+* Patch ``distutils.sysconfig``: ``get_python_inc`` and
+  ``get_python_lib`` to point to the global locations.
+* Install ``distutils.cfg`` before Setuptools, so that system
+  customizations of ``distutils.cfg`` won't effect the installation.
+* Add ``bin/pythonX.Y`` to the virtualenv (in addition to
+  ``bin/python``).
+* Fixed an issue with Mac Framework Python builds, and absolute paths
+  (from Ronald Oussoren).
+
+
+0.9.1
+~~~~~
+
+* Improve ability to create a virtualenv from inside a virtualenv.
+* Fix a little bug in ``bin/activate``.
+* Actually get ``distutils.cfg`` to work reliably.
+
+
+0.9
+~~~
+
+* Added ``lib-dynload`` and ``config`` to things that need to be
+  copied over in an environment.
+* Copy over or symlink the ``include`` directory, so that you can
+  build packages that need the C headers.
+* Include a ``distutils`` package, so you can locally update
+  ``distutils.cfg`` (in ``lib/pythonX.Y/distutils/distutils.cfg``).
+* Better avoid downloading Setuptools, and hitting PyPI on environment
+  creation.
+* Fix a problem creating a ``lib64/`` directory.
+* Should work on MacOSX Framework builds (the default Python
+  installations on Mac).  Thanks to Ronald Oussoren.
+
+
+0.8.4
+~~~~~
+
+* Windows installs would sometimes give errors about ``sys.prefix`` that
+  were inaccurate.
+* Slightly prettier output.
+
+
+0.8.3
+~~~~~
+
+* Added support for Windows.
+
+
+0.8.2
+~~~~~
+
+* Give a better warning if you are on an unsupported platform (Mac
+  Framework Pythons, and Windows).
+* Give error about running while inside a workingenv.
+* Give better error message about Python 2.3.
+
+
+0.8.1
+~~~~~
+
+Fixed packaging of the library.
+
+
+0.8
+~~~
+
+Initial release.  Everything is changed and new!
diff --git a/bootstrap/virtualenv/docs/conf.py b/bootstrap/virtualenv/docs/conf.py
new file mode 100644
index 0000000..1d89554
--- /dev/null
+++ b/bootstrap/virtualenv/docs/conf.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+#
+# Paste documentation build configuration file, created by
+# sphinx-quickstart on Tue Apr 22 22:08:49 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import os
+import sys
+
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+# If your extensions are in another directory, add it here.
+sys.path.insert(0, os.path.abspath(os.pardir))
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc']
+
+# Add any paths that contain templates here, relative to this directory.
+## FIXME: disabled for now because I haven't figured out how to use this:
+#templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = 'virtualenv'
+copyright = '2007-2014, Ian Bicking, The Open Planning Project, PyPA'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+try:
+    from virtualenv import __version__
+    # The short X.Y version.
+    version = '.'.join(__version__.split('.')[:2])
+    # The full version, including alpha/beta/rc tags.
+    release = __version__
+except ImportError:
+    version = release = 'dev'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+unused_docs = []
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+#html_style = 'default.css'
+
+html_theme = 'default'
+if not on_rtd:
+    try:
+        import sphinx_rtd_theme
+        html_theme = 'sphinx_rtd_theme'
+        html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+    except ImportError:
+        pass
+
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Content template for the index page.
+#html_index = ''
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Pastedoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+#latex_documents = []
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/bootstrap/virtualenv/docs/development.rst b/bootstrap/virtualenv/docs/development.rst
new file mode 100644
index 0000000..aba2785
--- /dev/null
+++ b/bootstrap/virtualenv/docs/development.rst
@@ -0,0 +1,61 @@
+Development
+===========
+
+Contributing
+------------
+
+Refer to the `pip development`_ documentation - it applies equally to
+virtualenv, except that virtualenv issues should filed on the `virtualenv
+repo`_ at GitHub.
+
+Virtualenv's release schedule is tied to pip's -- each time there's a new pip
+release, there will be a new virtualenv release that bundles the new version of
+pip.
+
+Files in the `virtualenv_embedded/` subdirectory are embedded into
+`virtualenv.py` itself as base64-encoded strings (in order to support
+single-file use of `virtualenv.py` without installing it). If your patch
+changes any file in `virtualenv_embedded/`, run `bin/rebuild-script.py` to
+update the embedded version of that file in `virtualenv.py`; commit that and
+submit it as part of your patch / pull request.
+
+.. _pip development: http://www.pip-installer.org/en/latest/development.html
+.. _virtualenv repo: https://github.com/pypa/virtualenv/
+
+Running the tests
+-----------------
+
+Virtualenv's test suite is small and not yet at all comprehensive, but we aim
+to grow it.
+
+The easy way to run tests (handles test dependencies automatically)::
+
+    $ python setup.py test
+
+If you want to run only a selection of the tests, you'll need to run them
+directly with pytest instead. Create a virtualenv, and install required
+packages::
+
+    $ pip install pytest mock
+
+Run pytest::
+
+    $ pytest
+
+Or select just a single test file to run::
+
+    $ pytest tests/test_virtualenv
+
+Status and License
+------------------
+
+``virtualenv`` is a successor to `workingenv
+<http://cheeseshop.python.org/pypi/workingenv.py>`_, and an extension
+of `virtual-python
+<http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_.
+
+It was written by Ian Bicking, sponsored by the `Open Planning
+Project <http://openplans.org>`_ and is now maintained by a
+`group of developers <https://github.com/pypa/virtualenv/raw/master/AUTHORS.txt>`_.
+It is licensed under an
+`MIT-style permissive license <https://github.com/pypa/virtualenv/raw/master/LICENSE.txt>`_.
diff --git a/bootstrap/virtualenv/docs/index.rst b/bootstrap/virtualenv/docs/index.rst
new file mode 100644
index 0000000..04f7191
--- /dev/null
+++ b/bootstrap/virtualenv/docs/index.rst
@@ -0,0 +1,137 @@
+Virtualenv
+==========
+
+`Mailing list <http://groups.google.com/group/python-virtualenv>`_ |
+`Issues <https://github.com/pypa/virtualenv/issues>`_ |
+`Github <https://github.com/pypa/virtualenv>`_ |
+`PyPI <https://pypi.python.org/pypi/virtualenv/>`_ |
+User IRC: #pypa
+Dev IRC: #pypa-dev
+
+Introduction
+------------
+
+``virtualenv`` is a tool to create isolated Python environments.
+
+The basic problem being addressed is one of dependencies and versions,
+and indirectly permissions. Imagine you have an application that
+needs version 1 of LibFoo, but another application requires version
+2. How can you use both these applications?  If you install
+everything into ``/usr/lib/python2.7/site-packages`` (or whatever your
+platform's standard location is), it's easy to end up in a situation
+where you unintentionally upgrade an application that shouldn't be
+upgraded.
+
+Or more generally, what if you want to install an application *and
+leave it be*?  If an application works, any change in its libraries or
+the versions of those libraries can break the application.
+
+Also, what if you can't install packages into the global
+``site-packages`` directory?  For instance, on a shared host.
+
+In all these cases, ``virtualenv`` can help you. It creates an
+environment that has its own installation directories, that doesn't
+share libraries with other virtualenv environments (and optionally
+doesn't access the globally installed libraries either).
+
+.. comment: split here
+
+.. toctree::
+   :maxdepth: 2
+
+   installation
+   userguide
+   reference
+   development
+   changes
+
+.. warning::
+
+   Python bugfix releases 2.6.8, 2.7.3, 3.1.5 and 3.2.3 include a change that
+   will cause "import random" to fail with "cannot import name urandom" on any
+   virtualenv created on a Unix host with an earlier release of Python
+   2.6/2.7/3.1/3.2, if the underlying system Python is upgraded. This is due to
+   the fact that a virtualenv uses the system Python's standard library but
+   contains its own copy of the Python interpreter, so an upgrade to the system
+   Python results in a mismatch between the version of the Python interpreter
+   and the version of the standard library. It can be fixed by removing
+   ``$ENV/bin/python`` and re-running virtualenv on the same target directory
+   with the upgraded Python.
+
+Other Documentation and Links
+-----------------------------
+
+* `Blog announcement of virtualenv`__.
+
+  .. __: http://blog.ianbicking.org/2007/10/10/workingenv-is-dead-long-live-virtualenv/
+
+* James Gardner has written a tutorial on using `virtualenv with
+  Pylons
+  <http://wiki.pylonshq.com/display/pylonscookbook/Using+a+Virtualenv+Sandbox>`_.
+
+* Chris Perkins created a `showmedo video including virtualenv
+  <http://showmedo.com/videos/video?name=2910000&fromSeriesID=291>`_.
+
+* Doug Hellmann's `virtualenvwrapper`_ is a useful set of scripts to make
+  your workflow with many virtualenvs even easier. `His initial blog post on it`__.
+  He also wrote `an example of using virtualenv to try IPython`__.
+
+  .. _virtualenvwrapper: https://pypi.python.org/pypi/virtualenvwrapper/
+  .. __: http://www.doughellmann.com/articles/CompletelyDifferent-2008-05-virtualenvwrapper/index.html
+  .. __: http://www.doughellmann.com/articles/CompletelyDifferent-2008-02-ipython-and-virtualenv/index.html
+
+* `Pew`_ is another wrapper for virtualenv that makes use of a different
+  activation technique.
+
+  .. _Pew: https://pypi.python.org/pypi/pew/
+
+* `Using virtualenv with mod_wsgi
+  <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+
+* `virtualenv commands
+  <https://github.com/thisismedium/virtualenv-commands>`_ for some more
+  workflow-related tools around virtualenv.
+
+* PyCon US 2011 talk: `Reverse-engineering Ian Bicking's brain: inside pip and virtualenv
+  <http://pyvideo.org/video/568/reverse-engineering-ian-bicking--39-s-brain--insi>`_.
+  By the end of the talk, you'll have a good idea exactly how pip
+  and virtualenv do their magic, and where to go looking in the source
+  for particular behaviors or bug fixes.
+
+Compare & Contrast with Alternatives
+------------------------------------
+
+There are several alternatives that create isolated environments:
+
+* ``workingenv`` (which I do not suggest you use anymore) is the
+  predecessor to this library. It used the main Python interpreter,
+  but relied on setting ``$PYTHONPATH`` to activate the environment.
+  This causes problems when running Python scripts that aren't part of
+  the environment (e.g., a globally installed ``hg`` or ``bzr``). It
+  also conflicted a lot with Setuptools.
+
+* `virtual-python
+  <http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_
+  is also a predecessor to this library. It uses only symlinks, so it
+  couldn't work on Windows. It also symlinks over the *entire*
+  standard library and global ``site-packages``. As a result, it
+  won't see new additions to the global ``site-packages``.
+
+  This script only symlinks a small portion of the standard library
+  into the environment, and so on Windows it is feasible to simply
+  copy these files over. Also, it creates a new/empty
+  ``site-packages`` and also adds the global ``site-packages`` to the
+  path, so updates are tracked separately. This script also installs
+  Setuptools automatically, saving a step and avoiding the need for
+  network access.
+
+* `zc.buildout <http://pypi.python.org/pypi/zc.buildout>`_ doesn't
+  create an isolated Python environment in the same style, but
+  achieves similar results through a declarative config file that sets
+  up scripts with very particular packages. As a declarative system,
+  it is somewhat easier to repeat and manage, but more difficult to
+  experiment with. ``zc.buildout`` includes the ability to setup
+  non-Python systems (e.g., a database server or an Apache instance).
+
+I *strongly* recommend anyone doing application development or
+deployment use one of these tools.
diff --git a/bootstrap/virtualenv/docs/installation.rst b/bootstrap/virtualenv/docs/installation.rst
new file mode 100644
index 0000000..3006d76
--- /dev/null
+++ b/bootstrap/virtualenv/docs/installation.rst
@@ -0,0 +1,58 @@
+Installation
+============
+
+.. warning::
+
+    We advise installing virtualenv-1.9 or greater. Prior to version 1.9, the
+    pip included in virtualenv did not download from PyPI over SSL.
+
+.. warning::
+
+    When using pip to install virtualenv, we advise using pip 1.3 or greater.
+    Prior to version 1.3, pip did not download from PyPI over SSL.
+
+.. warning::
+
+    We advise against using easy_install to install virtualenv when using
+    setuptools < 0.9.7, because easy_install didn't download from PyPI over SSL
+    and was broken in some subtle ways.
+
+To install globally with `pip` (if you have pip 1.3 or greater installed globally):
+
+::
+
+ $ [sudo] pip install virtualenv
+
+Or to get the latest unreleased dev version:
+
+::
+
+ $ [sudo] pip install https://github.com/pypa/virtualenv/tarball/develop
+
+
+To install version X.X globally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ [sudo] python setup.py install
+
+
+To *use* locally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ python virtualenv.py myVE
+
+.. note::
+
+    The ``virtualenv.py`` script is *not* supported if run without the
+    necessary pip/setuptools/virtualenv distributions available locally. All
+    of the installation methods above include a ``virtualenv_support``
+    directory alongside ``virtualenv.py`` which contains a complete set of
+    pip and setuptools distributions, and so are fully supported.
diff --git a/bootstrap/virtualenv/docs/make.bat b/bootstrap/virtualenv/docs/make.bat
new file mode 100644
index 0000000..aa5c189
--- /dev/null
+++ b/bootstrap/virtualenv/docs/make.bat
@@ -0,0 +1,170 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+	set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+	:help
+	echo.Please use `make ^<target^>` where ^<target^> is one of
+	echo.  html       to make standalone HTML files
+	echo.  dirhtml    to make HTML files named index.html in directories
+	echo.  singlehtml to make a single large HTML file
+	echo.  pickle     to make pickle files
+	echo.  json       to make JSON files
+	echo.  htmlhelp   to make HTML files and a HTML help project
+	echo.  qthelp     to make HTML files and a qthelp project
+	echo.  devhelp    to make HTML files and a Devhelp project
+	echo.  epub       to make an epub
+	echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+	echo.  text       to make text files
+	echo.  man        to make manual pages
+	echo.  changes    to make an overview over all changed/added/deprecated items
+	echo.  linkcheck  to check all external links for integrity
+	echo.  doctest    to run all doctests embedded in the documentation if enabled
+	goto end
+)
+
+if "%1" == "clean" (
+	for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+	del /q /s %BUILDDIR%\*
+	goto end
+)
+
+if "%1" == "html" (
+	%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+	goto end
+)
+
+if "%1" == "dirhtml" (
+	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+	goto end
+)
+
+if "%1" == "singlehtml" (
+	%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+	goto end
+)
+
+if "%1" == "pickle" (
+	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the pickle files.
+	goto end
+)
+
+if "%1" == "json" (
+	%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the JSON files.
+	goto end
+)
+
+if "%1" == "htmlhelp" (
+	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+	goto end
+)
+
+if "%1" == "qthelp" (
+	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+	echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-compressor.qhcp
+	echo.To view the help file:
+	echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-compressor.ghc
+	goto end
+)
+
+if "%1" == "devhelp" (
+	%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished.
+	goto end
+)
+
+if "%1" == "epub" (
+	%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The epub file is in %BUILDDIR%/epub.
+	goto end
+)
+
+if "%1" == "latex" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "text" (
+	%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The text files are in %BUILDDIR%/text.
+	goto end
+)
+
+if "%1" == "man" (
+	%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The manual pages are in %BUILDDIR%/man.
+	goto end
+)
+
+if "%1" == "changes" (
+	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.The overview file is in %BUILDDIR%/changes.
+	goto end
+)
+
+if "%1" == "linkcheck" (
+	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+	goto end
+)
+
+if "%1" == "doctest" (
+	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+	goto end
+)
+
+:end
diff --git a/bootstrap/virtualenv/docs/reference.rst b/bootstrap/virtualenv/docs/reference.rst
new file mode 100644
index 0000000..ca801a0
--- /dev/null
+++ b/bootstrap/virtualenv/docs/reference.rst
@@ -0,0 +1,256 @@
+Reference Guide
+===============
+
+``virtualenv`` Command
+----------------------
+
+.. _usage:
+
+Usage
+~~~~~
+
+:command:`virtualenv [OPTIONS] ENV_DIR`
+
+    Where ``ENV_DIR`` is an absolute or relative path to a directory to create
+    the virtual environment in.
+
+.. _options:
+
+Options
+~~~~~~~
+
+.. program: virtualenv
+
+.. option:: --version
+
+   show program's version number and exit
+
+.. option:: -h, --help
+
+   show this help message and exit
+
+.. option:: -v, --verbose
+
+   Increase verbosity.
+
+.. option:: -q, --quiet
+
+   Decrease verbosity.
+
+.. option:: -p PYTHON_EXE, --python=PYTHON_EXE
+
+   The Python interpreter to use, e.g.,
+   --python=python2.5 will use the python2.5 interpreter
+   to create the new environment.  The default is the
+   interpreter that virtualenv was installed with
+   (like ``/usr/bin/python``)
+
+.. option:: --clear
+
+   Clear out the non-root install and start from scratch.
+
+.. option:: --system-site-packages
+
+   Give the virtual environment access to the global
+   site-packages.
+
+.. option:: --always-copy
+
+   Always copy files rather than symlinking.
+
+.. option:: --relocatable
+
+   Make an EXISTING virtualenv environment relocatable.
+   This fixes up scripts and makes all .pth files relative.
+
+.. option:: --unzip-setuptools
+
+   Unzip Setuptools when installing it.
+
+.. option:: --no-setuptools
+
+   Do not install setuptools (or pip) in the new
+   virtualenv.
+
+.. option:: --no-pip
+
+   Do not install pip in the new virtualenv.
+
+.. option:: --extra-search-dir=DIR
+
+   Directory to look for setuptools/pip distributions in.
+   This option can be specified multiple times.
+
+.. option:: --prompt=PROMPT
+
+   Provides an alternative prompt prefix for this
+   environment.
+
+.. option:: --never-download
+
+   DEPRECATED. Retained only for backward compatibility.
+   This option has no effect. Virtualenv never downloads
+   pip or setuptools.
+
+.. option:: --no-site-packages
+
+   DEPRECATED. Retained only for backward compatibility.
+   Not having access to global site-packages is now the
+   default behavior.
+
+.. option:: --distribute
+.. option:: --setuptools
+
+   Legacy; now have no effect.  Before version 1.10 these could be used
+   to choose whether to install Distribute_ or Setuptools_ into the created
+   virtualenv. Distribute has now been merged into Setuptools, and the
+   latter is always installed.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+.. _Setuptools: https://pypi.python.org/pypi/setuptools
+
+
+Configuration
+-------------
+
+Environment Variables
+~~~~~~~~~~~~~~~~~~~~~
+
+Each command line option is automatically used to look for environment
+variables with the name format ``VIRTUALENV_<UPPER_NAME>``. That means
+the name of the command line options are capitalized and have dashes
+(``'-'``) replaced with underscores (``'_'``).
+
+For example, to automatically use a custom Python binary instead of the
+one virtualenv is run with you can also set an environment variable::
+
+  $ export VIRTUALENV_PYTHON=/opt/python-3.3/bin/python
+  $ virtualenv ENV
+
+It's the same as passing the option to virtualenv directly::
+
+  $ virtualenv --python=/opt/python-3.3/bin/python ENV
+
+This also works for appending command line options, like ``--find-links``.
+Just leave an empty space between the passed values, e.g.::
+
+  $ export VIRTUALENV_EXTRA_SEARCH_DIR="/path/to/dists /path/to/other/dists"
+  $ virtualenv ENV
+
+is the same as calling::
+
+  $ virtualenv --extra-search-dir=/path/to/dists --extra-search-dir=/path/to/other/dists ENV
+
+.. envvar:: VIRTUAL_ENV_DISABLE_PROMPT
+
+   Any virtualenv created when this is set to a non-empty value will not have
+   it's :ref:`activate` modify the shell prompt.
+
+
+Configuration File
+~~~~~~~~~~~~~~~~~~
+
+virtualenv also looks for a standard ini config file. On Unix and Mac OS X
+that's ``$HOME/.virtualenv/virtualenv.ini`` and on Windows, it's
+``%APPDATA%\virtualenv\virtualenv.ini``.
+
+The names of the settings are derived from the long command line option,
+e.g. the option :option:`--python <-p>` would look like this::
+
+  [virtualenv]
+  python = /opt/python-3.3/bin/python
+
+Appending options like :option:`--extra-search-dir` can be written on multiple
+lines::
+
+  [virtualenv]
+  extra-search-dir =
+      /path/to/dists
+      /path/to/other/dists
+
+Please have a look at the output of :option:`--help <-h>` for a full list
+of supported options.
+
+
+Extending Virtualenv
+--------------------
+
+
+Creating Your Own Bootstrap Scripts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+While this creates an environment, it doesn't put anything into the
+environment. Developers may find it useful to distribute a script
+that sets up a particular environment, for example a script that
+installs a particular web application.
+
+To create a script like this, call
+:py:func:`virtualenv.create_bootstrap_script`, and write the
+result to your new bootstrapping script.
+
+.. py:function:: create_bootstrap_script(extra_text)
+
+   Creates a bootstrap script from ``extra_text``, which is like
+   this script but with extend_parser, adjust_options, and after_install hooks.
+
+This returns a string that (written to disk of course) can be used
+as a bootstrap script with your own customizations. The script
+will be the standard virtualenv.py script, with your extra text
+added (your extra text should be Python code).
+
+If you include these functions, they will be called:
+
+.. py:function:: extend_parser(optparse_parser)
+
+   You can add or remove options from the parser here.
+
+.. py:function:: adjust_options(options, args)
+
+   You can change options here, or change the args (if you accept
+   different kinds of arguments, be sure you modify ``args`` so it is
+   only ``[DEST_DIR]``).
+
+.. py:function:: after_install(options, home_dir)
+
+   After everything is installed, this function is called. This
+   is probably the function you are most likely to use. An
+   example would be::
+
+       def after_install(options, home_dir):
+           if sys.platform == 'win32':
+               bin = 'Scripts'
+           else:
+               bin = 'bin'
+           subprocess.call([join(home_dir, bin, 'easy_install'),
+                            'MyPackage'])
+           subprocess.call([join(home_dir, bin, 'my-package-script'),
+                            'setup', home_dir])
+
+   This example immediately installs a package, and runs a setup
+   script from that package.
+
+Bootstrap Example
+~~~~~~~~~~~~~~~~~
+
+Here's a more concrete example of how you could use this::
+
+    import virtualenv, textwrap
+    output = virtualenv.create_bootstrap_script(textwrap.dedent("""
+    import os, subprocess
+    def after_install(options, home_dir):
+        etc = join(home_dir, 'etc')
+        if not os.path.exists(etc):
+            os.makedirs(etc)
+        subprocess.call([join(home_dir, 'bin', 'easy_install'),
+                         'BlogApplication'])
+        subprocess.call([join(home_dir, 'bin', 'paster'),
+                         'make-config', 'BlogApplication',
+                         join(etc, 'blog.ini')])
+        subprocess.call([join(home_dir, 'bin', 'paster'),
+                         'setup-app', join(etc, 'blog.ini')])
+    """))
+    f = open('blog-bootstrap.py', 'w').write(output)
+
+Another example is available `here`__.
+
+.. __: https://github.com/socialplanning/fassembler/blob/master/fassembler/create-venv-script.py
diff --git a/bootstrap/virtualenv/docs/userguide.rst b/bootstrap/virtualenv/docs/userguide.rst
new file mode 100644
index 0000000..70d4af8
--- /dev/null
+++ b/bootstrap/virtualenv/docs/userguide.rst
@@ -0,0 +1,249 @@
+User Guide
+==========
+
+
+Usage
+-----
+
+Virtualenv has one basic command::
+
+    $ virtualenv ENV
+
+Where ``ENV`` is a directory to place the new virtual environment. It has
+a number of usual effects (modifiable by many :ref:`options`):
+
+ - :file:`ENV/lib/` and :file:`ENV/include/` are created, containing supporting
+   library files for a new virtualenv python. Packages installed in this
+   environment will live under :file:`ENV/lib/pythonX.X/site-packages/`.
+
+ - :file:`ENV/bin` is created, where executables live - noticeably a new
+   :command:`python`. Thus running a script with ``#! /path/to/ENV/bin/python``
+   would run that script under this virtualenv's python.
+
+ - The crucial packages pip_ and setuptools_ are installed, which allow other
+   packages to be easily installed to the environment. This associated pip
+   can be run from :file:`ENV/bin/pip`.
+
+The python in your new virtualenv is effectively isolated from the python that
+was used to create it.
+
+.. _pip: https://pypi.python.org/pypi/pip
+.. _setuptools: https://pypi.python.org/pypi/setuptools
+
+
+.. _activate:
+
+activate script
+~~~~~~~~~~~~~~~
+
+In a newly created virtualenv there will also be a :command:`activate` shell
+script. For Windows systems, activation scripts are provided for
+the Command Prompt and Powershell.
+
+On Posix systems, this resides in :file:`/ENV/bin/`, so you can run::
+
+    $ source bin/activate
+
+For some shells (e.g. the original Bourne Shell) you may need to use the
+:command:`.` command, when :command:`source` does not exist.
+
+This will change your ``$PATH`` so its first entry is the virtualenv's
+``bin/`` directory. (You have to use ``source`` because it changes your
+shell environment in-place.) This is all it does; it's purely a
+convenience. If you directly run a script or the python interpreter
+from the virtualenv's ``bin/`` directory (e.g. ``path/to/ENV/bin/pip``
+or ``/path/to/ENV/bin/python-script.py``) there's no need for
+activation.
+
+The ``activate`` script will also modify your shell prompt to indicate
+which environment is currently active. To disable this behaviour, see
+:envvar:`VIRTUAL_ENV_DISABLE_PROMPT`.
+
+To undo these changes to your path (and prompt), just run::
+
+    $ deactivate
+
+On Windows, the equivalent `activate` script is in the ``Scripts`` folder::
+
+    > \path\to\env\Scripts\activate
+
+And type ``deactivate`` to undo the changes.
+
+Based on your active shell (CMD.exe or Powershell.exe), Windows will use
+either activate.bat or activate.ps1 (as appropriate) to activate the
+virtual environment. If using Powershell, see the notes about code signing
+below.
+
+.. note::
+
+    If using Powershell, the ``activate`` script is subject to the
+    `execution policies`_ on the system. By default on Windows 7, the system's
+    excution policy is set to ``Restricted``, meaning no scripts like the
+    ``activate`` script are allowed to be executed. But that can't stop us
+    from changing that slightly to allow it to be executed.
+
+    In order to use the script, you can relax your system's execution
+    policy to ``AllSigned``, meaning all scripts on the system must be
+    digitally signed to be executed. Since the virtualenv activation
+    script is signed by one of the authors (Jannis Leidel) this level of
+    the execution policy suffices. As an administrator run::
+
+        PS C:\> Set-ExecutionPolicy AllSigned
+
+    Then you'll be asked to trust the signer, when executing the script.
+    You will be prompted with the following::
+
+        PS C:\> virtualenv .\foo
+        New python executable in C:\foo\Scripts\python.exe
+        Installing setuptools................done.
+        Installing pip...................done.
+        PS C:\> .\foo\scripts\activate
+
+        Do you want to run software from this untrusted publisher?
+        File C:\foo\scripts\activate.ps1 is published by E=jannis@leidel.info,
+        CN=Jannis Leidel, L=Berlin, S=Berlin, C=DE, Description=581796-Gh7xfJxkxQSIO4E0
+        and is not trusted on your system. Only run scripts from trusted publishers.
+        [V] Never run  [D] Do not run  [R] Run once  [A] Always run  [?] Help
+        (default is "D"):A
+        (foo) PS C:\>
+
+    If you select ``[A] Always Run``, the certificate will be added to the
+    Trusted Publishers of your user account, and will be trusted in this
+    user's context henceforth. If you select ``[R] Run Once``, the script will
+    be run, but you will be prometed on a subsequent invocation. Advanced users
+    can add the signer's certificate to the Trusted Publishers of the Computer
+    account to apply to all users (though this technique is out of scope of this
+    document).
+
+    Alternatively, you may relax the system execution policy to allow running
+    of local scripts without verifying the code signature using the following::
+
+        PS C:\> Set-ExecutionPolicy RemoteSigned
+
+    Since the ``activate.ps1`` script is generated locally for each virtualenv,
+    it is not considered a remote script and can then be executed.
+
+.. _`execution policies`: http://technet.microsoft.com/en-us/library/dd347641.aspx
+
+The :option:`--system-site-packages` Option
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you build with ``virtualenv --system-site-packages ENV``, your virtual
+environment will inherit packages from ``/usr/lib/python2.7/site-packages``
+(or wherever your global site-packages directory is).
+
+This can be used if you have control over the global site-packages directory,
+and you want to depend on the packages there. If you want isolation from the
+global system, do not use this flag.
+
+Windows Notes
+~~~~~~~~~~~~~
+
+Some paths within the virtualenv are slightly different on Windows: scripts and
+executables on Windows go in ``ENV\Scripts\`` instead of ``ENV/bin/`` and
+libraries go in ``ENV\Lib\`` rather than ``ENV/lib/``.
+
+To create a virtualenv under a path with spaces in it on Windows, you'll need
+the `win32api <http://sourceforge.net/projects/pywin32/>`_ library installed.
+
+
+Using Virtualenv without ``bin/python``
+---------------------------------------
+
+Sometimes you can't or don't want to use the Python interpreter
+created by the virtualenv. For instance, in a `mod_python
+<http://www.modpython.org/>`_ or `mod_wsgi <http://www.modwsgi.org/>`_
+environment, there is only one interpreter.
+
+Luckily, it's easy. You must use the custom Python interpreter to
+*install* libraries. But to *use* libraries, you just have to be sure
+the path is correct. A script is available to correct the path. You
+can setup the environment like::
+
+    activate_this = '/path/to/env/bin/activate_this.py'
+    execfile(activate_this, dict(__file__=activate_this))
+
+This will change ``sys.path`` and even change ``sys.prefix``, but also allow
+you to use an existing interpreter. Items in your environment will show up
+first on ``sys.path``, before global items. However, global items will
+always be accessible (as if the :option:`--system-site-packages` flag had been
+used in creating the environment, whether it was or not). Also, this cannot undo
+the activation of other environments, or modules that have been imported.
+You shouldn't try to, for instance, activate an environment before a web
+request; you should activate *one* environment as early as possible, and not
+do it again in that process.
+
+Making Environments Relocatable
+-------------------------------
+
+**Note:** this option is somewhat experimental, and there are probably
+caveats that have not yet been identified.
+
+.. warning::
+
+    The ``--relocatable`` option currently has a number of issues,
+    and is not guaranteed to work in all circumstances. It is possible
+    that the option will be deprecated in a future version of ``virtualenv``.
+
+Normally environments are tied to a specific path. That means that
+you cannot move an environment around or copy it to another computer.
+You can fix up an environment to make it relocatable with the
+command::
+
+    $ virtualenv --relocatable ENV
+
+This will make some of the files created by setuptools use relative paths,
+and will change all the scripts to use ``activate_this.py`` instead of using
+the location of the Python interpreter to select the environment.
+
+**Note:** scripts which have been made relocatable will only work if
+the virtualenv is activated, specifically the python executable from
+the virtualenv must be the first one on the system PATH. Also note that
+the activate scripts are not currently made relocatable by
+``virtualenv --relocatable``.
+
+**Note:** you must run this after you've installed *any* packages into
+the environment. If you make an environment relocatable, then
+install a new package, you must run ``virtualenv --relocatable``
+again.
+
+Also, this **does not make your packages cross-platform**. You can
+move the directory around, but it can only be used on other similar
+computers. Some known environmental differences that can cause
+incompatibilities: a different version of Python, when one platform
+uses UCS2 for its internal unicode representation and another uses
+UCS4 (a compile-time option), obvious platform changes like Windows
+vs. Linux, or Intel vs. ARM, and if you have libraries that bind to C
+libraries on the system, if those C libraries are located somewhere
+different (either different versions, or a different filesystem
+layout).
+
+If you use this flag to create an environment, currently, the
+:option:`--system-site-packages` option will be implied.
+
+The :option:`--extra-search-dir` option
+---------------------------------------
+
+This option allows you to provide your own versions of setuptools and/or
+pip to use instead of the embedded versions that come with virtualenv.
+
+To use this feature, pass one or more ``--extra-search-dir`` options to
+virtualenv like this::
+
+    $ virtualenv --extra-search-dir=/path/to/distributions ENV
+
+The ``/path/to/distributions`` path should point to a directory that contains
+setuptools and/or pip wheels.
+
+virtualenv will look for wheels in the specified directories, but will use
+pip's standard algorithm for selecting the wheel to install, which looks for
+the latest compatible wheel.
+
+As well as the extra directories, the search order includes:
+
+#. The ``virtualenv_support`` directory relative to virtualenv.py
+#. The directory where virtualenv.py is located.
+#. The current directory.
+
+If no satisfactory local distributions are found, virtualenv will
+fail. Virtualenv will never download packages.
diff --git a/bootstrap/virtualenv/scripts/virtualenv b/bootstrap/virtualenv/scripts/virtualenv
new file mode 100644
index 0000000..c961dd7
--- /dev/null
+++ b/bootstrap/virtualenv/scripts/virtualenv
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+import virtualenv
+virtualenv.main()
diff --git a/bootstrap/virtualenv/setup.py b/bootstrap/virtualenv/setup.py
new file mode 100644
index 0000000..ce35314
--- /dev/null
+++ b/bootstrap/virtualenv/setup.py
@@ -0,0 +1,111 @@
+import os
+import re
+import shutil
+import sys
+
+if sys.version_info[:2] < (2, 6):
+    sys.exit('virtualenv requires Python 2.6 or higher.')
+
+try:
+    from setuptools import setup
+    from setuptools.command.test import test as TestCommand
+
+    class PyTest(TestCommand):
+        user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
+
+        def initialize_options(self):
+            TestCommand.initialize_options(self)
+            self.pytest_args = None
+
+        def finalize_options(self):
+            TestCommand.finalize_options(self)
+            self.test_args = []
+            self.test_suite = True
+
+        def run_tests(self):
+            # import here, because outside the eggs aren't loaded
+            import pytest
+            errno = pytest.main(self.pytest_args)
+            sys.exit(errno)
+
+    setup_params = {
+        'entry_points': {
+            'console_scripts': [
+                'virtualenv=virtualenv:main',
+                'virtualenv-%s.%s=virtualenv:main' % sys.version_info[:2]
+            ],
+        },
+        'zip_safe': False,
+        'cmdclass': {'test': PyTest},
+        'tests_require': ['pytest', 'mock'],
+    }
+except ImportError:
+    from distutils.core import setup
+    if sys.platform == 'win32':
+        print('Note: without Setuptools installed you will '
+              'have to use "python -m virtualenv ENV"')
+        setup_params = {}
+    else:
+        script = 'scripts/virtualenv'
+        script_ver = script + '-%s.%s' % sys.version_info[:2]
+        shutil.copy(script, script_ver)
+        setup_params = {'scripts': [script, script_ver]}
+
+
+def read_file(*paths):
+    here = os.path.dirname(os.path.abspath(__file__))
+    with open(os.path.join(here, *paths)) as f:
+        return f.read()
+
+# Get long_description from index.rst:
+long_description = read_file('docs', 'index.rst')
+long_description = long_description.strip().split('split here', 1)[0]
+# Add release history
+long_description += "\n\n" + read_file('docs', 'changes.rst')
+
+
+def get_version():
+    version_file = read_file('virtualenv.py')
+    version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
+                              version_file, re.M)
+    if version_match:
+        return version_match.group(1)
+    raise RuntimeError("Unable to find version string.")
+
+
+# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
+# exit of python setup.py test # in multiprocessing/util.py _exit_function when
+# running python setup.py test (see
+# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
+try:
+    import multiprocessing  # noqa
+except ImportError:
+    pass
+
+setup(
+    name='virtualenv',
+    version=get_version(),
+    description="Virtual Python Environment builder",
+    long_description=long_description,
+    classifiers=[
+        'Development Status :: 5 - Production/Stable',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: MIT License',
+        'Programming Language :: Python :: 2',
+        'Programming Language :: Python :: 2.6',
+        'Programming Language :: Python :: 2.7',
+        'Programming Language :: Python :: 3',
+        'Programming Language :: Python :: 3.1',
+        'Programming Language :: Python :: 3.2',
+    ],
+    keywords='setuptools deployment installation distutils',
+    author='Ian Bicking',
+    author_email='ianb@colorstudy.com',
+    maintainer='Jannis Leidel, Carl Meyer and Brian Rosner',
+    maintainer_email='python-virtualenv@groups.google.com',
+    url='https://virtualenv.pypa.io/',
+    license='MIT',
+    py_modules=['virtualenv'],
+    packages=['virtualenv_support'],
+    package_data={'virtualenv_support': ['*.whl']},
+    **setup_params)
diff --git a/bootstrap/virtualenv/tests/__init__.py b/bootstrap/virtualenv/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/virtualenv/tests/__init__.py
diff --git a/bootstrap/virtualenv/tests/test_activate.sh b/bootstrap/virtualenv/tests/test_activate.sh
new file mode 100755
index 0000000..a2b79a5
--- /dev/null
+++ b/bootstrap/virtualenv/tests/test_activate.sh
@@ -0,0 +1,94 @@
+#!/bin/sh
+
+ROOT="$(dirname $0)/.."
+VIRTUALENV="${ROOT}/virtualenv.py"
+TESTENV="/tmp/test_virtualenv_activate.venv"
+
+rm -rf ${TESTENV}
+
+echo "$0: Creating virtualenv ${TESTENV}..." 1>&2
+
+${VIRTUALENV} ${TESTENV} | tee ${ROOT}/tests/test_activate_actual.output
+if ! diff ${ROOT}/tests/test_activate_expected.output ${ROOT}/tests/test_activate_actual.output; then
+    echo "$0: Failed to get expected output from ${VIRTUALENV}!" 1>&2
+    exit 1
+fi
+
+echo "$0: Created virtualenv ${TESTENV}." 1>&2
+
+echo "$0: Activating ${TESTENV}..." 1>&2
+. ${TESTENV}/bin/activate
+echo "$0: Activated ${TESTENV}." 1>&2
+
+echo "$0: Checking value of \$VIRTUAL_ENV..." 1>&2
+
+if [ "$VIRTUAL_ENV" != "${TESTENV}" ]; then
+    echo "$0: Expected \$VIRTUAL_ENV to be set to \"${TESTENV}\"; actual value: \"${VIRTUAL_ENV}\"!" 1>&2
+    exit 2
+fi
+
+echo "$0: \$VIRTUAL_ENV = \"${VIRTUAL_ENV}\" -- OK." 1>&2
+
+echo "$0: Checking output of \$(which python)..." 1>&2
+
+if [ "$(which python)" != "${TESTENV}/bin/python" ]; then
+    echo "$0: Expected \$(which python) to return \"${TESTENV}/bin/python\"; actual value: \"$(which python)\"!" 1>&2
+    exit 3
+fi
+
+echo "$0: Output of \$(which python) is OK." 1>&2
+
+echo "$0: Checking output of \$(which pip)..." 1>&2
+
+if [ "$(which pip)" != "${TESTENV}/bin/pip" ]; then
+    echo "$0: Expected \$(which pip) to return \"${TESTENV}/bin/pip\"; actual value: \"$(which pip)\"!" 1>&2
+    exit 4
+fi
+
+echo "$0: Output of \$(which pip) is OK." 1>&2
+
+echo "$0: Checking output of \$(which easy_install)..." 1>&2
+
+if [ "$(which easy_install)" != "${TESTENV}/bin/easy_install" ]; then
+    echo "$0: Expected \$(which easy_install) to return \"${TESTENV}/bin/easy_install\"; actual value: \"$(which easy_install)\"!" 1>&2
+    exit 5
+fi
+
+echo "$0: Output of \$(which easy_install) is OK." 1>&2
+
+echo "$0: Executing a simple Python program..." 1>&2
+
+TESTENV=${TESTENV} python <<__END__
+import os, sys
+
+expected_site_packages = os.path.join(os.environ['TESTENV'], 'lib','python%s' % sys.version[:3], 'site-packages')
+site_packages = os.path.join(os.environ['VIRTUAL_ENV'], 'lib', 'python%s' % sys.version[:3], 'site-packages')
+
+assert site_packages == expected_site_packages, 'site_packages did not have expected value; actual value: %r' % site_packages
+
+open(os.path.join(site_packages, 'pydoc_test.py'), 'w').write('"""This is pydoc_test.py"""\n')
+__END__
+
+if [ $? -ne 0 ]; then
+    echo "$0: Python script failed!" 1>&2
+    exit 6
+fi
+
+echo "$0: Execution of a simple Python program -- OK." 1>&2
+
+echo "$0: Testing pydoc..." 1>&2
+
+if ! PAGER=cat pydoc pydoc_test | grep 'This is pydoc_test.py' > /dev/null; then
+    echo "$0: pydoc test failed!" 1>&2
+    exit 7
+fi
+
+echo "$0: pydoc is OK." 1>&2
+
+echo "$0: Deactivating ${TESTENV}..." 1>&2
+deactivate
+echo "$0: Deactivated ${TESTENV}." 1>&2
+echo "$0: OK!" 1>&2
+
+rm -rf ${TESTENV}
+
diff --git a/bootstrap/virtualenv/tests/test_activate_expected.output b/bootstrap/virtualenv/tests/test_activate_expected.output
new file mode 100644
index 0000000..35bf7f7
--- /dev/null
+++ b/bootstrap/virtualenv/tests/test_activate_expected.output
@@ -0,0 +1,2 @@
+New python executable in /tmp/test_virtualenv_activate.venv/bin/python
+Installing setuptools, pip...done.
diff --git a/bootstrap/virtualenv/tests/test_virtualenv.py b/bootstrap/virtualenv/tests/test_virtualenv.py
new file mode 100644
index 0000000..10c1136
--- /dev/null
+++ b/bootstrap/virtualenv/tests/test_virtualenv.py
@@ -0,0 +1,139 @@
+import virtualenv
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+from mock import patch, Mock
+
+
+def test_version():
+    """Should have a version string"""
+    assert virtualenv.virtualenv_version, "Should have version"
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_absolute_path(mock_exists):
+    """Should return absolute path if given and exists"""
+    mock_exists.return_value = True
+    virtualenv.is_executable = Mock(return_value=True)
+
+    exe = virtualenv.resolve_interpreter("/usr/bin/python42")
+
+    assert exe == "/usr/bin/python42", "Absolute path should return as is"
+    mock_exists.assert_called_with("/usr/bin/python42")
+    virtualenv.is_executable.assert_called_with("/usr/bin/python42")
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_nonexistent_interpreter(mock_exists):
+    """Should exit when with absolute path if not exists"""
+    mock_exists.return_value = False
+
+    try:
+        virtualenv.resolve_interpreter("/usr/bin/python42")
+        assert False, "Should raise exception"
+    except SystemExit:
+        pass
+
+    mock_exists.assert_called_with("/usr/bin/python42")
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_invalid_interpreter(mock_exists):
+    """Should exit when with absolute path if not exists"""
+    mock_exists.return_value = True
+    virtualenv.is_executable = Mock(return_value=False)
+
+    try:
+        virtualenv.resolve_interpreter("/usr/bin/python42")
+        assert False, "Should raise exception"
+    except SystemExit:
+        pass
+
+    mock_exists.assert_called_with("/usr/bin/python42")
+    virtualenv.is_executable.assert_called_with("/usr/bin/python42")
+
+
+def test_activate_after_future_statements():
+    """Should insert activation line after last future statement"""
+    script = [
+        '#!/usr/bin/env python',
+        'from __future__ import with_statement',
+        'from __future__ import print_function',
+        'print("Hello, world!")'
+    ]
+    assert virtualenv.relative_script(script) == [
+        '#!/usr/bin/env python',
+        'from __future__ import with_statement',
+        'from __future__ import print_function',
+        '',
+        "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this",
+        '',
+        'print("Hello, world!")'
+    ]
+
+
+def test_cop_update_defaults_with_store_false():
+    """store_false options need reverted logic"""
+    class MyConfigOptionParser(virtualenv.ConfigOptionParser):
+        def __init__(self, *args, **kwargs):
+            self.config = virtualenv.ConfigParser.RawConfigParser()
+            self.files = []
+            optparse.OptionParser.__init__(self, *args, **kwargs)
+
+        def get_environ_vars(self, prefix='VIRTUALENV_'):
+            yield ("no_site_packages", "1")
+
+    cop = MyConfigOptionParser()
+    cop.add_option(
+        '--no-site-packages',
+        dest='system_site_packages',
+        action='store_false',
+        help="Don't give access to the global site-packages dir to the "
+             "virtual environment (default)")
+
+    defaults = {}
+    cop.update_defaults(defaults)
+    assert defaults == {'system_site_packages': 0}
+
+def test_install_python_bin():
+    """Should create the right python executables and links"""
+    tmp_virtualenv = tempfile.mkdtemp()
+    try:
+        home_dir, lib_dir, inc_dir, bin_dir = \
+                                virtualenv.path_locations(tmp_virtualenv)
+        virtualenv.install_python(home_dir, lib_dir, inc_dir, bin_dir, False,
+                                  False)
+
+        if virtualenv.is_win:
+            required_executables = [ 'python.exe', 'pythonw.exe']
+        else:
+            py_exe_no_version = 'python'
+            py_exe_version_major = 'python%s' % sys.version_info[0]
+            py_exe_version_major_minor = 'python%s.%s' % (
+                sys.version_info[0], sys.version_info[1])
+            required_executables = [ py_exe_no_version, py_exe_version_major,
+                                     py_exe_version_major_minor ]
+
+        for pth in required_executables:
+            assert os.path.exists(os.path.join(bin_dir, pth)), ("%s should "
+                            "exist in bin_dir" % pth)
+    finally:
+        shutil.rmtree(tmp_virtualenv)
+
+
+def test_always_copy_option():
+    """Should be no symlinks in directory tree"""
+    tmp_virtualenv = tempfile.mkdtemp()
+    ve_path = os.path.join(tmp_virtualenv, 'venv')
+    try:
+        virtualenv.create_environment(ve_path, symlink=False)
+
+        for root, dirs, files in os.walk(tmp_virtualenv):
+            for f in files + dirs:
+                full_name = os.path.join(root, f)
+                assert not os.path.islink(full_name), "%s should not be a" \
+                    " symlink (to %s)" % (full_name, os.readlink(full_name))
+    finally:
+        shutil.rmtree(tmp_virtualenv)
diff --git a/bootstrap/virtualenv/tests/tox.ini b/bootstrap/virtualenv/tests/tox.ini
new file mode 100644
index 0000000..da59e02
--- /dev/null
+++ b/bootstrap/virtualenv/tests/tox.ini
@@ -0,0 +1,12 @@
+# Tox (http://codespeak.net/~hpk/tox/) is a tool for running tests
+# in multiple virtualenvs. This configuration file will run the
+# test suite on all supported python versions. To use it, "pip install tox"
+# and then run "tox" from this directory.
+
+[tox]
+envlist = py25, py26, py27, py31, py32, pypy, jython
+setupdir = ..
+
+[testenv]
+commands = python setup.py test
+changedir = ..
diff --git a/bootstrap/virtualenv/tox.ini b/bootstrap/virtualenv/tox.ini
new file mode 100644
index 0000000..d2661ea
--- /dev/null
+++ b/bootstrap/virtualenv/tox.ini
@@ -0,0 +1,17 @@
+[tox]
+envlist =
+    py26,py27,py32,py33,py34,pypy,pypy3,docs
+
+[testenv]
+deps =
+    mock
+    pytest
+commands =
+    py.test []
+    python virtualenv.py {envtmpdir}/test-venv-01
+
+[testenv:docs]
+deps = sphinx
+basepython = python2.7
+commands =
+    sphinx-build -W -b html -d {envtmpdir}/doctrees docs docs/_build/html
diff --git a/bootstrap/virtualenv/virtualenv.py b/bootstrap/virtualenv/virtualenv.py
new file mode 100755
index 0000000..380a601
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv.py
@@ -0,0 +1,2367 @@
+#!/usr/bin/env python
+"""Create a "virtual" Python installation
+"""
+
+__version__ = "12.0"
+virtualenv_version = __version__  # legacy
+
+# NB: avoid placing additional imports here, before sys.path is fixed!
+
+import sys
+import os
+
+#
+# RATIONALE:
+# This script is both it's own "host" and "guest".  If it's running in "guest
+# mode" (inside the virtualenv interpreter), it's essentially invoked via:
+#     /path/to/python /path/to/this/script.py
+#
+# Which, by the nature of Python, will put `/path/to/this` on the system path
+# as the first argument.  Now this can cause many subtle bugs, because the
+# rest of the script is now looking to import from the "host" Python version
+# first.  This has been especially troublesome when trying to create a Python
+# 3 "guest" env using a Python 2 "host", but even with minor Python
+# differences, there may been some bleeding between environments that doesn't
+# stand out as obviously.
+#
+# This removes the first argument off the system path, to avoid any accidental
+# usage of the "host" library directories.
+#
+if os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+    del sys.path[0]
+
+import base64
+import codecs
+import optparse
+import re
+import shutil
+import logging
+import tempfile
+import zlib
+import errno
+import glob
+import distutils.sysconfig
+from distutils.util import strtobool
+import struct
+import subprocess
+import tarfile
+
+if sys.version_info < (2, 6):
+    print('ERROR: %s' % sys.exc_info()[1])
+    print('ERROR: this script requires Python 2.6 or greater.')
+    sys.exit(101)
+
+try:
+    basestring
+except NameError:
+    basestring = str
+
+try:
+    import ConfigParser
+except ImportError:
+    import configparser as ConfigParser
+
+join = os.path.join
+py_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
+
+is_jython = sys.platform.startswith('java')
+is_pypy = hasattr(sys, 'pypy_version_info')
+is_win = (sys.platform == 'win32')
+is_cygwin = (sys.platform == 'cygwin')
+is_darwin = (sys.platform == 'darwin')
+abiflags = getattr(sys, 'abiflags', '')
+
+user_dir = os.path.expanduser('~')
+if is_win:
+    default_storage_dir = os.path.join(user_dir, 'virtualenv')
+else:
+    default_storage_dir = os.path.join(user_dir, '.virtualenv')
+default_config_file = os.path.join(default_storage_dir, 'virtualenv.ini')
+
+if is_pypy:
+    expected_exe = 'pypy'
+elif is_jython:
+    expected_exe = 'jython'
+else:
+    expected_exe = 'python'
+
+# Return a mapping of version -> Python executable
+# Only provided for Windows, where the information in the registry is used
+if not is_win:
+    def get_installed_pythons():
+        return {}
+else:
+    try:
+        import winreg
+    except ImportError:
+        import _winreg as winreg
+
+    def get_installed_pythons():
+        try:
+            python_core = winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE,
+                    "Software\\Python\\PythonCore")
+        except WindowsError:
+            # No registered Python installations
+            return {}
+        i = 0
+        versions = []
+        while True:
+            try:
+                versions.append(winreg.EnumKey(python_core, i))
+                i = i + 1
+            except WindowsError:
+                break
+        exes = dict()
+        for ver in versions:
+            try:
+                path = winreg.QueryValue(python_core, "%s\\InstallPath" % ver)
+            except WindowsError:
+                continue
+            exes[ver] = join(path, "python.exe")
+
+        winreg.CloseKey(python_core)
+
+        # Add the major versions
+        # Sort the keys, then repeatedly update the major version entry
+        # Last executable (i.e., highest version) wins with this approach
+        for ver in sorted(exes):
+            exes[ver[0]] = exes[ver]
+
+        return exes
+
+REQUIRED_MODULES = ['os', 'posix', 'posixpath', 'nt', 'ntpath', 'genericpath',
+                    'fnmatch', 'locale', 'encodings', 'codecs',
+                    'stat', 'UserDict', 'readline', 'copy_reg', 'types',
+                    're', 'sre', 'sre_parse', 'sre_constants', 'sre_compile',
+                    'zlib']
+
+REQUIRED_FILES = ['lib-dynload', 'config']
+
+majver, minver = sys.version_info[:2]
+if majver == 2:
+    if minver >= 6:
+        REQUIRED_MODULES.extend(['warnings', 'linecache', '_abcoll', 'abc'])
+    if minver >= 7:
+        REQUIRED_MODULES.extend(['_weakrefset'])
+elif majver == 3:
+    # Some extra modules are needed for Python 3, but different ones
+    # for different versions.
+    REQUIRED_MODULES.extend(['_abcoll', 'warnings', 'linecache', 'abc', 'io',
+                             '_weakrefset', 'copyreg', 'tempfile', 'random',
+                             '__future__', 'collections', 'keyword', 'tarfile',
+                             'shutil', 'struct', 'copy', 'tokenize', 'token',
+                             'functools', 'heapq', 'bisect', 'weakref',
+                             'reprlib'])
+    if minver >= 2:
+        REQUIRED_FILES[-1] = 'config-%s' % majver
+    if minver >= 3:
+        import sysconfig
+        platdir = sysconfig.get_config_var('PLATDIR')
+        REQUIRED_FILES.append(platdir)
+        # The whole list of 3.3 modules is reproduced below - the current
+        # uncommented ones are required for 3.3 as of now, but more may be
+        # added as 3.3 development continues.
+        REQUIRED_MODULES.extend([
+            #"aifc",
+            #"antigravity",
+            #"argparse",
+            #"ast",
+            #"asynchat",
+            #"asyncore",
+            "base64",
+            #"bdb",
+            #"binhex",
+            #"bisect",
+            #"calendar",
+            #"cgi",
+            #"cgitb",
+            #"chunk",
+            #"cmd",
+            #"codeop",
+            #"code",
+            #"colorsys",
+            #"_compat_pickle",
+            #"compileall",
+            #"concurrent",
+            #"configparser",
+            #"contextlib",
+            #"cProfile",
+            #"crypt",
+            #"csv",
+            #"ctypes",
+            #"curses",
+            #"datetime",
+            #"dbm",
+            #"decimal",
+            #"difflib",
+            #"dis",
+            #"doctest",
+            #"dummy_threading",
+            "_dummy_thread",
+            #"email",
+            #"filecmp",
+            #"fileinput",
+            #"formatter",
+            #"fractions",
+            #"ftplib",
+            #"functools",
+            #"getopt",
+            #"getpass",
+            #"gettext",
+            #"glob",
+            #"gzip",
+            "hashlib",
+            #"heapq",
+            "hmac",
+            #"html",
+            #"http",
+            #"idlelib",
+            #"imaplib",
+            #"imghdr",
+            "imp",
+            "importlib",
+            #"inspect",
+            #"json",
+            #"lib2to3",
+            #"logging",
+            #"macpath",
+            #"macurl2path",
+            #"mailbox",
+            #"mailcap",
+            #"_markupbase",
+            #"mimetypes",
+            #"modulefinder",
+            #"multiprocessing",
+            #"netrc",
+            #"nntplib",
+            #"nturl2path",
+            #"numbers",
+            #"opcode",
+            #"optparse",
+            #"os2emxpath",
+            #"pdb",
+            #"pickle",
+            #"pickletools",
+            #"pipes",
+            #"pkgutil",
+            #"platform",
+            #"plat-linux2",
+            #"plistlib",
+            #"poplib",
+            #"pprint",
+            #"profile",
+            #"pstats",
+            #"pty",
+            #"pyclbr",
+            #"py_compile",
+            #"pydoc_data",
+            #"pydoc",
+            #"_pyio",
+            #"queue",
+            #"quopri",
+            #"reprlib",
+            "rlcompleter",
+            #"runpy",
+            #"sched",
+            #"shelve",
+            #"shlex",
+            #"smtpd",
+            #"smtplib",
+            #"sndhdr",
+            #"socket",
+            #"socketserver",
+            #"sqlite3",
+            #"ssl",
+            #"stringprep",
+            #"string",
+            #"_strptime",
+            #"subprocess",
+            #"sunau",
+            #"symbol",
+            #"symtable",
+            #"sysconfig",
+            #"tabnanny",
+            #"telnetlib",
+            #"test",
+            #"textwrap",
+            #"this",
+            #"_threading_local",
+            #"threading",
+            #"timeit",
+            #"tkinter",
+            #"tokenize",
+            #"token",
+            #"traceback",
+            #"trace",
+            #"tty",
+            #"turtledemo",
+            #"turtle",
+            #"unittest",
+            #"urllib",
+            #"uuid",
+            #"uu",
+            #"wave",
+            #"weakref",
+            #"webbrowser",
+            #"wsgiref",
+            #"xdrlib",
+            #"xml",
+            #"xmlrpc",
+            #"zipfile",
+        ])
+    if minver >= 4:
+        REQUIRED_MODULES.extend([
+            'operator',
+            '_collections_abc',
+            '_bootlocale',
+        ])
+
+if is_pypy:
+    # these are needed to correctly display the exceptions that may happen
+    # during the bootstrap
+    REQUIRED_MODULES.extend(['traceback', 'linecache'])
+
+class Logger(object):
+
+    """
+    Logging object for use in command-line script.  Allows ranges of
+    levels, to avoid some redundancy of displayed information.
+    """
+
+    DEBUG = logging.DEBUG
+    INFO = logging.INFO
+    NOTIFY = (logging.INFO+logging.WARN)/2
+    WARN = WARNING = logging.WARN
+    ERROR = logging.ERROR
+    FATAL = logging.FATAL
+
+    LEVELS = [DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
+
+    def __init__(self, consumers):
+        self.consumers = consumers
+        self.indent = 0
+        self.in_progress = None
+        self.in_progress_hanging = False
+
+    def debug(self, msg, *args, **kw):
+        self.log(self.DEBUG, msg, *args, **kw)
+    def info(self, msg, *args, **kw):
+        self.log(self.INFO, msg, *args, **kw)
+    def notify(self, msg, *args, **kw):
+        self.log(self.NOTIFY, msg, *args, **kw)
+    def warn(self, msg, *args, **kw):
+        self.log(self.WARN, msg, *args, **kw)
+    def error(self, msg, *args, **kw):
+        self.log(self.ERROR, msg, *args, **kw)
+    def fatal(self, msg, *args, **kw):
+        self.log(self.FATAL, msg, *args, **kw)
+    def log(self, level, msg, *args, **kw):
+        if args:
+            if kw:
+                raise TypeError(
+                    "You may give positional or keyword arguments, not both")
+        args = args or kw
+        rendered = None
+        for consumer_level, consumer in self.consumers:
+            if self.level_matches(level, consumer_level):
+                if (self.in_progress_hanging
+                    and consumer in (sys.stdout, sys.stderr)):
+                    self.in_progress_hanging = False
+                    sys.stdout.write('\n')
+                    sys.stdout.flush()
+                if rendered is None:
+                    if args:
+                        rendered = msg % args
+                    else:
+                        rendered = msg
+                    rendered = ' '*self.indent + rendered
+                if hasattr(consumer, 'write'):
+                    consumer.write(rendered+'\n')
+                else:
+                    consumer(rendered)
+
+    def start_progress(self, msg):
+        assert not self.in_progress, (
+            "Tried to start_progress(%r) while in_progress %r"
+            % (msg, self.in_progress))
+        if self.level_matches(self.NOTIFY, self._stdout_level()):
+            sys.stdout.write(msg)
+            sys.stdout.flush()
+            self.in_progress_hanging = True
+        else:
+            self.in_progress_hanging = False
+        self.in_progress = msg
+
+    def end_progress(self, msg='done.'):
+        assert self.in_progress, (
+            "Tried to end_progress without start_progress")
+        if self.stdout_level_matches(self.NOTIFY):
+            if not self.in_progress_hanging:
+                # Some message has been printed out since start_progress
+                sys.stdout.write('...' + self.in_progress + msg + '\n')
+                sys.stdout.flush()
+            else:
+                sys.stdout.write(msg + '\n')
+                sys.stdout.flush()
+        self.in_progress = None
+        self.in_progress_hanging = False
+
+    def show_progress(self):
+        """If we are in a progress scope, and no log messages have been
+        shown, write out another '.'"""
+        if self.in_progress_hanging:
+            sys.stdout.write('.')
+            sys.stdout.flush()
+
+    def stdout_level_matches(self, level):
+        """Returns true if a message at this level will go to stdout"""
+        return self.level_matches(level, self._stdout_level())
+
+    def _stdout_level(self):
+        """Returns the level that stdout runs at"""
+        for level, consumer in self.consumers:
+            if consumer is sys.stdout:
+                return level
+        return self.FATAL
+
+    def level_matches(self, level, consumer_level):
+        """
+        >>> l = Logger([])
+        >>> l.level_matches(3, 4)
+        False
+        >>> l.level_matches(3, 2)
+        True
+        >>> l.level_matches(slice(None, 3), 3)
+        False
+        >>> l.level_matches(slice(None, 3), 2)
+        True
+        >>> l.level_matches(slice(1, 3), 1)
+        True
+        >>> l.level_matches(slice(2, 3), 1)
+        False
+        """
+        if isinstance(level, slice):
+            start, stop = level.start, level.stop
+            if start is not None and start > consumer_level:
+                return False
+            if stop is not None and stop <= consumer_level:
+                return False
+            return True
+        else:
+            return level >= consumer_level
+
+    #@classmethod
+    def level_for_integer(cls, level):
+        levels = cls.LEVELS
+        if level < 0:
+            return levels[0]
+        if level >= len(levels):
+            return levels[-1]
+        return levels[level]
+
+    level_for_integer = classmethod(level_for_integer)
+
+# create a silent logger just to prevent this from being undefined
+# will be overridden with requested verbosity main() is called.
+logger = Logger([(Logger.LEVELS[-1], sys.stdout)])
+
+def mkdir(path):
+    if not os.path.exists(path):
+        logger.info('Creating %s', path)
+        os.makedirs(path)
+    else:
+        logger.info('Directory %s already exists', path)
+
+def copyfileordir(src, dest, symlink=True):
+    if os.path.isdir(src):
+        shutil.copytree(src, dest, symlink)
+    else:
+        shutil.copy2(src, dest)
+
+def copyfile(src, dest, symlink=True):
+    if not os.path.exists(src):
+        # Some bad symlink in the src
+        logger.warn('Cannot find file %s (bad symlink)', src)
+        return
+    if os.path.exists(dest):
+        logger.debug('File %s already exists', dest)
+        return
+    if not os.path.exists(os.path.dirname(dest)):
+        logger.info('Creating parent directories for %s', os.path.dirname(dest))
+        os.makedirs(os.path.dirname(dest))
+    if not os.path.islink(src):
+        srcpath = os.path.abspath(src)
+    else:
+        srcpath = os.readlink(src)
+    if symlink and hasattr(os, 'symlink') and not is_win:
+        logger.info('Symlinking %s', dest)
+        try:
+            os.symlink(srcpath, dest)
+        except (OSError, NotImplementedError):
+            logger.info('Symlinking failed, copying to %s', dest)
+            copyfileordir(src, dest, symlink)
+    else:
+        logger.info('Copying to %s', dest)
+        copyfileordir(src, dest, symlink)
+
+def writefile(dest, content, overwrite=True):
+    if not os.path.exists(dest):
+        logger.info('Writing %s', dest)
+        f = open(dest, 'wb')
+        f.write(content.encode('utf-8'))
+        f.close()
+        return
+    else:
+        f = open(dest, 'rb')
+        c = f.read()
+        f.close()
+        if c != content.encode("utf-8"):
+            if not overwrite:
+                logger.notify('File %s exists with different content; not overwriting', dest)
+                return
+            logger.notify('Overwriting %s with new content', dest)
+            f = open(dest, 'wb')
+            f.write(content.encode('utf-8'))
+            f.close()
+        else:
+            logger.info('Content %s already in place', dest)
+
+def rmtree(dir):
+    if os.path.exists(dir):
+        logger.notify('Deleting tree %s', dir)
+        shutil.rmtree(dir)
+    else:
+        logger.info('Do not need to delete %s; already gone', dir)
+
+def make_exe(fn):
+    if hasattr(os, 'chmod'):
+        oldmode = os.stat(fn).st_mode & 0xFFF # 0o7777
+        newmode = (oldmode | 0x16D) & 0xFFF # 0o555, 0o7777
+        os.chmod(fn, newmode)
+        logger.info('Changed mode of %s to %s', fn, oct(newmode))
+
+def _find_file(filename, dirs):
+    for dir in reversed(dirs):
+        files = glob.glob(os.path.join(dir, filename))
+        if files and os.path.isfile(files[0]):
+            return True, files[0]
+    return False, filename
+
+def file_search_dirs():
+    here = os.path.dirname(os.path.abspath(__file__))
+    dirs = ['.', here,
+            join(here, 'virtualenv_support')]
+    if os.path.splitext(os.path.dirname(__file__))[0] != 'virtualenv':
+        # Probably some boot script; just in case virtualenv is installed...
+        try:
+            import virtualenv
+        except ImportError:
+            pass
+        else:
+            dirs.append(os.path.join(os.path.dirname(virtualenv.__file__), 'virtualenv_support'))
+    return [d for d in dirs if os.path.isdir(d)]
+
+
+class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
+    """
+    Custom help formatter for use in ConfigOptionParser that updates
+    the defaults before expanding them, allowing them to show up correctly
+    in the help listing
+    """
+    def expand_default(self, option):
+        if self.parser is not None:
+            self.parser.update_defaults(self.parser.defaults)
+        return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class ConfigOptionParser(optparse.OptionParser):
+    """
+    Custom option parser which updates its defaults by checking the
+    configuration files and environmental variables
+    """
+    def __init__(self, *args, **kwargs):
+        self.config = ConfigParser.RawConfigParser()
+        self.files = self.get_config_files()
+        self.config.read(self.files)
+        optparse.OptionParser.__init__(self, *args, **kwargs)
+
+    def get_config_files(self):
+        config_file = os.environ.get('VIRTUALENV_CONFIG_FILE', False)
+        if config_file and os.path.exists(config_file):
+            return [config_file]
+        return [default_config_file]
+
+    def update_defaults(self, defaults):
+        """
+        Updates the given defaults with values from the config files and
+        the environ. Does a little special handling for certain types of
+        options (lists).
+        """
+        # Then go and look for the other sources of configuration:
+        config = {}
+        # 1. config files
+        config.update(dict(self.get_config_section('virtualenv')))
+        # 2. environmental variables
+        config.update(dict(self.get_environ_vars()))
+        # Then set the options with those values
+        for key, val in config.items():
+            key = key.replace('_', '-')
+            if not key.startswith('--'):
+                key = '--%s' % key  # only prefer long opts
+            option = self.get_option(key)
+            if option is not None:
+                # ignore empty values
+                if not val:
+                    continue
+                # handle multiline configs
+                if option.action == 'append':
+                    val = val.split()
+                else:
+                    option.nargs = 1
+                if option.action == 'store_false':
+                    val = not strtobool(val)
+                elif option.action in ('store_true', 'count'):
+                    val = strtobool(val)
+                try:
+                    val = option.convert_value(key, val)
+                except optparse.OptionValueError:
+                    e = sys.exc_info()[1]
+                    print("An error occurred during configuration: %s" % e)
+                    sys.exit(3)
+                defaults[option.dest] = val
+        return defaults
+
+    def get_config_section(self, name):
+        """
+        Get a section of a configuration
+        """
+        if self.config.has_section(name):
+            return self.config.items(name)
+        return []
+
+    def get_environ_vars(self, prefix='VIRTUALENV_'):
+        """
+        Returns a generator with all environmental vars with prefix VIRTUALENV
+        """
+        for key, val in os.environ.items():
+            if key.startswith(prefix):
+                yield (key.replace(prefix, '').lower(), val)
+
+    def get_default_values(self):
+        """
+        Overridding to make updating the defaults after instantiation of
+        the option parser possible, update_defaults() does the dirty work.
+        """
+        if not self.process_default_values:
+            # Old, pre-Optik 1.5 behaviour.
+            return optparse.Values(self.defaults)
+
+        defaults = self.update_defaults(self.defaults.copy())  # ours
+        for option in self._get_all_options():
+            default = defaults.get(option.dest)
+            if isinstance(default, basestring):
+                opt_str = option.get_opt_string()
+                defaults[option.dest] = option.check_value(opt_str, default)
+        return optparse.Values(defaults)
+
+
+def main():
+    parser = ConfigOptionParser(
+        version=virtualenv_version,
+        usage="%prog [OPTIONS] DEST_DIR",
+        formatter=UpdatingDefaultsHelpFormatter())
+
+    parser.add_option(
+        '-v', '--verbose',
+        action='count',
+        dest='verbose',
+        default=0,
+        help="Increase verbosity.")
+
+    parser.add_option(
+        '-q', '--quiet',
+        action='count',
+        dest='quiet',
+        default=0,
+        help='Decrease verbosity.')
+
+    parser.add_option(
+        '-p', '--python',
+        dest='python',
+        metavar='PYTHON_EXE',
+        help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 '
+        'interpreter to create the new environment.  The default is the interpreter that '
+        'virtualenv was installed with (%s)' % sys.executable)
+
+    parser.add_option(
+        '--clear',
+        dest='clear',
+        action='store_true',
+        help="Clear out the non-root install and start from scratch.")
+
+    parser.set_defaults(system_site_packages=False)
+    parser.add_option(
+        '--no-site-packages',
+        dest='system_site_packages',
+        action='store_false',
+        help="DEPRECATED. Retained only for backward compatibility. "
+             "Not having access to global site-packages is now the default behavior.")
+
+    parser.add_option(
+        '--system-site-packages',
+        dest='system_site_packages',
+        action='store_true',
+        help="Give the virtual environment access to the global site-packages.")
+
+    parser.add_option(
+        '--always-copy',
+        dest='symlink',
+        action='store_false',
+        default=True,
+        help="Always copy files rather than symlinking.")
+
+    parser.add_option(
+        '--unzip-setuptools',
+        dest='unzip_setuptools',
+        action='store_true',
+        help="Unzip Setuptools when installing it.")
+
+    parser.add_option(
+        '--relocatable',
+        dest='relocatable',
+        action='store_true',
+        help='Make an EXISTING virtualenv environment relocatable. '
+             'This fixes up scripts and makes all .pth files relative.')
+
+    parser.add_option(
+        '--no-setuptools',
+        dest='no_setuptools',
+        action='store_true',
+        help='Do not install setuptools (or pip) in the new virtualenv.')
+
+    parser.add_option(
+        '--no-pip',
+        dest='no_pip',
+        action='store_true',
+        help='Do not install pip in the new virtualenv.')
+
+    default_search_dirs = file_search_dirs()
+    parser.add_option(
+        '--extra-search-dir',
+        dest="search_dirs",
+        action="append",
+        metavar='DIR',
+        default=default_search_dirs,
+        help="Directory to look for setuptools/pip distributions in. "
+              "This option can be used multiple times.")
+
+    parser.add_option(
+        '--never-download',
+        dest="never_download",
+        action="store_true",
+        default=True,
+        help="DEPRECATED. Retained only for backward compatibility. This option has no effect. "
+              "Virtualenv never downloads pip or setuptools.")
+
+    parser.add_option(
+        '--prompt',
+        dest='prompt',
+        help='Provides an alternative prompt prefix for this environment.')
+
+    parser.add_option(
+        '--setuptools',
+        dest='setuptools',
+        action='store_true',
+        help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+    parser.add_option(
+        '--distribute',
+        dest='distribute',
+        action='store_true',
+        help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+    if 'extend_parser' in globals():
+        extend_parser(parser)
+
+    options, args = parser.parse_args()
+
+    global logger
+
+    if 'adjust_options' in globals():
+        adjust_options(options, args)
+
+    verbosity = options.verbose - options.quiet
+    logger = Logger([(Logger.level_for_integer(2 - verbosity), sys.stdout)])
+
+    if options.python and not os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+        env = os.environ.copy()
+        interpreter = resolve_interpreter(options.python)
+        if interpreter == sys.executable:
+            logger.warn('Already using interpreter %s' % interpreter)
+        else:
+            logger.notify('Running virtualenv with interpreter %s' % interpreter)
+            env['VIRTUALENV_INTERPRETER_RUNNING'] = 'true'
+            file = __file__
+            if file.endswith('.pyc'):
+                file = file[:-1]
+            popen = subprocess.Popen([interpreter, file] + sys.argv[1:], env=env)
+            raise SystemExit(popen.wait())
+
+    if not args:
+        print('You must provide a DEST_DIR')
+        parser.print_help()
+        sys.exit(2)
+    if len(args) > 1:
+        print('There must be only one argument: DEST_DIR (you gave %s)' % (
+            ' '.join(args)))
+        parser.print_help()
+        sys.exit(2)
+
+    home_dir = args[0]
+
+    if os.environ.get('WORKING_ENV'):
+        logger.fatal('ERROR: you cannot run virtualenv while in a workingenv')
+        logger.fatal('Please deactivate your workingenv, then re-run this script')
+        sys.exit(3)
+
+    if 'PYTHONHOME' in os.environ:
+        logger.warn('PYTHONHOME is set.  You *must* activate the virtualenv before using it')
+        del os.environ['PYTHONHOME']
+
+    if options.relocatable:
+        make_environment_relocatable(home_dir)
+        return
+
+    if not options.never_download:
+        logger.warn('The --never-download option is for backward compatibility only.')
+        logger.warn('Setting it to false is no longer supported, and will be ignored.')
+
+    create_environment(home_dir,
+                       site_packages=options.system_site_packages,
+                       clear=options.clear,
+                       unzip_setuptools=options.unzip_setuptools,
+                       prompt=options.prompt,
+                       search_dirs=options.search_dirs,
+                       never_download=True,
+                       no_setuptools=options.no_setuptools,
+                       no_pip=options.no_pip,
+                       symlink=options.symlink)
+    if 'after_install' in globals():
+        after_install(options, home_dir)
+
+def call_subprocess(cmd, show_stdout=True,
+                    filter_stdout=None, cwd=None,
+                    raise_on_returncode=True, extra_env=None,
+                    remove_from_env=None):
+    cmd_parts = []
+    for part in cmd:
+        if len(part) > 45:
+            part = part[:20]+"..."+part[-20:]
+        if ' ' in part or '\n' in part or '"' in part or "'" in part:
+            part = '"%s"' % part.replace('"', '\\"')
+        if hasattr(part, 'decode'):
+            try:
+                part = part.decode(sys.getdefaultencoding())
+            except UnicodeDecodeError:
+                part = part.decode(sys.getfilesystemencoding())
+        cmd_parts.append(part)
+    cmd_desc = ' '.join(cmd_parts)
+    if show_stdout:
+        stdout = None
+    else:
+        stdout = subprocess.PIPE
+    logger.debug("Running command %s" % cmd_desc)
+    if extra_env or remove_from_env:
+        env = os.environ.copy()
+        if extra_env:
+            env.update(extra_env)
+        if remove_from_env:
+            for varname in remove_from_env:
+                env.pop(varname, None)
+    else:
+        env = None
+    try:
+        proc = subprocess.Popen(
+            cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
+            cwd=cwd, env=env)
+    except Exception:
+        e = sys.exc_info()[1]
+        logger.fatal(
+            "Error %s while executing command %s" % (e, cmd_desc))
+        raise
+    all_output = []
+    if stdout is not None:
+        stdout = proc.stdout
+        encoding = sys.getdefaultencoding()
+        fs_encoding = sys.getfilesystemencoding()
+        while 1:
+            line = stdout.readline()
+            try:
+                line = line.decode(encoding)
+            except UnicodeDecodeError:
+                line = line.decode(fs_encoding)
+            if not line:
+                break
+            line = line.rstrip()
+            all_output.append(line)
+            if filter_stdout:
+                level = filter_stdout(line)
+                if isinstance(level, tuple):
+                    level, line = level
+                logger.log(level, line)
+                if not logger.stdout_level_matches(level):
+                    logger.show_progress()
+            else:
+                logger.info(line)
+    else:
+        proc.communicate()
+    proc.wait()
+    if proc.returncode:
+        if raise_on_returncode:
+            if all_output:
+                logger.notify('Complete output from command %s:' % cmd_desc)
+                logger.notify('\n'.join(all_output) + '\n----------------------------------------')
+            raise OSError(
+                "Command %s failed with error code %s"
+                % (cmd_desc, proc.returncode))
+        else:
+            logger.warn(
+                "Command %s had error code %s"
+                % (cmd_desc, proc.returncode))
+
+def filter_install_output(line):
+    if line.strip().startswith('running'):
+        return Logger.INFO
+    return Logger.DEBUG
+
+def find_wheels(projects, search_dirs):
+    """Find wheels from which we can import PROJECTS.
+
+    Scan through SEARCH_DIRS for a wheel for each PROJECT in turn. Return
+    a list of the first wheel found for each PROJECT
+    """
+
+    wheels = []
+
+    # Look through SEARCH_DIRS for the first suitable wheel. Don't bother
+    # about version checking here, as this is simply to get something we can
+    # then use to install the correct version.
+    for project in projects:
+        for dirname in search_dirs:
+            # This relies on only having "universal" wheels available.
+            # The pattern could be tightened to require -py2.py3-none-any.whl.
+            files = glob.glob(os.path.join(dirname, project + '-*.whl'))
+            if files:
+                wheels.append(os.path.abspath(files[0]))
+                break
+        else:
+            # We're out of luck, so quit with a suitable error
+            logger.fatal('Cannot find a wheel for %s' % (project,))
+
+    return wheels
+
+def install_wheel(project_names, py_executable, search_dirs=None):
+    if search_dirs is None:
+        search_dirs = file_search_dirs()
+
+    wheels = find_wheels(['setuptools', 'pip'], search_dirs)
+    pythonpath = os.pathsep.join(wheels)
+    findlinks = ' '.join(search_dirs)
+
+    cmd = [
+        py_executable, '-c',
+        'import sys, pip; sys.exit(pip.main(["install", "--ignore-installed"] + sys.argv[1:]))',
+    ] + project_names
+    logger.start_progress('Installing %s...' % (', '.join(project_names)))
+    logger.indent += 2
+    try:
+        call_subprocess(cmd, show_stdout=False,
+            extra_env = {
+                'PYTHONPATH': pythonpath,
+                'PIP_FIND_LINKS': findlinks,
+                'PIP_USE_WHEEL': '1',
+                'PIP_PRE': '1',
+                'PIP_NO_INDEX': '1'
+            }
+        )
+    finally:
+        logger.indent -= 2
+        logger.end_progress()
+
+def create_environment(home_dir, site_packages=False, clear=False,
+                       unzip_setuptools=False,
+                       prompt=None, search_dirs=None, never_download=False,
+                       no_setuptools=False, no_pip=False, symlink=True):
+    """
+    Creates a new environment in ``home_dir``.
+
+    If ``site_packages`` is true, then the global ``site-packages/``
+    directory will be on the path.
+
+    If ``clear`` is true (default False) then the environment will
+    first be cleared.
+    """
+    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+
+    py_executable = os.path.abspath(install_python(
+        home_dir, lib_dir, inc_dir, bin_dir,
+        site_packages=site_packages, clear=clear, symlink=symlink))
+
+    install_distutils(home_dir)
+
+    if not no_setuptools:
+        to_install = ['setuptools']
+        if not no_pip:
+            to_install.append('pip')
+        install_wheel(to_install, py_executable, search_dirs)
+
+    install_activate(home_dir, bin_dir, prompt)
+
+def is_executable_file(fpath):
+    return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+def path_locations(home_dir):
+    """Return the path locations for the environment (where libraries are,
+    where scripts go, etc)"""
+    # XXX: We'd use distutils.sysconfig.get_python_inc/lib but its
+    # prefix arg is broken: http://bugs.python.org/issue3386
+    if is_win:
+        # Windows has lots of problems with executables with spaces in
+        # the name; this function will remove them (using the ~1
+        # format):
+        mkdir(home_dir)
+        if ' ' in home_dir:
+            import ctypes
+            GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW
+            size = max(len(home_dir)+1, 256)
+            buf = ctypes.create_unicode_buffer(size)
+            try:
+                u = unicode
+            except NameError:
+                u = str
+            ret = GetShortPathName(u(home_dir), buf, size)
+            if not ret:
+                print('Error: the path "%s" has a space in it' % home_dir)
+                print('We could not determine the short pathname for it.')
+                print('Exiting.')
+                sys.exit(3)
+            home_dir = str(buf.value)
+        lib_dir = join(home_dir, 'Lib')
+        inc_dir = join(home_dir, 'Include')
+        bin_dir = join(home_dir, 'Scripts')
+    if is_jython:
+        lib_dir = join(home_dir, 'Lib')
+        inc_dir = join(home_dir, 'Include')
+        bin_dir = join(home_dir, 'bin')
+    elif is_pypy:
+        lib_dir = home_dir
+        inc_dir = join(home_dir, 'include')
+        bin_dir = join(home_dir, 'bin')
+    elif not is_win:
+        lib_dir = join(home_dir, 'lib', py_version)
+        multiarch_exec = '/usr/bin/multiarch-platform'
+        if is_executable_file(multiarch_exec):
+            # In Mageia (2) and Mandriva distros the include dir must be like:
+            # virtualenv/include/multiarch-x86_64-linux/python2.7
+            # instead of being virtualenv/include/python2.7
+            p = subprocess.Popen(multiarch_exec, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+            stdout, stderr = p.communicate()
+            # stdout.strip is needed to remove newline character
+            inc_dir = join(home_dir, 'include', stdout.strip(), py_version + abiflags)
+        else:
+            inc_dir = join(home_dir, 'include', py_version + abiflags)
+        bin_dir = join(home_dir, 'bin')
+    return home_dir, lib_dir, inc_dir, bin_dir
+
+
+def change_prefix(filename, dst_prefix):
+    prefixes = [sys.prefix]
+
+    if is_darwin:
+        prefixes.extend((
+            os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+            os.path.join(sys.prefix, "Extras", "lib", "python"),
+            os.path.join("~", "Library", "Python", sys.version[:3], "site-packages"),
+            # Python 2.6 no-frameworks
+            os.path.join("~", ".local", "lib","python", sys.version[:3], "site-packages"),
+            # System Python 2.7 on OSX Mountain Lion
+            os.path.join("~", "Library", "Python", sys.version[:3], "lib", "python", "site-packages")))
+
+    if hasattr(sys, 'real_prefix'):
+        prefixes.append(sys.real_prefix)
+    if hasattr(sys, 'base_prefix'):
+        prefixes.append(sys.base_prefix)
+    prefixes = list(map(os.path.expanduser, prefixes))
+    prefixes = list(map(os.path.abspath, prefixes))
+    # Check longer prefixes first so we don't split in the middle of a filename
+    prefixes = sorted(prefixes, key=len, reverse=True)
+    filename = os.path.abspath(filename)
+    for src_prefix in prefixes:
+        if filename.startswith(src_prefix):
+            _, relpath = filename.split(src_prefix, 1)
+            if src_prefix != os.sep: # sys.prefix == "/"
+                assert relpath[0] == os.sep
+                relpath = relpath[1:]
+            return join(dst_prefix, relpath)
+    assert False, "Filename %s does not start with any of these prefixes: %s" % \
+        (filename, prefixes)
+
+def copy_required_modules(dst_prefix, symlink):
+    import imp
+    for modname in REQUIRED_MODULES:
+        if modname in sys.builtin_module_names:
+            logger.info("Ignoring built-in bootstrap module: %s" % modname)
+            continue
+        try:
+            f, filename, _ = imp.find_module(modname)
+        except ImportError:
+            logger.info("Cannot import bootstrap module: %s" % modname)
+        else:
+            if f is not None:
+                f.close()
+            # special-case custom readline.so on OS X, but not for pypy:
+            if modname == 'readline' and sys.platform == 'darwin' and not (
+                    is_pypy or filename.endswith(join('lib-dynload', 'readline.so'))):
+                dst_filename = join(dst_prefix, 'lib', 'python%s' % sys.version[:3], 'readline.so')
+            elif modname == 'readline' and sys.platform == 'win32':
+                # special-case for Windows, where readline is not a
+                # standard module, though it may have been installed in
+                # site-packages by a third-party package
+                pass
+            else:
+                dst_filename = change_prefix(filename, dst_prefix)
+            copyfile(filename, dst_filename, symlink)
+            if filename.endswith('.pyc'):
+                pyfile = filename[:-1]
+                if os.path.exists(pyfile):
+                    copyfile(pyfile, dst_filename[:-1], symlink)
+
+
+def subst_path(prefix_path, prefix, home_dir):
+    prefix_path = os.path.normpath(prefix_path)
+    prefix = os.path.normpath(prefix)
+    home_dir = os.path.normpath(home_dir)
+    if not prefix_path.startswith(prefix):
+        logger.warn('Path not in prefix %r %r', prefix_path, prefix)
+        return
+    return prefix_path.replace(prefix, home_dir, 1)
+
+
+def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink=True):
+    """Install just the base environment, no distutils patches etc"""
+    if sys.executable.startswith(bin_dir):
+        print('Please use the *system* python to run this script')
+        return
+
+    if clear:
+        rmtree(lib_dir)
+        ## FIXME: why not delete it?
+        ## Maybe it should delete everything with #!/path/to/venv/python in it
+        logger.notify('Not deleting %s', bin_dir)
+
+    if hasattr(sys, 'real_prefix'):
+        logger.notify('Using real prefix %r' % sys.real_prefix)
+        prefix = sys.real_prefix
+    elif hasattr(sys, 'base_prefix'):
+        logger.notify('Using base prefix %r' % sys.base_prefix)
+        prefix = sys.base_prefix
+    else:
+        prefix = sys.prefix
+    mkdir(lib_dir)
+    fix_lib64(lib_dir, symlink)
+    stdlib_dirs = [os.path.dirname(os.__file__)]
+    if is_win:
+        stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs'))
+    elif is_darwin:
+        stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages'))
+    if hasattr(os, 'symlink'):
+        logger.info('Symlinking Python bootstrap modules')
+    else:
+        logger.info('Copying Python bootstrap modules')
+    logger.indent += 2
+    try:
+        # copy required files...
+        for stdlib_dir in stdlib_dirs:
+            if not os.path.isdir(stdlib_dir):
+                continue
+            for fn in os.listdir(stdlib_dir):
+                bn = os.path.splitext(fn)[0]
+                if fn != 'site-packages' and bn in REQUIRED_FILES:
+                    copyfile(join(stdlib_dir, fn), join(lib_dir, fn), symlink)
+        # ...and modules
+        copy_required_modules(home_dir, symlink)
+    finally:
+        logger.indent -= 2
+    mkdir(join(lib_dir, 'site-packages'))
+    import site
+    site_filename = site.__file__
+    if site_filename.endswith('.pyc'):
+        site_filename = site_filename[:-1]
+    elif site_filename.endswith('$py.class'):
+        site_filename = site_filename.replace('$py.class', '.py')
+    site_filename_dst = change_prefix(site_filename, home_dir)
+    site_dir = os.path.dirname(site_filename_dst)
+    writefile(site_filename_dst, SITE_PY)
+    writefile(join(site_dir, 'orig-prefix.txt'), prefix)
+    site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
+    if not site_packages:
+        writefile(site_packages_filename, '')
+
+    if is_pypy or is_win:
+        stdinc_dir = join(prefix, 'include')
+    else:
+        stdinc_dir = join(prefix, 'include', py_version + abiflags)
+    if os.path.exists(stdinc_dir):
+        copyfile(stdinc_dir, inc_dir, symlink)
+    else:
+        logger.debug('No include dir %s' % stdinc_dir)
+
+    platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1)
+    if platinc_dir != stdinc_dir:
+        platinc_dest = distutils.sysconfig.get_python_inc(
+            plat_specific=1, prefix=home_dir)
+        if platinc_dir == platinc_dest:
+            # Do platinc_dest manually due to a CPython bug;
+            # not http://bugs.python.org/issue3386 but a close cousin
+            platinc_dest = subst_path(platinc_dir, prefix, home_dir)
+        if platinc_dest:
+            # PyPy's stdinc_dir and prefix are relative to the original binary
+            # (traversing virtualenvs), whereas the platinc_dir is relative to
+            # the inner virtualenv and ignores the prefix argument.
+            # This seems more evolved than designed.
+            copyfile(platinc_dir, platinc_dest, symlink)
+
+    # pypy never uses exec_prefix, just ignore it
+    if sys.exec_prefix != prefix and not is_pypy:
+        if is_win:
+            exec_dir = join(sys.exec_prefix, 'lib')
+        elif is_jython:
+            exec_dir = join(sys.exec_prefix, 'Lib')
+        else:
+            exec_dir = join(sys.exec_prefix, 'lib', py_version)
+        for fn in os.listdir(exec_dir):
+            copyfile(join(exec_dir, fn), join(lib_dir, fn), symlink)
+
+    if is_jython:
+        # Jython has either jython-dev.jar and javalib/ dir, or just
+        # jython.jar
+        for name in 'jython-dev.jar', 'javalib', 'jython.jar':
+            src = join(prefix, name)
+            if os.path.exists(src):
+                copyfile(src, join(home_dir, name), symlink)
+        # XXX: registry should always exist after Jython 2.5rc1
+        src = join(prefix, 'registry')
+        if os.path.exists(src):
+            copyfile(src, join(home_dir, 'registry'), symlink=False)
+        copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'),
+                 symlink=False)
+
+    mkdir(bin_dir)
+    py_executable = join(bin_dir, os.path.basename(sys.executable))
+    if 'Python.framework' in prefix:
+        # OS X framework builds cause validation to break
+        # https://github.com/pypa/virtualenv/issues/322
+        if os.environ.get('__PYVENV_LAUNCHER__'):
+            del os.environ["__PYVENV_LAUNCHER__"]
+        if re.search(r'/Python(?:-32|-64)*$', py_executable):
+            # The name of the python executable is not quite what
+            # we want, rename it.
+            py_executable = os.path.join(
+                    os.path.dirname(py_executable), 'python')
+
+    logger.notify('New %s executable in %s', expected_exe, py_executable)
+    pcbuild_dir = os.path.dirname(sys.executable)
+    pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth')
+    if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')):
+        logger.notify('Detected python running from build directory %s', pcbuild_dir)
+        logger.notify('Writing .pth file linking to build directory for *.pyd files')
+        writefile(pyd_pth, pcbuild_dir)
+    else:
+        pcbuild_dir = None
+        if os.path.exists(pyd_pth):
+            logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth)
+            os.unlink(pyd_pth)
+
+    if sys.executable != py_executable:
+        ## FIXME: could I just hard link?
+        executable = sys.executable
+        shutil.copyfile(executable, py_executable)
+        make_exe(py_executable)
+        if is_win or is_cygwin:
+            pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe')
+            if os.path.exists(pythonw):
+                logger.info('Also created pythonw.exe')
+                shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe'))
+            python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe')
+            python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe')
+            if os.path.exists(python_d):
+                logger.info('Also created python_d.exe')
+                shutil.copyfile(python_d, python_d_dest)
+            elif os.path.exists(python_d_dest):
+                logger.info('Removed python_d.exe as it is no longer at the source')
+                os.unlink(python_d_dest)
+            # we need to copy the DLL to enforce that windows will load the correct one.
+            # may not exist if we are cygwin.
+            py_executable_dll = 'python%s%s.dll' % (
+                sys.version_info[0], sys.version_info[1])
+            py_executable_dll_d = 'python%s%s_d.dll' % (
+                sys.version_info[0], sys.version_info[1])
+            pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll)
+            pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d)
+            pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d)
+            if os.path.exists(pythondll):
+                logger.info('Also created %s' % py_executable_dll)
+                shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll))
+            if os.path.exists(pythondll_d):
+                logger.info('Also created %s' % py_executable_dll_d)
+                shutil.copyfile(pythondll_d, pythondll_d_dest)
+            elif os.path.exists(pythondll_d_dest):
+                logger.info('Removed %s as the source does not exist' % pythondll_d_dest)
+                os.unlink(pythondll_d_dest)
+        if is_pypy:
+            # make a symlink python --> pypy-c
+            python_executable = os.path.join(os.path.dirname(py_executable), 'python')
+            if sys.platform in ('win32', 'cygwin'):
+                python_executable += '.exe'
+            logger.info('Also created executable %s' % python_executable)
+            copyfile(py_executable, python_executable, symlink)
+
+            if is_win:
+                for name in ['libexpat.dll', 'libpypy.dll', 'libpypy-c.dll',
+                            'libeay32.dll', 'ssleay32.dll', 'sqlite3.dll',
+                            'tcl85.dll', 'tk85.dll']:
+                    src = join(prefix, name)
+                    if os.path.exists(src):
+                        copyfile(src, join(bin_dir, name), symlink)
+
+                for d in sys.path:
+                    if d.endswith('lib_pypy'):
+                        break
+                else:
+                    logger.fatal('Could not find lib_pypy in sys.path')
+                    raise SystemExit(3)
+                logger.info('Copying lib_pypy')
+                copyfile(d, os.path.join(home_dir, 'lib_pypy'), symlink)
+
+    if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe:
+        secondary_exe = os.path.join(os.path.dirname(py_executable),
+                                     expected_exe)
+        py_executable_ext = os.path.splitext(py_executable)[1]
+        if py_executable_ext.lower() == '.exe':
+            # python2.4 gives an extension of '.4' :P
+            secondary_exe += py_executable_ext
+        if os.path.exists(secondary_exe):
+            logger.warn('Not overwriting existing %s script %s (you must use %s)'
+                        % (expected_exe, secondary_exe, py_executable))
+        else:
+            logger.notify('Also creating executable in %s' % secondary_exe)
+            shutil.copyfile(sys.executable, secondary_exe)
+            make_exe(secondary_exe)
+
+    if '.framework' in prefix:
+        if 'Python.framework' in prefix:
+            logger.debug('MacOSX Python framework detected')
+            # Make sure we use the embedded interpreter inside
+            # the framework, even if sys.executable points to
+            # the stub executable in ${sys.prefix}/bin
+            # See http://groups.google.com/group/python-virtualenv/
+            #                              browse_thread/thread/17cab2f85da75951
+            original_python = os.path.join(
+                prefix, 'Resources/Python.app/Contents/MacOS/Python')
+        if 'EPD' in prefix:
+            logger.debug('EPD framework detected')
+            original_python = os.path.join(prefix, 'bin/python')
+        shutil.copy(original_python, py_executable)
+
+        # Copy the framework's dylib into the virtual
+        # environment
+        virtual_lib = os.path.join(home_dir, '.Python')
+
+        if os.path.exists(virtual_lib):
+            os.unlink(virtual_lib)
+        copyfile(
+            os.path.join(prefix, 'Python'),
+            virtual_lib,
+            symlink)
+
+        # And then change the install_name of the copied python executable
+        try:
+            mach_o_change(py_executable,
+                          os.path.join(prefix, 'Python'),
+                          '@executable_path/../.Python')
+        except:
+            e = sys.exc_info()[1]
+            logger.warn("Could not call mach_o_change: %s. "
+                        "Trying to call install_name_tool instead." % e)
+            try:
+                call_subprocess(
+                    ["install_name_tool", "-change",
+                     os.path.join(prefix, 'Python'),
+                     '@executable_path/../.Python',
+                     py_executable])
+            except:
+                logger.fatal("Could not call install_name_tool -- you must "
+                             "have Apple's development tools installed")
+                raise
+
+    if not is_win:
+        # Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist
+        py_exe_version_major = 'python%s' % sys.version_info[0]
+        py_exe_version_major_minor = 'python%s.%s' % (
+            sys.version_info[0], sys.version_info[1])
+        py_exe_no_version = 'python'
+        required_symlinks = [ py_exe_no_version, py_exe_version_major,
+                         py_exe_version_major_minor ]
+
+        py_executable_base = os.path.basename(py_executable)
+
+        if py_executable_base in required_symlinks:
+            # Don't try to symlink to yourself.
+            required_symlinks.remove(py_executable_base)
+
+        for pth in required_symlinks:
+            full_pth = join(bin_dir, pth)
+            if os.path.exists(full_pth):
+                os.unlink(full_pth)
+            if symlink:
+                os.symlink(py_executable_base, full_pth)
+            else:
+                copyfile(py_executable, full_pth, symlink)
+
+    if is_win and ' ' in py_executable:
+        # There's a bug with subprocess on Windows when using a first
+        # argument that has a space in it.  Instead we have to quote
+        # the value:
+        py_executable = '"%s"' % py_executable
+    # NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks
+    cmd = [py_executable, '-c', 'import sys;out=sys.stdout;'
+        'getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))']
+    logger.info('Testing executable with %s %s "%s"' % tuple(cmd))
+    try:
+        proc = subprocess.Popen(cmd,
+                            stdout=subprocess.PIPE)
+        proc_stdout, proc_stderr = proc.communicate()
+    except OSError:
+        e = sys.exc_info()[1]
+        if e.errno == errno.EACCES:
+            logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e))
+            sys.exit(100)
+        else:
+            raise e
+
+    proc_stdout = proc_stdout.strip().decode("utf-8")
+    proc_stdout = os.path.normcase(os.path.abspath(proc_stdout))
+    norm_home_dir = os.path.normcase(os.path.abspath(home_dir))
+    if hasattr(norm_home_dir, 'decode'):
+        norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding())
+    if proc_stdout != norm_home_dir:
+        logger.fatal(
+            'ERROR: The executable %s is not functioning' % py_executable)
+        logger.fatal(
+            'ERROR: It thinks sys.prefix is %r (should be %r)'
+            % (proc_stdout, norm_home_dir))
+        logger.fatal(
+            'ERROR: virtualenv is not compatible with this system or executable')
+        if is_win:
+            logger.fatal(
+                'Note: some Windows users have reported this error when they '
+                'installed Python for "Only this user" or have multiple '
+                'versions of Python installed. Copying the appropriate '
+                'PythonXX.dll to the virtualenv Scripts/ directory may fix '
+                'this problem.')
+        sys.exit(100)
+    else:
+        logger.info('Got sys.prefix result: %r' % proc_stdout)
+
+    pydistutils = os.path.expanduser('~/.pydistutils.cfg')
+    if os.path.exists(pydistutils):
+        logger.notify('Please make sure you remove any previous custom paths from '
+                      'your %s file.' % pydistutils)
+    ## FIXME: really this should be calculated earlier
+
+    fix_local_scheme(home_dir, symlink)
+
+    if site_packages:
+        if os.path.exists(site_packages_filename):
+            logger.info('Deleting %s' % site_packages_filename)
+            os.unlink(site_packages_filename)
+
+    return py_executable
+
+
+def install_activate(home_dir, bin_dir, prompt=None):
+    home_dir = os.path.abspath(home_dir)
+    if is_win or is_jython and os._name == 'nt':
+        files = {
+            'activate.bat': ACTIVATE_BAT,
+            'deactivate.bat': DEACTIVATE_BAT,
+            'activate.ps1': ACTIVATE_PS,
+        }
+
+        # MSYS needs paths of the form /c/path/to/file
+        drive, tail = os.path.splitdrive(home_dir.replace(os.sep, '/'))
+        home_dir_msys = (drive and "/%s%s" or "%s%s") % (drive[:1], tail)
+
+        # Run-time conditional enables (basic) Cygwin compatibility
+        home_dir_sh = ("""$(if [ "$OSTYPE" "==" "cygwin" ]; then cygpath -u '%s'; else echo '%s'; fi;)""" %
+                       (home_dir, home_dir_msys))
+        files['activate'] = ACTIVATE_SH.replace('__VIRTUAL_ENV__', home_dir_sh)
+
+    else:
+        files = {'activate': ACTIVATE_SH}
+
+        # suppling activate.fish in addition to, not instead of, the
+        # bash script support.
+        files['activate.fish'] = ACTIVATE_FISH
+
+        # same for csh/tcsh support...
+        files['activate.csh'] = ACTIVATE_CSH
+
+    files['activate_this.py'] = ACTIVATE_THIS
+    if hasattr(home_dir, 'decode'):
+        home_dir = home_dir.decode(sys.getfilesystemencoding())
+    vname = os.path.basename(home_dir)
+    for name, content in files.items():
+        content = content.replace('__VIRTUAL_PROMPT__', prompt or '')
+        content = content.replace('__VIRTUAL_WINPROMPT__', prompt or '(%s)' % vname)
+        content = content.replace('__VIRTUAL_ENV__', home_dir)
+        content = content.replace('__VIRTUAL_NAME__', vname)
+        content = content.replace('__BIN_NAME__', os.path.basename(bin_dir))
+        writefile(os.path.join(bin_dir, name), content)
+
+def install_distutils(home_dir):
+    distutils_path = change_prefix(distutils.__path__[0], home_dir)
+    mkdir(distutils_path)
+    ## FIXME: maybe this prefix setting should only be put in place if
+    ## there's a local distutils.cfg with a prefix setting?
+    home_dir = os.path.abspath(home_dir)
+    ## FIXME: this is breaking things, removing for now:
+    #distutils_cfg = DISTUTILS_CFG + "\n[install]\nprefix=%s\n" % home_dir
+    writefile(os.path.join(distutils_path, '__init__.py'), DISTUTILS_INIT)
+    writefile(os.path.join(distutils_path, 'distutils.cfg'), DISTUTILS_CFG, overwrite=False)
+
+def fix_local_scheme(home_dir, symlink=True):
+    """
+    Platforms that use the "posix_local" install scheme (like Ubuntu with
+    Python 2.7) need to be given an additional "local" location, sigh.
+    """
+    try:
+        import sysconfig
+    except ImportError:
+        pass
+    else:
+        if sysconfig._get_default_scheme() == 'posix_local':
+            local_path = os.path.join(home_dir, 'local')
+            if not os.path.exists(local_path):
+                os.mkdir(local_path)
+                for subdir_name in os.listdir(home_dir):
+                    if subdir_name == 'local':
+                        continue
+                    copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \
+                                                            os.path.join(local_path, subdir_name), symlink)
+
+def fix_lib64(lib_dir, symlink=True):
+    """
+    Some platforms (particularly Gentoo on x64) put things in lib64/pythonX.Y
+    instead of lib/pythonX.Y.  If this is such a platform we'll just create a
+    symlink so lib64 points to lib
+    """
+    if [p for p in distutils.sysconfig.get_config_vars().values()
+        if isinstance(p, basestring) and 'lib64' in p]:
+        # PyPy's library path scheme is not affected by this.
+        # Return early or we will die on the following assert.
+        if is_pypy:
+            logger.debug('PyPy detected, skipping lib64 symlinking')
+            return
+
+        logger.debug('This system uses lib64; symlinking lib64 to lib')
+
+        assert os.path.basename(lib_dir) == 'python%s' % sys.version[:3], (
+            "Unexpected python lib dir: %r" % lib_dir)
+        lib_parent = os.path.dirname(lib_dir)
+        top_level = os.path.dirname(lib_parent)
+        lib_dir = os.path.join(top_level, 'lib')
+        lib64_link = os.path.join(top_level, 'lib64')
+        assert os.path.basename(lib_parent) == 'lib', (
+            "Unexpected parent dir: %r" % lib_parent)
+        if os.path.lexists(lib64_link):
+            return
+        if symlink:
+            os.symlink('lib', lib64_link)
+        else:
+            copyfile('lib', lib64_link)
+
+def resolve_interpreter(exe):
+    """
+    If the executable given isn't an absolute path, search $PATH for the interpreter
+    """
+    # If the "executable" is a version number, get the installed executable for
+    # that version
+    python_versions = get_installed_pythons()
+    if exe in python_versions:
+        exe = python_versions[exe]
+
+    if os.path.abspath(exe) != exe:
+        paths = os.environ.get('PATH', '').split(os.pathsep)
+        for path in paths:
+            if os.path.exists(os.path.join(path, exe)):
+                exe = os.path.join(path, exe)
+                break
+    if not os.path.exists(exe):
+        logger.fatal('The executable %s (from --python=%s) does not exist' % (exe, exe))
+        raise SystemExit(3)
+    if not is_executable(exe):
+        logger.fatal('The executable %s (from --python=%s) is not executable' % (exe, exe))
+        raise SystemExit(3)
+    return exe
+
+def is_executable(exe):
+    """Checks a file is executable"""
+    return os.access(exe, os.X_OK)
+
+############################################################
+## Relocating the environment:
+
+def make_environment_relocatable(home_dir):
+    """
+    Makes the already-existing environment use relative paths, and takes out
+    the #!-based environment selection in scripts.
+    """
+    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+    activate_this = os.path.join(bin_dir, 'activate_this.py')
+    if not os.path.exists(activate_this):
+        logger.fatal(
+            'The environment doesn\'t have a file %s -- please re-run virtualenv '
+            'on this environment to update it' % activate_this)
+    fixup_scripts(home_dir, bin_dir)
+    fixup_pth_and_egg_link(home_dir)
+    ## FIXME: need to fix up distutils.cfg
+
+OK_ABS_SCRIPTS = ['python', 'python%s' % sys.version[:3],
+                  'activate', 'activate.bat', 'activate_this.py',
+                  'activate.fish', 'activate.csh']
+
+def fixup_scripts(home_dir, bin_dir):
+    if is_win:
+        new_shebang_args = (
+            '%s /c' % os.path.normcase(os.environ.get('COMSPEC', 'cmd.exe')),
+            '', '.exe')
+    else:
+        new_shebang_args = ('/usr/bin/env', sys.version[:3], '')
+
+    # This is what we expect at the top of scripts:
+    shebang = '#!%s' % os.path.normcase(os.path.join(
+        os.path.abspath(bin_dir), 'python%s' % new_shebang_args[2]))
+    # This is what we'll put:
+    new_shebang = '#!%s python%s%s' % new_shebang_args
+
+    for filename in os.listdir(bin_dir):
+        filename = os.path.join(bin_dir, filename)
+        if not os.path.isfile(filename):
+            # ignore subdirs, e.g. .svn ones.
+            continue
+        f = open(filename, 'rb')
+        try:
+            try:
+                lines = f.read().decode('utf-8').splitlines()
+            except UnicodeDecodeError:
+                # This is probably a binary program instead
+                # of a script, so just ignore it.
+                continue
+        finally:
+            f.close()
+        if not lines:
+            logger.warn('Script %s is an empty file' % filename)
+            continue
+
+        old_shebang = lines[0].strip()
+        old_shebang = old_shebang[0:2] + os.path.normcase(old_shebang[2:])
+
+        if not old_shebang.startswith(shebang):
+            if os.path.basename(filename) in OK_ABS_SCRIPTS:
+                logger.debug('Cannot make script %s relative' % filename)
+            elif lines[0].strip() == new_shebang:
+                logger.info('Script %s has already been made relative' % filename)
+            else:
+                logger.warn('Script %s cannot be made relative (it\'s not a normal script that starts with %s)'
+                            % (filename, shebang))
+            continue
+        logger.notify('Making script %s relative' % filename)
+        script = relative_script([new_shebang] + lines[1:])
+        f = open(filename, 'wb')
+        f.write('\n'.join(script).encode('utf-8'))
+        f.close()
+
+def relative_script(lines):
+    "Return a script that'll work in a relocatable environment."
+    activate = "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this"
+    # Find the last future statement in the script. If we insert the activation
+    # line before a future statement, Python will raise a SyntaxError.
+    activate_at = None
+    for idx, line in reversed(list(enumerate(lines))):
+        if line.split()[:3] == ['from', '__future__', 'import']:
+            activate_at = idx + 1
+            break
+    if activate_at is None:
+        # Activate after the shebang.
+        activate_at = 1
+    return lines[:activate_at] + ['', activate, ''] + lines[activate_at:]
+
+def fixup_pth_and_egg_link(home_dir, sys_path=None):
+    """Makes .pth and .egg-link files use relative paths"""
+    home_dir = os.path.normcase(os.path.abspath(home_dir))
+    if sys_path is None:
+        sys_path = sys.path
+    for path in sys_path:
+        if not path:
+            path = '.'
+        if not os.path.isdir(path):
+            continue
+        path = os.path.normcase(os.path.abspath(path))
+        if not path.startswith(home_dir):
+            logger.debug('Skipping system (non-environment) directory %s' % path)
+            continue
+        for filename in os.listdir(path):
+            filename = os.path.join(path, filename)
+            if filename.endswith('.pth'):
+                if not os.access(filename, os.W_OK):
+                    logger.warn('Cannot write .pth file %s, skipping' % filename)
+                else:
+                    fixup_pth_file(filename)
+            if filename.endswith('.egg-link'):
+                if not os.access(filename, os.W_OK):
+                    logger.warn('Cannot write .egg-link file %s, skipping' % filename)
+                else:
+                    fixup_egg_link(filename)
+
+def fixup_pth_file(filename):
+    lines = []
+    prev_lines = []
+    f = open(filename)
+    prev_lines = f.readlines()
+    f.close()
+    for line in prev_lines:
+        line = line.strip()
+        if (not line or line.startswith('#') or line.startswith('import ')
+            or os.path.abspath(line) != line):
+            lines.append(line)
+        else:
+            new_value = make_relative_path(filename, line)
+            if line != new_value:
+                logger.debug('Rewriting path %s as %s (in %s)' % (line, new_value, filename))
+            lines.append(new_value)
+    if lines == prev_lines:
+        logger.info('No changes to .pth file %s' % filename)
+        return
+    logger.notify('Making paths in .pth file %s relative' % filename)
+    f = open(filename, 'w')
+    f.write('\n'.join(lines) + '\n')
+    f.close()
+
+def fixup_egg_link(filename):
+    f = open(filename)
+    link = f.readline().strip()
+    f.close()
+    if os.path.abspath(link) != link:
+        logger.debug('Link in %s already relative' % filename)
+        return
+    new_link = make_relative_path(filename, link)
+    logger.notify('Rewriting link %s in %s as %s' % (link, filename, new_link))
+    f = open(filename, 'w')
+    f.write(new_link)
+    f.close()
+
+def make_relative_path(source, dest, dest_is_directory=True):
+    """
+    Make a filename relative, where the filename is dest, and it is
+    being referred to from the filename source.
+
+        >>> make_relative_path('/usr/share/something/a-file.pth',
+        ...                    '/usr/share/another-place/src/Directory')
+        '../another-place/src/Directory'
+        >>> make_relative_path('/usr/share/something/a-file.pth',
+        ...                    '/home/user/src/Directory')
+        '../../../home/user/src/Directory'
+        >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
+        './'
+    """
+    source = os.path.dirname(source)
+    if not dest_is_directory:
+        dest_filename = os.path.basename(dest)
+        dest = os.path.dirname(dest)
+    dest = os.path.normpath(os.path.abspath(dest))
+    source = os.path.normpath(os.path.abspath(source))
+    dest_parts = dest.strip(os.path.sep).split(os.path.sep)
+    source_parts = source.strip(os.path.sep).split(os.path.sep)
+    while dest_parts and source_parts and dest_parts[0] == source_parts[0]:
+        dest_parts.pop(0)
+        source_parts.pop(0)
+    full_parts = ['..']*len(source_parts) + dest_parts
+    if not dest_is_directory:
+        full_parts.append(dest_filename)
+    if not full_parts:
+        # Special case for the current directory (otherwise it'd be '')
+        return './'
+    return os.path.sep.join(full_parts)
+
+
+
+############################################################
+## Bootstrap script creation:
+
+def create_bootstrap_script(extra_text, python_version=''):
+    """
+    Creates a bootstrap script, which is like this script but with
+    extend_parser, adjust_options, and after_install hooks.
+
+    This returns a string that (written to disk of course) can be used
+    as a bootstrap script with your own customizations.  The script
+    will be the standard virtualenv.py script, with your extra text
+    added (your extra text should be Python code).
+
+    If you include these functions, they will be called:
+
+    ``extend_parser(optparse_parser)``:
+        You can add or remove options from the parser here.
+
+    ``adjust_options(options, args)``:
+        You can change options here, or change the args (if you accept
+        different kinds of arguments, be sure you modify ``args`` so it is
+        only ``[DEST_DIR]``).
+
+    ``after_install(options, home_dir)``:
+
+        After everything is installed, this function is called.  This
+        is probably the function you are most likely to use.  An
+        example would be::
+
+            def after_install(options, home_dir):
+                subprocess.call([join(home_dir, 'bin', 'easy_install'),
+                                 'MyPackage'])
+                subprocess.call([join(home_dir, 'bin', 'my-package-script'),
+                                 'setup', home_dir])
+
+        This example immediately installs a package, and runs a setup
+        script from that package.
+
+    If you provide something like ``python_version='2.5'`` then the
+    script will start with ``#!/usr/bin/env python2.5`` instead of
+    ``#!/usr/bin/env python``.  You can use this when the script must
+    be run with a particular Python version.
+    """
+    filename = __file__
+    if filename.endswith('.pyc'):
+        filename = filename[:-1]
+    f = codecs.open(filename, 'r', encoding='utf-8')
+    content = f.read()
+    f.close()
+    py_exe = 'python%s' % python_version
+    content = (('#!/usr/bin/env %s\n' % py_exe)
+               + '## WARNING: This file is generated\n'
+               + content)
+    return content.replace('##EXT' 'END##', extra_text)
+
+##EXTEND##
+
+def convert(s):
+    b = base64.b64decode(s.encode('ascii'))
+    return zlib.decompress(b).decode('utf-8')
+
+##file site.py
+SITE_PY = convert("""
+eJzFPf1z2zaWv/OvwMqToZTKdOJ0e3tO3RsncVrfuYm3yc7m1vXoKAmyWFMkS5C2tTd3f/u9DwAE
++CHb2+6cphNLJPDw8PC+8PAeOhqNTopCZkuxyZd1KoWScblYiyKu1kqs8lJU66Rc7hdxWW3h6eIm
+vpZKVLlQWxVhqygInv/GT/BcfF4nyqAA3+K6yjdxlSziNN2KZFPkZSWXYlmXSXYtkiypkjhN/g4t
+8iwSz387BsFZJmDmaSJLcStLBXCVyFfiYlut80yM6wLn/DL6Y/xqMhVqUSZFBQ1KjTNQZB1XQSbl
+EtCElrUCUiaV3FeFXCSrZGEb3uV1uhRFGi+k+K//4qlR0zAMVL6Rd2tZSpEBMgBTAqwC8YCvSSkW
++VJGQryRixgH4OcNsQKGNsU1U0jGLBdpnl3DnDK5kErF5VaM53VFgAhlscwBpwQwqJI0De7y8kZN
+YElpPe7gkYiZPfzJMHvAPHH8LucAjh+z4C9Zcj9l2MA9CK5aM9uUcpXcixjBwk95Lxcz/WycrMQy
+Wa2ABlk1wSYBI6BEmswPClqOb/UKfXdAWFmujGEMiShzY35JPaLgrBJxqoBt6wJppAjzd3KexBlQ
+I7uF4QAikDToG2eZqMqOQ7MTOQAocR0rkJKNEuNNnGTArD/GC0L7r0m2zO/UhCgAq6XEL7Wq3PmP
+ewgArR0CTANcLLOadZYmNzLdTgCBz4B9KVWdVigQy6SUiyovE6kIAKC2FfIekJ6KuJSahMyZRm6n
+RH+iSZLhwqKAocDjSyTJKrmuS5IwsUqAc4Er3n/8Sbw7fXN28kHzmAHGMnu9AZwBCi20gxMMIA5q
+VR6kOQh0FJzjHxEvlyhk1zg+4NU0OHhwpYMxzL2I2n2cBQey68XVw8AcK1AmNFZA/f4bukzVGujz
+Pw+sdxCcDFGFJs7f7tY5yGQWb6RYx8xfyBnBtxrOd1FRrV8DNyiEUwGpFC4OIpggPCCJS7NxnklR
+AIulSSYnAVBoTm39VQRW+JBn+7TWLU4ACGWQwUvn2YRGzCRMtAvrNeoL03hLM9NNArvOm7wkxQH8
+ny1IF6VxdkM4KmIo/jaX10mWIULIC0G4F9LA6iYBTlxG4pxakV4wjUTI2otbokjUwEvIdMCT8j7e
+FKmcsviibt2tRmgwWQmz1ilzHLSsSL3SqjVT7eW9w+hLi+sIzWpdSgBezz2hW+X5VMxBZxM2Rbxh
+8arucuKcoEeeqBPyBLWEvvgdKHqiVL2R9iXyCmgWYqhgladpfgckOwoCIfawkTHKPnPCW3gH/wJc
+/DeV1WIdBM5IFrAGhcgPgUIgYBJkprlaI+Fxm2bltpJJMtYUebmUJQ31OGIfMOKPbIxzDT7klTZq
+PF1c5XyTVKiS5tpkJmzxsrBi/fia5w3TAMutiGamaUOnDU4vLdbxXBqXZC5XKAl6kV7bZYcxg54x
+yRZXYsNWBt4BWWTCFqRfsaDSWVWSnACAwcIXZ0lRp9RIIYOJGAbaFAR/E6NJz7WzBOzNZjlAhcTm
+ewH2B3D7O4jR3ToB+iwAAmgY1FKwfPOkKtFBaPRR4Bt905/HB049W2nbxEOu4iTVVj7OgjN6eFqW
+JL4LWWCvqSaGghlmFbp21xnQEcV8NBoFgXGHtsp8zVVQldsjYAVhxpnN5nWChm82Q1Ovf6iARxHO
+wF43287CAw1hOn0AKjldVmW+wdd2bp9AmcBY2CPYExekZSQ7yB4nvkbyuSq9ME3RdjvsLFAPBRc/
+nb4/+3L6SRyLy0alTdv67ArGPM1iYGuyCMBUrWEbXQYtUfElqPvEezDvxBRgz6g3ia+Mqxp4F1D/
+XNb0Gqax8F4Gpx9O3pyfzv7y6fSn2aezz6eAINgZGezRlNE81uAwqgiEA7hyqSJtX4NOD3rw5uST
+fRDMEjX75mtgN3gyvpYVMHE5hhlPRbiJ7xUwaDilphPEsdMALHg4mYjvxOHz568OCVqxLbYADMyu
+0xQfzrRFnyXZKg8n1PgXdumPWUlp/+3y6OsrcXwswl/i2zgMwIdqmjJL/Eji9HlbSOhawZ9xriZB
+sJQrEL0biQI6fk5+8YQ7wJJAy1zb6V/yJDPvmSvdIUh/jKkH4DCbLdJYKWw8m4VABOrQ84EOETvX
+KHVj6Fhs3a4TjQp+SgkLm2GXKf7Tg2I8p36IBqPodjGNQFw3i1hJbkXTh36zGeqs2WysBwRhJokB
+h4vVUChME9RZZQJ+LXEe6rC5ylP8ifBRC5AA4tYKtSQukt46RbdxWks1diYFRByPW2RERZso4kdw
+UcZgiZulm0za1DQ8A82AfGkOWrRsUQ4/e+DvgLoymzjc6PHei2mGmP477zQIB3A5Q1T3SrWgsHYU
+F6cX4tWLw310Z2DPubTU8ZqjhU6yWtqHK1gtIw+MMPcy8uLSZYV6Fp8e7Ya5iezKdFlhpZe4lJv8
+Vi4BW2RgZ5XFT/QGduYwj0UMqwh6nfwBVqHGb4xxH8qzB2lB3wGotyEoZv3N0u9xMEBmChQRb6yJ
+1HrXz6awKPPbBJ2N+Va/BFsJyhItpnFsAmfhPCZDkwgaArzgDCl1J0NQh2XNDivhjSDRXiwbxRoR
+uHPU1Ff09SbL77IZ74SPUemOJ5Z1UbA082KDZgn2xHuwQoBkDhu7hmgMBVx+gbK1D8jD9GG6QFna
+WwAgMPSKtmsOLLPVoynyrhGHRRiT14KEt5ToL9yaIWirZYjhQKK3kX1gtARCgslZBWdVg2YylDXT
+DAZ2SOJz3XnEW1AfQIuKEZjNsYbGjQz9Lo9AOYtzVyk5/dAif/nyhdlGrSm+gojNcdLoQqzIWEbF
+FgxrAjrBeGQcrSE2uAPnFsDUSrOm2P8k8oK9MVjPCy3b4AfA7q6qiqODg7u7u0hHF/Ly+kCtDv74
+p2+++dML1onLJfEPTMeRFh1qiw7oHXq00bfGAn1nVq7Fj0nmcyPBGkvyysgVRfy+r5NlLo72J1Z/
+Ihc3Zhr/Na4MKJCZGZSpDLQdNRg9U/vPoldqJJ6RdbZtxxP2S7RJtVbMt7rQo8rBEwC/ZZHXaKob
+TlDiK7BusENfynl9HdrBPRtpfsBUUU7Hlgf2X14hBj5nGL4ypniGWoLYAi2+Q/qfmG1i8o60hkDy
+oonq7J63/VrMEHf5eHm3vqYjNGaGiULuQInwmzxaAG3jruTgR7u2aPcc19Z8PENgLH1gmFc7lmMU
+HMIF12LqSp3D1ejxgjTdsWoGBeOqRlDQ4CTOmdoaHNnIEEGid2M2+7ywugXQqRU5NPEBswrQwh2n
+Y+3arOB4QsgDx+IlPZHgIh913r3gpa3TlAI6LR71qMKAvYVGO50DX44NgKkYlX8ZcUuzTfnYWhRe
+gx5gOceAkMFWHWbCN64PONob9bBTx+oP9WYa94HARRpzLOpR0AnlYx6hVCBNxdjvOcTilrjdwXZa
+HGIqs0wk0mpAuNrKo1eodhqmVZKh7nUWKVqkOXjFVisSIzXvfWeB9kH4uM+YaQnUZGjI4TQ6Jm/P
+E8BQt8Pw2XWNgQY3DoMYbRJF1g3JtIZ/wK2g+AYFo4CWBM2CeayU+RP7HWTOzld/GWAPS2hkCLfp
+kBvSsRgajnm/J5CMOhoDUpABCbvCSK4jq4MUOMxZIE+44bUclG6CESmQM8eCkJoB3Omlt8HBJxGe
+gJCEIuT7SslCfCVGsHxtUX2c7v5dudQEIcZOA3IVdPTi2I1sOFGN41aUw2doP75BZyVFDhw8B5fH
+DfS7bG6Y1gZdwFn3FbdFCjQyxWFGExfVK0MYN5j8h2OnRUMsM4hhKG8g70jHjDQJ7HJr0LDgBoy3
+5u2x9GM3YoF9x2GuDuXmHvZ/YZmoRa5Cipm0YxfuR3NFlzYW2/NkPoI/3gKMJlceJJnq+AVGWf6B
+QUIPetgH3ZsshkWWcXmXZCEpME2/Y39pOnhYUnpG7uATbacOYKIY8Tx4X4KA0NHnAYgTagLYlctQ
+abe/C3bnFEcWLncfeW7z5dGrqy5xp0MRHvvpX6rT+6qMFa5WyovGQoGr1TXgqHRhcnG21YeX+nAb
+twllrmAXKT5++iKQEBzXvYu3T5t6w/CIzYNz8j4GddBrD5KrNTtiF0AEtSIyykH4dI58PLJPndyO
+iT0ByJMYZseiGEiaT/4ROLsWCsbYX24zjKO1VQZ+4PU3X896IqMukt98PXpglBYx+sR+3PIE7cic
+VLBrtqWMU3I1nD4UVMwa1rFtignrc9r+aR676vE5NVo29t3fAj8GCobUJfgIL6YN2bpTxY/vTg3C
+03ZqB7DObtV89mgRYG+fz3+BHbLSQbXbOEnpXAEmv7+PytVs7jle0a89PEg7FYxDgr79l7p8AdwQ
+cjRh0p2OdsZOTMC5ZxdsPkWsuqjs6RyC5gjMywtwjz+HFU6ve+B7Bge/r7p8IiBvTqMeMmpbbIZ4
+wQclhz1K9gnzfvqMf9dZP27mw4L1/zHLF/+cST5hKgaaNh4+rH5iuXbXAHuEeRpwO3e4hd2h+axy
+ZZw7VklKPEfd9VzcUboCxVbxpAigLNnv64GDUqoPvd/WZclH16QCC1nu43HsVGCmlvH8ek3Mnjj4
+ICvExDZbUKzayevJ+4Qv1NFnO5Ow2Tf0c+c6NzErmd0mJfQFhTsOf/j442nYb0IwjgudHm9FHu83
+INwnMG6oiRM+pQ9T6Cld/nH10d66+AQ1GQEmIqzJ1iVsJxBs4gj9a/BARMg7sOVjdtyhL9ZycTOT
+lDqAbIpdnaD4W3yNmNiMAj//S8UrSmKDmSzSGmnFjjdmH67qbEHnI5UE/0qnCmPqECUEcPhvlcbX
+Ykydlxh60txI0anbuNTeZ1HmmJwq6mR5cJ0shfy1jlPc1svVCnDBwyv9KuLhKQIl3nFOAyctKrmo
+y6TaAglileuzP0p/cBrOtzzRsYckH/MwATEh4kh8wmnjeybc0pDLBAf8Ew+cJO67sYOTrBDRc3if
+5TMcdUY5vlNGqnsuT4+D9gg5ABgBUJj/aKIjd/4bSa/cA0Zac5eoqCU9UrqRhpycMYQynmCkg3/T
+T58RXd4awPJ6GMvr3Vhet7G87sXy2sfyejeWrkjgwtqglZGEvsBV+1ijN9/GjTnxMKfxYs3tMPcT
+czwBoijMBtvIFKdAe5EtPt8jIKS2nQNnetjkzyScVFrmHALXIJH78RBLb+ZN8rrTmbJxdGeeinFn
+h3KI/L4HUUSpYnPqzvK2jKs48uTiOs3nILYW3WkDYCra6UQcK81uZ3OO7rYs1ejiPz//8PEDNkdQ
+I5PeQN1wEdGw4FTGz+PyWnWlqdn8FcCO1NJPxKFuGuDeIyNrPMoe//OOMjyQccQdZSjkogAPgLK6
+bDM39ykMW891kpR+zkzOh03HYpRVo2ZSA0Q6ubh4d/L5ZEQhv9H/jlyBMbT1pcPFx7SwDbr+m9vc
+Uhz7gFDr2FZj/Nw5ebRuOOJhG2vAdjzf1oPDxxjs3jCBP8t/KqVgSYBQkQ7+PoVQj945/Kb9UIc+
+hhE7yX/uyRo7K/adI3uOi+KIft+xQ3sA/7AT9xgzIIB2ocZmZ9DslVtK35rXHRR1gD7S1/vNe832
+1qu9k/EpaifR4wA6lLXNht0/75yGjZ6S1ZvT788+nJ+9uTj5/IPjAqIr9/HTwaE4/fGLoPwQNGDs
+E8WYGlFhJhIYFrfQSSxz+K/GyM+yrjhIDL3enZ/rk5oNlrpg7jPanAiecxqThcZBM45C24c6/wgx
+SvUGyakponQdqjnC/dKG61lUrvOjqVRpjs5qrbdeulbM1JTRuXYE0geNXVIwCE4xg1eUxV6ZXWHJ
+J4C6zqoHKW2jbWJISkHBTrqAc/5lTle8QCl1hidNZ63oL0MX1/AqUkWawE7udWhlSXfD9JiGcfRD
+e8DNePVpQKc7jKwb8qwHsUCr9Trkuen+k4bRfq0Bw4bB3sG8M0npIZSBjcltIsRGfJITynv4apde
+r4GCBcODvgoX0TBdArOPYXMt1glsIIAn12B9cZ8AEFor4R8IHDnRAZljdkb4drPc/3OoCeK3/vnn
+nuZVme7/TRSwCxKcShT2ENNt/A42PpGMxOnH95OQkaPUXPHnGssDwCGhAKgj7ZS/xCfos7GS6Urn
+l/j6AF9oP4Fet7qXsih1937XOEQJeKbG5DU8U4Z+IaZ7WdhTnMqkBRorHyxmWEHopiGYz574tJZp
+qvPdz96dn4LviMUYKEF87nYKw3G8BI/QdfIdVzi2QOEBO7wukY1LdGEpyWIZec16g9YoctTby8uw
+60SB4W6vThS4jBPloj3GaTMsU04QISvDWphlZdZutUEKu22I4igzzBKzi5ISWH2eAF6mpzFviWCv
+hKUeJgLPp8hJVpmMxTRZgB4FlQsKdQpCgsTFekbivDzjGHheKlMGBQ+LbZlcrys83YDOEZVgYPMf
+T76cn32gsoTDV43X3cOcU9oJTDmJ5BhTBDHaAV/ctD/kqtmsj2f1K4SB2gf+tF9xdsoxD9Dpx4FF
+/NN+xXVox85OkGcACqou2uKBGwCnW5/cNLLAuNp9MH7cFMAGMx8MxSKx7EUnerjz63KibdkyJRT3
+MS+fcICzKmxKmu7spqS1P3qOqwLPuZbj/kbwtk+2zGcOXW86b4aS39xPRwqxJBYw6rb2xzDZYZ2m
+ejoOsw1xC21rtY39OXNipU67RYaiDEQcu50nLpP1K2HdnDnQS6PuABPfanSNJPaq8tHP2Uh7GB4m
+ltidfYrpSGUsZAQwkiF17U8NPhRaBFAglP07diR3Onl+6M3RsQYPz1HrLrCNP4Ai1Lm4VOORl8CJ
+8OVXdhz5FaGFevRIhI6nkskst3li+Llbo1f50p9jrwxQEBPFroyzazlmWFMD8yuf2AMhWNK2Hqkv
+k6s+wyLOwDm9H+Dwrlz0H5wY1FqM0Gl3I7dtdeSTBxv0loLsJJgPvozvQPcXdTXmlRw4h+6tpRuG
++jBEzD6Epvr0fRxiOObXcGB9GsC91NCw0MP7deDsktfGOLLWPraqmkL7QnuwixK2ZpWiYxmnONH4
+otYLaAzucWPyR/apThSyv3vqxJyYkAXKg7sgvbmNdINWOGHE5UpcOZpQOnxTTaPfLeWtTMFogJEd
+Y7XDL7baYRLZcEpvHthvxu5ie7Htx43eNJgdmXIMRIAKMXoDPbsQanDAFf5Z70Ti7Iac47d/PZuK
+tx9+gn/fyI9gQbHmcSr+BqOLt3kJ20ou2qXbFLCAo+L9Yl4rLIwkaHRCwRdPoLd24ZEXT0N0ZYlf
+UmIVpMBk2nLDt50AijxBKmRv3ANTLwG/TUFXywk1DmLfWoz0S6TBcI0L1oUc6JbRutqkaCac4Eiz
+iJej87O3px8+nUbVPTK2+Tlygid+HhZORx8Nl3gMNhX2yaLGJ1eOv/yDTIsed1nvNU29DO41RQjb
+kcLuL/kmjdjuKeISAwai2C7zRYQtgdO5RK+6A/954mwrH7TvnnFFWOOJPjxrnHh8DNQQP7f1zwga
+Uh89J+pJCMVzrBXjx9Go3wJPBUW04c/zm7ulGxDXRT80wTamzazHfnerAtdMZw3PchLhdWyXwdSB
+pkmsNvOFWx/4MRP6IhRQbnS8IVdxnVZCZrCVor093UgBCt4t6WMJYVZhK0Z1bhSdSe/irXJyj2Il
+RjjqiIrq8RyGAoWw9f4xvmEzgLWGouYSaIBOiNK2KXe6qnqxZgnmnRBRryff4C7JXrnJL5rCPChv
+jBeN/wrzRG+RMbqWlZ4/PxhPLl82CQ4UjF54Bb2LAoydyyZ7oDGL58+fj8S/Pez0MCpRmuc34I0B
+7F5n5ZxeDxhsPTm7Wl2H3ryJgB8Xa3kJD64oaG6f1xlFJHd0pQWR9q+BEeLahJYZTfuWOeZYXcnn
+y9yCz6m0wfhLltB1RxhRkqhs9a1RGG0y0kQsCYohjNUiSUKOTsB6bPMaa/Ewuqj5Rd4DxycIZopv
+8WCMd9hrdCwpb9Zyj0XnWIwI8IhSyng0KmamajTAc3ax1WjOzrKkaspIXrhnpvoKgMreYqT5SsR3
+KBlmHi1iOGWdHqs2jnW+k0W9jUq+uHTjjK1Z8uuHcAfWBknLVyuDKTw0i7TIZbkw5hRXLFkklQPG
+tEM43JkubyLrEwU9KI1AvZNVWFqJtm//YNfFxfQjHR/vm5F01lBlL8TimFCctfIKo6gZn6JPlpCW
+b82XCYzygaLZ2hPwxhJ/0LFUrCHw7u1wyxnrTN/HwWkbzSUdAIfugLIK0rKjpyOci8csfGbagVs0
+8EM7c8LtNimrOk5n+tqHGfppM3uervG0ZXA7CzyttwK+fQ6O777O2AfHwSTXID0x49ZUZByLlY5M
+RG5lmV+EVeTo5R2yrwQ+BVJmOTP10CZ2dGnZ1Raa6gRHR8UjqK9M8dKAQ26qZjoFJy7mU0pvMuUO
+A86zn29JV1eI78T41VQctnY+i2KLNzkBss+Woe+KUTeYihMMMHNs34shvjsW45dT8ccd0KOBAY4O
+3RHa+9gWhEEgr66eTMY0mRPZwr4U9of76hxG0PSM4+SqTf4umb4lKv1ri0pcIagTlV+2E5VbYw/u
+WzsfH8lwA4pjlcjl/jOFJNRIN7p5mMEJPyyg37M5Wrp2vKmoocK5OWxG7ho96GhE4zbbQUxRulZf
+XL+LuoYNp71zwKTJtFIV7S1zmMao0WsRFQDM+o7S8Bve7QLvNSlc/2zwiFUXAViwPREEXenJB2ZN
+w0ZQH3QEn6QBHmAUEeJhaqMoXMl6goiEdA8OMdFXrUNsh+N/d+bhEoOho9AOlt98vQtPVzB7izp6
+FnR3pYUnsra8ollu8+kPzHmM0tf1NwmMA6URHXBWzVWV5GYeYfYy30GT2yzmDV4GSSfTaBJT6bpN
+vJXmW7/Qj6HYASWTwVqAJ1Wv8CD5lu62PFGU9IZX1Hx9+HJqKoMZkJ7Aq+jVV/oKSOpmLj/wfeyp
+3rvBS93vMPoXB1hS+b3tq85uhqZ13LoLyh8spOjZJJpZOjSG6eE6kGbNYoF3JjbEZN/aXgDyHryd
+Ofg55vLTHBw22JBGfei6GqOR3iHVNiDAD5uMIcl5VNdGkSLSu4RtSHnuUpxPFgXdq9+CYAgBOX8d
+8xt0BeviyIbYjE3Bk8+xm82Jn+qmt+6M7Qka2+om3DV97r9r7rpFYGdukhk6c/frS10a6L7DVrSP
+Bhze0IR4VIlEo/H7jYlrB6Y6h6Y/Qq8/SH63E850wKw8BMZk7GC8n9hTY2/M/iZeuN8xIWyfL2R2
+y4l7nY3WtDs2o83xj/EUOPkFn9sbBiijaak5kPdLdMPejHNkZ/L6Ws1ivN1xRptsyufq7J7Mtu09
+Xc4nY7U1uy28tAhAGG7Smbducj0wBuhKvmWa06Gc22kEDU1Jw04WskqWbBL01g7ARRwxpf4mEM9p
+xKNUYqBb1WVRwm54pO8i5jydvtTmBqgJ4G1idWNQNz2m+mpaUqyUHGZKkDlO20ryASKwEe+YhtnM
+vgNeedFcs5BMLTPIrN7IMq6aK4b8jIAENl3NCFR0jovrhOcaqWxxiYtYYnnDQQoDZPb7V7Cx9DbV
+O+5VmFht93h2oh465PuUKxscY2S4OLm31wu611ot6Wpr1zu0zRqus1cqwTKYu/JIR+pYGb/V93fx
+HbMcyUf/0uEfkHe38tLPQrfqjL1bi4bzzFUI3Qub8MYAMs599zB2OKB742JrA2zH9/WFZZSOhznQ
+2FJR++S9CqcZbdJEkDBh9IEIkl8U8MQIkgf/kREkfWsmGBqNj9YDvWUCD4SaWD24V1A2jAB9ZkAk
+PMBuXWBoTOXYTbovcpXcj+yF0qwrnUo+Yx6QI7t3kxEIvmpSuRnK3lVwuyJIvnTR4+/PP745OSda
+zC5O3v7HyfeUlIXHJS1b9egQW5bvM7X3vfRvN9ymE2n6Bm+w7bkhlmuYNITO+04OQg+E/nq1vgVt
+KzL39VCHTt1PtxMgvnvaLahDKrsXcscv0zUmbvpMK0870E85qdb8cjITzCNzUsfi0JzEmffN4YmW
+0U5seWjhnPTWrjrR/qq+BXQg7j2xSda0Anhmgvxlj0xMxYwNzLOD0v7ffFBmOFYbmht0QAoX0rnJ
+kS5xZFCV//8TKUHZxbi3Y0dxau/mpnZ8PKTspfN49ruQkSGIV+436s7PFfalTAeoEASs8PQ9hYyI
+0X/6QNWmHzxT4nKfCov3Udlc2V+4Ztq5/WuCSQaVve9LcYISH7NC41WduokDtk+nAzl9dBqVr5xK
+FtB8B0DnRjwVsDf6S6wQ51sRwsZRu2SYHEt01Jf1Ocij3XSwN7R6IfaHyk7dskshXg43XLYqO3WP
+Q+6hHuihalPc51hgzNIcqicV3xFkPs4UdMGX53zgGbre9sPX28uXR/ZwAfkdXzuKhLLJRo5hv3Sy
+MXdeKul0J2Ypp5Suh3s1JySsW1w5UNknGNrbdEpSBvY/Js+BIY289/0hM9PDu3p/1MbUst4RTEmM
+n6kJTcsp4tG42yeT7nQbtdUFwgVJjwDSUYEAC8F0dKOTILrlLO/xC70bnNd0Ha97whQ6UkHJYj5H
+cA/j+zX4tbtTIfGjujOKpj83aHOgXnIQbvYduNXEC4UMm4T21Bs+GHABuCa7v//LR/TvpjHa7oe7
+/Grb6lVvHSD7spj5iplBLRKZxxEYGdCbY9LWWC5hBB2voWno6DJUMzfkC3T8KJsWL9umDQY5szPt
+AVijEPwfucjncQ==
+""")
+
+##file activate.sh
+ACTIVATE_SH = convert("""
+eJytVVFvokAQfudXTLEPtTlLeo9tvMSmJpq02hSvl7u2wRUG2QR2DSxSe7n/frOACEVNLlceRHa+
+nfl25pvZDswCnoDPQ4QoTRQsENIEPci4CsBMZBq7CAsuLOYqvmYKTTj3YxnBgiXBudGBjUzBZUJI
+BXEqgCvweIyuCjeG4eF2F5x14bcB9KQiQQWrjSddI1/oQIx6SYYeoFjzWIoIhYI1izlbhJjkKO7D
+M/QEmKfO9O7WeRo/zr4P7pyHwWxkwitcgwpQ5Ej96OX+PmiFwLeVjFUOrNYKaq1Nud3nR2n8nI2m
+k9H0friPTGVsUdptaxGrTEfpNVFEskxpXtUkkCkl1UNF9cgLBkx48J4EXyALuBtAwNYIjF5kcmUU
+abMKmMq1ULoiRbgsDEkTSsKSGFCJ6Z8vY/2xYiSacmtyAfCDdCNTVZoVF8vSTQOoEwSnOrngBkws
+MYGMBMg8/bMBLSYKS7pYEXP0PqT+ZmBT0Xuy+Pplj5yn4aM9nk72JD8/Wi+Gr98sD9eWSMOwkapD
+BbUv91XSvmyVkICt2tmXR4tWmrcUCsjWOpw87YidEC8i0gdTSOFhouJUNxR+4NYBG0MftoCTD9F7
+2rTtxG3oPwY1b2HncYwhrlmj6Wq924xtGDWqfdNxap+OYxplEurnMVo9RWks+rH8qKEtx7kZT5zJ
+4H7oOFclrN6uFe+d+nW2aIUsSgs/42EIPuOhXq+jEo3S6tX6w2ilNkDnIpHCWdEQhFgwj9pkk7FN
+l/y5eQvRSIQ5+TrL05lewxWpt/Lbhes5cJF3mLET1MGhcKCF+40tNWnUulxrpojwDo2sObdje3Bz
+N3QeHqf3D7OjEXMVV8LN3ZlvuzoWHqiUcNKHtwNd0IbvPGKYYM31nPKCgkUILw3KL+Y8l7aO1ArS
+Ad37nIU0fCj5NE5gQCuC5sOSu+UdI2NeXg/lFkQIlFpdWVaWZRfvqGiirC9o6liJ9FXGYrSY9mI1
+D/Ncozgn13vJvsznr7DnkJWXsyMH7e42ljdJ+aqNDF1bFnKWFLdj31xtaJYK6EXFgqmV/ymD/ROG
++n8O9H8f5vsGOWXsL1+1k3g=
+""")
+
+##file activate.fish
+ACTIVATE_FISH = convert("""
+eJydVW2P2jgQ/s6vmAZQoVpA9/WkqqJaTou0u6x2uZVOVWWZZEKsS+yc7UDpr+84bziQbauLxEvs
+eXnsZ56ZIWwTYSAWKUJWGAs7hMJgBEdhEwiMKnSIsBNywUMrDtziPBYmCeBDrFUG7v8HmCTW5n8u
+Fu7NJJim81Bl08EQTqqAkEupLOhCgrAQCY2hTU+DQVxIiqgkRNiEBphFEKy+kd1BaFvwFOUBuIxA
+oy20BKtAKp3xFMo0QNtCK5mhtMEA6BmSpUELKo38TThwLfguRVNaiRgs0llnEoIR29zfstf18/bv
+5T17Wm7vAiiN3ONCzfbfwC3DtWXXDqHfAGX0q6z/bO82j3ebh1VwnbrduwTQbvwcRtesAfMGor/W
+L3fs6Xnz8LRlm9fV8/P61sM0LDNwCZjl9gSpCokJRzpryGQ5t8kNGFUt51QjOZGu0Mj35FlYlXEr
+yC09EVOp4lEXfF84Lz1qbhBsgl59vDedXI3rTV03xipduSgt9kLytI3XmBp3aV6MPoMQGNUU62T6
+uQdeefTy1Hfj10zVHg2pq8fXDoHBiOv94csfXwN49xECqWREy7pwukKfvxdMY2j23vXDPuuxxeE+
+JOdCOhxCE3N44B1ZeSLuZh8Mmkr2wEPAmPfKWHA2uxIRjEopdbQYjDz3BWOf14/scfmwoki1eQvX
+ExBdF60Mqh+Y/QcX4uiH4Amwzx79KOVFtbL63sXJbtcvy8/3q5rupmO5CnE91wBviQAhjUUegYpL
+vVEbpLt2/W+PklRgq5Ku6mp+rpMhhCo/lXthQTxJ2ysO4Ka0ad97S7VT/n6YXus6fzk3fLnBZW5C
+KDC6gSO62QDqgFqLCCtPmjegjnLeAdArtSE8VYGbAJ/aLb+vnQutFhk768E9uRbSxhCMzdgEveYw
+IZ5ZqFKl6+kz7UR4U+buqQZXu9SIujrAfD7f0FXpozB4Q0gwp31H9mVTZGGC4b871/wm7lvyDLu1
+FUyvTj/yvD66k3UPTs08x1AQQaGziOl0S1qRkPG9COtBTSTWM9NzQ4R64B+Px/l3tDzCgxv5C6Ni
+e+QaF9xFWrxx0V/G5uvYQOdiZzvYpQUVQSIsTr1TTghI33GnPbTA7/GCqcE3oE3GZurq4HeQXQD6
+32XS1ITj/qLjN72ob0hc5C9bzw8MhfmL
+""")
+
+##file activate.csh
+ACTIVATE_CSH = convert("""
+eJx9VG1P2zAQ/u5fcYQKNgTNPtN1WxlIQ4KCUEGaxuQ6yYVYSuzKdhqVX7+zk3bpy5YPUXL3PPfc
+ne98DLNCWshliVDV1kGCUFvMoJGugMjq2qQIiVSxSJ1cCofD1BYRnOVGV0CfZ0N2DD91DalQSjsw
+tQLpIJMGU1euvPe7QeJlkKzgWixlhnAt4aoUVsLnLBiy5NtbJWQ5THX1ZciYKKWwkOFaE04dUm6D
+r/zh7pq/3D7Nnid3/HEy+wFHY/gEJydg0aFaQrBFgz1c5DG1IhTs+UZgsBC2GMFBlaeH+8dZXwcW
+VPvCjXdlAvCfQsE7al0+07XjZvrSCUevR5dnkVeKlFYZmUztG4BdzL2u9KyLVabTU0bdfg7a0hgs
+cSmUg6UwUiQl2iHrcbcVGNvPCiLOe7+cRwG13z9qRGgx2z6DHjfm/Op2yqeT+xvOLzs0PTKHDz2V
+tkckFHoQfQRXoGJAj9el0FyJCmEMhzgMS4sB7KPOE2ExoLcSieYwDvR+cP8cg11gKkVJc2wRcm1g
+QhYFlXiTaTfO2ki0fQoiFM4tLuO4aZrhOzqR4dIPcWx17hphMBY+Srwh7RTyN83XOWkcSPh1Pg/k
+TXX/jbJTbMtUmcxZ+/bbqOsy82suFQg/BhdSOTRhMNBHlUarCpU7JzBhmkKmRejKOQzayQe6MWoa
+n1wqWmuh6LZAaHxcdeqIlVLhIBJdO9/kbl0It2oEXQj+eGjJOuvOIR/YGRqvFhttUB2XTvLXYN2H
+37CBdbW2W7j2r2+VsCn0doVWcFG1/4y1VwBjfwAyoZhD
+""")
+
+##file activate.bat
+ACTIVATE_BAT = convert("""
+eJx9UdEKgjAUfW6wfxjiIH+hEDKUFHSKLCMI7kNOEkIf9P9pTJ3OLJ/03HPPPed4Es9XS9qqwqgT
+PbGKKOdXL4aAFS7A4gvAwgijuiKlqOpGlATS2NeMLE+TjJM9RkQ+SmqAXLrBo1LLIeLdiWlD6jZt
+r7VNubWkndkXaxg5GO3UaOOKS6drO3luDDiO5my3iA0YAKGzPRV1ack8cOdhysI0CYzIPzjSiH5X
+0QcvC8Lfaj0emsVKYF2rhL5L3fCkVjV76kShi59NHwDniAHzkgDgqBcwOgTMx+gDQQqXCw==
+""")
+
+##file deactivate.bat
+DEACTIVATE_BAT = convert("""
+eJxzSE3OyFfIT0vj4ipOLVEI8wwKCXX0iXf1C7Pl4spMU0hJTcvMS01RiPf3cYmHyQYE+fsGhCho
+cCkAAUibEkTEVhWLMlUlLk6QGixStlyaeCyJDPHw9/Pw93VFsQguim4ZXAJoIUw5DhX47XUM8UCx
+EchHtwsohN1bILUgw61c/Vy4AJYPYm4=
+""")
+
+##file activate.ps1
+ACTIVATE_PS = convert("""
+eJylWdmO41hyfW+g/0FTU7C7IXeJIqmtB/3AnZRIStxF2kaBm7gv4ipyMF/mB3+Sf8GXVGVl1tLT
+43ECSqR4b5wbETeWE8z/+a///vNCDaN6cYtSf5G1dbNw/IVXNIu6aCvX9xa3qsgWl0IJ/7IYinbh
+2nkOVqs2X0TNjz/8eeFFle826fBhQRaLBkD9uviw+LCy3Sbq7Mb/UNbrH3+YNtLcVaB+Xbipb+eL
+tly0eVsD/M6u6g8//vC+dquobH5VWU75eMFUdvHb4n02RHlXuHYTFfmHbHCLLLNz70NpN+GrBI4p
+1EeSk4FAXaZR88u0vPip8usi7fznt3fvP+OuPnx49/Pil4td+XnzigIAPoqYQH2J8v4z+C+8b98m
+Q25t7k76LIK0cOz0V89/MXXx0+Lf6z5q3PA/F+/FIif9uqnaadFf/PzXSXYBfqIb2NeApecJwPzI
+dlL/149nnvyoc7KqYfzTAT8v/voUmX7e+3n364tffl/oVaDyswKY/7J18e6bve8Wv9RuUfqfLHmK
+/u139Hwx+9ePRep97KKqae30YwmCo2y+0vTz1k+rv7159B3pb1SOGj97Pe8/flfkC1Vn/7xYR4n6
+lypNEGDDV5f7lcjil3S+4++p881Wv6qKyn5GQg1yJwcp4BZ5E+Wt/z1P/umbiHir4J8Xip/eFt6n
+9T/9gU9eY+7zUX97Jlmb136ziKrKT/3OzpvP8VX/+MObSP0lL3LvVZlJ9v1b8357jXyw8rXxYPXN
+11n4UzJ8G8S/vUbuJ6RPj999DbtS5kys//JusXwrNLnvT99cFlBNwXCe+niRz8JF/ezNr9Pze+H6
+18W7d5PPvozW7+387Zto/v4pL8BvbxTzvIW9KCv/Fj0WzVQb/YXbVlPZWTz3/9vCaRtQbPN/Bb+j
+2rUrDxTVD68gfQXu/ZewAFX53U/vf/rD2P3558W7+W79Po1y/xXoX/6RFHyNIoVjgAG4H0RTcAe5
+3bSVv3DSwk2mZYHjFB8zj6fC4sLOFTHJJQrwzFYJgso0ApOoBzFiRzzQKjIQCCbQMIFJGCKqGUyS
+8AkjiF2wTwmMEbcEUvq8Nj+X0f4YcCQmYRiOY7eRbAJDqzm1chOoNstbJ8oTBhZQ2NcfgaB6QjLp
+U4+SWFjQGCZpyqby8V4JkPGs9eH1BscXIrTG24QxXLIgCLYNsIlxSYLA6SjAeg7HAg4/kpiIB8k9
+TCLm0EM4gKIxEj8IUj2dQeqSxEwYVH88qiRlCLjEYGuNIkJB1BA5dHOZdGAoUFk54WOqEojkuf4Q
+Ig3WY+96TDlKLicMC04h0+gDCdYHj0kz2xBDj9ECDU5zJ0tba6RKgXBneewhBG/xJ5m5FX+WSzsn
+wnHvKhcOciw9NunZ0BUF0n0IJAcJMdcLqgQb0zP19dl8t9PzmMBjkuIF7KkvHgqEovUPOsY0PBB1
+HCtUUhch83qEJPjQcNQDsgj0cRqx2ZbnnlrlUjE1EX2wFJyyDa/0GLrmKDEFepdWlsbmVU45Wiwt
+eFM6mfs4kxg8yc4YmKDy67dniLV5FUeO5AKNPZaOQQ++gh+dXE7dbJ1aTDr7S4WPd8sQoQkDyODg
+XnEu/voeKRAXZxB/e2xaJ4LTFLPYEJ15Ltb87I45l+P6OGFA5F5Ix8A4ORV6M1NH1uMuZMnmFtLi
+VpYed+gSq9JDBoHc05J4OhKetrk1p0LYiKipxLMe3tYS7c5V7O1KcPU8BJGdLfcswhoFCSGQqJ8f
+ThyQKy5EWFtHVuNhvTnkeTc8JMpN5li3buURh0+3ZGuzdwM55kon+8urbintjdQJf9U1D0ah+hNh
+i1XNu4fSKbTC5AikGEaj0CYM1dpuli7EoqUt7929f1plxGGNZnixFSFP2qzhlZMonu2bB9OWSqYx
+VuHKWNGJI8kqUhMTRtk0vJ5ycZ60JlodlmN3D9XiEj/cG2lSt+WV3OtMgt1Tf4/Z+1BaCus740kx
+Nvj78+jMd9tq537Xz/mNFyiHb0HdwHytJ3uQUzKkYhK7wjGtx3oKX43YeYoJVtqDSrCnQFzMemCS
+2bPSvP+M4yZFi/iZhAjL4UOeMfa7Ex8HKBqw4umOCPh+imOP6yVTwG2MplB+wtg97olEtykNZ6wg
+FJBNXSTJ3g0CCTEEMdUjjcaBDjhJ9fyINXgQVHhA0bjk9lhhhhOGzcqQSxYdj3iIN2xGEOODx4qj
+Q2xikJudC1ujCVOtiRwhga5nPdhe1gSa649bLJ0wCuLMcEYIeSy25YcDQHJb95nfowv3rQnin0fE
+zIXFkM/EwSGxvCCMgEPNcDp/wph1gMEa8Xd1qAWOwWZ/KhjlqzgisBpDDDXz9Cmov46GYBKHC4zZ
+84HJnXoTxyWNBbXV4LK/r+OEwSN45zBp7Cub3gIYIvYlxon5BzDgtPUYfXAMPbENGrI+YVGSeTQ5
+i8NMB5UCcC+YRGIBhgs0xhAGwSgYwywpbu4vpCSTdEKrsy8osXMUnHQYenQHbOBofLCNNTg3CRRj
+A1nXY2MZcjnXI+oQ2Zk+561H4CqoW61tbPKv65Y7fqc3TDUF9CA3F3gM0e0JQ0TPADJFJXVzphpr
+2FzwAY8apGCju1QGOiUVO5KV6/hKbtgVN6hRVwpRYtu+/OC6w2bCcGzZQ8NCc4WejNEjFxOIgR3o
+QqR1ZK0IaUxZ9nbL7GWJIjxBARUhAMnYrq/S0tVOjzlOSYRqeIZxaSaOBX5HSR3MFekOXVdUPbjX
+nru61fDwI8HRYPUS7a6Inzq9JLjokU6P6OzT4UCH+Nha+JrU4VqEo4rRHQJhVuulAnvFhYz5NWFT
+aS/bKxW6J3e46y4PLagGrCDKcq5B9EmP+s1QMCaxHNeM7deGEV3WPn3CeKjndlygdPyoIcNaL3dd
+bdqPs47frcZ3aNWQ2Tk+rjFR01Ul4XnQQB6CSKA+cZusD0CP3F2Ph0e78baybgioepG12luSpFXi
+bHbI6rGLDsGEodMObDG7uyxfCeU+1OiyXYk8fnGu0SpbpRoEuWdSUlNi5bd9nBxYqZGrq7Qa7zV+
+VLazLcelzzP9+n6+xUtWx9OVJZW3gk92XGGkstTJ/LreFVFF2feLpXGGuQqq6/1QbWPyhJXIXIMs
+7ySVlzMYqoPmnmrobbeauMIxrCr3sM+qs5HpwmmFt7SM3aRNQWpCrmeAXY28EJ9uc966urGKBL9H
+18MtDE5OX97GDOHxam11y5LCAzcwtkUu8wqWI1dWgHyxGZdY8mC3lXzbzncLZ2bIUxTD2yW7l9eY
+gBUo7uj02ZI3ydUViL7oAVFag37JsjYG8o4Csc5R7SeONGF8yZP+7xxi9scnHvHPcogJ44VH/LMc
+Yu6Vn3jEzCFw9Eqq1ENQAW8aqbUwSiAqi+nZ+OkZJKpBL66Bj8z+ATqb/8qDIJUeNRTwrI0YrVmb
+9FArKVEbCWUNSi8ipfVv+STgkpSsUhcBg541eeKLoBpLGaiHTNoK0r4nn3tZqrcIULtq20Df+FVQ
+Sa0MnWxTugMuzD410sQygF4qdntbswiJMqjs014Irz/tm+pd5oygJ0fcdNbMg165Pqi7EkYGAXcB
+dwxioCDA3+BY9+JjuOmJu/xyX2GJtaKSQcOZxyqFzTaa6/ot21sez0BtKjirROKRm2zuai02L0N+
+ULaX8H5P6VwsGPbYOY7sAy5FHBROMrMzFVPYhFHZ7M3ZCZa2hsT4jGow6TGtG8Nje9405uMUjdF4
+PtKQjw6yZOmPUmO8LjFWS4aPCfE011N+l3EdYq09O3iQJ9a01B3KXiMF1WmtZ+l1gmyJ/ibAHZil
+vQzdOl6g9PoSJ4TM4ghTnTndEVMOmsSSu+SCVlGCOLQRaw9oLzamSWP62VuxPZ77mZYdfTRGuNBi
+KyhZL32S2YckO/tU7y4Bf+QKKibQSKCTDWPUwWaE8yCBeL5FjpbQuAlb53mGX1jptLeRotREbx96
+gnicYz0496dYauCjpTCA4VA0cdLJewzRmZeTwuXWD0talJsSF9J1Pe72nkaHSpULgNeK1+o+9yi0
+YpYwXZyvaZatK2eL0U0ZY6ekZkFPdC8JTF4Yo1ytawNfepqUKEhwznp6HO6+2l7L2R9Q3N49JMIe
+Z+ax1mVaWussz98QbNTRPo1xu4W33LJpd9H14dd66ype7UktfEDi3oUTccJ4nODjwBKFxS7lYWiq
+XoHu/b7ZVcK5TbRD0F/2GShg2ywwUl07k4LLqhofKxFBNd1grWY+Zt/cPtacBpV9ys2z1moMLrT3
+W0Elrjtt5y/dvDQYtObYS97pqj0eqmwvD3jCPRqamGthLiF0XkgB6IdHLBBwDGPiIDh7oPaRmTrN
+tYA/yQKFxRiok+jM6ciJq/ZgiOi5+W4DEmufPEubeSuYJaM3/JHEevM08yJAXUQwb9LS2+8FOfds
+FfOe3Bel6EDSjIEIKs4o9tyt67L1ylQlzhe0Q+7ue/bJnWMcD3q6wDSIQi8ThnRM65aqLWesi/ZM
+xhHmQvfKBbWcC194IPjbBLYR9JTPITbzwRcu+OSFHDHNSYCLt29sAHO6Gf0h/2UO9Xwvhrjhczyx
+Ygz6CqP4IwxQj5694Q1Pe2IR+KF/yy+5PvCL/vgwv5mPp9n4kx7fnY/nmV++410qF/ZVCMyv5nAP
+pkeOSce53yJ6ahF4aMJi52by1HcCj9mDT5i+7TF6RoPaLL+cN1hXem2DmX/mdIbeeqwQOLD5lKO/
+6FM4x77w6D5wMx3g0IAfa2D/pgY9a7bFQbinLDPz5dZi9ATIrd0cB5xfC0BfCCZO7TKP0jQ2Meih
+nRXhkA3smTAnDN9IW2vA++lsgNuZ2QP0UhqyjUPrDmgfWP2bWWiKA+YiEK7xou8cY0+d3/bk0oHR
+QLrq4KzDYF/ljQDmNhBHtkVNuoDey6TTeaD3SHO/Bf4d3IwGdqQp6FuhmwFbmbQBssDXVKDBYOpk
+Jy7wxOaSRwr0rDmGbsFdCM+7XU/84JPu3D/gW7QXgzlvbjixn99/8CpWFUQWHFEz/RyXvzNXTTOd
+OXLNNFc957Jn/YikNzEpUdRNxXcC6b76ccTwMGoKj5X7c7TvHFgc3Tf4892+5A+iR+D8OaaE6ACe
+gdgHcyCoPm/xiDCWP+OZRjpzfj5/2u0i4qQfmIEOsTV9Hw6jZ3Agnh6hiwjDtGYxWvt5TiWEuabN
+77YCyRXwO8P8wdzG/8489KwfFBZWI6Vvx76gmlOc03JI1HEfXYZEL4sNFQ3+bqf7e2hdSWQknwKF
+ICJjGyDs3fdmnnxubKXebpQYLjPgEt9GTzKkUgTvOoQa1J7N3nv4sR6uvYFLhkXZ+pbCoU3K9bfq
+gF7W82tNutRRZExad+k4GYYsCfmEbvizS4jsRr3fdzqjEthpEwm7pmN7OgVzRbrktjrFw1lc0vM8
+V7dyTJ71qlsd7v3KhmHzeJB35pqEOk2pEe5uPeCToNkmedmxcKbIj+MZzjFSsvCmimaMQB1uJJKa
++hoWUi7aEFLvIxKxJavqpggXBIk2hr0608dIgnfG5ZEprqmH0b0YSy6jVXTCuIB+WER4d5BPVy9Q
+M4taX0RIlDYxQ2CjBuq78AAcHQf5qoKP8BXHnDnd/+ed5fS+csL4g3eWqECaL+8suy9r8hx7c+4L
+EegEWdqAWN1w1NezP34xsxLkvRRI0DRzKOg0U+BKfQY128YlYsbwSczEg2LqKxRmcgiwHdhc9MQJ
+IwKQHlgBejWeMGDYYxTOQUiJOmIjJbzIzHH6lAMP+y/fR0v1g4wx4St8fcqTt3gz5wc+xXFZZ3qI
+JpXI5iJk7xmNL2tYsDpcqu0375Snd5EKsIvg8u5szTOyZ4v06Ny2TZXRpHUSinh4IFp8Eoi7GINJ
+02lPJnS/9jSxolJwp2slPMIEbjleWw3eec4XaetyEnSSqTPRZ9fVA0cPXMqzrPYQQyrRux3LaAh1
+wujbgcObg1nt4iiJ5IMbc/WNPc280I2T4nTkdwG8H6iS5xO2WfsFsruBwf2QkgZlb6w7om2G65Lr
+r2Gl4dk63F8rCEHoUJ3fW+pU2Srjlmcbp+JXY3DMifEI22HcHAvT7zzXiMTr7VbUR5a2lZtJkk4k
+1heZZFdru8ucCWMTr3Z4eNnjLm7LW7rcN7QjMpxrsCzjxndeyFUX7deIs3PQkgyH8k6luI0uUyLr
+va47TBjM4JmNHFzGPcP6BV6cYgQy8VQYZe5GmzZHMxyBYhGiUdekZQ/qwyxC3WGylQGdUpSf9ZCP
+a7qPdJd31fPRC0TOgzupO7nLuBGr2A02yuUQwt2KQG31sW8Gd9tQiHq+hPDt4OzJuY4pS8XRsepY
+tsd7dVEfJFmc15IYqwHverrpWyS1rFZibDPW1hUUb+85CGUzSBSTK8hpvee/ZxonW51TUXekMy3L
+uy25tMTg4mqbSLQQJ+skiQu2toIfBFYrOWql+EQipgfT15P1aq6FDK3xgSjIGWde0BPftYchDTdM
+i4QdudHFkN0u6fSKiT09QLv2mtSblt5nNzBR6UReePNs+khE4rHcXuoK21igUKHl1c3MXMgPu7y8
+rKQDxR6N/rffXv+lROXet/9Q+l9I4D1U
+""")
+
+##file distutils-init.py
+DISTUTILS_INIT = convert("""
+eJytV1uL4zYUfvevOE0ottuMW9q3gVDa3aUMXXbLMlDKMBiNrSTqOJKRlMxkf33PkXyRbGe7Dw2E
+UXTu37lpxLFV2oIyifAncxmOL0xLIfcG+gv80x9VW6maw7o/CANSWWBwFtqeWMPlGY6qPjV8A0bB
+C4eKSTgZ5LRgFeyErMEeOBhbN+Ipgeizhjtnhkn7DdyjuNLPoCS0l/ayQTG0djwZC08cLXozeMss
+aG5EzQ0IScpnWtHSTXuxByV/QCmxE7y+eS0uxWeoheaVVfqSJHiU7Mhhi6gULbOHorshkrEnKxpT
+0n3A8Y8SMpuwZx6aoix3ouFlmW8gHRSkeSJ2g7hU+kiHLDaQw3bmRDaTGfTnty7gPm0FHbIBg9U9
+oh1kZzAFLaue2R6htPCtAda2nGlDSUJ4PZBgCJBGVcwKTAMz/vJiLD+Oin5Z5QlvDPdulC6EsiyE
+NFzb7McNTKJzbJqzphx92VKRFY1idenzmq3K0emRcbWBD0ryqc4NZGmKOOOX9Pz5x+/l27tP797c
+f/z0d+4NruGNai8uAM0bfsYaw8itFk8ny41jsfpyO+BWlpqfhcG4yxLdi/0tQqoT4a8Vby382mt8
+p7XSo7aWGdPBc+b6utaBmCQ7rQKQoWtAuthQCiold2KfJIPTT8xwg9blPumc+YDZC/wYGdAyHpJk
+vUbHbHWAp5No6pK/WhhLEWrFjUwtPEv1Agf8YmnsuXUQYkeZoHm8ogP16gt2uHoxcEMdf2C6pmbw
+hUMsWGhanboh4IzzmsIpWs134jVPqD/c74bZHdY69UKKSn/+KfVhxLgUlToemayLMYQOqfEC61bh
+cbhwaqoGUzIyZRFHPmau5juaWqwRn3mpWmoEA5nhzS5gog/5jbcFQqOZvmBasZtwYlG93k5GEiyw
+buHhMWLjDarEGpMGB2LFs5nIJkhp/nUmZneFaRth++lieJtHepIvKgx6PJqIlD9X2j6pG1i9x3pZ
+5bHuCPFiirGHeO7McvoXkz786GaKVzC9DSpnOxJdc4xm6NSVq7lNEnKdVlnpu9BNYoKX2Iq3wvgh
+gGEUM66kK6j4NiyoneuPLSwaCWDxczgaolEWpiMyDVDb7dNuLAbriL8ig8mmeju31oNvQdpnvEPC
+1vAXbWacGRVrGt/uXN/gU0CDDwgooKRrHfTBb1/s9lYZ8ZqOBU0yLvpuP6+K9hLFsvIjeNhBi0KL
+MlOuWRn3FRwx5oHXjl0YImUx0+gLzjGchrgzca026ETmYJzPD+IpuKzNi8AFn048Thd63OdD86M6
+84zE8yQm0VqXdbbgvub2pKVnS76icBGdeTHHXTKspUmr4NYo/furFLKiMdQzFjHJNcdAnMhltBJK
+0/IKX3DVFqvPJ2dLE7bDBkH0l/PJ29074+F0CsGYOxsb7U3myTUncYfXqnLLfa6sJybX4g+hmcjO
+kMRBfA1JellfRRKJcyRpxdS4rIl6FdmQCWjo/o9Qz7yKffoP4JHjOvABcRn4CZIT2RH4jnxmfpVG
+qgLaAvQBNfuO6X0/Ux02nb4FKx3vgP+XnkX0QW9pLy/NsXgdN24dD3LxO2Nwil7Zlc1dqtP3d7/h
+kzp1/+7hGBuY4pk0XD/0Ao/oTe/XGrfyM773aB7iUhgkpy+dwAMalxMP0DrBcsVw/6p25+/hobP9
+GBknrWExDhLJ1bwt1NcCNblaFbMKCyvmX0PeRaQ=
+""")
+
+##file distutils.cfg
+DISTUTILS_CFG = convert("""
+eJxNj00KwkAMhfc9xYNuxe4Ft57AjYiUtDO1wXSmNJnK3N5pdSEEAu8nH6lxHVlRhtDHMPATA4uH
+xJ4EFmGbvfJiicSHFRzUSISMY6hq3GLCRLnIvSTnEefN0FIjw5tF0Hkk9Q5dRunBsVoyFi24aaLg
+9FDOlL0FPGluf4QjcInLlxd6f6rqkgPu/5nHLg0cXCscXoozRrP51DRT3j9QNl99AP53T2Q=
+""")
+
+##file activate_this.py
+ACTIVATE_THIS = convert("""
+eJyNU01v2zAMvetXEB4K21jnDOstQA4dMGCHbeihlyEIDMWmE62yJEiKE//7kXKdpEWLzYBt8evx
+kRSzLPs6wiEoswM8YdMpjUXcq1Dz6RZa1cSiTkJdr86GsoTRHuCotBayiWqQEYGtMCgfD1KjGYBe
+5a3p0cRKiEe2NtLAFikftnDco0ko/SFEVgEZ8aRCZDIPY9xbA8pE9M4jfW/B2CjiHq9zbJVZuOQq
+siwTIvpxKYCembPAU4Muwi/Z4zfvrZ/MXipKeB8C+qisSZYiWfjJfs+0/MFMdWn1hJcO5U7G/SLa
+xVx8zU6VG/PXLXvfsyyzUqjeWR8hjGE+2iCE1W1tQ82hsCJN9dzKaoexyB/uH79TnjwvxcW0ntSb
+yZ8jq1Z5Q1UXsyy3gf9nbjTEj7NzQMfCJa/YSmrQ+2D/BqfiOi6sclrGzvoeVivIj8rcfcmnIQRF
+7XCyeZI7DFe5/lhlCs5PRf5QW66VXT/NrlQ46oD/D6InkOmi3IQcbhKxAX2g4a+Xd5s3UtCtG2py
+m8eg6WYWqR6SL5OjKMGfSrYt/6kxxQtOpeAgj1LXBNmpE2ElmCSIy5H0zFd8gJ924HWijWhb2hRC
+6wNEm1QdDZtuSZcEprIUBo/XRNcbQe1OUbQ/r3hPTaPJJDNtFLu8KHV5XoNr3Eo6h6YtOKw8e8yw
+VF5PnJ+ts3a9/Mz38RpG/AUSzYUW
+""")
+
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+FAT_MAGIC = 0xcafebabe
+BIG_ENDIAN = '>'
+LITTLE_ENDIAN = '<'
+LC_LOAD_DYLIB = 0xc
+maxint = majver == 3 and getattr(sys, 'maxsize') or getattr(sys, 'maxint')
+
+
+class fileview(object):
+    """
+    A proxy for file-like objects that exposes a given view of a file.
+    Modified from macholib.
+    """
+
+    def __init__(self, fileobj, start=0, size=maxint):
+        if isinstance(fileobj, fileview):
+            self._fileobj = fileobj._fileobj
+        else:
+            self._fileobj = fileobj
+        self._start = start
+        self._end = start + size
+        self._pos = 0
+
+    def __repr__(self):
+        return '<fileview [%d, %d] %r>' % (
+            self._start, self._end, self._fileobj)
+
+    def tell(self):
+        return self._pos
+
+    def _checkwindow(self, seekto, op):
+        if not (self._start <= seekto <= self._end):
+            raise IOError("%s to offset %d is outside window [%d, %d]" % (
+                op, seekto, self._start, self._end))
+
+    def seek(self, offset, whence=0):
+        seekto = offset
+        if whence == os.SEEK_SET:
+            seekto += self._start
+        elif whence == os.SEEK_CUR:
+            seekto += self._start + self._pos
+        elif whence == os.SEEK_END:
+            seekto += self._end
+        else:
+            raise IOError("Invalid whence argument to seek: %r" % (whence,))
+        self._checkwindow(seekto, 'seek')
+        self._fileobj.seek(seekto)
+        self._pos = seekto - self._start
+
+    def write(self, bytes):
+        here = self._start + self._pos
+        self._checkwindow(here, 'write')
+        self._checkwindow(here + len(bytes), 'write')
+        self._fileobj.seek(here, os.SEEK_SET)
+        self._fileobj.write(bytes)
+        self._pos += len(bytes)
+
+    def read(self, size=maxint):
+        assert size >= 0
+        here = self._start + self._pos
+        self._checkwindow(here, 'read')
+        size = min(size, self._end - here)
+        self._fileobj.seek(here, os.SEEK_SET)
+        bytes = self._fileobj.read(size)
+        self._pos += len(bytes)
+        return bytes
+
+
+def read_data(file, endian, num=1):
+    """
+    Read a given number of 32-bits unsigned integers from the given file
+    with the given endianness.
+    """
+    res = struct.unpack(endian + 'L' * num, file.read(num * 4))
+    if len(res) == 1:
+        return res[0]
+    return res
+
+
+def mach_o_change(path, what, value):
+    """
+    Replace a given name (what) in any LC_LOAD_DYLIB command found in
+    the given binary with a new name (value), provided it's shorter.
+    """
+
+    def do_macho(file, bits, endian):
+        # Read Mach-O header (the magic number is assumed read by the caller)
+        cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = read_data(file, endian, 6)
+        # 64-bits header has one more field.
+        if bits == 64:
+            read_data(file, endian)
+        # The header is followed by ncmds commands
+        for n in range(ncmds):
+            where = file.tell()
+            # Read command header
+            cmd, cmdsize = read_data(file, endian, 2)
+            if cmd == LC_LOAD_DYLIB:
+                # The first data field in LC_LOAD_DYLIB commands is the
+                # offset of the name, starting from the beginning of the
+                # command.
+                name_offset = read_data(file, endian)
+                file.seek(where + name_offset, os.SEEK_SET)
+                # Read the NUL terminated string
+                load = file.read(cmdsize - name_offset).decode()
+                load = load[:load.index('\0')]
+                # If the string is what is being replaced, overwrite it.
+                if load == what:
+                    file.seek(where + name_offset, os.SEEK_SET)
+                    file.write(value.encode() + '\0'.encode())
+            # Seek to the next command
+            file.seek(where + cmdsize, os.SEEK_SET)
+
+    def do_file(file, offset=0, size=maxint):
+        file = fileview(file, offset, size)
+        # Read magic number
+        magic = read_data(file, BIG_ENDIAN)
+        if magic == FAT_MAGIC:
+            # Fat binaries contain nfat_arch Mach-O binaries
+            nfat_arch = read_data(file, BIG_ENDIAN)
+            for n in range(nfat_arch):
+                # Read arch header
+                cputype, cpusubtype, offset, size, align = read_data(file, BIG_ENDIAN, 5)
+                do_file(file, offset, size)
+        elif magic == MH_MAGIC:
+            do_macho(file, 32, BIG_ENDIAN)
+        elif magic == MH_CIGAM:
+            do_macho(file, 32, LITTLE_ENDIAN)
+        elif magic == MH_MAGIC_64:
+            do_macho(file, 64, BIG_ENDIAN)
+        elif magic == MH_CIGAM_64:
+            do_macho(file, 64, LITTLE_ENDIAN)
+
+    assert(len(what) >= len(value))
+    do_file(open(path, 'r+b'))
+
+
+if __name__ == '__main__':
+    main()
+
+## TODO:
+## Copy python.exe.manifest
+## Monkeypatch distutils.sysconfig
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.bat b/bootstrap/virtualenv/virtualenv_embedded/activate.bat
new file mode 100644
index 0000000..4c2003e
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.bat
@@ -0,0 +1,26 @@
+@echo off

+set "VIRTUAL_ENV=__VIRTUAL_ENV__"

+

+if defined _OLD_VIRTUAL_PROMPT (

+    set "PROMPT=%_OLD_VIRTUAL_PROMPT%"

+) else (

+    if not defined PROMPT (

+        set "PROMPT=$P$G"

+    )

+	set "_OLD_VIRTUAL_PROMPT=%PROMPT%"	

+)

+set "PROMPT=__VIRTUAL_WINPROMPT__ %PROMPT%"

+

+if not defined _OLD_VIRTUAL_PYTHONHOME (

+    set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"

+)

+set PYTHONHOME=

+

+if defined _OLD_VIRTUAL_PATH (

+    set "PATH=%_OLD_VIRTUAL_PATH%"

+) else (

+    set "_OLD_VIRTUAL_PATH=%PATH%"

+)

+set "PATH=%VIRTUAL_ENV%\__BIN_NAME__;%PATH%"

+

+:END

diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.csh b/bootstrap/virtualenv/virtualenv_embedded/activate.csh
new file mode 100644
index 0000000..9db7744
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.csh
@@ -0,0 +1,42 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+
+
+
+if ("__VIRTUAL_PROMPT__" != "") then
+    set env_name = "__VIRTUAL_PROMPT__"
+else
+    if (`basename "$VIRTUAL_ENV"` == "__") then
+        # special case for Aspen magic directories
+        # see http://www.zetadev.com/software/aspen/
+        set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
+    else
+        set env_name = `basename "$VIRTUAL_ENV"`
+    endif
+endif
+
+# Could be in a non-interactive environment,
+# in which case, $prompt is undefined and we wouldn't
+# care about the prompt anyway.
+if ( $?prompt ) then
+    set _OLD_VIRTUAL_PROMPT="$prompt"
+    set prompt = "[$env_name] $prompt"
+endif
+
+unset env_name
+
+alias pydoc python -m pydoc
+
+rehash
+
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.fish b/bootstrap/virtualenv/virtualenv_embedded/activate.fish
new file mode 100644
index 0000000..eaa241d
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.fish
@@ -0,0 +1,74 @@
+# This file must be used with "source bin/activate.fish" *from fish* (http://fishshell.com)
+# you cannot run it directly
+
+function deactivate  -d "Exit virtualenv and return to normal shell environment"
+    # reset old environment variables
+    if test -n "$_OLD_VIRTUAL_PATH" 
+        set -gx PATH $_OLD_VIRTUAL_PATH
+        set -e _OLD_VIRTUAL_PATH
+    end
+    if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+        set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+        set -e _OLD_VIRTUAL_PYTHONHOME
+    end
+    
+    if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+        # set an empty local fish_function_path, so fish_prompt doesn't automatically reload
+        set -l fish_function_path
+        # erase the virtualenv's fish_prompt function, and restore the original
+        functions -e fish_prompt
+        functions -c _old_fish_prompt fish_prompt
+        functions -e _old_fish_prompt
+        set -e _OLD_FISH_PROMPT_OVERRIDE
+    end
+    
+    set -e VIRTUAL_ENV
+    if test "$argv[1]" != "nondestructive"
+        # Self destruct!
+        functions -e deactivate
+    end
+end
+
+# unset irrelevant variables
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/__BIN_NAME__" $PATH
+
+# unset PYTHONHOME if set
+if set -q PYTHONHOME
+    set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+    set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+    # fish uses a function instead of an env var to generate the prompt.
+    
+    # copy the current fish_prompt function as the function _old_fish_prompt
+    functions -c fish_prompt _old_fish_prompt
+    
+    # with the original prompt function copied, we can override with our own.
+    function fish_prompt
+        # Prompt override?
+        if test -n "__VIRTUAL_PROMPT__"
+            printf "%s%s" "__VIRTUAL_PROMPT__" (set_color normal)
+            _old_fish_prompt
+            return
+        end
+        # ...Otherwise, prepend env
+        set -l _checkbase (basename "$VIRTUAL_ENV")
+        if test $_checkbase = "__"
+            # special case for Aspen magic directories
+            # see http://www.zetadev.com/software/aspen/
+            printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) 
+            _old_fish_prompt
+        else
+            printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
+            _old_fish_prompt
+        end
+    end 
+    
+    set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.ps1 b/bootstrap/virtualenv/virtualenv_embedded/activate.ps1
new file mode 100644
index 0000000..0f4adf1
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.ps1
@@ -0,0 +1,150 @@
+# This file must be dot sourced from PoSh; you cannot run it

+# directly. Do this: . ./activate.ps1

+

+# FIXME: clean up unused vars.

+$script:THIS_PATH = $myinvocation.mycommand.path

+$script:BASE_DIR = split-path (resolve-path "$THIS_PATH/..") -Parent

+$script:DIR_NAME = split-path $BASE_DIR -Leaf

+

+function global:deactivate ( [switch] $NonDestructive ){

+

+    if ( test-path variable:_OLD_VIRTUAL_PATH ) {

+        $env:PATH = $variable:_OLD_VIRTUAL_PATH

+        remove-variable "_OLD_VIRTUAL_PATH" -scope global

+    }

+

+    if ( test-path function:_old_virtual_prompt ) {

+        $function:prompt = $function:_old_virtual_prompt

+        remove-item function:\_old_virtual_prompt

+    }

+

+    if ($env:VIRTUAL_ENV) {

+        $old_env = split-path $env:VIRTUAL_ENV -leaf

+        remove-item env:VIRTUAL_ENV -erroraction silentlycontinue

+    }

+

+    if ( !$NonDestructive ) {

+        # Self destruct!

+        remove-item function:deactivate

+    }

+}

+

+# unset irrelevant variables

+deactivate -nondestructive

+

+$VIRTUAL_ENV = $BASE_DIR

+$env:VIRTUAL_ENV = $VIRTUAL_ENV

+

+$global:_OLD_VIRTUAL_PATH = $env:PATH

+$env:PATH = "$env:VIRTUAL_ENV/Scripts;" + $env:PATH

+if (! $env:VIRTUAL_ENV_DISABLE_PROMPT) {

+    function global:_old_virtual_prompt { "" }

+    $function:_old_virtual_prompt = $function:prompt

+    function global:prompt {

+        # Add a prefix to the current prompt, but don't discard it.

+        write-host "($(split-path $env:VIRTUAL_ENV -leaf)) " -nonewline

+        & $function:_old_virtual_prompt

+    }

+}

+

+# SIG # Begin signature block

+# MIISeAYJKoZIhvcNAQcCoIISaTCCEmUCAQExCzAJBgUrDgMCGgUAMGkGCisGAQQB

+# gjcCAQSgWzBZMDQGCisGAQQBgjcCAR4wJgIDAQAABBAfzDtgWUsITrck0sYpfvNR

+# AgEAAgEAAgEAAgEAAgEAMCEwCQYFKw4DAhoFAAQUS5reBwSg3zOUwhXf2jPChZzf

+# yPmggg6tMIIGcDCCBFigAwIBAgIBJDANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQG

+# EwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERp

+# Z2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2Vy

+# dGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDcxMDI0MjIwMTQ2WhcNMTcxMDI0MjIw

+# MTQ2WjCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0ZC4xKzAp

+# BgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcxODA2BgNV

+# BAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUgT2JqZWN0

+# IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyiOLIjUemqAbPJ1J

+# 0D8MlzgWKbr4fYlbRVjvhHDtfhFN6RQxq0PjTQxRgWzwFQNKJCdU5ftKoM5N4YSj

+# Id6ZNavcSa6/McVnhDAQm+8H3HWoD030NVOxbjgD/Ih3HaV3/z9159nnvyxQEckR

+# ZfpJB2Kfk6aHqW3JnSvRe+XVZSufDVCe/vtxGSEwKCaNrsLc9pboUoYIC3oyzWoU

+# TZ65+c0H4paR8c8eK/mC914mBo6N0dQ512/bkSdaeY9YaQpGtW/h/W/FkbQRT3sC

+# pttLVlIjnkuY4r9+zvqhToPjxcfDYEf+XD8VGkAqle8Aa8hQ+M1qGdQjAye8OzbV

+# uUOw7wIDAQABo4IB6TCCAeUwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC

+# AQYwHQYDVR0OBBYEFNBOD0CZbLhLGW87KLjg44gHNKq3MB8GA1UdIwQYMBaAFE4L

+# 7xqkQFulF2mHMMo0aEPQQa7yMD0GCCsGAQUFBwEBBDEwLzAtBggrBgEFBQcwAoYh

+# aHR0cDovL3d3dy5zdGFydHNzbC5jb20vc2ZzY2EuY3J0MFsGA1UdHwRUMFIwJ6Al

+# oCOGIWh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3Nmc2NhLmNybDAnoCWgI4YhaHR0

+# cDovL2NybC5zdGFydHNzbC5jb20vc2ZzY2EuY3JsMIGABgNVHSAEeTB3MHUGCysG

+# AQQBgbU3AQIBMGYwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29t

+# L3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29t

+# L2ludGVybWVkaWF0ZS5wZGYwEQYJYIZIAYb4QgEBBAQDAgABMFAGCWCGSAGG+EIB

+# DQRDFkFTdGFydENvbSBDbGFzcyAyIFByaW1hcnkgSW50ZXJtZWRpYXRlIE9iamVj

+# dCBTaWduaW5nIENlcnRpZmljYXRlczANBgkqhkiG9w0BAQUFAAOCAgEAcnMLA3Va

+# N4OIE9l4QT5OEtZy5PByBit3oHiqQpgVEQo7DHRsjXD5H/IyTivpMikaaeRxIv95

+# baRd4hoUcMwDj4JIjC3WA9FoNFV31SMljEZa66G8RQECdMSSufgfDYu1XQ+cUKxh

+# D3EtLGGcFGjjML7EQv2Iol741rEsycXwIXcryxeiMbU2TPi7X3elbwQMc4JFlJ4B

+# y9FhBzuZB1DV2sN2irGVbC3G/1+S2doPDjL1CaElwRa/T0qkq2vvPxUgryAoCppU

+# FKViw5yoGYC+z1GaesWWiP1eFKAL0wI7IgSvLzU3y1Vp7vsYaxOVBqZtebFTWRHt

+# XjCsFrrQBngt0d33QbQRI5mwgzEp7XJ9xu5d6RVWM4TPRUsd+DDZpBHm9mszvi9g

+# VFb2ZG7qRRXCSqys4+u/NLBPbXi/m/lU00cODQTlC/euwjk9HQtRrXQ/zqsBJS6U

+# J+eLGw1qOfj+HVBl/ZQpfoLk7IoWlRQvRL1s7oirEaqPZUIWY/grXq9r6jDKAp3L

+# ZdKQpPOnnogtqlU4f7/kLjEJhrrc98mrOWmVMK/BuFRAfQ5oDUMnVmCzAzLMjKfG

+# cVW/iMew41yfhgKbwpfzm3LBr1Zv+pEBgcgW6onRLSAn3XHM0eNtz+AkxH6rRf6B

+# 2mYhLEEGLapH8R1AMAo4BbVFOZR5kXcMCwowggg1MIIHHaADAgECAgIEuDANBgkq

+# hkiG9w0BAQUFADCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0

+# ZC4xKzApBgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcx

+# ODA2BgNVBAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUg

+# T2JqZWN0IENBMB4XDTExMTIwMzE1MzQxOVoXDTEzMTIwMzE0NTgwN1owgYwxIDAe

+# BgNVBA0TFzU4MTc5Ni1HaDd4Zkp4a3hRU0lPNEUwMQswCQYDVQQGEwJERTEPMA0G

+# A1UECBMGQmVybGluMQ8wDQYDVQQHEwZCZXJsaW4xFjAUBgNVBAMTDUphbm5pcyBM

+# ZWlkZWwxITAfBgkqhkiG9w0BCQEWEmphbm5pc0BsZWlkZWwuaW5mbzCCAiIwDQYJ

+# KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMcPeABYdN7nPq/AkZ/EkyUBGx/l2Yui

+# Lfm8ZdLG0ulMb/kQL3fRY7sUjYPyn9S6PhqqlFnNoGHJvbbReCdUC9SIQYmOEjEA

+# raHfb7MZU10NjO4U2DdGucj2zuO5tYxKizizOJF0e4yRQZVxpUGdvkW/+GLjCNK5

+# L7mIv3Z1dagxDKHYZT74HXiS4VFUwHF1k36CwfM2vsetdm46bdgSwV+BCMmZICYT

+# IJAS9UQHD7kP4rik3bFWjUx08NtYYFAVOd/HwBnemUmJe4j3IhZHr0k1+eDG8hDH

+# KVvPgLJIoEjC4iMFk5GWsg5z2ngk0LLu3JZMtckHsnnmBPHQK8a3opUNd8hdMNJx

+# gOwKjQt2JZSGUdIEFCKVDqj0FmdnDMPfwy+FNRtpBMl1sz78dUFhSrnM0D8NXrqa

+# 4rG+2FoOXlmm1rb6AFtpjAKksHRpYcPk2DPGWp/1sWB+dUQkS3gOmwFzyqeTuXpT

+# 0juqd3iAxOGx1VRFQ1VHLLf3AzV4wljBau26I+tu7iXxesVucSdsdQu293jwc2kN

+# xK2JyHCoZH+RyytrwS0qw8t7rMOukU9gwP8mn3X6mgWlVUODMcHTULjSiCEtvyZ/

+# aafcwjUbt4ReEcnmuZtWIha86MTCX7U7e+cnpWG4sIHPnvVTaz9rm8RyBkIxtFCB

+# nQ3FnoQgyxeJAgMBAAGjggOdMIIDmTAJBgNVHRMEAjAAMA4GA1UdDwEB/wQEAwIH

+# gDAuBgNVHSUBAf8EJDAiBggrBgEFBQcDAwYKKwYBBAGCNwIBFQYKKwYBBAGCNwoD

+# DTAdBgNVHQ4EFgQUWyCgrIWo8Ifvvm1/YTQIeMU9nc8wHwYDVR0jBBgwFoAU0E4P

+# QJlsuEsZbzsouODjiAc0qrcwggIhBgNVHSAEggIYMIICFDCCAhAGCysGAQQBgbU3

+# AQICMIIB/zAuBggrBgEFBQcCARYiaHR0cDovL3d3dy5zdGFydHNzbC5jb20vcG9s

+# aWN5LnBkZjA0BggrBgEFBQcCARYoaHR0cDovL3d3dy5zdGFydHNzbC5jb20vaW50

+# ZXJtZWRpYXRlLnBkZjCB9wYIKwYBBQUHAgIwgeowJxYgU3RhcnRDb20gQ2VydGlm

+# aWNhdGlvbiBBdXRob3JpdHkwAwIBARqBvlRoaXMgY2VydGlmaWNhdGUgd2FzIGlz

+# c3VlZCBhY2NvcmRpbmcgdG8gdGhlIENsYXNzIDIgVmFsaWRhdGlvbiByZXF1aXJl

+# bWVudHMgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeSwgcmVsaWFuY2Ugb25seSBm

+# b3IgdGhlIGludGVuZGVkIHB1cnBvc2UgaW4gY29tcGxpYW5jZSBvZiB0aGUgcmVs

+# eWluZyBwYXJ0eSBvYmxpZ2F0aW9ucy4wgZwGCCsGAQUFBwICMIGPMCcWIFN0YXJ0

+# Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MAMCAQIaZExpYWJpbGl0eSBhbmQg

+# d2FycmFudGllcyBhcmUgbGltaXRlZCEgU2VlIHNlY3Rpb24gIkxlZ2FsIGFuZCBM

+# aW1pdGF0aW9ucyIgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeS4wNgYDVR0fBC8w

+# LTAroCmgJ4YlaHR0cDovL2NybC5zdGFydHNzbC5jb20vY3J0YzItY3JsLmNybDCB

+# iQYIKwYBBQUHAQEEfTB7MDcGCCsGAQUFBzABhitodHRwOi8vb2NzcC5zdGFydHNz

+# bC5jb20vc3ViL2NsYXNzMi9jb2RlL2NhMEAGCCsGAQUFBzAChjRodHRwOi8vYWlh

+# LnN0YXJ0c3NsLmNvbS9jZXJ0cy9zdWIuY2xhc3MyLmNvZGUuY2EuY3J0MCMGA1Ud

+# EgQcMBqGGGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tLzANBgkqhkiG9w0BAQUFAAOC

+# AQEAhrzEV6zwoEtKjnFRhCsjwiPykVpo5Eiye77Ve801rQDiRKgSCCiW6g3HqedL

+# OtaSs65Sj2pm3Viea4KR0TECLcbCTgsdaHqw2x1yXwWBQWZEaV6EB05lIwfr94P1

+# SFpV43zkuc+bbmA3+CRK45LOcCNH5Tqq7VGTCAK5iM7tvHwFlbQRl+I6VEL2mjpF

+# NsuRjDOVrv/9qw/a22YJ9R7Y1D0vUSs3IqZx2KMUaYDP7H2mSRxJO2nADQZBtriF

+# gTyfD3lYV12MlIi5CQwe3QC6DrrfSMP33i5Wa/OFJiQ27WPxmScYVhiqozpImFT4

+# PU9goiBv9RKXdgTmZE1PN0NQ5jGCAzUwggMxAgEBMIGTMIGMMQswCQYDVQQGEwJJ

+# TDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0

+# YWwgQ2VydGlmaWNhdGUgU2lnbmluZzE4MDYGA1UEAxMvU3RhcnRDb20gQ2xhc3Mg

+# MiBQcmltYXJ5IEludGVybWVkaWF0ZSBPYmplY3QgQ0ECAgS4MAkGBSsOAwIaBQCg

+# eDAYBgorBgEEAYI3AgEMMQowCKACgAChAoAAMBkGCSqGSIb3DQEJAzEMBgorBgEE

+# AYI3AgEEMBwGCisGAQQBgjcCAQsxDjAMBgorBgEEAYI3AgEVMCMGCSqGSIb3DQEJ

+# BDEWBBRVGw0FDSiaIi38dWteRUAg/9Pr6DANBgkqhkiG9w0BAQEFAASCAgCInvOZ

+# FdaNFzbf6trmFDZKMojyx3UjKMCqNjHVBbuKY0qXwFC/ElYDV1ShJ2CBZbdurydO

+# OQ6cIQ0KREOCwmX/xB49IlLHHUxNhEkVv7HGU3EKAFf9IBt9Yr7jikiR9cjIsfHK

+# 4cjkoKJL7g28yEpLLkHt1eo37f1Ga9lDWEa5Zq3U5yX+IwXhrUBm1h8Xr033FhTR

+# VEpuSz6LHtbrL/zgJnCzJ2ahjtJoYevdcWiNXffosJHFaSfYDDbiNsPRDH/1avmb

+# 5j/7BhP8BcBaR6Fp8tFbNGIcWHHGcjqLMnTc4w13b7b4pDhypqElBa4+lCmwdvv9

+# GydYtRgPz8GHeoBoKj30YBlMzRIfFYaIFGIC4Ai3UEXkuH9TxYohVbGm/W0Kl4Lb

+# RJ1FwiVcLcTOJdgNId2vQvKc+jtNrjcg5SP9h2v/C4aTx8tyc6tE3TOPh2f9b8DL

+# S+SbVArJpuJqrPTxDDoO1QNjTgLcdVYeZDE+r/NjaGZ6cMSd8db3EaG3ijD/0bud

+# SItbm/OlNVbQOFRR76D+ZNgPcU5iNZ3bmvQQIg6aSB9MHUpIE/SeCkNl9YeVk1/1

+# GFULgNMRmIYP4KLvu9ylh5Gu3hvD5VNhH6+FlXANwFy07uXks5uF8mfZVxVCnodG

+# xkNCx+6PsrA5Z7WP4pXcmYnMn97npP/Q9EHJWw==

+# SIG # End signature block

diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.sh b/bootstrap/virtualenv/virtualenv_embedded/activate.sh
new file mode 100644
index 0000000..e50c782
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.sh
@@ -0,0 +1,80 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+    unset pydoc
+
+    # reset old environment variables
+    if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
+        PATH="$_OLD_VIRTUAL_PATH"
+        export PATH
+        unset _OLD_VIRTUAL_PATH
+    fi
+    if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then
+        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
+        export PYTHONHOME
+        unset _OLD_VIRTUAL_PYTHONHOME
+    fi
+
+    # This should detect bash and zsh, which have a hash command that must
+    # be called to get it to forget past commands.  Without forgetting
+    # past commands the $PATH changes we made may not be respected
+    if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+        hash -r 2>/dev/null
+    fi
+
+    if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
+        PS1="$_OLD_VIRTUAL_PS1"
+        export PS1
+        unset _OLD_VIRTUAL_PS1
+    fi
+
+    unset VIRTUAL_ENV
+    if [ ! "$1" = "nondestructive" ] ; then
+    # Self destruct!
+        unset -f deactivate
+    fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV="__VIRTUAL_ENV__"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
+# could use `if (set -u; : $PYTHONHOME) ;` in bash
+if [ -n "$PYTHONHOME" ] ; then
+    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
+    unset PYTHONHOME
+fi
+
+if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then
+    _OLD_VIRTUAL_PS1="$PS1"
+    if [ "x__VIRTUAL_PROMPT__" != x ] ; then
+        PS1="__VIRTUAL_PROMPT__$PS1"
+    else
+    if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
+        # special case for Aspen magic directories
+        # see http://www.zetadev.com/software/aspen/
+        PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
+    else
+        PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
+    fi
+    fi
+    export PS1
+fi
+
+alias pydoc="python -m pydoc"
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands.  Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+    hash -r 2>/dev/null
+fi
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate_this.py b/bootstrap/virtualenv/virtualenv_embedded/activate_this.py
new file mode 100644
index 0000000..f18193b
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate_this.py
@@ -0,0 +1,34 @@
+"""By using execfile(this_file, dict(__file__=this_file)) you will
+activate this virtualenv environment.
+
+This can be used when you must use an existing Python interpreter, not
+the virtualenv bin/python
+"""
+
+try:
+    __file__
+except NameError:
+    raise AssertionError(
+        "You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
+import sys
+import os
+
+old_os_path = os.environ.get('PATH', '')
+os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
+base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+if sys.platform == 'win32':
+    site_packages = os.path.join(base, 'Lib', 'site-packages')
+else:
+    site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
+prev_sys_path = list(sys.path)
+import site
+site.addsitedir(site_packages)
+sys.real_prefix = sys.prefix
+sys.prefix = base
+# Move the added items to the front of the path:
+new_sys_path = []
+for item in list(sys.path):
+    if item not in prev_sys_path:
+        new_sys_path.append(item)
+        sys.path.remove(item)
+sys.path[:0] = new_sys_path
diff --git a/bootstrap/virtualenv/virtualenv_embedded/deactivate.bat b/bootstrap/virtualenv/virtualenv_embedded/deactivate.bat
new file mode 100644
index 0000000..fd4db26
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/deactivate.bat
@@ -0,0 +1,20 @@
+@echo off
+
+set VIRTUAL_ENV=
+
+if defined _OLD_VIRTUAL_PROMPT (
+    set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+	set _OLD_VIRTUAL_PROMPT=
+)
+
+if defined _OLD_VIRTUAL_PYTHONHOME (
+    set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
+    set _OLD_VIRTUAL_PYTHONHOME=
+)
+
+if defined _OLD_VIRTUAL_PATH (
+    set "PATH=%_OLD_VIRTUAL_PATH%"
+	set _OLD_VIRTUAL_PATH=
+)
+
+:END
diff --git a/bootstrap/virtualenv/virtualenv_embedded/distutils-init.py b/bootstrap/virtualenv/virtualenv_embedded/distutils-init.py
new file mode 100644
index 0000000..29fc1da
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/distutils-init.py
@@ -0,0 +1,101 @@
+import os
+import sys
+import warnings 
+import imp
+import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
+              # Important! To work on pypy, this must be a module that resides in the
+              # lib-python/modified-x.y.z directory
+
+dirname = os.path.dirname
+
+distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
+if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
+    warnings.warn(
+        "The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
+else:
+    __path__.insert(0, distutils_path)
+    real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ('', '', imp.PKG_DIRECTORY))
+    # Copy the relevant attributes
+    try:
+        __revision__ = real_distutils.__revision__
+    except AttributeError:
+        pass
+    __version__ = real_distutils.__version__
+
+from distutils import dist, sysconfig
+
+try:
+    basestring
+except NameError:
+    basestring = str
+
+## patch build_ext (distutils doesn't know how to get the libs directory
+## path on windows - it hardcodes the paths around the patched sys.prefix)
+
+if sys.platform == 'win32':
+    from distutils.command.build_ext import build_ext as old_build_ext
+    class build_ext(old_build_ext):
+        def finalize_options (self):
+            if self.library_dirs is None:
+                self.library_dirs = []
+            elif isinstance(self.library_dirs, basestring):
+                self.library_dirs = self.library_dirs.split(os.pathsep)
+            
+            self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
+            old_build_ext.finalize_options(self)
+            
+    from distutils.command import build_ext as build_ext_module 
+    build_ext_module.build_ext = build_ext
+
+## distutils.dist patches:
+
+old_find_config_files = dist.Distribution.find_config_files
+def find_config_files(self):
+    found = old_find_config_files(self)
+    system_distutils = os.path.join(distutils_path, 'distutils.cfg')
+    #if os.path.exists(system_distutils):
+    #    found.insert(0, system_distutils)
+        # What to call the per-user config file
+    if os.name == 'posix':
+        user_filename = ".pydistutils.cfg"
+    else:
+        user_filename = "pydistutils.cfg"
+    user_filename = os.path.join(sys.prefix, user_filename)
+    if os.path.isfile(user_filename):
+        for item in list(found):
+            if item.endswith('pydistutils.cfg'):
+                found.remove(item)
+        found.append(user_filename)
+    return found
+dist.Distribution.find_config_files = find_config_files
+
+## distutils.sysconfig patches:
+
+old_get_python_inc = sysconfig.get_python_inc
+def sysconfig_get_python_inc(plat_specific=0, prefix=None):
+    if prefix is None:
+        prefix = sys.real_prefix
+    return old_get_python_inc(plat_specific, prefix)
+sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
+sysconfig.get_python_inc = sysconfig_get_python_inc
+
+old_get_python_lib = sysconfig.get_python_lib
+def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    if standard_lib and prefix is None:
+        prefix = sys.real_prefix
+    return old_get_python_lib(plat_specific, standard_lib, prefix)
+sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
+sysconfig.get_python_lib = sysconfig_get_python_lib
+
+old_get_config_vars = sysconfig.get_config_vars
+def sysconfig_get_config_vars(*args):
+    real_vars = old_get_config_vars(*args)
+    if sys.platform == 'win32':
+        lib_dir = os.path.join(sys.real_prefix, "libs")
+        if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
+            real_vars['LIBDIR'] = lib_dir # asked for all
+        elif isinstance(real_vars, list) and 'LIBDIR' in args:
+            real_vars = real_vars + [lib_dir] # asked for list
+    return real_vars
+sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
+sysconfig.get_config_vars = sysconfig_get_config_vars
diff --git a/bootstrap/virtualenv/virtualenv_embedded/distutils.cfg b/bootstrap/virtualenv/virtualenv_embedded/distutils.cfg
new file mode 100644
index 0000000..1af230e
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/distutils.cfg
@@ -0,0 +1,6 @@
+# This is a config file local to this virtualenv installation
+# You may include options that will be used by all distutils commands,
+# and by easy_install.  For instance:
+#
+#   [easy_install]
+#   find_links = http://mylocalsite
diff --git a/bootstrap/virtualenv/virtualenv_embedded/site.py b/bootstrap/virtualenv/virtualenv_embedded/site.py
new file mode 100644
index 0000000..7969769
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/site.py
@@ -0,0 +1,758 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code.  Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path.  On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages.  On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely).  The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path.  Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once.  Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
+following:
+
+  # foo package configuration
+  foo
+  bar
+  bletch
+
+and bar.pth contains:
+
+  # bar package configuration
+  bar
+
+Then the following directories are added to sys.path, in this order:
+
+  /usr/local/lib/python2.X/site-packages/bar
+  /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations.  If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+try:
+    import __builtin__ as builtins
+except ImportError:
+    import builtins
+try:
+    set
+except NameError:
+    from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_jython = sys.platform[:4] == 'java'
+if _is_jython:
+    ModuleType = type(os)
+
+def makepath(*paths):
+    dir = os.path.join(*paths)
+    if _is_jython and (dir == '__classpath__' or
+                       dir.startswith('__pyclasspath__')):
+        return dir, dir
+    dir = os.path.abspath(dir)
+    return dir, os.path.normcase(dir)
+
+def abs__file__():
+    """Set all module' __file__ attribute to an absolute path"""
+    for m in sys.modules.values():
+        if ((_is_jython and not isinstance(m, ModuleType)) or
+            hasattr(m, '__loader__')):
+            # only modules need the abspath in Jython. and don't mess
+            # with a PEP 302-supplied __file__
+            continue
+        f = getattr(m, '__file__', None)
+        if f is None:
+            continue
+        m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+    """ Remove duplicate entries from sys.path along with making them
+    absolute"""
+    # This ensures that the initial path provided by the interpreter contains
+    # only absolute pathnames, even if we're running from the build directory.
+    L = []
+    known_paths = set()
+    for dir in sys.path:
+        # Filter out duplicate paths (on case-insensitive file systems also
+        # if they only differ in case); turn relative paths into absolute
+        # paths.
+        dir, dircase = makepath(dir)
+        if not dircase in known_paths:
+            L.append(dir)
+            known_paths.add(dircase)
+    sys.path[:] = L
+    return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python.  See http://www.python.org/sf/586680
+def addbuilddir():
+    """Append ./build/lib.<platform> in case we're running in the build dir
+    (especially for Guido :-)"""
+    from distutils.util import get_platform
+    s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+    if hasattr(sys, 'gettotalrefcount'):
+        s += '-pydebug'
+    s = os.path.join(os.path.dirname(sys.path[-1]), s)
+    sys.path.append(s)
+
+def _init_pathinfo():
+    """Return a set containing all existing directory entries from sys.path"""
+    d = set()
+    for dir in sys.path:
+        try:
+            if os.path.isdir(dir):
+                dir, dircase = makepath(dir)
+                d.add(dircase)
+        except TypeError:
+            continue
+    return d
+
+def addpackage(sitedir, name, known_paths):
+    """Add a new path to known_paths by combining sitedir and 'name' or execute
+    sitedir if it starts with 'import'"""
+    if known_paths is None:
+        _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    fullname = os.path.join(sitedir, name)
+    try:
+        f = open(fullname, "rU")
+    except IOError:
+        return
+    try:
+        for line in f:
+            if line.startswith("#"):
+                continue
+            if line.startswith("import"):
+                exec(line)
+                continue
+            line = line.rstrip()
+            dir, dircase = makepath(sitedir, line)
+            if not dircase in known_paths and os.path.exists(dir):
+                sys.path.append(dir)
+                known_paths.add(dircase)
+    finally:
+        f.close()
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+    'sitedir'"""
+    if known_paths is None:
+        known_paths = _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    sitedir, sitedircase = makepath(sitedir)
+    if not sitedircase in known_paths:
+        sys.path.append(sitedir)        # Add path component
+    try:
+        names = os.listdir(sitedir)
+    except os.error:
+        return
+    names.sort()
+    for name in names:
+        if name.endswith(os.extsep + "pth"):
+            addpackage(sitedir, name, known_paths)
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+    """Add site-packages (and possibly site-python) to sys.path"""
+    prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+    if exec_prefix != sys_prefix:
+        prefixes.append(os.path.join(exec_prefix, "local"))
+
+    for prefix in prefixes:
+        if prefix:
+            if sys.platform in ('os2emx', 'riscos') or _is_jython:
+                sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+            elif _is_pypy:
+                sitedirs = [os.path.join(prefix, 'site-packages')]
+            elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+                if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+                    sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+                                os.path.join(prefix, "Extras", "lib", "python")]
+
+                else: # any other Python distros on OSX work this way
+                    sitedirs = [os.path.join(prefix, "lib",
+                                             "python" + sys.version[:3], "site-packages")]
+
+            elif os.sep == '/':
+                sitedirs = [os.path.join(prefix,
+                                         "lib",
+                                         "python" + sys.version[:3],
+                                         "site-packages"),
+                            os.path.join(prefix, "lib", "site-python"),
+                            os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+                lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+                if (os.path.exists(lib64_dir) and
+                    os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+                    if _is_64bit:
+                        sitedirs.insert(0, lib64_dir)
+                    else:
+                        sitedirs.append(lib64_dir)
+                try:
+                    # sys.getobjects only available in --with-pydebug build
+                    sys.getobjects
+                    sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+                except AttributeError:
+                    pass
+                # Debian-specific dist-packages directories:
+                sitedirs.append(os.path.join(prefix, "local/lib",
+                                             "python" + sys.version[:3],
+                                             "dist-packages"))
+                if sys.version[0] == '2':
+                    sitedirs.append(os.path.join(prefix, "lib",
+                                                 "python" + sys.version[:3],
+                                                 "dist-packages"))
+                else:
+                    sitedirs.append(os.path.join(prefix, "lib",
+                                                 "python" + sys.version[0],
+                                                 "dist-packages"))
+                sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+            else:
+                sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+            if sys.platform == 'darwin':
+                # for framework builds *only* we add the standard Apple
+                # locations. Currently only per-user, but /Library and
+                # /Network/Library could be added too
+                if 'Python.framework' in prefix:
+                    home = os.environ.get('HOME')
+                    if home:
+                        sitedirs.append(
+                            os.path.join(home,
+                                         'Library',
+                                         'Python',
+                                         sys.version[:3],
+                                         'site-packages'))
+            for sitedir in sitedirs:
+                if os.path.isdir(sitedir):
+                    addsitedir(sitedir, known_paths)
+    return None
+
+def check_enableusersite():
+    """Check if user site directory is safe for inclusion
+
+    The function tests for the command line flag (including environment var),
+    process uid/gid equal to effective uid/gid.
+
+    None: Disabled for security reasons
+    False: Disabled by user (command line option)
+    True: Safe and enabled
+    """
+    if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+        return False
+
+    if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+        # check process uid == effective uid
+        if os.geteuid() != os.getuid():
+            return None
+    if hasattr(os, "getgid") and hasattr(os, "getegid"):
+        # check process gid == effective gid
+        if os.getegid() != os.getgid():
+            return None
+
+    return True
+
+def addusersitepackages(known_paths):
+    """Add a per user site-package to sys.path
+
+    Each user has its own python directory with site-packages in the
+    home directory.
+
+    USER_BASE is the root directory for all Python versions
+
+    USER_SITE is the user specific site-packages directory
+
+    USER_SITE/.. can be used for data.
+    """
+    global USER_BASE, USER_SITE, ENABLE_USER_SITE
+    env_base = os.environ.get("PYTHONUSERBASE", None)
+
+    def joinuser(*args):
+        return os.path.expanduser(os.path.join(*args))
+
+    #if sys.platform in ('os2emx', 'riscos'):
+    #    # Don't know what to put here
+    #    USER_BASE = ''
+    #    USER_SITE = ''
+    if os.name == "nt":
+        base = os.environ.get("APPDATA") or "~"
+        if env_base:
+            USER_BASE = env_base
+        else:
+            USER_BASE = joinuser(base, "Python")
+        USER_SITE = os.path.join(USER_BASE,
+                                 "Python" + sys.version[0] + sys.version[2],
+                                 "site-packages")
+    else:
+        if env_base:
+            USER_BASE = env_base
+        else:
+            USER_BASE = joinuser("~", ".local")
+        USER_SITE = os.path.join(USER_BASE, "lib",
+                                 "python" + sys.version[:3],
+                                 "site-packages")
+
+    if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+        addsitedir(USER_SITE, known_paths)
+    if ENABLE_USER_SITE:
+        for dist_libdir in ("lib", "local/lib"):
+            user_site = os.path.join(USER_BASE, dist_libdir,
+                                     "python" + sys.version[:3],
+                                     "dist-packages")
+            if os.path.isdir(user_site):
+                addsitedir(user_site, known_paths)
+    return known_paths
+
+
+
+def setBEGINLIBPATH():
+    """The OS/2 EMX port has optional extension modules that do double duty
+    as DLLs (and must use the .DLL file extension) for other extensions.
+    The library search path needs to be amended so these will be found
+    during module import.  Use BEGINLIBPATH so that these are at the start
+    of the library search path.
+
+    """
+    dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+    libpath = os.environ['BEGINLIBPATH'].split(';')
+    if libpath[-1]:
+        libpath.append(dllpath)
+    else:
+        libpath[-1] = dllpath
+    os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+    """Define new built-ins 'quit' and 'exit'.
+    These are simply strings that display a hint on how to exit.
+
+    """
+    if os.sep == ':':
+        eof = 'Cmd-Q'
+    elif os.sep == '\\':
+        eof = 'Ctrl-Z plus Return'
+    else:
+        eof = 'Ctrl-D (i.e. EOF)'
+
+    class Quitter(object):
+        def __init__(self, name):
+            self.name = name
+        def __repr__(self):
+            return 'Use %s() or %s to exit' % (self.name, eof)
+        def __call__(self, code=None):
+            # Shells like IDLE catch the SystemExit, but listen when their
+            # stdin wrapper is closed.
+            try:
+                sys.stdin.close()
+            except:
+                pass
+            raise SystemExit(code)
+    builtins.quit = Quitter('quit')
+    builtins.exit = Quitter('exit')
+
+
+class _Printer(object):
+    """interactive prompt objects for printing the license text, a list of
+    contributors and the copyright notice."""
+
+    MAXLINES = 23
+
+    def __init__(self, name, data, files=(), dirs=()):
+        self.__name = name
+        self.__data = data
+        self.__files = files
+        self.__dirs = dirs
+        self.__lines = None
+
+    def __setup(self):
+        if self.__lines:
+            return
+        data = None
+        for dir in self.__dirs:
+            for filename in self.__files:
+                filename = os.path.join(dir, filename)
+                try:
+                    fp = open(filename, "rU")
+                    data = fp.read()
+                    fp.close()
+                    break
+                except IOError:
+                    pass
+            if data:
+                break
+        if not data:
+            data = self.__data
+        self.__lines = data.split('\n')
+        self.__linecnt = len(self.__lines)
+
+    def __repr__(self):
+        self.__setup()
+        if len(self.__lines) <= self.MAXLINES:
+            return "\n".join(self.__lines)
+        else:
+            return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+    def __call__(self):
+        self.__setup()
+        prompt = 'Hit Return for more, or q (and Return) to quit: '
+        lineno = 0
+        while 1:
+            try:
+                for i in range(lineno, lineno + self.MAXLINES):
+                    print(self.__lines[i])
+            except IndexError:
+                break
+            else:
+                lineno += self.MAXLINES
+                key = None
+                while key is None:
+                    try:
+                        key = raw_input(prompt)
+                    except NameError:
+                        key = input(prompt)
+                    if key not in ('', 'q'):
+                        key = None
+                if key == 'q':
+                    break
+
+def setcopyright():
+    """Set 'copyright' and 'credits' in __builtin__"""
+    builtins.copyright = _Printer("copyright", sys.copyright)
+    if _is_jython:
+        builtins.credits = _Printer(
+            "credits",
+            "Jython is maintained by the Jython developers (www.jython.org).")
+    elif _is_pypy:
+        builtins.credits = _Printer(
+            "credits",
+            "PyPy is maintained by the PyPy developers: http://pypy.org/")
+    else:
+        builtins.credits = _Printer("credits", """\
+    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+    for supporting Python development.  See www.python.org for more information.""")
+    here = os.path.dirname(os.__file__)
+    builtins.license = _Printer(
+        "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+        ["LICENSE.txt", "LICENSE"],
+        [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+    """Define the built-in 'help'.
+    This is a wrapper around pydoc.help (with a twist).
+
+    """
+
+    def __repr__(self):
+        return "Type help() for interactive help, " \
+               "or help(object) for help about object."
+    def __call__(self, *args, **kwds):
+        import pydoc
+        return pydoc.help(*args, **kwds)
+
+def sethelper():
+    builtins.help = _Helper()
+
+def aliasmbcs():
+    """On Windows, some default encodings are not provided by Python,
+    while they are always available as "mbcs" in each locale. Make
+    them usable by aliasing to "mbcs" in such a case."""
+    if sys.platform == 'win32':
+        import locale, codecs
+        enc = locale.getdefaultlocale()[1]
+        if enc.startswith('cp'):            # "cp***" ?
+            try:
+                codecs.lookup(enc)
+            except LookupError:
+                import encodings
+                encodings._cache[enc] = encodings._unknown
+                encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+    """Set the string encoding used by the Unicode implementation.  The
+    default is 'ascii', but if you're willing to experiment, you can
+    change this."""
+    encoding = "ascii" # Default value set by _PyUnicode_Init()
+    if 0:
+        # Enable to support locale aware default string encodings.
+        import locale
+        loc = locale.getdefaultlocale()
+        if loc[1]:
+            encoding = loc[1]
+    if 0:
+        # Enable to switch off string to Unicode coercion and implicit
+        # Unicode to string conversion.
+        encoding = "undefined"
+    if encoding != "ascii":
+        # On Non-Unicode builds this will raise an AttributeError...
+        sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+    """Run custom site specific code, if available."""
+    try:
+        import sitecustomize
+    except ImportError:
+        pass
+
+def virtual_install_main_packages():
+    f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+    sys.real_prefix = f.read().strip()
+    f.close()
+    pos = 2
+    hardcoded_relative_dirs = []
+    if sys.path[0] == '':
+        pos += 1
+    if _is_jython:
+        paths = [os.path.join(sys.real_prefix, 'Lib')]
+    elif _is_pypy:
+        if sys.version_info > (3, 2):
+            cpyver = '%d' % sys.version_info[0]
+        elif sys.pypy_version_info >= (1, 5):
+            cpyver = '%d.%d' % sys.version_info[:2]
+        else:
+            cpyver = '%d.%d.%d' % sys.version_info[:3]
+        paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
+                 os.path.join(sys.real_prefix, 'lib-python', cpyver)]
+        if sys.pypy_version_info < (1, 9):
+            paths.insert(1, os.path.join(sys.real_prefix,
+                                         'lib-python', 'modified-%s' % cpyver))
+        hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+        #
+        # This is hardcoded in the Python executable, but relative to sys.prefix:
+        for path in paths[:]:
+            plat_path = os.path.join(path, 'plat-%s' % sys.platform)
+            if os.path.exists(plat_path):
+                paths.append(plat_path)
+    elif sys.platform == 'win32':
+        paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+    else:
+        paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+        hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+        lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+        if os.path.exists(lib64_path):
+            if _is_64bit:
+                paths.insert(0, lib64_path)
+            else:
+                paths.append(lib64_path)
+        # This is hardcoded in the Python executable, but relative to
+        # sys.prefix.  Debian change: we need to add the multiarch triplet
+        # here, which is where the real stuff lives.  As per PEP 421, in
+        # Python 3.3+, this lives in sys.implementation, while in Python 2.7
+        # it lives in sys.
+        try:
+            arch = getattr(sys, 'implementation', sys)._multiarch
+        except AttributeError:
+            # This is a non-multiarch aware Python.  Fallback to the old way.
+            arch = sys.platform
+        plat_path = os.path.join(sys.real_prefix, 'lib',
+                                 'python'+sys.version[:3],
+                                 'plat-%s' % arch)
+        if os.path.exists(plat_path):
+            paths.append(plat_path)
+    # This is hardcoded in the Python executable, but
+    # relative to sys.prefix, so we have to fix up:
+    for path in list(paths):
+        tk_dir = os.path.join(path, 'lib-tk')
+        if os.path.exists(tk_dir):
+            paths.append(tk_dir)
+
+    # These are hardcoded in the Apple's Python executable,
+    # but relative to sys.prefix, so we have to fix them up:
+    if sys.platform == 'darwin':
+        hardcoded_paths = [os.path.join(relative_dir, module)
+                           for relative_dir in hardcoded_relative_dirs
+                           for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+        for path in hardcoded_paths:
+            if os.path.exists(path):
+                paths.append(path)
+
+    sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+    """
+    Force easy_installed eggs in the global environment to get placed
+    in sys.path after all packages inside the virtualenv.  This
+    maintains the "least surprise" result that packages in the
+    virtualenv always mask global packages, never the other way
+    around.
+
+    """
+    egginsert = getattr(sys, '__egginsert', 0)
+    for i, path in enumerate(sys.path):
+        if i > egginsert and path.startswith(sys.prefix):
+            egginsert = i
+    sys.__egginsert = egginsert + 1
+
+def virtual_addsitepackages(known_paths):
+    force_global_eggs_after_local_site_packages()
+    return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+    """Adjust the special classpath sys.path entries for Jython. These
+    entries should follow the base virtualenv lib directories.
+    """
+    paths = []
+    classpaths = []
+    for path in sys.path:
+        if path == '__classpath__' or path.startswith('__pyclasspath__'):
+            classpaths.append(path)
+        else:
+            paths.append(path)
+    sys.path = paths
+    sys.path.extend(classpaths)
+
+def execusercustomize():
+    """Run custom user specific code, if available."""
+    try:
+        import usercustomize
+    except ImportError:
+        pass
+
+
+def main():
+    global ENABLE_USER_SITE
+    virtual_install_main_packages()
+    abs__file__()
+    paths_in_sys = removeduppaths()
+    if (os.name == "posix" and sys.path and
+        os.path.basename(sys.path[-1]) == "Modules"):
+        addbuilddir()
+    if _is_jython:
+        fixclasspath()
+    GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+    if not GLOBAL_SITE_PACKAGES:
+        ENABLE_USER_SITE = False
+    if ENABLE_USER_SITE is None:
+        ENABLE_USER_SITE = check_enableusersite()
+    paths_in_sys = addsitepackages(paths_in_sys)
+    paths_in_sys = addusersitepackages(paths_in_sys)
+    if GLOBAL_SITE_PACKAGES:
+        paths_in_sys = virtual_addsitepackages(paths_in_sys)
+    if sys.platform == 'os2emx':
+        setBEGINLIBPATH()
+    setquit()
+    setcopyright()
+    sethelper()
+    aliasmbcs()
+    setencoding()
+    execsitecustomize()
+    if ENABLE_USER_SITE:
+        execusercustomize()
+    # Remove sys.setdefaultencoding() so that users cannot change the
+    # encoding after initialization.  The test for presence is needed when
+    # this module is run as a script, because this code is executed twice.
+    if hasattr(sys, "setdefaultencoding"):
+        del sys.setdefaultencoding
+
+main()
+
+def _script():
+    help = """\
+    %s [--user-base] [--user-site]
+
+    Without arguments print some useful information
+    With arguments print the value of USER_BASE and/or USER_SITE separated
+    by '%s'.
+
+    Exit codes with --user-base or --user-site:
+      0 - user site directory is enabled
+      1 - user site directory is disabled by user
+      2 - uses site directory is disabled by super user
+          or for security reasons
+     >2 - unknown error
+    """
+    args = sys.argv[1:]
+    if not args:
+        print("sys.path = [")
+        for dir in sys.path:
+            print("    %r," % (dir,))
+        print("]")
+        def exists(path):
+            if os.path.isdir(path):
+                return "exists"
+            else:
+                return "doesn't exist"
+        print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))
+        print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))
+        print("ENABLE_USER_SITE: %r" %  ENABLE_USER_SITE)
+        sys.exit(0)
+
+    buffer = []
+    if '--user-base' in args:
+        buffer.append(USER_BASE)
+    if '--user-site' in args:
+        buffer.append(USER_SITE)
+
+    if buffer:
+        print(os.pathsep.join(buffer))
+        if ENABLE_USER_SITE:
+            sys.exit(0)
+        elif ENABLE_USER_SITE is False:
+            sys.exit(1)
+        elif ENABLE_USER_SITE is None:
+            sys.exit(2)
+        else:
+            sys.exit(3)
+    else:
+        import textwrap
+        print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
+        sys.exit(10)
+
+if __name__ == '__main__':
+    _script()
diff --git a/bootstrap/virtualenv/virtualenv_support/__init__.py b/bootstrap/virtualenv/virtualenv_support/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_support/__init__.py
diff --git a/bootstrap/virtualenv/virtualenv_support/pip-6.0-py2.py3-none-any.whl b/bootstrap/virtualenv/virtualenv_support/pip-6.0-py2.py3-none-any.whl
new file mode 100644
index 0000000..34c8ff1
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_support/pip-6.0-py2.py3-none-any.whl
Binary files differ
diff --git a/bootstrap/virtualenv/virtualenv_support/setuptools-8.2.1-py2.py3-none-any.whl b/bootstrap/virtualenv/virtualenv_support/setuptools-8.2.1-py2.py3-none-any.whl
new file mode 100644
index 0000000..fa3a6a5
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_support/setuptools-8.2.1-py2.py3-none-any.whl
Binary files differ
diff --git a/buildbucket.py b/buildbucket.py
new file mode 100755
index 0000000..f00cc8f
--- /dev/null
+++ b/buildbucket.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tool for interacting with Buildbucket.
+
+Usage:
+  $ depot-tools-auth login https://cr-buildbucket.appspot.com
+  $ buildbucket.py \
+    put \
+    --bucket master.tryserver.chromium.linux \
+    --builder my-builder \
+
+  Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
+"""
+
+import argparse
+import json
+import urlparse
+import os
+import sys
+
+from third_party import httplib2
+
+import auth
+
+
+BUILDBUCKET_URL = 'https://cr-buildbucket.appspot.com'
+PUT_BUILD_URL = urlparse.urljoin(
+  BUILDBUCKET_URL,
+  '_ah/api/buildbucket/v1/builds',
+)
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+    '-v',
+    '--verbose',
+    action='store_true',
+  )
+  subparsers = parser.add_subparsers(dest='command')
+  put_parser = subparsers.add_parser('put')
+  put_parser.add_argument(
+    '-b',
+    '--bucket',
+    help=(
+      'The bucket to schedule the build on. Typically the master name, e.g.'
+      ' master.tryserver.chromium.linux.'
+    ),
+    required=True,
+  )
+  put_parser.add_argument(
+    '-c',
+    '--changes',
+    help='A flie to load a JSON list of changes dicts from.',
+  )
+  put_parser.add_argument(
+    '-n',
+    '--builder-name',
+    help='The builder to schedule the build on.',
+    required=True,
+  )
+  put_parser.add_argument(
+    '-p',
+    '--properties',
+    help='A file to load a JSON dict of properties from.',
+  )
+  args = parser.parse_args()
+  # TODO(smut): When more commands are implemented, refactor this.
+  assert args.command == 'put'
+
+  changes = []
+  if args.changes:
+    try:
+      with open(args.changes) as fp:
+        changes.extend(json.load(fp))
+    except (TypeError, ValueError):
+      sys.stderr.write('%s contained invalid JSON list.\n' % args.changes)
+      raise
+
+  properties = {}
+  if args.properties:
+    try:
+      with open(args.properties) as fp:
+        properties.update(json.load(fp))
+    except (TypeError, ValueError):
+      sys.stderr.write('%s contained invalid JSON dict.\n' % args.properties)
+      raise
+
+  authenticator = auth.get_authenticator_for_host(
+    BUILDBUCKET_URL,
+    auth.make_auth_config(use_oauth2=True),
+  )
+  http = authenticator.authorize(httplib2.Http())
+  http.force_exception_to_status_code = True
+  response, content = http.request(
+    PUT_BUILD_URL,
+    'PUT',
+    body=json.dumps({
+      'bucket': args.bucket,
+      'parameters_json': json.dumps({
+        'builder_name': args.builder_name,
+        'changes': changes,
+        'properties': properties,
+      }),
+    }),
+    headers={'Content-Type': 'application/json'},
+  )
+
+  if args.verbose:
+    print content
+
+  return response.status != 200
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/checkout.py b/checkout.py
index 5dbb375..8d5ccce 100644
--- a/checkout.py
+++ b/checkout.py
@@ -676,7 +676,7 @@
           else:
             # No need to do anything special with p.is_new or if not
             # p.diff_hunks. git apply manages all that already.
-            cmd = ['apply', '--index', '-p%s' % p.patchlevel]
+            cmd = ['apply', '--index', '-3', '-p%s' % p.patchlevel]
             if verbose:
               cmd.append('--verbose')
             stdout.append(self._check_output_git(cmd, stdin=p.get(True)))
diff --git a/chrome-update.py b/chrome-update.py
index 4f5731c..4fb11e5 100755
--- a/chrome-update.py
+++ b/chrome-update.py
@@ -60,7 +60,7 @@
   return subprocess.call(cmd, cwd=chrome_root, shell=IS_WIN)
 
 
-def Main(args):
+def main(args):
   if len(args) < 3:
     print('Usage: chrome-update.py <path> [options]')
     print('See options from compile.py at')
@@ -84,4 +84,8 @@
 
 
 if __name__ == "__main__":
-  sys.exit(Main(sys.argv))
+  try:
+    sys.exit(main(sys.argv))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/cit b/cit
new file mode 100755
index 0000000..410341f
--- /dev/null
+++ b/cit
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/cit.py" "$@"
diff --git a/cit.bat b/cit.bat
new file mode 100755
index 0000000..a1da0dd
--- /dev/null
+++ b/cit.bat
@@ -0,0 +1,11 @@
+@echo off
+:: Copyright (c) 2015 The Chromium Authors. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+setlocal
+
+:: This is required with cygwin only.
+PATH=%~dp0;%PATH%
+
+:: Defer control.
+%~dp0python "%~dp0\cit.py" %*
diff --git a/cit.py b/cit.py
new file mode 100755
index 0000000..5210cee
--- /dev/null
+++ b/cit.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Wrapper for updating and calling infra.git tools.
+
+This tool does a two things:
+* Maintains a infra.git checkout pinned at "deployed" in the home dir
+* Acts as an alias to infra.tools.*
+"""
+
+# TODO(hinoka): Use cipd/glyco instead of git/gclient.
+
+import sys
+import os
+import subprocess
+import re
+
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+GCLIENT = os.path.join(SCRIPT_DIR, 'gclient.py')
+TARGET_DIR = os.path.expanduser('~/.chrome-infra')
+INFRA_DIR = os.path.join(TARGET_DIR, 'infra')
+
+
+def get_git_rev(target, branch):
+  return subprocess.check_output(
+      ['git', 'log', '--format=%B', '-n1', branch], cwd=target)
+
+
+def need_to_update():
+  """Checks to see if we need to update the ~/.chrome-infra/infra checkout."""
+  try:
+    cmd = [sys.executable, GCLIENT, 'revinfo']
+    subprocess.check_call(
+        cmd, cwd=os.path.join(TARGET_DIR), stdout=subprocess.PIPE)
+  except subprocess.CalledProcessError:
+    return True  # Gclient failed, definitely need to update.
+  except OSError:
+    return True  # Gclient failed, definitely need to update.
+
+  local_rev = get_git_rev(INFRA_DIR, 'HEAD')
+
+  subprocess.check_call(
+      ['git', 'fetch', 'origin'], cwd=INFRA_DIR,
+      stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  origin_rev = get_git_rev(INFRA_DIR, 'origin/deployed')
+  return origin_rev != local_rev
+
+
+def ensure_infra():
+  """Ensures that infra.git is present in ~/.chrome-infra."""
+  print 'Fetching infra into %s, may take a couple of minutes...' % TARGET_DIR
+  if not os.path.isdir(TARGET_DIR):
+    os.mkdir(TARGET_DIR)
+  if not os.path.exists(os.path.join(TARGET_DIR, '.gclient')):
+    subprocess.check_call(
+        [sys.executable, os.path.join(SCRIPT_DIR, 'fetch.py'), 'infra'],
+        cwd=TARGET_DIR,
+        stdout=subprocess.PIPE)
+  subprocess.check_call(
+      [sys.executable, GCLIENT, 'sync', '--revision', 'origin/deployed'],
+      cwd=TARGET_DIR,
+      stdout=subprocess.PIPE)
+
+
+def get_available_tools():
+  tools = []
+  starting = os.path.join(TARGET_DIR, 'infra', 'infra', 'tools')
+  for root, _, files in os.walk(starting):
+    if '__main__.py' in files:
+      tools.append(root[len(starting)+1:].replace(os.path.sep, '.'))
+  return tools
+
+
+def run(args):
+  if args:
+    tool_name = args[0]
+    cmd = [
+        sys.executable, os.path.join(TARGET_DIR, 'infra', 'run.py'),
+        'infra.tools.%s' % tool_name]
+    cmd.extend(args[1:])
+    return subprocess.call(cmd)
+
+  tools = get_available_tools()
+  print """usage: cit.py <name of tool> [args for tool]
+
+  Wrapper for maintaining and calling tools in "infra.git/run.py infra.tools.*"
+
+  Available tools are:
+  """
+  for tool in tools:
+    print '  * %s' % tool
+
+
+def main():
+  if need_to_update():
+    ensure_infra()
+  return run(sys.argv[1:])
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/clang_format.py b/clang_format.py
index 8320e6d..5bfeb1a 100755
--- a/clang_format.py
+++ b/clang_format.py
@@ -62,8 +62,12 @@
   if any(match in args for match in help_syntax):
     print '\nDepot tools redirects you to the clang-format at:\n    %s\n' % tool
 
-  return subprocess.call([tool] + sys.argv[1:])
+  return subprocess.call([tool] + args)
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/commit_queue b/commit_queue
new file mode 100755
index 0000000..a634371
--- /dev/null
+++ b/commit_queue
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+if [[ "#grep#fetch#cleanup#diff#" != *"#$1#"* ]]; then
+  "$base_dir"/update_depot_tools "$@"
+fi
+
+PYTHONDONTWRITEBYTECODE=1 exec "$base_dir/ENV/bin/python" "$base_dir/commit_queue.py" "$@"
diff --git a/commit_queue.bat b/commit_queue.bat
new file mode 100755
index 0000000..76018a6
--- /dev/null
+++ b/commit_queue.bat
@@ -0,0 +1,14 @@
+@echo off

+:: Copyright 2015 The Chromium Authors. All rights reserved.

+:: Use of this source code is governed by a BSD-style license that can be

+:: found in the LICENSE file.

+setlocal

+

+:: This is required with cygwin only.

+PATH=%~dp0;%PATH%

+

+:: Synchronize the root directory before deferring control back to gclient.py.

+call "%~dp0\update_depot_tools.bat" %*

+

+:: Defer control.

+%~dp0\ENV\bin\python "%~dp0\commit_queue.py" %*

diff --git a/commit_queue.py b/commit_queue.py
index 7639065..25ddb38 100755
--- a/commit_queue.py
+++ b/commit_queue.py
@@ -9,6 +9,7 @@
 __version__ = '0.1'
 
 import functools
+import json
 import logging
 import optparse
 import os
@@ -17,9 +18,16 @@
 
 import breakpad  # pylint: disable=W0611
 
+import auth
 import fix_encoding
 import rietveld
 
+THIRD_PARTY_DIR = os.path.join(os.path.dirname(__file__), 'third_party')
+sys.path.insert(0, THIRD_PARTY_DIR)
+
+from cq_client import cq_pb2
+from cq_client import validate_config
+from protobuf26 import text_format
 
 def usage(more):
   def hook(fn):
@@ -36,9 +44,10 @@
 
     def new_parse_args(args=None, values=None):
       options, args = old_parse_args(args, values)
+      auth_config = auth.extract_auth_config_from_options(options)
       if not options.issue:
         parser.error('Require --issue')
-      obj = rietveld.Rietveld(options.server, options.user, None)
+      obj = rietveld.Rietveld(options.server, auth_config, options.user)
       return options, args, obj
 
     parser.parse_args = new_parse_args
@@ -59,6 +68,7 @@
         metavar='S',
         default='http://codereview.chromium.org',
         help='Rietveld server, default: %default')
+    auth.add_auth_options(parser)
 
     # Call the original function with the modified parser.
     return fn(parser, args, *extra_args, **kwargs)
@@ -99,6 +109,60 @@
   return set_commit(obj, options.issue, '0')
 
 
+def CMDbuilders(parser, args):
+  """Prints json-formatted list of builders given a path to cq.cfg file.
+
+  The output is a dictionary in the following format:
+    {
+      'master_name': [
+        'builder_name',
+        'another_builder'
+      ],
+      'another_master': [
+        'third_builder'
+      ]
+    }
+  """
+  _, args = parser.parse_args(args)
+  if len(args) != 1:
+    parser.error('Expected a single path to CQ config. Got: %s' %
+                 ' '.join(args))
+
+  with open(args[0]) as config_file:
+    cq_config = config_file.read()
+
+  config = cq_pb2.Config()
+  text_format.Merge(cq_config, config)
+  masters = {}
+  if config.HasField('verifiers') and config.verifiers.HasField('try_job'):
+    for bucket in config.verifiers.try_job.buckets:
+      masters.setdefault(bucket.name, [])
+      for builder in bucket.builders:
+        if not builder.HasField('experiment_percentage'):
+          masters[bucket.name].append(builder.name)
+  print json.dumps(masters)
+
+CMDbuilders.func_usage_more = '<path-to-cq-config>'
+
+
+def CMDvalidate(parser, args):
+  """Validates a CQ config.
+
+  Takes a single argument - path to the CQ config to be validated. Returns 0 on
+  valid config, non-zero on invalid config. Errors and warnings are printed to
+  screen.
+  """
+  _, args = parser.parse_args(args)
+  if len(args) != 1:
+    parser.error('Expected a single path to CQ config. Got: %s' %
+                 ' '.join(args))
+
+  with open(args[0]) as config_file:
+    cq_config = config_file.read()
+  return 0 if validate_config.IsValid(cq_config) else 1
+
+CMDvalidate.func_usage_more = '<path-to-cq-config>'
+
 ###############################################################################
 ## Boilerplate code
 
@@ -181,4 +245,8 @@
 
 if __name__ == "__main__":
   fix_encoding.fix_encoding()
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/cpplint.py b/cpplint.py
index 3011345..ccc25d4 100755
--- a/cpplint.py
+++ b/cpplint.py
@@ -175,71 +175,77 @@
 # If you add a new error message with a new category, add it to the list
 # here!  cpplint_unittest.py should tell you if you forget to do this.
 _ERROR_CATEGORIES = [
-  'build/class',
-  'build/c++11',
-  'build/deprecated',
-  'build/endif_comment',
-  'build/explicit_make_pair',
-  'build/forward_decl',
-  'build/header_guard',
-  'build/include',
-  'build/include_alpha',
-  'build/include_order',
-  'build/include_what_you_use',
-  'build/namespaces',
-  'build/printf_format',
-  'build/storage_class',
-  'legal/copyright',
-  'readability/alt_tokens',
-  'readability/braces',
-  'readability/casting',
-  'readability/check',
-  'readability/constructors',
-  'readability/fn_size',
-  'readability/function',
-  'readability/inheritance',
-  'readability/multiline_comment',
-  'readability/multiline_string',
-  'readability/namespace',
-  'readability/nolint',
-  'readability/nul',
-  'readability/streams',
-  'readability/todo',
-  'readability/utf8',
-  'runtime/arrays',
-  'runtime/casting',
-  'runtime/explicit',
-  'runtime/int',
-  'runtime/init',
-  'runtime/invalid_increment',
-  'runtime/member_string_references',
-  'runtime/memset',
-  'runtime/indentation_namespace',
-  'runtime/operator',
-  'runtime/printf',
-  'runtime/printf_format',
-  'runtime/references',
-  'runtime/string',
-  'runtime/threadsafe_fn',
-  'runtime/vlog',
-  'whitespace/blank_line',
-  'whitespace/braces',
-  'whitespace/comma',
-  'whitespace/comments',
-  'whitespace/empty_conditional_body',
-  'whitespace/empty_loop_body',
-  'whitespace/end_of_line',
-  'whitespace/ending_newline',
-  'whitespace/forcolon',
-  'whitespace/indent',
-  'whitespace/line_length',
-  'whitespace/newline',
-  'whitespace/operators',
-  'whitespace/parens',
-  'whitespace/semicolon',
-  'whitespace/tab',
-  'whitespace/todo'
-  ]
+    'build/class',
+    'build/c++11',
+    'build/deprecated',
+    'build/endif_comment',
+    'build/explicit_make_pair',
+    'build/forward_decl',
+    'build/header_guard',
+    'build/include',
+    'build/include_alpha',
+    'build/include_order',
+    'build/include_what_you_use',
+    'build/namespaces',
+    'build/printf_format',
+    'build/storage_class',
+    'legal/copyright',
+    'readability/alt_tokens',
+    'readability/braces',
+    'readability/casting',
+    'readability/check',
+    'readability/constructors',
+    'readability/fn_size',
+    'readability/function',
+    'readability/inheritance',
+    'readability/multiline_comment',
+    'readability/multiline_string',
+    'readability/namespace',
+    'readability/nolint',
+    'readability/nul',
+    'readability/strings',
+    'readability/todo',
+    'readability/utf8',
+    'runtime/arrays',
+    'runtime/casting',
+    'runtime/explicit',
+    'runtime/int',
+    'runtime/init',
+    'runtime/invalid_increment',
+    'runtime/member_string_references',
+    'runtime/memset',
+    'runtime/indentation_namespace',
+    'runtime/operator',
+    'runtime/printf',
+    'runtime/printf_format',
+    'runtime/references',
+    'runtime/string',
+    'runtime/threadsafe_fn',
+    'runtime/vlog',
+    'whitespace/blank_line',
+    'whitespace/braces',
+    'whitespace/comma',
+    'whitespace/comments',
+    'whitespace/empty_conditional_body',
+    'whitespace/empty_loop_body',
+    'whitespace/end_of_line',
+    'whitespace/ending_newline',
+    'whitespace/forcolon',
+    'whitespace/indent',
+    'whitespace/line_length',
+    'whitespace/newline',
+    'whitespace/operators',
+    'whitespace/parens',
+    'whitespace/semicolon',
+    'whitespace/tab',
+    'whitespace/todo',
+    ]
+
+# These error categories are no longer enforced by cpplint, but for backwards-
+# compatibility they may still appear in NOLINT comments.
+_LEGACY_ERROR_CATEGORIES = [
+    'readability/streams',
+    ]
 
 # The default state of the category filter. This is overridden by the --filter=
 # flag. By default all errors are on, so only add here categories that should be
@@ -522,7 +528,7 @@
         category = category[1:-1]
         if category in _ERROR_CATEGORIES:
           _error_suppressions.setdefault(category, set()).add(suppressed_line)
-        else:
+        elif category not in _LEGACY_ERROR_CATEGORIES:
           error(filename, linenum, 'readability/nolint', 5,
                 'Unknown NOLINT error category: %s' % category)
 
@@ -690,7 +696,7 @@
     # If previous line was a blank line, assume that the headers are
     # intentionally sorted the way they are.
     if (self._last_header > header_path and
-        not Match(r'^\s*$', clean_lines.elided[linenum - 1])):
+        Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])):
       return False
     return True
 
@@ -1246,7 +1252,7 @@
   # Having // dummy comments makes the lines non-empty, so we will not get
   # unnecessary blank line warnings later in the code.
   for i in range(begin, end):
-    lines[i] = '// dummy'
+    lines[i] = '/**/'
 
 
 def RemoveMultiLineComments(filename, lines, error):
@@ -1282,12 +1288,14 @@
 
 
 class CleansedLines(object):
-  """Holds 3 copies of all lines with different preprocessing applied to them.
+  """Holds 4 copies of all lines with different preprocessing applied to them.
 
-  1) elided member contains lines without strings and comments,
-  2) lines member contains lines without comments, and
+  1) elided member contains lines without strings and comments.
+  2) lines member contains lines without comments.
   3) raw_lines member contains all the lines without processing.
-  All these three members are of <type 'list'>, and of the same length.
+  4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw
+     strings removed.
+  All these members are of <type 'list'>, and of the same length.
   """
 
   def __init__(self, lines):
@@ -1656,15 +1664,17 @@
   # flymake.
   filename = re.sub(r'_flymake\.h$', '.h', filename)
   filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
-
+  # Replace 'c++' with 'cpp'.
+  filename = filename.replace('C++', 'cpp').replace('c++', 'cpp')
+  
   fileinfo = FileInfo(filename)
   file_path_from_root = fileinfo.RepositoryName()
   if _root:
     file_path_from_root = re.sub('^' + _root + os.sep, '', file_path_from_root)
-  return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_'
+  return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
 
 
-def CheckForHeaderGuard(filename, lines, error):
+def CheckForHeaderGuard(filename, clean_lines, error):
   """Checks that the file contains a header guard.
 
   Logs an error if no #ifndef header guard is present.  For other
@@ -1672,7 +1682,7 @@
 
   Args:
     filename: The name of the C++ header file.
-    lines: An array of strings, each representing a line of the file.
+    clean_lines: A CleansedLines instance containing the file.
     error: The function to call with any errors found.
   """
 
@@ -1682,18 +1692,19 @@
   # Because this is silencing a warning for a nonexistent line, we
   # only support the very specific NOLINT(build/header_guard) syntax,
   # and not the general NOLINT or NOLINT(*) syntax.
-  for i in lines:
+  raw_lines = clean_lines.lines_without_raw_strings
+  for i in raw_lines:
     if Search(r'//\s*NOLINT\(build/header_guard\)', i):
       return
 
   cppvar = GetHeaderGuardCPPVariable(filename)
 
-  ifndef = None
+  ifndef = ''
   ifndef_linenum = 0
-  define = None
-  endif = None
+  define = ''
+  endif = ''
   endif_linenum = 0
-  for linenum, line in enumerate(lines):
+  for linenum, line in enumerate(raw_lines):
     linesplit = line.split()
     if len(linesplit) >= 2:
       # find the first occurrence of #ifndef and #define, save arg
@@ -1708,18 +1719,12 @@
       endif = line
       endif_linenum = linenum
 
-  if not ifndef:
+  if not ifndef or not define or ifndef != define:
     error(filename, 0, 'build/header_guard', 5,
           'No #ifndef header guard found, suggested CPP variable is: %s' %
           cppvar)
     return
 
-  if not define:
-    error(filename, 0, 'build/header_guard', 5,
-          'No #define header guard found, suggested CPP variable is: %s' %
-          cppvar)
-    return
-
   # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
   # for backward compatibility.
   if ifndef != cppvar:
@@ -1727,26 +1732,69 @@
     if ifndef != cppvar + '_':
       error_level = 5
 
-    ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum,
+    ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
                             error)
     error(filename, ifndef_linenum, 'build/header_guard', error_level,
           '#ifndef header guard has wrong style, please use: %s' % cppvar)
 
-  if define != ifndef:
-    error(filename, 0, 'build/header_guard', 5,
-          '#ifndef and #define don\'t match, suggested CPP variable is: %s' %
-          cppvar)
+  # Check for "//" comments on endif line.
+  ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum,
+                          error)
+  match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif)
+  if match:
+    if match.group(1) == '_':
+      # Issue low severity warning for deprecated double trailing underscore
+      error(filename, endif_linenum, 'build/header_guard', 0,
+            '#endif line should be "#endif  // %s"' % cppvar)
     return
 
-  if endif != ('#endif  // %s' % cppvar):
-    error_level = 0
-    if endif != ('#endif  // %s' % (cppvar + '_')):
-      error_level = 5
+  # Didn't find the corresponding "//" comment.  If this file does not
+  # contain any "//" comments at all, it could be that the compiler
+  # only wants "/**/" comments, look for those instead.
+  no_single_line_comments = True
+  for i in xrange(1, len(raw_lines) - 1):
+    line = raw_lines[i]
+    if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
+      no_single_line_comments = False
+      break
 
-    ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum,
-                            error)
-    error(filename, endif_linenum, 'build/header_guard', error_level,
-          '#endif line should be "#endif  // %s"' % cppvar)
+  if no_single_line_comments:
+    match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif)
+    if match:
+      if match.group(1) == '_':
+        # Low severity warning for double trailing underscore
+        error(filename, endif_linenum, 'build/header_guard', 0,
+              '#endif line should be "#endif  /* %s */"' % cppvar)
+      return
+
+  # Didn't find anything
+  error(filename, endif_linenum, 'build/header_guard', 5,
+        '#endif line should be "#endif  // %s"' % cppvar)
+
+
+def CheckHeaderFileIncluded(filename, include_state, error):
+  """Logs an error if a .cc file does not include its header."""
+
+  # Do not check test files
+  if filename.endswith('_test.cc') or filename.endswith('_unittest.cc'):
+    return
+
+  fileinfo = FileInfo(filename)
+  headerfile = filename[0:len(filename) - 2] + 'h'
+  if not os.path.exists(headerfile):
+    return
+  headername = FileInfo(headerfile).RepositoryName()
+  first_include = 0
+  for section_list in include_state.include_list:
+    for f in section_list:
+      if headername in f[0] or f[0] in headername:
+        return
+      if not first_include:
+        first_include = f[1]
+
+  error(filename, first_include, 'build/include', 5,
+        '%s should include its header file %s' % (fileinfo.RepositoryName(),
+                                                  headername))
 
 
 def CheckForBadCharacters(filename, lines, error):
@@ -2042,6 +2090,23 @@
       self.is_derived = True
 
   def CheckEnd(self, filename, clean_lines, linenum, error):
+    # If there is a DISALLOW macro, it should appear near the end of
+    # the class.
+    seen_last_thing_in_class = False
+    for i in xrange(linenum - 1, self.starting_linenum, -1):
+      match = Search(
+          r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' +
+          self.name + r'\)',
+          clean_lines.elided[i])
+      if match:
+        if seen_last_thing_in_class:
+          error(filename, i, 'readability/constructors', 3,
+                match.group(1) + ' should be the last thing in the class')
+        break
+
+      if not Match(r'^\s*$', clean_lines.elided[i]):
+        seen_last_thing_in_class = True
+
     # Check that closing brace is aligned with beginning of the class.
     # Only do this if the closing brace is indented by only whitespaces.
     # This means we will not check single-line class definitions.
@@ -2722,7 +2787,8 @@
             'Extra space after (')
     if (Search(r'\w\s+\(', fncall) and
         not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and
-        not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall)):
+        not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and
+        not Search(r'\bcase\s+\(', fncall)):
       # TODO(unknown): Space after an operator function seem to be a common
       # error, silence those for now by restricting them to highest verbosity.
       if Search(r'\boperator_*\b', line):
@@ -2892,11 +2958,14 @@
                 'TODO(my_username) should be followed by a space')
 
       # If the comment contains an alphanumeric character, there
-      # should be a space somewhere between it and the //.
-      if Match(r'//[^ ]*\w', comment):
+      # should be a space somewhere between it and the // unless
+      # it's a /// or //! Doxygen comment.
+      if (Match(r'//[^ ]*\w', comment) and
+          not Match(r'(///|//\!)(\s+|$)', comment)):
         error(filename, linenum, 'whitespace/comments', 4,
               'Should have a space between // and comment')
 
+
 def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
   """Checks for improper use of DISALLOW* macros.
 
@@ -3083,7 +3152,12 @@
   # Otherwise not.  Note we only check for non-spaces on *both* sides;
   # sometimes people put non-spaces on one side when aligning ='s among
   # many lines (not that this is behavior that I approve of...)
-  if Search(r'[\w.]=[\w.]', line) and not Search(r'\b(if|while) ', line):
+  if ((Search(r'[\w.]=', line) or
+       Search(r'=[\w.]', line))
+      and not Search(r'\b(if|while|for) ', line)
+      # Operators taken from [lex.operators] in C++11 standard.
+      and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line)
+      and not Search(r'operator=', line)):
     error(filename, linenum, 'whitespace/operators', 4,
           'Missing spaces around =')
 
@@ -3135,9 +3209,8 @@
   #
   # We also allow operators following an opening parenthesis, since
   # those tend to be macros that deal with operators.
-  match = Search(r'(operator|\S)(?:L|UL|ULL|l|ul|ull)?<<([^\s,=])', line)
-  if (match and match.group(1) != '(' and
-      not (match.group(1).isdigit() and match.group(2).isdigit()) and
+  match = Search(r'(operator|[^\s(<])(?:L|UL|ULL|l|ul|ull)?<<([^\s,=<])', line)
+  if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and
       not (match.group(1) == 'operator' and match.group(2) == ';')):
     error(filename, linenum, 'whitespace/operators', 3,
           'Missing spaces around <<')
@@ -3255,7 +3328,7 @@
   # an initializer list, for instance), you should have spaces before your
   # braces. And since you should never have braces at the beginning of a line,
   # this is an easy test.
-  match = Match(r'^(.*[^ ({]){', line)
+  match = Match(r'^(.*[^ ({>]){', line)
   if match:
     # Try a bit harder to check for brace initialization.  This
     # happens in one of the following forms:
@@ -3355,13 +3428,14 @@
   return False
 
 
-def IsRValueType(clean_lines, nesting_state, linenum, column):
+def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
   """Check if the token ending on (linenum, column) is a type.
 
   Assumes that text to the right of the column is "&&" or a function
   name.
 
   Args:
+    typenames: set of type names from template-argument-list.
     clean_lines: A CleansedLines instance containing the file.
     nesting_state: A NestingState instance which maintains information about
                    the current stack of nested blocks being parsed.
@@ -3385,7 +3459,7 @@
   if Match(r'&&\s*(?:[>,]|\.\.\.)', suffix):
     return True
 
-  # Check for simple type and end of templates:
+  # Check for known types and end of templates:
   #   int&& variable
   #   vector<int>&& variable
   #
@@ -3393,9 +3467,10 @@
   # recognize pointer and reference types:
   #   int* Function()
   #   int& Function()
-  if match.group(2) in ['char', 'char16_t', 'char32_t', 'wchar_t', 'bool',
-                        'short', 'int', 'long', 'signed', 'unsigned',
-                        'float', 'double', 'void', 'auto', '>', '*', '&']:
+  if (match.group(2) in typenames or
+      match.group(2) in ['char', 'char16_t', 'char32_t', 'wchar_t', 'bool',
+                         'short', 'int', 'long', 'signed', 'unsigned',
+                         'float', 'double', 'void', 'auto', '>', '*', '&']):
     return True
 
   # If we see a close parenthesis, look for decltype on the other side.
@@ -3528,7 +3603,7 @@
 
     # Something else.  Check that tokens to the left look like
     #   return_type function_name
-    match_func = Match(r'^(.*)\s+\w(?:\w|::)*(?:<[^<>]*>)?\s*$',
+    match_func = Match(r'^(.*\S.*)\s+\w(?:\w|::)*(?:<[^<>]*>)?\s*$',
                        match_symbol.group(1))
     if match_func:
       # Check for constructors, which don't have return types.
@@ -3538,7 +3613,7 @@
       if (implicit_constructor and
           implicit_constructor.group(1) == implicit_constructor.group(2)):
         return True
-      return IsRValueType(clean_lines, nesting_state, linenum,
+      return IsRValueType(typenames, clean_lines, nesting_state, linenum,
                           len(match_func.group(1)))
 
     # Nothing before the function name.  If this is inside a block scope,
@@ -3576,12 +3651,13 @@
   return Match(r'\s*=\s*(?:delete|default)\b', close_line[close_paren:])
 
 
-def IsRValueAllowed(clean_lines, linenum):
+def IsRValueAllowed(clean_lines, linenum, typenames):
   """Check if RValue reference is allowed on a particular line.
 
   Args:
     clean_lines: A CleansedLines instance containing the file.
     linenum: The number of the line to check.
+    typenames: set of type names from template-argument-list.
   Returns:
     True if line is within the region where RValue references are allowed.
   """
@@ -3602,7 +3678,7 @@
     return IsDeletedOrDefault(clean_lines, linenum)
 
   # Allow constructors
-  match = Match(r'\s*([\w<>]+)\s*::\s*([\w<>]+)\s*\(', line)
+  match = Match(r'\s*(?:[\w<>]+::)*([\w<>]+)\s*::\s*([\w<>]+)\s*\(', line)
   if match and match.group(1) == match.group(2):
     return IsDeletedOrDefault(clean_lines, linenum)
   if Search(r'\b(?:explicit|inline)\s+[\w<>]+\s*\(', line):
@@ -3615,7 +3691,86 @@
     if Match(r'^\s*$', previous_line) or Search(r'[{}:;]\s*$', previous_line):
       return IsDeletedOrDefault(clean_lines, linenum)
 
-  return False
+  # Reject types not mentioned in template-argument-list
+  while line:
+    match = Match(r'^.*?(\w+)\s*&&(.*)$', line)
+    if not match:
+      break
+    if match.group(1) not in typenames:
+      return False
+    line = match.group(2)
+
+  # All RValue types that were in template-argument-list should have
+  # been removed by now.  Those were allowed, assuming that they will
+  # be forwarded.
+  #
+  # If there are no remaining RValue types left (i.e. types that were
+  # not found in template-argument-list), flag those as not allowed.
+  return line.find('&&') < 0
+
+
+def GetTemplateArgs(clean_lines, linenum):
+  """Find list of template arguments associated with this function declaration.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: Line number containing the start of the function declaration,
+             usually one line after the end of the template-argument-list.
+  Returns:
+    Set of type names, or empty set if this does not appear to have
+    any template parameters.
+  """
+  # Find start of function
+  func_line = linenum
+  while func_line > 0:
+    line = clean_lines.elided[func_line]
+    if Match(r'^\s*$', line):
+      return set()
+    if line.find('(') >= 0:
+      break
+    func_line -= 1
+  if func_line == 0:
+    return set()
+
+  # Collapse template-argument-list into a single string
+  argument_list = ''
+  match = Match(r'^(\s*template\s*)<', clean_lines.elided[func_line])
+  if match:
+    # template-argument-list on the same line as function name
+    start_col = len(match.group(1))
+    _, end_line, end_col = CloseExpression(clean_lines, func_line, start_col)
+    if end_col > -1 and end_line == func_line:
+      start_col += 1  # Skip the opening bracket
+      argument_list = clean_lines.elided[func_line][start_col:end_col]
+
+  elif func_line > 1:
+    # template-argument-list one line before function name
+    match = Match(r'^(.*)>\s*$', clean_lines.elided[func_line - 1])
+    if match:
+      end_col = len(match.group(1))
+      _, start_line, start_col = ReverseCloseExpression(
+          clean_lines, func_line - 1, end_col)
+      if start_col > -1:
+        start_col += 1  # Skip the opening bracket
+        while start_line < func_line - 1:
+          argument_list += clean_lines.elided[start_line][start_col:]
+          start_col = 0
+          start_line += 1
+        argument_list += clean_lines.elided[func_line - 1][start_col:end_col]
+
+  if not argument_list:
+    return set()
+
+  # Extract type names
+  typenames = set()
+  while True:
+    match = Match(r'^[,\s]*(?:typename|class)(?:\.\.\.)?\s+(\w+)(.*)$',
+                  argument_list)
+    if not match:
+      break
+    typenames.add(match.group(1))
+    argument_list = match.group(2)
+  return typenames
 
 
 def CheckRValueReference(filename, clean_lines, linenum, nesting_state, error):
@@ -3643,9 +3798,10 @@
   # Either poorly formed && or an rvalue reference, check the context
   # to get a more accurate error message.  Mostly we want to determine
   # if what's to the left of "&&" is a type or not.
+  typenames = GetTemplateArgs(clean_lines, linenum)
   and_pos = len(match.group(1))
-  if IsRValueType(clean_lines, nesting_state, linenum, and_pos):
-    if not IsRValueAllowed(clean_lines, linenum):
+  if IsRValueType(typenames, clean_lines, nesting_state, linenum, and_pos):
+    if not IsRValueAllowed(clean_lines, linenum, typenames):
       error(filename, linenum, 'build/c++11', 3,
             'RValue references are an unapproved C++ feature.')
   else:
@@ -3926,8 +4082,10 @@
     # semicolons, while the downside for getting the blacklist wrong
     # would result in compile errors.
     #
-    # In addition to macros, we also don't want to warn on compound
-    # literals and lambdas.
+    # In addition to macros, we also don't want to warn on
+    #  - Compound literals
+    #  - Lambdas
+    #  - alignas specifier with anonymous structs:
     closing_brace_pos = match.group(1).rfind(')')
     opening_parenthesis = ReverseCloseExpression(
         clean_lines, linenum, closing_brace_pos)
@@ -3941,6 +4099,7 @@
                'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
                'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
           (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or
+          Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or
           Search(r'\s+=\s*$', line_prefix)):
         match = None
     if (match and
@@ -4484,6 +4643,10 @@
       error(filename, linenum, 'build/include', 4,
             '"%s" already included at %s:%s' %
             (include, filename, duplicate_line))
+    elif (include.endswith('.cc') and
+          os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
+      error(filename, linenum, 'build/include', 4,
+            'Do not include .cc files from other packages')
     elif not _THIRD_PARTY_HEADERS_PATTERN.match(include):
       include_state.include_list[-1].append((include, linenum))
 
@@ -4511,20 +4674,6 @@
               'Include "%s" not in alphabetical order' % include)
       include_state.SetLastHeader(canonical_include)
 
-  # Look for any of the stream classes that are part of standard C++.
-  match = _RE_PATTERN_INCLUDE.match(line)
-  if match:
-    include = match.group(2)
-    if Match(r'(f|ind|io|i|o|parse|pf|stdio|str|)?stream$', include):
-      # Many unit tests use cout, so we exempt them.
-      if not _IsTestFilename(filename):
-        # Suggest a different header for ostream
-        if include == 'ostream':
-          error(filename, linenum, 'readability/streams', 3,
-                'For logging, include "base/logging.h" instead of <ostream>.')
-        else:
-          error(filename, linenum, 'readability/streams', 3,
-                'Streams are highly discouraged.')
 
 
 def _GetTextInside(text, start_pattern):
@@ -4755,25 +4904,6 @@
             'Do not use variable-length arrays.  Use an appropriately named '
             "('k' followed by CamelCase) compile-time constant for the size.")
 
-  # If DISALLOW_COPY_AND_ASSIGN DISALLOW_IMPLICIT_CONSTRUCTORS is present,
-  # then it should be the last thing in the class declaration.
-  match = Match(
-      (r'\s*'
-       r'(DISALLOW_(COPY_AND_ASSIGN|IMPLICIT_CONSTRUCTORS))'
-       r'\(.*\);$'),
-      line)
-  if match and linenum + 1 < clean_lines.NumLines():
-    next_line = clean_lines.elided[linenum + 1]
-    # We allow some, but not all, declarations of variables to be present
-    # in the statement that defines the class.  The [\w\*,\s]* fragment of
-    # the regular expression below allows users to declare instances of
-    # the class or pointers to instances, but not less common types such
-    # as function pointers or arrays.  It's a tradeoff between allowing
-    # reasonable code and avoiding trying to parse more C++ using regexps.
-    if not Search(r'^\s*}[\w\*,\s]*;', next_line):
-      error(filename, linenum, 'readability/constructors', 3,
-            match.group(1) + ' should be the last thing in the class')
-
   # Check for use of unnamed namespaces in header files.  Registration
   # macros are typically OK, so we allow use of "namespace {" on lines
   # that end with backslashes.
@@ -4889,6 +5019,22 @@
   return False
 
 
+def IsOutOfLineMethodDefinition(clean_lines, linenum):
+  """Check if current line contains an out-of-line method definition.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+  Returns:
+    True if current line contains an out-of-line method definition.
+  """
+  # Scan back a few lines for start of current function
+  for i in xrange(linenum, max(-1, linenum - 10), -1):
+    if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]):
+      return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None
+  return False
+
+
 def IsInitializerList(clean_lines, linenum):
   """Check if current line is inside constructor initializer list.
 
@@ -4957,6 +5103,11 @@
   if IsDerivedFunction(clean_lines, linenum):
     return
 
+  # Don't warn on out-of-line method definitions, as we would warn on the
+  # in-line declaration, if it isn't marked with 'override'.
+  if IsOutOfLineMethodDefinition(clean_lines, linenum):
+    return
+
   # Long type names may be broken across multiple lines, usually in one
   # of these forms:
   #   LongType
@@ -5152,9 +5303,9 @@
   # This is not a cast:
   #   reference_type&(int* function_param);
   match = Search(
-      r'(?:[^\w]&\(([^)]+)\)[\w(])|'
+      r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|'
       r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line)
-  if match and match.group(1) != '*':
+  if match:
     # Try a better error message when the & is bound to something
     # dereferenced by the casted pointer, as opposed to the casted
     # pointer itself.
@@ -5235,6 +5386,7 @@
   #   ExceptionMember(int) throw (...);
   #   ExceptionMember(int) throw (...) {
   #   PureVirtual(int) = 0;
+  #   [](int) -> bool {
   #
   # These are functions of some sort, where the compiler would be fine
   # if they had named parameters, but people often omit those
@@ -5246,7 +5398,7 @@
   #   <TemplateArgument(int)>;
   #   <(FunctionPointerTemplateArgument)(int)>;
   remainder = line[match.end(0):]
-  if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),])',
+  if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)',
            remainder):
     # Looks like an unnamed parameter.
 
@@ -5335,6 +5487,7 @@
     ('<set>', ('set', 'multiset',)),
     ('<stack>', ('stack',)),
     ('<string>', ('char_traits', 'basic_string',)),
+    ('<tuple>', ('tuple',)),
     ('<utility>', ('pair',)),
     ('<vector>', ('vector',)),
 
@@ -5602,9 +5755,21 @@
   """
   # Look for "virtual" on current line.
   line = clean_lines.elided[linenum]
-  virtual = Match(r'^(.*\bvirtual\b)', line)
+  virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line)
   if not virtual: return
 
+  # Ignore "virtual" keywords that are near access-specifiers.  These
+  # are only used in class base-specifier and do not apply to member
+  # functions.
+  if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or
+      Match(r'^\s+(public|protected|private)\b', virtual.group(3))):
+    return
+
+  # Ignore the "virtual" keyword from virtual base classes.  Usually
+  # there is a column on the same line in these cases (virtual base
+  # classes are rare in google3 because multiple inheritance is rare).
+  if Match(r'^.*[^:]:[^:].*$', line): return
+
   # Look for the next opening parenthesis.  This is the start of the
   # parameter list (possibly on the next line shortly after virtual).
   # TODO(unknown): doesn't work if there are virtual functions with
@@ -5612,7 +5777,7 @@
   # that this is rare.
   end_col = -1
   end_line = -1
-  start_col = len(virtual.group(1))
+  start_col = len(virtual.group(2))
   for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
     line = clean_lines.elided[start_line][start_col:]
     parameter_list = Match(r'^([^(]*)\(', line)
@@ -5652,9 +5817,21 @@
     linenum: The number of the line to check.
     error: The function to call with any errors found.
   """
-  # Check that at most one of "override" or "final" is present, not both
+  # Look for closing parenthesis nearby.  We need one to confirm where
+  # the declarator ends and where the virt-specifier starts to avoid
+  # false positives.
   line = clean_lines.elided[linenum]
-  if Search(r'\boverride\b', line) and Search(r'\bfinal\b', line):
+  declarator_end = line.rfind(')')
+  if declarator_end >= 0:
+    fragment = line[declarator_end:]
+  else:
+    if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0:
+      fragment = line
+    else:
+      return
+
+  # Check that at most one of "override" or "final" is present, not both
+  if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment):
     error(filename, linenum, 'readability/inheritance', 4,
           ('"override" is redundant since function is '
            'already declared as "final"'))
@@ -5809,9 +5986,6 @@
       # type_traits
       'alignment_of',
       'aligned_union',
-
-      # utility
-      'forward',
       ):
     if Search(r'\bstd::%s\b' % top_name, line):
       error(filename, linenum, 'build/c++11', 5,
@@ -5846,11 +6020,12 @@
 
   CheckForCopyright(filename, lines, error)
 
-  if file_extension == 'h':
-    CheckForHeaderGuard(filename, lines, error)
-
   RemoveMultiLineComments(filename, lines, error)
   clean_lines = CleansedLines(lines)
+
+  if file_extension == 'h':
+    CheckForHeaderGuard(filename, clean_lines, error)
+
   for line in xrange(clean_lines.NumLines()):
     ProcessLine(filename, file_extension, clean_lines, line,
                 include_state, function_state, nesting_state, error,
@@ -5859,6 +6034,10 @@
   nesting_state.CheckCompletedBlocks(filename, error)
 
   CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
+  
+  # Check that the .cc file has included its header if it exists.
+  if file_extension == 'cc':
+    CheckHeaderFileIncluded(filename, include_state, error)
 
   # We check here rather than inside ProcessLine so that we see raw
   # lines rather than "cleaned" lines.
diff --git a/dart_format.py b/dart_format.py
new file mode 100755
index 0000000..ee07efe
--- /dev/null
+++ b/dart_format.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Redirects to the version of dartfmt checked into a gclient repo.
+
+dartfmt binaries are pulled down during gclient sync in the mojo repo.
+
+This tool is named dart_format.py instead of dartfmt to parallel
+clang_format.py, which is in this same repository."""
+
+import os
+import subprocess
+import sys
+
+import gclient_utils
+
+class NotFoundError(Exception):
+  """A file could not be found."""
+  def __init__(self, e):
+    Exception.__init__(self,
+        'Problem while looking for dartfmt in Chromium source tree:\n'
+        '  %s' % e)
+
+
+def FindDartFmtToolInChromiumTree():
+  """Return a path to the dartfmt executable, or die trying."""
+  primary_solution_path = gclient_utils.GetPrimarySolutionPath()
+  if not primary_solution_path:
+    raise NotFoundError(
+        'Could not find checkout in any parent of the current path.')
+
+  dartfmt_path = os.path.join(primary_solution_path, 'third_party', 'dart-sdk',
+                              'dart-sdk', 'bin', 'dartfmt')
+  if not os.path.exists(dartfmt_path):
+    raise NotFoundError('File does not exist: %s' % dartfmt_path)
+  return dartfmt_path
+
+
+def main(args):
+  try:
+    tool = FindDartFmtToolInChromiumTree()
+  except NotFoundError, e:
+    print >> sys.stderr, e
+    sys.exit(1)
+
+  # Add some visibility to --help showing where the tool lives, since this
+  # redirection can be a little opaque.
+  help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
+  if any(match in args for match in help_syntax):
+    print '\nDepot tools redirects you to the dartfmt at:\n    %s\n' % tool
+
+  return subprocess.call([tool] + sys.argv[1:])
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/depot-tools-auth b/depot-tools-auth
new file mode 100755
index 0000000..9233c92
--- /dev/null
+++ b/depot-tools-auth
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/depot-tools-auth.py" "$@"
diff --git a/depot-tools-auth.bat b/depot-tools-auth.bat
new file mode 100644
index 0000000..fe13f93
--- /dev/null
+++ b/depot-tools-auth.bat
@@ -0,0 +1,11 @@
+@echo off

+:: Copyright 2015 The Chromium Authors. All rights reserved.

+:: Use of this source code is governed by a BSD-style license that can be

+:: found in the LICENSE file.

+setlocal

+

+:: This is required with cygwin only.

+PATH=%~dp0;%PATH%

+

+:: Defer control.

+%~dp0python "%~dp0\depot-tools-auth.py" %*

diff --git a/depot-tools-auth.py b/depot-tools-auth.py
new file mode 100755
index 0000000..3ebc239
--- /dev/null
+++ b/depot-tools-auth.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manages cached OAuth2 tokens used by other depot_tools scripts.
+
+Usage:
+  depot-tools-auth login codereview.chromium.org
+  depot-tools-auth info codereview.chromium.org
+  depot-tools-auth logout codereview.chromium.org
+"""
+
+import logging
+import optparse
+import sys
+
+from third_party import colorama
+
+import auth
+import subcommand
+
+__version__ = '1.0'
+
+
+@subcommand.usage('<hostname>')
+def CMDlogin(parser, args):
+  """Performs interactive login and caches authentication token."""
+  # Forcefully relogin, revoking previous token.
+  hostname, authenticator = parser.parse_args(args)
+  authenticator.logout()
+  authenticator.login()
+  print_token_info(hostname, authenticator)
+  return 0
+
+
+@subcommand.usage('<hostname>')
+def CMDlogout(parser, args):
+  """Revokes cached authentication token and removes it from disk."""
+  _, authenticator = parser.parse_args(args)
+  done = authenticator.logout()
+  print 'Done.' if done else 'Already logged out.'
+  return 0
+
+
+@subcommand.usage('<hostname>')
+def CMDinfo(parser, args):
+  """Shows email associated with a cached authentication token."""
+  # If no token is cached, AuthenticationError will be caught in 'main'.
+  hostname, authenticator = parser.parse_args(args)
+  print_token_info(hostname, authenticator)
+  return 0
+
+
+def print_token_info(hostname, authenticator):
+  token_info = authenticator.get_token_info()
+  print 'Logged in to %s as %s.' % (hostname, token_info['email'])
+  print ''
+  print 'To login with a different email run:'
+  print '  depot-tools-auth login %s' % hostname
+  print 'To logout and purge the authentication token run:'
+  print '  depot-tools-auth logout %s' % hostname
+
+
+class OptionParser(optparse.OptionParser):
+  def __init__(self, *args, **kwargs):
+    optparse.OptionParser.__init__(
+        self, *args, prog='depot-tools-auth', version=__version__, **kwargs)
+    self.add_option(
+        '-v', '--verbose', action='count', default=0,
+        help='Use 2 times for more debugging info')
+    auth.add_auth_options(self, auth.make_auth_config(use_oauth2=True))
+
+  def parse_args(self, args=None, values=None):
+    """Parses options and returns (hostname, auth.Authenticator object)."""
+    options, args = optparse.OptionParser.parse_args(self, args, values)
+    levels = [logging.WARNING, logging.INFO, logging.DEBUG]
+    logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
+    auth_config = auth.extract_auth_config_from_options(options)
+    if len(args) != 1:
+      self.error('Expecting single argument (hostname).')
+    if not auth_config.use_oauth2:
+      self.error('This command is only usable with OAuth2 authentication')
+    return args[0], auth.get_authenticator_for_host(args[0], auth_config)
+
+
+def main(argv):
+  dispatcher = subcommand.CommandDispatcher(__name__)
+  try:
+    return dispatcher.execute(OptionParser(), argv)
+  except auth.AuthenticationError as e:
+    print >> sys.stderr, e
+    return 1
+
+
+if __name__ == '__main__':
+  colorama.init()
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/download_from_google_storage.py b/download_from_google_storage.py
index 8370515..44cc3a7 100755
--- a/download_from_google_storage.py
+++ b/download_from_google_storage.py
@@ -20,8 +20,7 @@
 
 
 GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    'third_party', 'gsutil', 'gsutil')
+    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
 # Maps sys.platform to what we actually want to call them.
 PLATFORM_MAPPING = {
     'cygwin': 'win',
@@ -55,13 +54,13 @@
 class Gsutil(object):
   """Call gsutil with some predefined settings.  This is a convenience object,
   and is also immutable."""
-  def __init__(self, path, boto_path, timeout=None, bypass_prodaccess=False):
+  def __init__(self, path, boto_path=None, timeout=None, version='4.7'):
     if not os.path.exists(path):
       raise FileNotFoundError('GSUtil not found in %s' % path)
     self.path = path
     self.timeout = timeout
     self.boto_path = boto_path
-    self.bypass_prodaccess = bypass_prodaccess
+    self.version = version
 
   def get_sub_env(self):
     env = os.environ.copy()
@@ -71,25 +70,16 @@
     elif self.boto_path:
       env['AWS_CREDENTIAL_FILE'] = self.boto_path
       env['BOTO_CONFIG'] = self.boto_path
-    else:
-      custompath = env.get('AWS_CREDENTIAL_FILE', '~/.boto') + '.depot_tools'
-      custompath = os.path.expanduser(custompath)
-      if os.path.exists(custompath):
-        env['AWS_CREDENTIAL_FILE'] = custompath
 
     return env
 
   def call(self, *args):
-    cmd = [sys.executable, self.path]
-    if self.bypass_prodaccess:
-      cmd.append('--bypass_prodaccess')
+    cmd = [sys.executable, self.path, '--force-version', self.version]
     cmd.extend(args)
     return subprocess2.call(cmd, env=self.get_sub_env(), timeout=self.timeout)
 
   def check_call(self, *args):
-    cmd = [sys.executable, self.path]
-    if self.bypass_prodaccess:
-      cmd.append('--bypass_prodaccess')
+    cmd = [sys.executable, self.path, '--force-version', self.version]
     cmd.extend(args)
     ((out, err), code) = subprocess2.communicate(
         cmd,
@@ -105,28 +95,11 @@
     if ('You are attempting to access protected data with '
           'no configured credentials.' in err):
       return (403, out, err)
-    if 'No such object' in err:
+    if 'matched no objects' in err:
       return (404, out, err)
     return (code, out, err)
 
 
-def check_bucket_permissions(bucket, gsutil):
-  if not bucket:
-    print >> sys.stderr, 'Missing bucket %s.'
-    return (None, 1)
-  base_url = 'gs://%s' % bucket
-
-  code, _, ls_err = gsutil.check_call('ls', base_url)
-  if code != 0:
-    print >> sys.stderr, ls_err
-  if code == 403:
-    print >> sys.stderr, 'Got error 403 while authenticating to %s.' % base_url
-    print >> sys.stderr, 'Try running "download_from_google_storage --config".'
-  elif code == 404:
-    print >> sys.stderr, '%s not found.' % base_url
-  return (base_url, code)
-
-
 def check_platform(target):
   """Checks if any parent directory of target matches (win|mac|linux)."""
   assert os.path.isabs(target)
@@ -163,8 +136,7 @@
     with open(input_filename, 'rb') as f:
       sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
       if sha1_match:
-        work_queue.put(
-            (sha1_match.groups(1)[0], input_filename.replace('.sha1', '')))
+        work_queue.put((sha1_match.groups(1)[0], output))
         return 1
     if not ignore_errors:
       raise InvalidFileError('No sha1 sum found in %s.' % input_filename)
@@ -229,19 +201,41 @@
         continue
     # Check if file exists.
     file_url = '%s/%s' % (base_url, input_sha1_sum)
-    if gsutil.check_call('ls', file_url)[0] != 0:
-      out_q.put('%d> File %s for %s does not exist, skipping.' % (
-          thread_num, file_url, output_filename))
-      ret_codes.put((1, 'File %s for %s does not exist.' % (
-          file_url, output_filename)))
+    (code, _, err) = gsutil.check_call('ls', file_url)
+    if code != 0:
+      if code == 404:
+        out_q.put('%d> File %s for %s does not exist, skipping.' % (
+            thread_num, file_url, output_filename))
+        ret_codes.put((1, 'File %s for %s does not exist.' % (
+            file_url, output_filename)))
+      else:
+        # Other error, probably auth related (bad ~/.boto, etc).
+        out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % (
+            thread_num, file_url, output_filename, err))
+        ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % (
+            file_url, output_filename, err)))
       continue
     # Fetch the file.
-    out_q.put('%d> Downloading %s...' % (
-        thread_num, output_filename))
-    code, _, err = gsutil.check_call('cp', '-q', file_url, output_filename)
+    out_q.put('%d> Downloading %s...' % (thread_num, output_filename))
+    try:
+      os.remove(output_filename)  # Delete the file if it exists already.
+    except OSError:
+      if os.path.exists(output_filename):
+        out_q.put('%d> Warning: deleting %s failed.' % (
+            thread_num, output_filename))
+    code, _, err = gsutil.check_call('cp', file_url, output_filename)
     if code != 0:
       out_q.put('%d> %s' % (thread_num, err))
       ret_codes.put((code, err))
+      continue
+
+    remote_sha1 = get_sha1(output_filename)
+    if remote_sha1 != input_sha1_sum:
+      msg = ('%d> ERROR remote sha1 (%s) does not match expected sha1 (%s).' %
+             (thread_num, remote_sha1, input_sha1_sum))
+      out_q.put(msg)
+      ret_codes.put((20, msg))
+      continue
 
     # Set executable bit.
     if sys.platform == 'cygwin':
@@ -254,15 +248,11 @@
     elif sys.platform != 'win32':
       # On non-Windows platforms, key off of the custom header
       # "x-goog-meta-executable".
-      #
-      # TODO(hinoka): It is supposedly faster to use "gsutil stat" but that
-      # doesn't appear to be supported by the gsutil currently in our tree. When
-      # we update, this code should use that instead of "gsutil ls -L".
-      code, out, _ = gsutil.check_call('ls', '-L', file_url)
+      code, out, _ = gsutil.check_call('stat', file_url)
       if code != 0:
         out_q.put('%d> %s' % (thread_num, err))
         ret_codes.put((code, err))
-      elif re.search('x-goog-meta-executable:', out):
+      elif re.search(r'executable:\s*1', out):
         st = os.stat(output_filename)
         os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
 
@@ -394,21 +384,34 @@
 
   # Set the boto file to /dev/null if we don't need auth.
   if options.no_auth:
-    options.boto = os.devnull
+    if (set(('http_proxy', 'https_proxy')).intersection(
+        env.lower() for env in os.environ) and
+        'NO_AUTH_BOTO_CONFIG' not in os.environ):
+      print >> sys.stderr, ('NOTICE: You have PROXY values set in your '
+                            'environment, but gsutil in depot_tools does not '
+                            '(yet) obey them.')
+      print >> sys.stderr, ('Also, --no_auth prevents the normal BOTO_CONFIG '
+                            'environment variable from being used.')
+      print >> sys.stderr, ('To use a proxy in this situation, please supply '
+                            'those settings in a .boto file pointed to by '
+                            'the NO_AUTH_BOTO_CONFIG environment var.')
+    options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
 
   # Make sure gsutil exists where we expect it to.
   if os.path.exists(GSUTIL_DEFAULT_PATH):
     gsutil = Gsutil(GSUTIL_DEFAULT_PATH,
-                    boto_path=options.boto,
-                    bypass_prodaccess=options.no_auth)
+                    boto_path=options.boto)
   else:
     parser.error('gsutil not found in %s, bad depot_tools checkout?' %
                  GSUTIL_DEFAULT_PATH)
 
   # Passing in -g/--config will run our copy of GSUtil, then quit.
   if options.config:
-    return gsutil.call('config', '-r', '-o',
-                       os.path.expanduser('~/.boto.depot_tools'))
+    print '===Note from depot_tools==='
+    print 'If you do not have a project ID, enter "0" when asked for one.'
+    print '===End note from depot_tools==='
+    print
+    return gsutil.call('config')
 
   if not args:
     parser.error('Missing target.')
@@ -452,10 +455,7 @@
       parser.error('Output file %s exists and --no_resume is specified.'
                    % options.output)
 
-  # Check we have a valid bucket with valid permissions.
-  base_url, code = check_bucket_permissions(options.bucket, gsutil)
-  if code:
-    return code
+  base_url = 'gs://%s' % options.bucket
 
   return download_from_google_storage(
       input_filename, base_url, gsutil, options.num_threads, options.directory,
diff --git a/drover.py b/drover.py
index ec8620c..8702530 100755
--- a/drover.py
+++ b/drover.py
@@ -642,4 +642,8 @@
 
 
 if __name__ == "__main__":
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/fetch b/fetch
index 37e8e79..bea6718 100755
--- a/fetch
+++ b/fetch
@@ -5,8 +5,4 @@
 
 base_dir=$(dirname "$0")
 
-if [[ "#grep#fetch#cleanup#diff#" != *"#$1#"* ]]; then
-  "$base_dir"/update_depot_tools
-fi
-
 PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/fetch.py" "$@"
diff --git a/fetch.py b/fetch.py
index f5640cc..35e05d3 100755
--- a/fetch.py
+++ b/fetch.py
@@ -62,8 +62,8 @@
   def run(self, cmd, **kwargs):
     print 'Running: %s' % (' '.join(pipes.quote(x) for x in cmd))
     if self.options.dry_run:
-      return 0
-    return subprocess.check_call(cmd, **kwargs)
+      return ''
+    return subprocess.check_output(cmd, **kwargs)
 
 
 class GclientCheckout(Checkout):
@@ -75,6 +75,15 @@
       cmd_prefix = ('gclient',)
     return self.run(cmd_prefix + cmd, **kwargs)
 
+  def exists(self):
+    try:
+      gclient_root = self.run_gclient('root').strip()
+      return (os.path.exists(os.path.join(gclient_root, '.gclient')) or
+              os.path.exists(os.path.join(os.getcwd(), self.root)))
+    except subprocess.CalledProcessError:
+      pass
+    return os.path.exists(os.path.join(os.getcwd(), self.root))
+
 
 class GitCheckout(Checkout):
 
@@ -101,17 +110,28 @@
   def __init__(self, options, spec, root):
     super(GclientGitCheckout, self).__init__(options, spec, root)
     assert 'solutions' in self.spec
-    keys = ['solutions', 'target_os', 'target_os_only']
-    gclient_spec = '\n'.join('%s = %s' % (key, self.spec[key])
-                             for key in keys if key in self.spec)
-    self.spec['gclient_spec'] = gclient_spec
 
-  def exists(self):
-    return os.path.exists(os.path.join(os.getcwd(), self.root))
+  def _format_spec(self):
+    def _format_literal(lit):
+      if isinstance(lit, basestring):
+        return '"%s"' % lit
+      if isinstance(lit, list):
+        return '[%s]' % ', '.join(_format_literal(i) for i in lit)
+      return '%r' % lit
+    soln_strings = []
+    for soln in self.spec['solutions']:
+      soln_string= '\n'.join('    "%s": %s,' % (key, _format_literal(value))
+                             for key, value in soln.iteritems())
+      soln_strings.append('  {\n%s\n  },' % soln_string)
+    gclient_spec = 'solutions = [\n%s\n]\n' % '\n'.join(soln_strings)
+    extra_keys = ['target_os', 'target_os_only']
+    gclient_spec += ''.join('%s = %s\n' % (key, _format_literal(self.spec[key]))
+                             for key in extra_keys if key in self.spec)
+    return gclient_spec
 
   def init(self):
     # Configure and do the gclient checkout.
-    self.run_gclient('config', '--spec', self.spec['gclient_spec'])
+    self.run_gclient('config', '--spec', self._format_spec())
     sync_cmd = ['sync']
     if self.options.nohooks:
       sync_cmd.append('--nohooks')
@@ -139,20 +159,18 @@
 
   def __init__(self, options, spec, root):
     super(GclientGitSvnCheckout, self).__init__(options, spec, root)
-    assert 'svn_url' in self.spec
-    assert 'svn_branch' in self.spec
-    assert 'svn_ref' in self.spec
 
   def init(self):
     # Ensure we are authenticated with subversion for all submodules.
     git_svn_dirs = json.loads(self.spec.get('submodule_git_svn_spec', '{}'))
     git_svn_dirs.update({self.root: self.spec})
     for _, svn_spec in git_svn_dirs.iteritems():
-      try:
-        self.run_svn('ls', '--non-interactive', svn_spec['svn_url'])
-      except subprocess.CalledProcessError:
-        print 'Please run `svn ls %s`' % svn_spec['svn_url']
-        return 1
+      if svn_spec.get('svn_url'):
+        try:
+          self.run_svn('ls', '--non-interactive', svn_spec['svn_url'])
+        except subprocess.CalledProcessError:
+          print 'Please run `svn ls %s`' % svn_spec['svn_url']
+          return 1
 
     super(GclientGitSvnCheckout, self).init()
 
@@ -164,12 +182,17 @@
       wd = os.path.join(self.base, real_path)
       if self.options.dry_run:
         print 'cd %s' % wd
-      prefix = svn_spec.get('svn_prefix', 'origin/')
-      self.run_git('svn', 'init', '--prefix=' + prefix, '-T',
-                   svn_spec['svn_branch'], svn_spec['svn_url'], cwd=wd)
-      self.run_git('config', '--replace', 'svn-remote.svn.fetch',
-                   svn_spec['svn_branch'] + ':refs/remotes/' + prefix +
-                   svn_spec['svn_ref'], cwd=wd)
+      if svn_spec.get('auto'):
+        self.run_git('auto-svn', cwd=wd)
+        continue
+      self.run_git('svn', 'init', svn_spec['svn_url'], cwd=wd)
+      self.run_git('config', '--unset-all', 'svn-remote.svn.fetch', cwd=wd)
+      for svn_branch, git_ref in svn_spec.get('git_svn_fetch', {}).items():
+        self.run_git('config', '--add', 'svn-remote.svn.fetch',
+                     '%s:%s' % (svn_branch, git_ref), cwd=wd)
+      for svn_branch, git_ref in svn_spec.get('git_svn_branches', {}).items():
+        self.run_git('config', '--add', 'svn-remote.svn.branches',
+                     '%s:%s' % (svn_branch, git_ref), cwd=wd)
       self.run_git('svn', 'fetch', cwd=wd)
 
 
@@ -298,8 +321,9 @@
   except KeyError:
     return 1
   if checkout.exists():
-    print 'You appear to already have a checkout. "fetch" is used only'
-    print 'to get new checkouts. Use "gclient sync" to update the checkout.'
+    print 'Your current directory appears to already contain, or be part of, '
+    print 'a checkout. "fetch" is used only to get new checkouts. Use '
+    print '"gclient sync" to update existing checkouts.'
     print
     print 'Fetch also does not yet deal with partial checkouts, so if fetch'
     print 'failed, delete the checkout and start over (crbug.com/230691).'
@@ -314,4 +338,8 @@
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/fix_encoding.py b/fix_encoding.py
index 61bd742..5da9135 100644
--- a/fix_encoding.py
+++ b/fix_encoding.py
@@ -229,7 +229,7 @@
         remaining -= n.value
         if not remaining:
           break
-        text = text[n.value:]
+        text = text[int(n.value):]
     except Exception, e:
       complain('%s.write: %r' % (self.name, e))
       raise
diff --git a/gcl.py b/gcl.py
index a32b950..bbc4432 100755
--- a/gcl.py
+++ b/gcl.py
@@ -23,6 +23,7 @@
 import breakpad  # pylint: disable=W0611
 
 
+import auth
 import fix_encoding
 import gclient_utils
 import git_cl
@@ -46,6 +47,9 @@
 # we store information about changelists.
 REPOSITORY_ROOT = ""
 
+# Replacement for project name.
+SWITCH_TO_GIT = "SWITCH_TO_GIT_ALREADY"
+
 # Filename where we store repository specific information for gcl.
 CODEREVIEW_SETTINGS_FILE = "codereview.settings"
 CODEREVIEW_SETTINGS_FILE_NOT_FOUND = (
@@ -351,7 +355,10 @@
     if not self._rpc_server:
       if not self.rietveld:
         ErrorExit(CODEREVIEW_SETTINGS_FILE_NOT_FOUND)
-      self._rpc_server = rietveld.CachingRietveld(self.rietveld, None, None)
+      # TODO(vadimsh): glc.py should be deleted soon. Do not bother much about
+      # authentication options and always use defaults.
+      self._rpc_server = rietveld.CachingRietveld(
+          self.rietveld, auth.make_auth_config())
     return self._rpc_server
 
   def CloseIssue(self):
@@ -875,6 +882,10 @@
       # Uploading a new patchset.
       upload_arg.append("--issue=%d" % change_info.issue)
 
+      project = GetCodeReviewSetting("PROJECT")
+      if project:
+        upload_arg.append("--project=%s" % SWITCH_TO_GIT)
+
       if not any(i.startswith('--title') or i.startswith('-t') for i in args):
         upload_arg.append('--title= ')
     else:
@@ -915,7 +926,7 @@
 
       project = GetCodeReviewSetting("PROJECT")
       if project:
-        upload_arg.append("--project=%s" % project)
+        upload_arg.append("--project=%s" % SWITCH_TO_GIT)
 
     # If we have a lot of files with long paths, then we won't be able to fit
     # the command to "svn diff".  Instead, we generate the diff manually for
@@ -1465,6 +1476,10 @@
         '\nYour python version %s is unsupported, please upgrade.\n' %
         sys.version.split(' ', 1)[0])
     return 2
+
+  sys.stderr.write('Warning: gcl is going away soon. Get off subversion!\n')
+  sys.stderr.write('See http://crbug.com/475321 for more details.\n')
+
   if not argv:
     argv = ['help']
   command = Command(argv[0])
@@ -1511,4 +1526,8 @@
 
 if __name__ == "__main__":
   fix_encoding.fix_encoding()
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/gclient b/gclient
index 7c57dfd..798b2d8 100755
--- a/gclient
+++ b/gclient
@@ -6,7 +6,7 @@
 base_dir=$(dirname "$0")
 
 if [[ "#grep#fetch#cleanup#diff#" != *"#$1#"* ]]; then
-  "$base_dir"/update_depot_tools
+  "$base_dir"/update_depot_tools "$@"
 fi
 
 PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/gclient.py" "$@"
diff --git a/gclient.bat b/gclient.bat
index 42984c0..d73e641 100755
--- a/gclient.bat
+++ b/gclient.bat
@@ -8,7 +8,7 @@
 PATH=%~dp0;%PATH%

 

 :: Synchronize the root directory before deferring control back to gclient.py.

-call "%~dp0\update_depot_tools.bat"

+call "%~dp0\update_depot_tools.bat" %*

 

 :: Defer control.

 %~dp0python "%~dp0\gclient.py" %*

diff --git a/gclient.py b/gclient.py
index 3007487..8de7e52 100755
--- a/gclient.py
+++ b/gclient.py
@@ -342,6 +342,11 @@
     # A cache of the files affected by the current operation, necessary for
     # hooks.
     self._file_list = []
+    # List of host names from which dependencies are allowed.
+    # Default is an empty set, meaning unspecified in DEPS file, and hence all
+    # hosts will be allowed. Non-empty set means whitelist of hosts.
+    # allowed_hosts var is scoped to its DEPS file, and so it isn't recursive.
+    self._allowed_hosts = frozenset()
     # If it is not set to True, the dependency wasn't processed for its child
     # dependency, i.e. its DEPS wasn't read.
     self._deps_parsed = False
@@ -687,6 +692,18 @@
           rel_deps.add(os.path.normpath(os.path.join(self.name, d)))
         self.recursedeps = rel_deps
 
+    if 'allowed_hosts' in local_scope:
+      try:
+        self._allowed_hosts = frozenset(local_scope.get('allowed_hosts'))
+      except TypeError:  # raised if non-iterable
+        pass
+      if not self._allowed_hosts:
+        logging.warning("allowed_hosts is specified but empty %s",
+                        self._allowed_hosts)
+        raise gclient_utils.Error(
+            'ParseDepsFile(%s): allowed_hosts must be absent '
+            'or a non-empty iterable' % self.name)
+
     # Convert the deps into real Dependency.
     deps_to_add = []
     for name, url in deps.iteritems():
@@ -756,6 +773,24 @@
               print('Using parent\'s revision date %s since we are in a '
                     'different repository.' % options.revision)
 
+  def findDepsFromNotAllowedHosts(self):
+    """Returns a list of depenecies from not allowed hosts.
+
+    If allowed_hosts is not set, allows all hosts and returns empty list.
+    """
+    if not self._allowed_hosts:
+      return []
+    bad_deps = []
+    for dep in self._dependencies:
+      # Don't enforce this for custom_deps.
+      if dep.name in self._custom_deps:
+        continue
+      if isinstance(dep.url, basestring):
+        parsed_url = urlparse.urlparse(dep.url)
+        if parsed_url.netloc and parsed_url.netloc not in self._allowed_hosts:
+          bad_deps.append(dep)
+    return bad_deps
+
   # Arguments number differs from overridden method
   # pylint: disable=W0221
   def run(self, revision_overrides, command, args, work_queue, options):
@@ -770,6 +805,7 @@
     run_scm = command not in ('runhooks', 'recurse', None)
     parsed_url = self.LateOverride(self.url)
     file_list = [] if not options.nohooks else None
+    revision_override = revision_overrides.pop(self.name, None)
     if run_scm and parsed_url:
       if isinstance(parsed_url, self.FileImpl):
         # Special support for single-file checkout.
@@ -785,7 +821,7 @@
       else:
         # Create a shallow copy to mutate revision.
         options = copy.copy(options)
-        options.revision = revision_overrides.pop(self.name, None)
+        options.revision = revision_override
         self.maybeGetParentRevision(
             command, options, parsed_url, self.parent)
         self._used_revision = options.revision
@@ -1053,6 +1089,11 @@
 
   @property
   @gclient_utils.lockedmethod
+  def allowed_hosts(self):
+    return self._allowed_hosts
+
+  @property
+  @gclient_utils.lockedmethod
   def file_list(self):
     return tuple(self._file_list)
 
@@ -1077,7 +1118,8 @@
     out = []
     for i in ('name', 'url', 'parsed_url', 'safesync_url', 'custom_deps',
               'custom_vars', 'deps_hooks', 'file_list', 'should_process',
-              'processed', 'hooks_ran', 'deps_parsed', 'requirements'):
+              'processed', 'hooks_ran', 'deps_parsed', 'requirements',
+              'allowed_hosts'):
       # First try the native property if it exists.
       if hasattr(self, '_' + i):
         value = getattr(self, '_' + i, False)
@@ -1460,7 +1502,7 @@
     revision_overrides = {}
     # It's unnecessary to check for revision overrides for 'recurse'.
     # Save a few seconds by not calling _EnforceRevisions() in that case.
-    if command not in ('diff', 'recurse', 'runhooks', 'status'):
+    if command not in ('diff', 'recurse', 'runhooks', 'status', 'revert'):
       self._CheckConfig()
       revision_overrides = self._EnforceRevisions()
     pm = None
@@ -1500,23 +1542,18 @@
         # Fix path separator on Windows.
         entry_fixed = entry.replace('/', os.path.sep)
         e_dir = os.path.join(self.root_dir, entry_fixed)
-
-        def _IsParentOfAny(parent, path_list):
-          parent_plus_slash = parent + '/'
-          return any(
-              path[:len(parent_plus_slash)] == parent_plus_slash
-              for path in path_list)
-
         # Use entry and not entry_fixed there.
         if (entry not in entries and
             (not any(path.startswith(entry + '/') for path in entries)) and
             os.path.exists(e_dir)):
+          # The entry has been removed from DEPS.
           scm = gclient_scm.CreateSCM(
               prev_url, self.root_dir, entry_fixed, self.outbuf)
 
           # Check to see if this directory is now part of a higher-up checkout.
           # The directory might be part of a git OR svn checkout.
           scm_root = None
+          scm_class = None
           for scm_class in (gclient_scm.scm.GIT, gclient_scm.scm.SVN):
             try:
               scm_root = scm_class.GetCheckoutRoot(scm.checkout_path)
@@ -1529,9 +1566,45 @@
                             'determine whether it is part of a higher-level '
                             'checkout, so not removing.' % entry)
             continue
+
+          # This is to handle the case of third_party/WebKit migrating from
+          # being a DEPS entry to being part of the main project.
+          # If the subproject is a Git project, we need to remove its .git
+          # folder. Otherwise git operations on that folder will have different
+          # effects depending on the current working directory.
+          if scm_class == gclient_scm.scm.GIT and (
+              os.path.abspath(scm_root) == os.path.abspath(e_dir)):
+            e_par_dir = os.path.join(e_dir, os.pardir)
+            if scm_class.IsInsideWorkTree(e_par_dir):
+              par_scm_root = scm_class.GetCheckoutRoot(e_par_dir)
+              # rel_e_dir : relative path of entry w.r.t. its parent repo.
+              rel_e_dir = os.path.relpath(e_dir, par_scm_root)
+              if scm_class.IsDirectoryVersioned(par_scm_root, rel_e_dir):
+                save_dir = scm.GetGitBackupDirPath()
+                # Remove any eventual stale backup dir for the same project.
+                if os.path.exists(save_dir):
+                  gclient_utils.rmtree(save_dir)
+                os.rename(os.path.join(e_dir, '.git'), save_dir)
+                # When switching between the two states (entry/ is a subproject
+                # -> entry/ is part of the outer project), it is very likely
+                # that some files are changed in the checkout, unless we are
+                # jumping *exactly* across the commit which changed just DEPS.
+                # In such case we want to cleanup any eventual stale files
+                # (coming from the old subproject) in order to end up with a
+                # clean checkout.
+                scm_class.CleanupDir(par_scm_root, rel_e_dir)
+                assert not os.path.exists(os.path.join(e_dir, '.git'))
+                print(('\nWARNING: \'%s\' has been moved from DEPS to a higher '
+                       'level checkout. The git folder containing all the local'
+                       ' branches has been saved to %s.\n'
+                       'If you don\'t care about its state you can safely '
+                       'remove that folder to free up space.') %
+                      (entry, save_dir))
+                continue
+
           if scm_root in full_entries:
-            logging.info('%s is part of a higher level checkout, not '
-                         'removing.', scm.GetCheckoutRoot())
+            logging.info('%s is part of a higher level checkout, not removing',
+                         scm.GetCheckoutRoot())
             continue
 
           file_list = []
@@ -1751,6 +1824,16 @@
                   'git', 'grep', '--null', '--color=Always'] + args)
 
 
+def CMDroot(parser, args):
+  """Outputs the solution root (or current dir if there isn't one)."""
+  (options, args) = parser.parse_args(args)
+  client = GClient.LoadCurrentConfig(options)
+  if client:
+    print(os.path.abspath(client.root_dir))
+  else:
+    print(os.path.abspath('.'))
+
+
 @subcommand.usage('[url] [safesync url]')
 def CMDconfig(parser, args):
   """Creates a .gclient file in the current directory.
@@ -1930,6 +2013,9 @@
   parser.add_option('-M', '--merge', action='store_true',
                     help='merge upstream changes instead of trying to '
                          'fast-forward or rebase')
+  parser.add_option('-A', '--auto_rebase', action='store_true',
+                    help='Automatically rebase repositories against local '
+                         'checkout during update (git only).')
   parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
                     help='override deps for the specified (comma-separated) '
                          'platform(s); \'all\' will process all deps_os '
@@ -1948,6 +2034,9 @@
   parser.add_option('--shallow', action='store_true',
                     help='GIT ONLY - Do a shallow clone into the cache dir. '
                          'Requires Git 1.9+')
+  parser.add_option('--no_bootstrap', '--no-bootstrap',
+                    action='store_true',
+                    help='Don\'t bootstrap from Google Storage.')
   parser.add_option('--ignore_locks', action='store_true',
                     help='GIT ONLY - Ignore cache locks.')
   (options, args) = parser.parse_args(args)
@@ -2086,6 +2175,27 @@
   return 0
 
 
+def CMDverify(parser, args):
+  """Verifies the DEPS file deps are only from allowed_hosts."""
+  (options, args) = parser.parse_args(args)
+  client = GClient.LoadCurrentConfig(options)
+  if not client:
+    raise gclient_utils.Error('client not configured; see \'gclient config\'')
+  client.RunOnDeps(None, [])
+  # Look at each first-level dependency of this gclient only.
+  for dep in client.dependencies:
+    bad_deps = dep.findDepsFromNotAllowedHosts()
+    if not bad_deps:
+      continue
+    print "There are deps from not allowed hosts in file %s" % dep.deps_file
+    for bad_dep in bad_deps:
+      print "\t%s at %s" % (bad_dep.name, bad_dep.url)
+    print "allowed_hosts:", ', '.join(dep.allowed_hosts)
+    sys.stdout.flush()
+    raise gclient_utils.Error(
+        'dependencies from disallowed hosts; check your DEPS file.')
+  return 0
+
 class OptionParser(optparse.OptionParser):
   gclientfile_default = os.environ.get('GCLIENT_FILE', '.gclient')
 
@@ -2169,7 +2279,7 @@
   sys.stdout = gclient_utils.MakeFileAnnotated(sys.stdout)
 
 
-def Main(argv):
+def main(argv):
   """Doesn't parse the arguments here, just find the right subcommand to
   execute."""
   if sys.hexversion < 0x02060000:
@@ -2195,9 +2305,14 @@
     return 1
   finally:
     gclient_utils.PrintWarnings()
+  return 0
 
 
 if '__main__' == __name__:
-  sys.exit(Main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
 
 # vim: ts=2:sw=2:tw=80:et:
diff --git a/gclient_scm.py b/gclient_scm.py
index a465ec4..473a1bd 100644
--- a/gclient_scm.py
+++ b/gclient_scm.py
@@ -27,10 +27,9 @@
 THIS_FILE_PATH = os.path.abspath(__file__)
 
 GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    'third_party', 'gsutil', 'gsutil')
+    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
 
-CHROMIUM_SRC_URL = 'https://chromium.googlesource.com/chromium/src.git'
+
 class DiffFiltererWrapper(object):
   """Simple base class which tracks which file is being diffed and
   replaces instances of its file name in the original and
@@ -97,6 +96,10 @@
     elif (url.startswith('http://') or url.startswith('https://') or
           url.startswith('svn://') or url.startswith('svn+ssh://')):
       return 'svn'
+    elif url.startswith('file://'):
+      if url.endswith('.git'):
+        return 'git'
+      return 'svn'
   return None
 
 
@@ -294,7 +297,7 @@
     gclient_utils.CheckCallAndFilter(
         ['git', 'diff', merge_base],
         cwd=self.checkout_path,
-        filter_fn=GitDiffFilterer(self.relpath).Filter, print_func=self.Print)
+        filter_fn=GitDiffFilterer(self.relpath, print_func=self.Print).Filter)
 
   def _FetchAndReset(self, revision, file_list, options):
     """Equivalent to git fetch; git reset."""
@@ -315,10 +318,10 @@
       return
     for f in os.listdir(hook_dir):
       if not f.endswith('.sample') and not f.endswith('.disabled'):
-        dest_name = os.path.join(hook_dir, f + '.disabled')
-        if os.path.exists(dest_name):
-          os.remove(dest_name)
-        os.rename(os.path.join(hook_dir, f), dest_name)
+        disabled_hook_path = os.path.join(hook_dir, f + '.disabled')
+        if os.path.exists(disabled_hook_path):
+          os.remove(disabled_hook_path)
+        os.rename(os.path.join(hook_dir, f), disabled_hook_path)
 
   def update(self, options, args, file_list):
     """Runs git to update or transparently checkout the working copy.
@@ -388,6 +391,20 @@
     if mirror:
       url = mirror.mirror_path
 
+    # If we are going to introduce a new project, there is a possibility that
+    # we are syncing back to a state where the project was originally a
+    # sub-project rolled by DEPS (realistic case: crossing the Blink merge point
+    # syncing backwards, when Blink was a DEPS entry and not part of src.git).
+    # In such case, we might have a backup of the former .git folder, which can
+    # be used to avoid re-fetching the entire repo again (useful for bisects).
+    backup_dir = self.GetGitBackupDirPath()
+    target_dir = os.path.join(self.checkout_path, '.git')
+    if os.path.exists(backup_dir) and not os.path.exists(target_dir):
+      gclient_utils.safe_makedirs(self.checkout_path)
+      os.rename(backup_dir, target_dir)
+      # Reset to a clean state
+      self._Run(['reset', '--hard', 'HEAD'], options)
+
     if (not os.path.exists(self.checkout_path) or
         (os.path.isdir(self.checkout_path) and
          not os.path.exists(os.path.join(self.checkout_path, '.git')))):
@@ -435,6 +452,11 @@
         self._CheckClean(rev_str)
       # Switch over to the new upstream
       self._Run(['remote', 'set-url', self.remote, url], options)
+      if mirror:
+        with open(os.path.join(
+            self.checkout_path, '.git', 'objects', 'info', 'alternates'),
+            'w') as fh:
+          fh.write(os.path.join(url, 'objects'))
       self._FetchAndReset(revision, file_list, options)
       return_early = True
 
@@ -591,15 +613,16 @@
             self.Print('_____ %s%s' % (self.relpath, rev_str), timestamp=False)
             printed_path = True
           while True:
-            try:
-              action = self._AskForData(
-                  'Cannot %s, attempt to rebase? '
-                  '(y)es / (q)uit / (s)kip : ' %
-                      ('merge' if options.merge else 'fast-forward merge'),
-                  options)
-            except ValueError:
-              raise gclient_utils.Error('Invalid Character')
-            if re.match(r'yes|y', action, re.I):
+            if not options.auto_rebase:
+              try:
+                action = self._AskForData(
+                    'Cannot %s, attempt to rebase? '
+                    '(y)es / (q)uit / (s)kip : ' %
+                        ('merge' if options.merge else 'fast-forward merge'),
+                    options)
+              except ValueError:
+                raise gclient_utils.Error('Invalid Character')
+            if options.auto_rebase or re.match(r'yes|y', action, re.I):
               self._AttemptRebase(upstream_branch, files, options,
                                   printed_path=printed_path, merge=False)
               printed_path = True
@@ -798,6 +821,12 @@
     base_url = self.url
     return base_url[:base_url.rfind('/')] + url
 
+  def GetGitBackupDirPath(self):
+    """Returns the path where the .git folder for the current project can be
+    staged/restored. Use case: subproject moved from DEPS <-> outer project."""
+    return os.path.join(self._root_dir,
+                        'old_' + self.relpath.replace(os.sep, '_')) + '.git'
+
   def _GetMirror(self, url, options):
     """Get a git_cache.Mirror object for the argument url."""
     if not git_cache.Mirror.GetCachePath():
@@ -806,12 +835,6 @@
         'print_func': self.filter,
         'refs': []
     }
-    # TODO(hinoka): This currently just fails because lkcr/lkgr are branches
-    #               not tags. This also adds 20 seconds to every bot_update
-    #               run, so I'm commenting this out until lkcr/lkgr become
-    #               tags.  (2014/4/24)
-    # if url == CHROMIUM_SRC_URL or url + '.git' == CHROMIUM_SRC_URL:
-    #  mirror_kwargs['refs'].extend(['refs/tags/lkgr', 'refs/tags/lkcr'])
     if hasattr(options, 'with_branch_heads') and options.with_branch_heads:
       mirror_kwargs['refs'].append('refs/branch-heads/*')
     if hasattr(options, 'with_tags') and options.with_tags:
@@ -829,7 +852,9 @@
         depth = 10000
     else:
       depth = None
-    mirror.populate(verbose=options.verbose, bootstrap=True, depth=depth,
+    mirror.populate(verbose=options.verbose,
+                    bootstrap=not getattr(options, 'no_bootstrap', False),
+                    depth=depth,
                     ignore_lock=getattr(options, 'ignore_locks', False))
     mirror.unlock()
 
@@ -1127,21 +1152,37 @@
 
   def _Run(self, args, options, show_header=True, **kwargs):
     # Disable 'unused options' warning | pylint: disable=W0613
-    cwd = kwargs.setdefault('cwd', self.checkout_path)
+    kwargs.setdefault('cwd', self.checkout_path)
     kwargs.setdefault('stdout', self.out_fh)
     kwargs['filter_fn'] = self.filter
     kwargs.setdefault('print_stdout', False)
     env = scm.GIT.ApplyEnvVars(kwargs)
     cmd = ['git'] + args
     if show_header:
-      header = "running '%s' in '%s'" % (' '.join(cmd), cwd)
-      self.filter(header)
-    return gclient_utils.CheckCallAndFilter(cmd, env=env, **kwargs)
+      gclient_utils.CheckCallAndFilterAndHeader(cmd, env=env, **kwargs)
+    else:
+      gclient_utils.CheckCallAndFilter(cmd, env=env, **kwargs)
 
 
 class SVNWrapper(SCMWrapper):
   """ Wrapper for SVN """
   name = 'svn'
+  _PRINTED_DEPRECATION = False
+
+  _MESSAGE = (
+    'Oh hai! You are using subversion. Chrome infra is eager to get rid of',
+    'svn support so please switch to git.',
+    'Tracking bug: http://crbug.com/475320',
+    'If you are a project owner, you may request git migration assistance at: ',
+    '  https://code.google.com/p/chromium/issues/entry?template=Infra-Git')
+
+  def __init__(self, *args, **kwargs):
+    super(SVNWrapper, self).__init__(*args, **kwargs)
+    suppress_deprecated_notice = os.environ.get(
+        'SUPPRESS_DEPRECATED_SVN_NOTICE', False)
+    if not SVNWrapper._PRINTED_DEPRECATION and not suppress_deprecated_notice:
+      SVNWrapper._PRINTED_DEPRECATION = True
+      sys.stderr.write('\n'.join(self._MESSAGE) + '\n')
 
   @staticmethod
   def BinaryExists():
@@ -1186,7 +1227,7 @@
         ['svn', 'diff', '-x', '--ignore-eol-style'] + args,
         cwd=self.checkout_path,
         print_stdout=False,
-        filter_fn=SvnDiffFilterer(self.relpath).Filter, print_func=self.Print)
+        filter_fn=SvnDiffFilterer(self.relpath, print_func=self.Print).Filter)
 
   def update(self, options, args, file_list):
     """Runs svn to update or transparently checkout the working copy.
diff --git a/gclient_utils.py b/gclient_utils.py
index a21e65a..21c44c3 100644
--- a/gclient_utils.py
+++ b/gclient_utils.py
@@ -166,6 +166,13 @@
       time.sleep(0.1)
 
 
+def rm_file_or_tree(path):
+  if os.path.isfile(path):
+    os.remove(path)
+  else:
+    rmtree(path)
+
+
 def rmtree(path):
   """shutil.rmtree() on steroids.
 
@@ -655,15 +662,8 @@
   raise Error('Unknown platform: ' + sys.platform)
 
 
-def GetBuildtoolsPath():
-  """Returns the full path to the buildtools directory.
-  This is based on the root of the checkout containing the current directory."""
-
-  # Overriding the build tools path by environment is highly unsupported and may
-  # break without warning.  Do not rely on this for anything important.
-  override = os.environ.get('CHROMIUM_BUILDTOOLS_PATH')
-  if override is not None:
-    return override
+def GetPrimarySolutionPath():
+  """Returns the full path to the primary solution. (gclient_root + src)"""
 
   gclient_root = FindGclientRoot(os.getcwd())
   if not gclient_root:
@@ -679,18 +679,37 @@
       pass
     top_dir = top_dir[0]
     if os.path.exists(os.path.join(top_dir, 'buildtools')):
-      return os.path.join(top_dir, 'buildtools')
+      return top_dir
     return None
 
   # Some projects' top directory is not named 'src'.
   source_dir_name = GetGClientPrimarySolutionName(gclient_root) or 'src'
-  return os.path.join(gclient_root, source_dir_name, 'buildtools')
+  return os.path.join(gclient_root, source_dir_name)
+
+
+def GetBuildtoolsPath():
+  """Returns the full path to the buildtools directory.
+  This is based on the root of the checkout containing the current directory."""
+
+  # Overriding the build tools path by environment is highly unsupported and may
+  # break without warning.  Do not rely on this for anything important.
+  override = os.environ.get('CHROMIUM_BUILDTOOLS_PATH')
+  if override is not None:
+    return override
+
+  primary_solution = GetPrimarySolutionPath()
+  if not primary_solution:
+    return None
+  buildtools_path = os.path.join(primary_solution, 'buildtools')
+  if not os.path.exists(buildtools_path):
+    # Buildtools may be in the gclient root.
+    gclient_root = FindGclientRoot(os.getcwd())
+    buildtools_path = os.path.join(gclient_root, 'buildtools')
+  return buildtools_path
 
 
 def GetBuildtoolsPlatformBinaryPath():
   """Returns the full path to the binary directory for the current platform."""
-  # Mac and Windows just have one directory, Linux has two according to whether
-  # it's 32 or 64 bits.
   buildtools_path = GetBuildtoolsPath()
   if not buildtools_path:
     return None
@@ -700,10 +719,7 @@
   elif sys.platform == 'darwin':
     subdir = 'mac'
   elif sys.platform.startswith('linux'):
-    if sys.maxsize > 2**32:
       subdir = 'linux64'
-    else:
-      subdir = 'linux32'
   else:
     raise Error('Unknown platform: ' + sys.platform)
   return os.path.join(buildtools_path, subdir)
@@ -1130,15 +1146,33 @@
 def NumLocalCpus():
   """Returns the number of processors.
 
-  Python on OSX 10.6 raises a NotImplementedError exception.
+  multiprocessing.cpu_count() is permitted to raise NotImplementedError, and
+  is known to do this on some Windows systems and OSX 10.6. If we can't get the
+  CPU count, we will fall back to '1'.
   """
+  # Surround the entire thing in try/except; no failure here should stop gclient
+  # from working.
   try:
-    import multiprocessing
-    return multiprocessing.cpu_count()
-  except:  # pylint: disable=W0702
-    # Mac OS 10.6 only
-    # pylint: disable=E1101
-    return int(os.sysconf('SC_NPROCESSORS_ONLN'))
+    # Use multiprocessing to get CPU count. This may raise
+    # NotImplementedError.
+    try:
+      import multiprocessing
+      return multiprocessing.cpu_count()
+    except NotImplementedError:  # pylint: disable=W0702
+      # (UNIX) Query 'os.sysconf'.
+      # pylint: disable=E1101
+      if hasattr(os, 'sysconf') and 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
+        return int(os.sysconf('SC_NPROCESSORS_ONLN'))
+
+      # (Windows) Query 'NUMBER_OF_PROCESSORS' environment variable.
+      if 'NUMBER_OF_PROCESSORS' in os.environ:
+        return int(os.environ['NUMBER_OF_PROCESSORS'])
+  except Exception as e:
+    logging.exception("Exception raised while probing CPU count: %s", e)
+
+  logging.debug('Failed to get CPU count. Defaulting to 1.')
+  return 1
+
 
 def DefaultDeltaBaseCacheLimit():
   """Return a reasonable default for the git config core.deltaBaseCacheLimit.
@@ -1152,6 +1186,7 @@
   else:
     return '512m'
 
+
 def DefaultIndexPackConfig(url=''):
   """Return reasonable default values for configuring git-index-pack.
 
@@ -1162,3 +1197,21 @@
   if url in THREADED_INDEX_PACK_BLACKLIST:
     result.extend(['-c', 'pack.threads=1'])
   return result
+
+
+def FindExecutable(executable):
+  """This mimics the "which" utility."""
+  path_folders = os.environ.get('PATH').split(os.pathsep)
+
+  for path_folder in path_folders:
+    target = os.path.join(path_folder, executable)
+    # Just incase we have some ~/blah paths.
+    target = os.path.abspath(os.path.expanduser(target))
+    if os.path.isfile(target) and os.access(target, os.X_OK):
+      return target
+    if sys.platform.startswith('win'):
+      for suffix in ('.bat', '.cmd', '.exe'):
+        alt_target = target + suffix
+        if os.path.isfile(alt_target) and os.access(alt_target, os.X_OK):
+          return alt_target
+  return None
diff --git a/git-auto-svn b/git-auto-svn
new file mode 100755
index 0000000..6a07962
--- /dev/null
+++ b/git-auto-svn
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+. $(type -P python_git_runner.sh)
diff --git a/git-cache b/git-cache
index 95eef4b..6a07962 100755
--- a/git-cache
+++ b/git-cache
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_cache.py - a git-command for managing local caches of remote repositories.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-cherry-pick-upload b/git-cherry-pick-upload
index 4ab9b63..6a07962 100755
--- a/git-cherry-pick-upload
+++ b/git-cherry-pick-upload
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_cherry_pick_upload.py -- Upload a cherry pick CL to rietveld.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-cl b/git-cl
index e2082a4..12b19e3 100755
--- a/git-cl
+++ b/git-cl
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_cl.py -- a git-command for integrating reviews on Rietveld
-# Copyright (C) 2008 Evan Martin <martine@danga.com>
-
 . $(type -P python_git_runner.sh)
diff --git a/git-crrev-parse b/git-crrev-parse
new file mode 100755
index 0000000..77318fe
--- /dev/null
+++ b/git-crrev-parse
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+# This git extension converts a chromium commit number to its git commit hash.
+# It accepts the following input formats:
+# 
+#   $ git crrev-parse Cr-Commit-Position: refs/heads/master@{#311769}
+#   $ git crrev-parse '    Cr-Commit-Position: refs/heads/master@{#311769}'
+#   $ git crrev-parse 'Cr-Commit-Position: refs/heads/master@{#311769}'
+#   $ git crrev-parse refs/heads/master@{#311769}
+#   
+# It also works for branches (assuming you have branches in your local
+# checkout):
+#   
+#   $ git crrev-parse refs/branch-heads/2278@{#2}
+#   
+# If you don't specify a branch, refs/heads/master is assumed:
+#   
+#   $ git crrev-parse @{#311769}
+#   $ git crrev-parse 311769
+
+# Developer note: this script makes heavy use of prefix/suffix/pattern
+# substitution for bash variables.  Refer to the "Parameter Expansion"
+# section of the man page for bash.
+
+while [ -n "$1" ]; do
+  if [[ "$1" = "Cr-Commit-Position:" ]] && [[ "$2" =~ .*@\{#[0-9][0-9]*\} ]]; then
+    commit_pos="$2"
+    shift
+  else
+    commit_pos="${1#*Cr-Commit-Position: }"
+  fi
+  ref="${commit_pos%@\{#*\}}"
+  if [ "$ref" = "$commit_pos" -o -z "$ref" ]; then
+    ref="refs/heads/master"
+  fi
+  remote_ref="${ref/refs\/heads/refs\/remotes\/origin}"
+  remote_ref="${remote_ref/refs\/branch-heads/refs\/remotes\/branch-heads}"
+  num="${commit_pos#*@\{\#}"
+  num="${num%\}}"
+  
+  if [ -z "$ref" -o -z "$num" ]; then
+    git rev-parse "$1"
+  else
+    grep_str="Cr-Commit-Position: $ref@{#$num}"
+    git rev-list -n 1 --grep="$grep_str" "$remote_ref"
+  fi
+
+  shift
+done
diff --git a/git-footers b/git-footers
index 635cd8f..6a07962 100755
--- a/git-footers
+++ b/git-footers
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_footers.py -- Extract the conventional footers associated with a commit.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-freeze b/git-freeze
index 0187c2e..5e485bb 100755
--- a/git-freeze
+++ b/git-freeze
@@ -3,9 +3,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_freezer.py freeze -- a git-command to suspend all existing working
-# directory modifications. This can be reversed with the 'git thaw' command.
-
 SCRIPT=git_freezer.py
 set -- freeze "$@"
 . $(type -P python_git_runner.sh)
diff --git a/git-map b/git-map
index 3e651c7..03e8418 100755
--- a/git-map
+++ b/git-map
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_map.py -- a git-command for presenting a graphical view of the git
-# history.
-
 . $(type -P python_git_runner.sh) | less -R
diff --git a/git-map-branches b/git-map-branches
index b7da014..6a07962 100755
--- a/git-map-branches
+++ b/git-map-branches
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_map_branches.py -- a git-command for presenting a graphical view of git
-# branches in the current repo, and their relationships to each other.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-mark-merge-base b/git-mark-merge-base
index 502d04c..6a07962 100755
--- a/git-mark-merge-base
+++ b/git-mark-merge-base
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_mark_merge_base.py -- Manually set the merge base for the current branch.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-nav-downstream b/git-nav-downstream
index 7f8a677..6a07962 100755
--- a/git-nav-downstream
+++ b/git-nav-downstream
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_nav_downstream.py -- a git-command to navigate to a downstream branch.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-nav-upstream b/git-nav-upstream
index bec3eba..f16dc2f 100755
--- a/git-nav-upstream
+++ b/git-nav-upstream
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# a git-command to navigate to the upstream branch.
-
 git checkout '@{u}'
diff --git a/git-new-branch b/git-new-branch
index fb56ee8..6a07962 100755
--- a/git-new-branch
+++ b/git-new-branch
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_new_branch.py -- Create a new branch which tracks the default upstream
-# (origin/master).
-
 . $(type -P python_git_runner.sh)
diff --git a/git-number b/git-number
index e1d1298..a5037a4 100755
--- a/git-number
+++ b/git-number
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_number.py - a git-command for calculating and displaying the generation
-# number of a commit.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-rebase-update b/git-rebase-update
index 60c16c3..6a07962 100755
--- a/git-rebase-update
+++ b/git-rebase-update
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_rebase_update.py -- Update remote sources, and use rebase to update all
-# branches in this repo.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-rename-branch b/git-rename-branch
index 8c18884..6a07962 100755
--- a/git-rename-branch
+++ b/git-rename-branch
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_rename_branch.py -- Rename the current branch, correctly updating the
-# upstream branch of all the downstream branches.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-reparent-branch b/git-reparent-branch
index 1fdb1ae..6a07962 100755
--- a/git-reparent-branch
+++ b/git-reparent-branch
@@ -3,8 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_reparent_branch.py -- Change the parent (upstream) branch of the current
-# branch. Afterwards, run a `git rebase-update` cycle to ensure that all
-# branches correctly reflect their parentage.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-retry b/git-retry
index 2c03942..beb0976 100755
--- a/git-retry
+++ b/git-retry
@@ -3,9 +3,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_freezer.py freeze -- a git-command to suspend all existing working
-# directory modifications. This can be reversed with the 'git thaw' command.
-
 SCRIPT=git_retry.py
 set -- retry "$@"
 . $(type -P python_git_runner.sh)
diff --git a/git-squash-branch b/git-squash-branch
index 0fb1832..6a07962 100755
--- a/git-squash-branch
+++ b/git-squash-branch
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_squash_branch.py -- Collapses the current branch to a single commit.
-
 . $(type -P python_git_runner.sh)
diff --git a/git_auto_svn.py b/git_auto_svn.py
new file mode 100755
index 0000000..88d970b
--- /dev/null
+++ b/git_auto_svn.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Performs all git-svn setup steps necessary for 'git svn dcommit' to work.
+
+Assumes that trunk of the svn remote maps to master of the git remote.
+
+Example:
+git clone https://chromium.googlesource.com/chromium/tools/depot_tools
+cd depot_tools
+git auto-svn
+"""
+
+import argparse
+import os
+import sys
+import urlparse
+
+import subprocess2
+
+from git_common import run as run_git
+from git_common import run_stream_with_retcode as run_git_stream_with_retcode
+from git_common import set_config, root, ROOT
+from git_footers import get_footer_svn_id
+
+
+SVN_EXE = ROOT+'\\svn.bat' if sys.platform.startswith('win') else 'svn'
+
+
+def run_svn(*cmd, **kwargs):
+  """Runs an svn command.
+
+  Returns (stdout, stderr) as a pair of strings.
+
+  Raises subprocess2.CalledProcessError on nonzero return code.
+  """
+  kwargs.setdefault('stdin', subprocess2.PIPE)
+  kwargs.setdefault('stdout', subprocess2.PIPE)
+  kwargs.setdefault('stderr', subprocess2.PIPE)
+
+  cmd = (SVN_EXE,) + cmd
+  proc = subprocess2.Popen(cmd, **kwargs)
+  ret, err = proc.communicate()
+  retcode = proc.wait()
+  if retcode != 0:
+    raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(), ret, err)
+
+  return ret, err
+
+
+def main(argv):
+  # No command line flags. Just use the parser to prevent people from trying
+  # to pass flags that don't do anything, and to provide 'usage'.
+  parser = argparse.ArgumentParser(
+      description='Automatically set up git-svn for a repo mirrored from svn.')
+  parser.parse_args(argv)
+
+  upstream = root()
+  svn_id = get_footer_svn_id(upstream)
+  assert svn_id, 'No valid git-svn-id footer found on %s.' % upstream
+  print 'Found git-svn-id footer %s on %s' % (svn_id, upstream)
+
+  parsed_svn = urlparse.urlparse(svn_id)
+  path_components = parsed_svn.path.split('/')
+  svn_repo = None
+  svn_path = None
+  for i in xrange(len(path_components)):
+    try:
+      maybe_repo = '%s://%s%s' % (
+          parsed_svn.scheme, parsed_svn.netloc, '/'.join(path_components[:i+1]))
+      print 'Checking ', maybe_repo
+      run_svn('info', maybe_repo)
+      svn_repo = maybe_repo
+      svn_path = '/'.join(path_components[i+1:])
+      break
+    except subprocess2.CalledProcessError, e:
+      if 'E170001' in str(e):
+        print 'Authentication failed:'
+        print e
+        print ('Try running "svn ls %s" with the password'
+               ' from https://chromium-access.appspot.com' % maybe_repo)
+        print
+      continue
+  assert svn_repo is not None, 'Unable to find svn repo for %s' % svn_id
+  print 'Found upstream svn repo %s and path %s' % (svn_repo, svn_path)
+
+  set_config('svn-remote.svn.url', svn_repo)
+  set_config('svn-remote.svn.fetch',
+             '%s:refs/remotes/%s' % (svn_path, upstream))
+  print 'Configured metadata, running "git svn fetch". This may take some time.'
+  with run_git_stream_with_retcode('svn', 'fetch') as stdout:
+    for line in stdout.xreadlines():
+      print line.strip()
+  return 0
+
+
+if __name__ == '__main__':
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_cache.py b/git_cache.py
index eac2093..e80923c 100755
--- a/git_cache.py
+++ b/git_cache.py
@@ -142,8 +142,7 @@
 
   git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
   gsutil_exe = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    'third_party', 'gsutil', 'gsutil')
+    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
   cachepath_lock = threading.Lock()
 
   def __init__(self, url, refs=None, print_func=None):
@@ -151,7 +150,14 @@
     self.refs = refs or []
     self.basedir = self.UrlToCacheDir(url)
     self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
-    self.print = print_func or print
+    if print_func:
+      self.print = self.print_without_file
+      self.print_func = print_func
+    else:
+      self.print = print
+
+  def print_without_file(self, message, **kwargs):
+    self.print_func(message)
 
   @property
   def bootstrap_bucket(self):
@@ -179,24 +185,6 @@
     netpath = re.sub(r'\b-\b', '/', os.path.basename(path)).replace('--', '-')
     return 'https://%s' % netpath
 
-  @staticmethod
-  def FindExecutable(executable):
-    """This mimics the "which" utility."""
-    path_folders = os.environ.get('PATH').split(os.pathsep)
-
-    for path_folder in path_folders:
-      target = os.path.join(path_folder, executable)
-      # Just incase we have some ~/blah paths.
-      target = os.path.abspath(os.path.expanduser(target))
-      if os.path.isfile(target) and os.access(target, os.X_OK):
-        return target
-      if sys.platform.startswith('win'):
-        for suffix in ('.bat', '.cmd', '.exe'):
-          alt_target = target + suffix
-          if os.path.isfile(alt_target) and os.access(alt_target, os.X_OK):
-            return alt_target
-    return None
-
   @classmethod
   def SetCachePath(cls, cachepath):
     with cls.cachepath_lock:
@@ -267,16 +255,17 @@
     """
 
     python_fallback = False
-    if sys.platform.startswith('win') and not self.FindExecutable('7z'):
+    if (sys.platform.startswith('win') and
+        not gclient_utils.FindExecutable('7z')):
       python_fallback = True
     elif sys.platform.startswith('darwin'):
       # The OSX version of unzip doesn't support zip64.
       python_fallback = True
-    elif not self.FindExecutable('unzip'):
+    elif not gclient_utils.FindExecutable('unzip'):
       python_fallback = True
 
     gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
-    gsutil = Gsutil(self.gsutil_exe, boto_path=None, bypass_prodaccess=True)
+    gsutil = Gsutil(self.gsutil_exe, boto_path=None)
     # Get the most recent version of the zipfile.
     _, ls_out, _ = gsutil.check_call('ls', gs_folder)
     ls_out_sorted = sorted(ls_out.splitlines())
@@ -313,7 +302,7 @@
           retcode = 0
     finally:
       # Clean up the downloaded zipfile.
-      gclient_utils.rmtree(tempdir)
+      gclient_utils.rm_file_or_tree(tempdir)
 
     if retcode:
       self.print(
@@ -487,7 +476,7 @@
                      if os.path.isdir(os.path.join(cachepath, path))])
     for dirent in dirlist:
       if dirent.startswith('_cache_tmp') or dirent.startswith('tmp'):
-        gclient_utils.rmtree(os.path.join(cachepath, dirent))
+        gclient_utils.rm_file_or_tree(os.path.join(cachepath, dirent))
       elif (dirent.endswith('.lock') and
           os.path.isfile(os.path.join(cachepath, dirent))):
         repo_dirs.add(os.path.join(cachepath, dirent[:-5]))
@@ -691,4 +680,8 @@
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_cherry_pick_upload.py b/git_cherry_pick_upload.py
index 2a048fa..3090364 100755
--- a/git_cherry_pick_upload.py
+++ b/git_cherry_pick_upload.py
@@ -5,23 +5,26 @@
 
 """Upload a cherry pick CL to rietveld."""
 
-import argparse
 import md5
+import optparse
 import subprocess2
 import sys
 
+import auth
+
 from git_cl import Changelist
 from git_common import config, run
 from third_party.upload import EncodeMultipartFormData, GitVCS
 from rietveld import Rietveld
 
 
-def cherry_pick(target_branch, commit):
+def cherry_pick(target_branch, commit, auth_config):
   """Attempt to upload a cherry pick CL to rietveld.
 
   Args:
     target_branch: The branch to cherry pick onto.
     commit: The git hash of the commit to cherry pick.
+    auth_config: auth.AuthConfig object with authentication configuration.
   """
   author = config('user.email')
 
@@ -48,7 +51,7 @@
           run('diff', parent, commit))),
   ])
 
-  rietveld = Rietveld(config('rietveld.server'), author, None)
+  rietveld = Rietveld(config('rietveld.server'), auth_config, author)
   # pylint: disable=W0212
   output = rietveld._send(
     '/upload',
@@ -124,21 +127,29 @@
 
 
 def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument(
+  parser = optparse.OptionParser(
+      usage='usage: %prog --branch <branch> <commit>')
+  parser.add_option(
       '--branch',
       '-b',
       help='The upstream branch to cherry pick to.',
-      metavar='<branch>',
-      required=True,
-  )
-  parser.add_argument(
-      'commit',
-      help='SHA to cherry pick.',
-      metavar='<commit>',
-  )
-  args = parser.parse_args()
-  cherry_pick(args.branch, args.commit)
+      metavar='<branch>')
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args()
+  auth_config = auth.extract_auth_config_from_options
+
+  if not options.branch:
+    parser.error('--branch is required')
+  if len(args) != 1:
+    parser.error('Expecting single argument <commit>')
+
+  cherry_pick(options.branch, args[0], auth_config)
+  return 0
+
 
 if __name__ == '__main__':
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_cl.py b/git_cl.py
index 5d39cab..8b474a1 100755
--- a/git_cl.py
+++ b/git_cl.py
@@ -8,7 +8,11 @@
 """A git-command for integrating reviews on Rietveld."""
 
 from distutils.version import LooseVersion
+from multiprocessing.pool import ThreadPool
+import base64
+import collections
 import glob
+import httplib
 import json
 import logging
 import optparse
@@ -17,25 +21,31 @@
 import re
 import stat
 import sys
+import tempfile
 import textwrap
-import threading
+import time
+import traceback
 import urllib2
 import urlparse
 import webbrowser
+import zlib
 
 try:
   import readline  # pylint: disable=F0401,W0611
 except ImportError:
   pass
 
-
 from third_party import colorama
+from third_party import httplib2
 from third_party import upload
+import auth
 import breakpad  # pylint: disable=W0611
 import clang_format
+import dart_format
 import fix_encoding
 import gclient_utils
 import git_common
+from git_footers import get_footer_svn_id
 import owners
 import owners_finder
 import presubmit_support
@@ -52,6 +62,13 @@
 DESCRIPTION_BACKUP_FILE = '~/.git_cl_description_backup'
 GIT_INSTRUCTIONS_URL = 'http://code.google.com/p/chromium/wiki/UsingGit'
 CHANGE_ID = 'Change-Id:'
+REFS_THAT_ALIAS_TO_OTHER_REFS = {
+    'refs/remotes/origin/lkgr': 'refs/remotes/origin/master',
+    'refs/remotes/origin/lkcr': 'refs/remotes/origin/master',
+}
+
+# Buildbucket-related constants
+BUILDBUCKET_HOST = 'cr-buildbucket.appspot.com'
 
 # Valid extensions for files we want to lint.
 DEFAULT_LINT_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)"
@@ -111,6 +128,11 @@
     return 1, ''
 
 
+def RunGitSilent(args):
+  """Returns stdout, suppresses stderr and ingores the return code."""
+  return RunGitWithCode(args, suppress_stderr=True)[1]
+
+
 def IsGitVersionAtLeast(min_version):
   prefix = 'git version '
   version = RunGit(['--version']).strip()
@@ -118,6 +140,13 @@
       LooseVersion(version[len(prefix):]) >= LooseVersion(min_version))
 
 
+def BranchExists(branch):
+  """Return True if specified branch exists."""
+  code, _ = RunGitWithCode(['rev-parse', '--verify', branch],
+                           suppress_stderr=True)
+  return not code
+
+
 def ask_for_data(prompt):
   try:
     return raw_input(prompt)
@@ -188,18 +217,116 @@
   parser.parse_args = Parse
 
 
-def is_dirty_git_tree(cmd):
-  # Make sure index is up-to-date before running diff-index.
-  RunGit(['update-index', '--refresh', '-q'], error_ok=True)
-  dirty = RunGit(['diff-index', '--name-status', 'HEAD'])
-  if dirty:
-    print 'Cannot %s with a dirty tree. You must commit locally first.' % cmd
-    print 'Uncommitted files: (git diff-index --name-status HEAD)'
-    print dirty[:4096]
-    if len(dirty) > 4096:
-      print '... (run "git diff-index --name-status HEAD" to see full output).'
-    return True
-  return False
+def _prefix_master(master):
+  """Convert user-specified master name to full master name.
+
+  Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket
+  name, while the developers always use shortened master name
+  (tryserver.chromium.linux) by stripping off the prefix 'master.'. This
+  function does the conversion for buildbucket migration.
+  """
+  prefix = 'master.'
+  if master.startswith(prefix):
+    return master
+  return '%s%s' % (prefix, master)
+
+
+def trigger_try_jobs(auth_config, changelist, options, masters, category,
+                     override_properties=None):
+  rietveld_url = settings.GetDefaultServerUrl()
+  rietveld_host = urlparse.urlparse(rietveld_url).hostname
+  authenticator = auth.get_authenticator_for_host(rietveld_host, auth_config)
+  http = authenticator.authorize(httplib2.Http())
+  http.force_exception_to_status_code = True
+  issue_props = changelist.GetIssueProperties()
+  issue = changelist.GetIssue()
+  patchset = changelist.GetMostRecentPatchset()
+
+  buildbucket_put_url = (
+      'https://{hostname}/_ah/api/buildbucket/v1/builds/batch'.format(
+          hostname=BUILDBUCKET_HOST))
+  buildset = 'patch/rietveld/{hostname}/{issue}/{patch}'.format(
+      hostname=rietveld_host,
+      issue=issue,
+      patch=patchset)
+
+  batch_req_body = {'builds': []}
+  print_text = []
+  print_text.append('Tried jobs on:')
+  for master, builders_and_tests in sorted(masters.iteritems()):
+    print_text.append('Master: %s' % master)
+    bucket = _prefix_master(master)
+    for builder, tests in sorted(builders_and_tests.iteritems()):
+      print_text.append('  %s: %s' % (builder, tests))
+      parameters = {
+          'builder_name': builder,
+          'changes': [
+              {'author': {'email': issue_props['owner_email']}},
+          ],
+          'properties': {
+              'category': category,
+              'issue': issue,
+              'master': master,
+              'patch_project': issue_props['project'],
+              'patch_storage': 'rietveld',
+              'patchset': patchset,
+              'reason': options.name,
+              'revision': options.revision,
+              'rietveld': rietveld_url,
+              'testfilter': tests,
+          },
+      }
+      if override_properties:
+        parameters['properties'].update(override_properties)
+      if options.clobber:
+        parameters['properties']['clobber'] = True
+      batch_req_body['builds'].append(
+          {
+              'bucket': bucket,
+              'parameters_json': json.dumps(parameters),
+              'tags': ['builder:%s' % builder,
+                       'buildset:%s' % buildset,
+                       'master:%s' % master,
+                       'user_agent:git_cl_try']
+          }
+      )
+
+  for try_count in xrange(3):
+    response, content = http.request(
+        buildbucket_put_url,
+        'PUT',
+        body=json.dumps(batch_req_body),
+        headers={'Content-Type': 'application/json'},
+    )
+    content_json = None
+    try:
+      content_json = json.loads(content)
+    except ValueError:
+      pass
+
+    # Buildbucket could return an error even if status==200.
+    if content_json and content_json.get('error'):
+      msg = 'Error in response. Code: %d. Reason: %s. Message: %s.' % (
+          content_json['error'].get('code', ''),
+          content_json['error'].get('reason', ''),
+          content_json['error'].get('message', ''))
+      raise BuildbucketResponseException(msg)
+
+    if response.status == 200:
+      if not content_json:
+        raise BuildbucketResponseException(
+            'Buildbucket returns invalid json content: %s.\n'
+            'Please file bugs at crbug.com, label "Infra-BuildBucket".' %
+            content)
+      break
+    if response.status < 500 or try_count >= 2:
+      raise httplib2.HttpLib2Error(content)
+
+    # status >= 500 means transient failures.
+    logging.debug('Transient errors when triggering tryjobs. Will retry.')
+    time.sleep(0.5 + 1.5*try_count)
+
+  print '\n'.join(print_text)
 
 
 def MatchSvnGlob(url, base_url, glob_spec, allow_wildcards):
@@ -269,6 +396,10 @@
       stdout=stdout, env=env)
 
 
+class BuildbucketResponseException(Exception):
+  pass
+
+
 class Settings(object):
   def __init__(self):
     self.default_server = None
@@ -283,6 +414,7 @@
     self.is_gerrit_autodetect_branch = None
     self.git_editor = None
     self.project = None
+    self.force_https_commit_url = None
     self.pending_ref_prefix = None
 
   def LazyUpdateIfNeeded(self):
@@ -423,6 +555,16 @@
   def GetBugPrefix(self):
     return self._GetRietveldConfig('bug-prefix', error_ok=True)
 
+  def GetIsSkipDependencyUpload(self, branch_name):
+    """Returns true if specified branch should skip dep uploads."""
+    return self._GetBranchConfig(branch_name, 'skip-deps-uploads',
+                                 error_ok=True)
+
+  def GetRunPostUploadHook(self):
+    run_post_upload_hook = self._GetRietveldConfig(
+        'run-post-upload-hook', error_ok=True)
+    return run_post_upload_hook == "True"
+
   def GetDefaultCCList(self):
     return self._GetRietveldConfig('cc', error_ok=True)
 
@@ -461,6 +603,12 @@
       self.project = self._GetRietveldConfig('project', error_ok=True)
     return self.project
 
+  def GetForceHttpsCommitUrl(self):
+    if not self.force_https_commit_url:
+      self.force_https_commit_url = self._GetRietveldConfig(
+          'force-https-commit-url', error_ok=True)
+    return self.force_https_commit_url
+
   def GetPendingRefPrefix(self):
     if not self.pending_ref_prefix:
       self.pending_ref_prefix = self._GetRietveldConfig(
@@ -470,6 +618,9 @@
   def _GetRietveldConfig(self, param, **kwargs):
     return self._GetConfig('rietveld.' + param, **kwargs)
 
+  def _GetBranchConfig(self, branch_name, param, **kwargs):
+    return self._GetConfig('branch.' + branch_name + '.' + param, **kwargs)
+
   def _GetConfig(self, param, **kwargs):
     self.LazyUpdateIfNeeded()
     return RunGit(['config', param], **kwargs).strip()
@@ -481,7 +632,7 @@
 
 
 class Changelist(object):
-  def __init__(self, branchref=None, issue=None):
+  def __init__(self, branchref=None, issue=None, auth_config=None):
     # Poke settings so we get the "configure your server" message if necessary.
     global settings
     if not settings:
@@ -501,11 +652,16 @@
     self.description = None
     self.lookedup_patchset = False
     self.patchset = None
-    self._rpc_server = None
     self.cc = None
     self.watchers = ()
-    self._remote = None
+    self._auth_config = auth_config
     self._props = None
+    self._remote = None
+    self._rpc_server = None
+
+  @property
+  def auth_config(self):
+    return self._auth_config
 
   def GetCCList(self):
     """Return the users cc'd on this CL.
@@ -533,7 +689,11 @@
   def GetBranch(self):
     """Returns the short branch name, e.g. 'master'."""
     if not self.branch:
-      self.branchref = RunGit(['symbolic-ref', 'HEAD']).strip()
+      branchref = RunGit(['symbolic-ref', 'HEAD'],
+                         stderr=subprocess2.VOID, error_ok=True).strip()
+      if not branchref:
+        return None
+      self.branchref = branchref
       self.branch = ShortBranchName(self.branchref)
     return self.branch
 
@@ -583,8 +743,12 @@
     return remote, upstream_branch
 
   def GetCommonAncestorWithUpstream(self):
+    upstream_branch = self.GetUpstreamBranch()
+    if not BranchExists(upstream_branch):
+      DieWithError('The upstream for the current branch (%s) does not exist '
+                   'anymore.\nPlease fix it and try again.' % self.GetBranch())
     return git_common.get_or_create_merge_base(self.GetBranch(),
-                                               self.GetUpstreamBranch())
+                                               upstream_branch)
 
   def GetUpstreamBranch(self):
     if self.upstream_branch is None:
@@ -636,6 +800,15 @@
   def GitSanityChecks(self, upstream_git_obj):
     """Checks git repo status and ensures diff is from local commits."""
 
+    if upstream_git_obj is None:
+      if self.GetBranch() is None:
+        print >> sys.stderr, (
+            'ERROR: unable to determine current branch (detached HEAD?)')
+      else:
+        print >> sys.stderr, (
+            'ERROR: no upstream branch')
+      return False
+
     # Verify the commit we're diffing against is in our current branch.
     upstream_sha = RunGit(['rev-parse', '--verify', upstream_git_obj]).strip()
     common_ancestor = RunGit(['merge-base', upstream_sha, 'HEAD']).strip()
@@ -676,6 +849,19 @@
     return RunGit(['config', 'branch.%s.base-url' % self.GetBranch()],
                   error_ok=True).strip()
 
+  def GetGitSvnRemoteUrl(self):
+    """Return the configured git-svn remote URL parsed from git svn info.
+
+    Returns None if it is not set.
+    """
+    # URL is dependent on the current directory.
+    data = RunGit(['svn', 'info'], cwd=settings.GetRoot())
+    if data:
+      keys = dict(line.split(': ', 1) for line in data.splitlines()
+                  if ': ' in line)
+      return keys.get('URL', None)
+    return None
+
   def GetRemoteUrl(self):
     """Return the configured remote URL, e.g. 'git://example.org/foo.git/'.
 
@@ -704,8 +890,10 @@
       # If we're on a branch then get the server potentially associated
       # with that branch.
       if self.GetIssue():
-        self.rietveld_server = gclient_utils.UpgradeToHttps(RunGit(
-            ['config', self._RietveldServer()], error_ok=True).strip())
+        rietveld_server_config = self._RietveldServer()
+        if rietveld_server_config:
+          self.rietveld_server = gclient_utils.UpgradeToHttps(RunGit(
+              ['config', rietveld_server_config], error_ok=True).strip())
       if not self.rietveld_server:
         self.rietveld_server = settings.GetDefaultServerUrl()
     return self.rietveld_server
@@ -787,6 +975,9 @@
   def GetApprovingReviewers(self):
     return get_approving_reviewers(self.GetIssueProperties())
 
+  def AddComment(self, message):
+    return self.RpcServer().add_comment(self.GetIssue(), message)
+
   def SetIssue(self, issue):
     """Set this branch's issue.  If issue=0, clears the issue."""
     if issue:
@@ -937,7 +1128,8 @@
     """
     if not self._rpc_server:
       self._rpc_server = rietveld.CachingRietveld(
-          self.GetRietveldServer(), None, None)
+          self.GetRietveldServer(),
+          self._auth_config or auth.make_auth_config())
     return self._rpc_server
 
   def _IssueSetting(self):
@@ -950,7 +1142,10 @@
 
   def _RietveldServer(self):
     """Returns the git setting that stores this change's rietveld server."""
-    return 'branch.%s.rietveldserver' % self.GetBranch()
+    branch = self.GetBranch()
+    if branch:
+      return 'branch.%s.rietveldserver' % branch
+    return None
 
 
 def GetCodereviewSettingsInteractively():
@@ -987,6 +1182,8 @@
               'tree-status-url', False)
   SetProperty(settings.GetViewVCUrl(), 'ViewVC URL', 'viewvc-url', True)
   SetProperty(settings.GetBugPrefix(), 'Bug Prefix', 'bug-prefix', False)
+  SetProperty(settings.GetRunPostUploadHook(), 'Run Post Upload Hook',
+              'run-post-upload-hook', False)
 
   # TODO: configure a default branch to diff against, rather than this
   # svn-based hackery.
@@ -1164,9 +1361,13 @@
   SetProperty('viewvc-url', 'VIEW_VC', unset_error_ok=True)
   SetProperty('bug-prefix', 'BUG_PREFIX', unset_error_ok=True)
   SetProperty('cpplint-regex', 'LINT_REGEX', unset_error_ok=True)
+  SetProperty('force-https-commit-url', 'FORCE_HTTPS_COMMIT_URL',
+              unset_error_ok=True)
   SetProperty('cpplint-ignore-regex', 'LINT_IGNORE_REGEX', unset_error_ok=True)
   SetProperty('project', 'PROJECT', unset_error_ok=True)
   SetProperty('pending-ref-prefix', 'PENDING_REF_PREFIX', unset_error_ok=True)
+  SetProperty('run-post-upload-hook', 'RUN_POST_UPLOAD_HOOK',
+              unset_error_ok=True)
 
   if 'GERRIT_HOST' in keyvals:
     RunGit(['config', 'gerrit.host', keyvals['GERRIT_HOST']])
@@ -1290,6 +1491,151 @@
     'error': Fore.WHITE,
   }.get(status, Fore.WHITE)
 
+def fetch_cl_status(branch, auth_config=None):
+  """Fetches information for an issue and returns (branch, issue, status)."""
+  cl = Changelist(branchref=branch, auth_config=auth_config)
+  url = cl.GetIssueURL()
+  status = cl.GetStatus()
+
+  if url and (not status or status == 'error'):
+    # The issue probably doesn't exist anymore.
+    url += ' (broken)'
+
+  return (branch, url, status)
+
+def get_cl_statuses(
+    branches, fine_grained, max_processes=None, auth_config=None):
+  """Returns a blocking iterable of (branch, issue, color) for given branches.
+
+  If fine_grained is true, this will fetch CL statuses from the server.
+  Otherwise, simply indicate if there's a matching url for the given branches.
+
+  If max_processes is specified, it is used as the maximum number of processes
+  to spawn to fetch CL status from the server. Otherwise 1 process per branch is
+  spawned.
+  """
+  # Silence upload.py otherwise it becomes unwieldly.
+  upload.verbosity = 0
+
+  if fine_grained:
+    # Process one branch synchronously to work through authentication, then
+    # spawn processes to process all the other branches in parallel.
+    if branches:
+      fetch = lambda branch: fetch_cl_status(branch, auth_config=auth_config)
+      yield fetch(branches[0])
+
+      branches_to_fetch = branches[1:]
+      pool = ThreadPool(
+          min(max_processes, len(branches_to_fetch))
+              if max_processes is not None
+              else len(branches_to_fetch))
+      for x in pool.imap_unordered(fetch, branches_to_fetch):
+        yield x
+  else:
+    # Do not use GetApprovingReviewers(), since it requires an HTTP request.
+    for b in branches:
+      cl = Changelist(branchref=b, auth_config=auth_config)
+      url = cl.GetIssueURL()
+      yield (b, url, 'waiting' if url else 'error')
+
+
+def upload_branch_deps(cl, args):
+  """Uploads CLs of local branches that are dependents of the current branch.
+
+  If the local branch dependency tree looks like:
+  test1 -> test2.1 -> test3.1
+                   -> test3.2
+        -> test2.2 -> test3.3
+
+  and you run "git cl upload --dependencies" from test1 then "git cl upload" is
+  run on the dependent branches in this order:
+  test2.1, test3.1, test3.2, test2.2, test3.3
+
+  Note: This function does not rebase your local dependent branches. Use it when
+        you make a change to the parent branch that will not conflict with its
+        dependent branches, and you would like their dependencies updated in
+        Rietveld.
+  """
+  if git_common.is_dirty_git_tree('upload-branch-deps'):
+    return 1
+
+  root_branch = cl.GetBranch()
+  if root_branch is None:
+    DieWithError('Can\'t find dependent branches from detached HEAD state. '
+                 'Get on a branch!')
+  if not cl.GetIssue() or not cl.GetPatchset():
+    DieWithError('Current branch does not have an uploaded CL. We cannot set '
+                 'patchset dependencies without an uploaded CL.')
+
+  branches = RunGit(['for-each-ref',
+                     '--format=%(refname:short) %(upstream:short)',
+                     'refs/heads'])
+  if not branches:
+    print('No local branches found.')
+    return 0
+
+  # Create a dictionary of all local branches to the branches that are dependent
+  # on it.
+  tracked_to_dependents = collections.defaultdict(list)
+  for b in branches.splitlines():
+    tokens = b.split()
+    if len(tokens) == 2:
+      branch_name, tracked = tokens
+      tracked_to_dependents[tracked].append(branch_name)
+
+  print
+  print 'The dependent local branches of %s are:' % root_branch
+  dependents = []
+  def traverse_dependents_preorder(branch, padding=''):
+    dependents_to_process = tracked_to_dependents.get(branch, [])
+    padding += '  '
+    for dependent in dependents_to_process:
+      print '%s%s' % (padding, dependent)
+      dependents.append(dependent)
+      traverse_dependents_preorder(dependent, padding)
+  traverse_dependents_preorder(root_branch)
+  print
+
+  if not dependents:
+    print 'There are no dependent local branches for %s' % root_branch
+    return 0
+
+  print ('This command will checkout all dependent branches and run '
+         '"git cl upload".')
+  ask_for_data('[Press enter to continue or ctrl-C to quit]')
+
+  # Add a default patchset title to all upload calls.
+  args.extend(['-t', 'Updated patchset dependency'])
+  # Record all dependents that failed to upload.
+  failures = {}
+  # Go through all dependents, checkout the branch and upload.
+  try:
+    for dependent_branch in dependents:
+      print
+      print '--------------------------------------'
+      print 'Running "git cl upload" from %s:' % dependent_branch
+      RunGit(['checkout', '-q', dependent_branch])
+      print
+      try:
+        if CMDupload(OptionParser(), args) != 0:
+          print 'Upload failed for %s!' % dependent_branch
+          failures[dependent_branch] = 1
+      except:  # pylint: disable=W0702
+        failures[dependent_branch] = 1
+      print
+  finally:
+    # Swap back to the original root branch.
+    RunGit(['checkout', '-q', root_branch])
+
+  print
+  print 'Upload complete for dependent branches!'
+  for dependent_branch in dependents:
+    upload_status = 'failed' if failures.get(dependent_branch) else 'succeeded'
+    print '  %s : %s' % (dependent_branch, upload_status)
+  print
+
+  return 0
+
 
 @subcommand.hidden
 def CMDstatus(parser, args):
@@ -1309,12 +1655,18 @@
                     help='print only specific field (desc|id|patch|url)')
   parser.add_option('-f', '--fast', action='store_true',
                     help='Do not retrieve review status')
-  (options, args) = parser.parse_args(args)
+  parser.add_option(
+      '-j', '--maxjobs', action='store', type=int,
+      help='The maximum number of jobs to use when retrieving review status')
+
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
   if args:
     parser.error('Unsupported args: %s' % args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   if options.field:
-    cl = Changelist()
+    cl = Changelist(auth_config=auth_config)
     if options.field.startswith('desc'):
       print cl.GetDescription()
     elif options.field == 'id':
@@ -1336,67 +1688,41 @@
     print('No local branch found.')
     return 0
 
-  changes = (Changelist(branchref=b) for b in branches.splitlines())
+  changes = (
+      Changelist(branchref=b, auth_config=auth_config)
+      for b in branches.splitlines())
   branches = [c.GetBranch() for c in changes]
   alignment = max(5, max(len(b) for b in branches))
   print 'Branches associated with reviews:'
-  # Adhoc thread pool to request data concurrently.
-  output = Queue.Queue()
+  output = get_cl_statuses(branches,
+                           fine_grained=not options.fast,
+                           max_processes=options.maxjobs,
+                           auth_config=auth_config)
 
-  # Silence upload.py otherwise it becomes unweldly.
-  upload.verbosity = 0
-
-  if not options.fast:
-    def fetch(b):
-      """Fetches information for an issue and returns (branch, issue, color)."""
-      c = Changelist(branchref=b)
-      i = c.GetIssueURL()
-      status = c.GetStatus()
-      color = color_for_status(status)
-
-      if i and (not status or status == 'error'):
-        # The issue probably doesn't exist anymore.
-        i += ' (broken)'
-
-      output.put((b, i, color))
-
-    # Process one branch synchronously to work through authentication, then
-    # spawn threads to process all the other branches in parallel.
-    if branches:
-      fetch(branches[0])
-    threads = [
-      threading.Thread(target=fetch, args=(b,)) for b in branches[1:]]
-    for t in threads:
-      t.daemon = True
-      t.start()
-  else:
-    # Do not use GetApprovingReviewers(), since it requires an HTTP request.
-    for b in branches:
-      c = Changelist(branchref=b)
-      url = c.GetIssueURL()
-      output.put((b, url, Fore.BLUE if url else Fore.WHITE))
-
-  tmp = {}
+  branch_statuses = {}
   alignment = max(5, max(len(ShortBranchName(b)) for b in branches))
   for branch in sorted(branches):
-    while branch not in tmp:
-      b, i, color = output.get()
-      tmp[b] = (i, color)
-    issue, color = tmp.pop(branch)
+    while branch not in branch_statuses:
+      b, i, status = output.next()
+      branch_statuses[b] = (i, status)
+    issue_url, status = branch_statuses.pop(branch)
+    color = color_for_status(status)
     reset = Fore.RESET
     if not sys.stdout.isatty():
       color = ''
       reset = ''
-    print '  %*s : %s%s%s' % (
-          alignment, ShortBranchName(branch), color, issue, reset)
+    status_str = '(%s)' % status if status else ''
+    print '  %*s : %s%s %s%s' % (
+          alignment, ShortBranchName(branch), color, issue_url, status_str,
+          reset)
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   print
   print 'Current branch:',
-  if not cl.GetIssue():
-    print 'no issue assigned.'
-    return 0
   print cl.GetBranch()
+  if not cl.GetIssue():
+    print 'No issue assigned.'
+    return 0
   print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
   if not options.fast:
     print 'Issue description:'
@@ -1427,56 +1753,101 @@
 
   Pass issue number 0 to clear the current issue.
   """
-  _, args = parser.parse_args(args)
+  parser.add_option('-r', '--reverse', action='store_true',
+                    help='Lookup the branch(es) for the specified issues. If '
+                         'no issues are specified, all branches with mapped '
+                         'issues will be listed.')
+  options, args = parser.parse_args(args)
 
-  cl = Changelist()
-  if len(args) > 0:
-    try:
-      issue = int(args[0])
-    except ValueError:
-      DieWithError('Pass a number to set the issue or none to list it.\n'
-          'Maybe you want to run git cl status?')
-    cl.SetIssue(issue)
-  print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
+  if options.reverse:
+    branches = RunGit(['for-each-ref', 'refs/heads',
+                       '--format=%(refname:short)']).splitlines()
+
+    # Reverse issue lookup.
+    issue_branch_map = {}
+    for branch in branches:
+      cl = Changelist(branchref=branch)
+      issue_branch_map.setdefault(cl.GetIssue(), []).append(branch)
+    if not args:
+      args = sorted(issue_branch_map.iterkeys())
+    for issue in args:
+      if not issue:
+        continue
+      print 'Branch for issue number %s: %s' % (
+          issue, ', '.join(issue_branch_map.get(int(issue)) or ('None',)))
+  else:
+    cl = Changelist()
+    if len(args) > 0:
+      try:
+        issue = int(args[0])
+      except ValueError:
+        DieWithError('Pass a number to set the issue or none to list it.\n'
+            'Maybe you want to run git cl status?')
+      cl.SetIssue(issue)
+    print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
   return 0
 
 
 @subcommand.hidden
 def CMDcomments(parser, args):
-  """Shows review comments of the current changelist."""
-  (_, args) = parser.parse_args(args)
-  if args:
-    parser.error('Unsupported argument: %s' % args)
+  """Shows or posts review comments for any changelist."""
+  parser.add_option('-a', '--add-comment', dest='comment',
+                    help='comment to add to an issue')
+  parser.add_option('-i', dest='issue',
+                    help="review issue id (defaults to current issue)")
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
-  cl = Changelist()
-  if cl.GetIssue():
-    data = cl.GetIssueProperties()
-    for message in sorted(data['messages'], key=lambda x: x['date']):
-      if message['disapproval']:
-        color = Fore.RED
-      elif message['approval']:
-        color = Fore.GREEN
-      elif message['sender'] == data['owner_email']:
-        color = Fore.MAGENTA
-      else:
-        color = Fore.BLUE
-      print '\n%s%s  %s%s' % (
-          color, message['date'].split('.', 1)[0], message['sender'],
-          Fore.RESET)
-      if message['text'].strip():
-        print '\n'.join('  ' + l for l in message['text'].splitlines())
+  issue = None
+  if options.issue:
+    try:
+      issue = int(options.issue)
+    except ValueError:
+      DieWithError('A review issue id is expected to be a number')
+
+  cl = Changelist(issue=issue, auth_config=auth_config)
+
+  if options.comment:
+    cl.AddComment(options.comment)
+    return 0
+
+  data = cl.GetIssueProperties()
+  for message in sorted(data.get('messages', []), key=lambda x: x['date']):
+    if message['disapproval']:
+      color = Fore.RED
+    elif message['approval']:
+      color = Fore.GREEN
+    elif message['sender'] == data['owner_email']:
+      color = Fore.MAGENTA
+    else:
+      color = Fore.BLUE
+    print '\n%s%s  %s%s' % (
+        color, message['date'].split('.', 1)[0], message['sender'],
+        Fore.RESET)
+    if message['text'].strip():
+      print '\n'.join('  ' + l for l in message['text'].splitlines())
   return 0
 
 
 @subcommand.hidden
 def CMDdescription(parser, args):
   """Brings up the editor for the current CL's description."""
-  cl = Changelist()
+  parser.add_option('-d', '--display', action='store_true',
+                    help='Display the description instead of opening an editor')
+  auth.add_auth_options(parser)
+  options, _ = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
+  cl = Changelist(auth_config=auth_config)
   if not cl.GetIssue():
     DieWithError('This branch has no associated changelist.')
   description = ChangeDescription(cl.GetDescription())
+  if options.display:
+    print description.description
+    return 0
   description.prompt()
-  cl.UpdateDescription(description.description)
+  if cl.GetDescription() != description.description:
+    cl.UpdateDescription(description.description)
   return 0
 
 
@@ -1498,7 +1869,9 @@
   """Runs cpplint on the current changelist."""
   parser.add_option('--filter', action='append', metavar='-x,+y',
                     help='Comma-separated list of cpplint\'s category-filters')
-  (options, args) = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   # Access to a protected member _XX of a client class
   # pylint: disable=W0212
@@ -1514,7 +1887,7 @@
   previous_cwd = os.getcwd()
   os.chdir(settings.GetRoot())
   try:
-    cl = Changelist()
+    cl = Changelist(auth_config=auth_config)
     change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
     files = [f.LocalPath() for f in change.AffectedFiles()]
     if not files:
@@ -1553,13 +1926,15 @@
                     help='Run commit hook instead of the upload hook')
   parser.add_option('-f', '--force', action='store_true',
                     help='Run checks even if tree is dirty')
-  (options, args) = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
-  if not options.force and is_dirty_git_tree('presubmit'):
+  if not options.force and git_common.is_dirty_git_tree('presubmit'):
     print 'use --force to check even if tree is dirty.'
     return 1
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   if args:
     base_branch = args[0]
   else:
@@ -1610,15 +1985,72 @@
   if not change_desc.description:
     print "Description is empty; aborting."
     return 1
-  if CHANGE_ID not in change_desc.description:
-    AddChangeIdToCommitMessage(options, args)
 
-  commits = RunGit(['rev-list', '%s/%s..' % (remote, branch)]).splitlines()
+  if options.squash:
+    # Try to get the message from a previous upload.
+    shadow_branch = 'refs/heads/git_cl_uploads/' + cl.GetBranch()
+    message = RunGitSilent(['show', '--format=%s\n\n%b', '-s', shadow_branch])
+    if not message:
+      if not options.force:
+        change_desc.prompt()
+
+      if CHANGE_ID not in change_desc.description:
+        # Run the commit-msg hook without modifying the head commit by writing
+        # the commit message to a temporary file and running the hook over it,
+        # then reading the file back in.
+        commit_msg_hook = os.path.join(settings.GetRoot(), '.git', 'hooks',
+                                       'commit-msg')
+        file_handle, msg_file = tempfile.mkstemp(text=True,
+                                                 prefix='commit_msg')
+        try:
+          try:
+            with os.fdopen(file_handle, 'w') as fileobj:
+              fileobj.write(change_desc.description)
+          finally:
+            os.close(file_handle)
+            RunCommand([commit_msg_hook, msg_file])
+            change_desc.set_description(gclient_utils.FileRead(msg_file))
+        finally:
+          os.remove(msg_file)
+
+      if not change_desc.description:
+        print "Description is empty; aborting."
+        return 1
+
+      message = change_desc.description
+
+    remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
+    if remote is '.':
+      # If our upstream branch is local, we base our squashed commit on its
+      # squashed version.
+      parent = ('refs/heads/git_cl_uploads/' +
+                scm.GIT.ShortBranchName(upstream_branch))
+
+      # Verify that the upstream branch has been uploaded too, otherwise Gerrit
+      # will create additional CLs when uploading.
+      if (RunGitSilent(['rev-parse', upstream_branch + ':']) !=
+          RunGitSilent(['rev-parse', parent + ':'])):
+        print 'Upload upstream branch ' + upstream_branch + ' first.'
+        return 1
+    else:
+      parent = cl.GetCommonAncestorWithUpstream()
+
+    tree = RunGit(['rev-parse', 'HEAD:']).strip()
+    ref_to_push = RunGit(['commit-tree', tree, '-p', parent,
+                          '-m', message]).strip()
+  else:
+    if CHANGE_ID not in change_desc.description:
+      AddChangeIdToCommitMessage(options, args)
+    ref_to_push = 'HEAD'
+    parent = '%s/%s' % (gerrit_remote, branch)
+
+  commits = RunGitSilent(['rev-list', '%s..%s' % (parent,
+                                                  ref_to_push)]).splitlines()
   if len(commits) > 1:
     print('WARNING: This will upload %d commits. Run the following command '
           'to see which commits will be uploaded: ' % len(commits))
-    print('git log %s/%s..' % (remote, branch))
-    print('You can also use `git squash-branch` to squash these into a single'
+    print('git log %s..%s' % (parent, ref_to_push))
+    print('You can also use `git squash-branch` to squash these into a single '
           'commit.')
     if ask_for_data('About to upload; continue (y/N)? ').lower() != 'y':
       return 0
@@ -1641,16 +2073,77 @@
   if receive_options:
     git_command.append('--receive-pack=git receive-pack %s' %
                        ' '.join(receive_options))
-  git_command += [remote, 'HEAD:refs/for/' + branch]
+  git_command += [gerrit_remote, ref_to_push + ':refs/for/' + branch]
   RunGit(git_command)
+
+  if options.squash:
+    head = RunGit(['rev-parse', 'HEAD']).strip()
+    RunGit(['update-ref', '-m', 'Uploaded ' + head, shadow_branch, ref_to_push])
+
   # TODO(ukai): parse Change-Id: and set issue number?
   return 0
 
 
+def GetTargetRef(remote, remote_branch, target_branch, pending_prefix):
+  """Computes the remote branch ref to use for the CL.
+
+  Args:
+    remote (str): The git remote for the CL.
+    remote_branch (str): The git remote branch for the CL.
+    target_branch (str): The target branch specified by the user.
+    pending_prefix (str): The pending prefix from the settings.
+  """
+  if not (remote and remote_branch):
+    return None
+
+  if target_branch:
+    # Cannonicalize branch references to the equivalent local full symbolic
+    # refs, which are then translated into the remote full symbolic refs
+    # below.
+    if '/' not in target_branch:
+      remote_branch = 'refs/remotes/%s/%s' % (remote, target_branch)
+    else:
+      prefix_replacements = (
+        ('^((refs/)?remotes/)?branch-heads/', 'refs/remotes/branch-heads/'),
+        ('^((refs/)?remotes/)?%s/' % remote,  'refs/remotes/%s/' % remote),
+        ('^(refs/)?heads/',                   'refs/remotes/%s/' % remote),
+      )
+      match = None
+      for regex, replacement in prefix_replacements:
+        match = re.search(regex, target_branch)
+        if match:
+          remote_branch = target_branch.replace(match.group(0), replacement)
+          break
+      if not match:
+        # This is a branch path but not one we recognize; use as-is.
+        remote_branch = target_branch
+  elif remote_branch in REFS_THAT_ALIAS_TO_OTHER_REFS:
+    # Handle the refs that need to land in different refs.
+    remote_branch = REFS_THAT_ALIAS_TO_OTHER_REFS[remote_branch]
+
+  # Create the true path to the remote branch.
+  # Does the following translation:
+  # * refs/remotes/origin/refs/diff/test -> refs/diff/test
+  # * refs/remotes/origin/master -> refs/heads/master
+  # * refs/remotes/branch-heads/test -> refs/branch-heads/test
+  if remote_branch.startswith('refs/remotes/%s/refs/' % remote):
+    remote_branch = remote_branch.replace('refs/remotes/%s/' % remote, '')
+  elif remote_branch.startswith('refs/remotes/%s/' % remote):
+    remote_branch = remote_branch.replace('refs/remotes/%s/' % remote,
+                                          'refs/heads/')
+  elif remote_branch.startswith('refs/remotes/branch-heads'):
+    remote_branch = remote_branch.replace('refs/remotes/', 'refs/')
+  # If a pending prefix exists then replace refs/ with it.
+  if pending_prefix:
+    remote_branch = remote_branch.replace('refs/', pending_prefix)
+  return remote_branch
+
+
 def RietveldUpload(options, args, cl, change):
   """upload the patch to rietveld."""
   upload_args = ['--assume_yes']  # Don't ask about untracked files.
   upload_args.extend(['--server', cl.GetRietveldServer()])
+  upload_args.extend(auth.auth_config_to_command_options(cl.auth_config))
   if options.emulate_svn_auto_props:
     upload_args.append('--emulate_svn_auto_props')
 
@@ -1717,23 +2210,54 @@
   remote_url = cl.GetGitBaseUrlFromConfig()
   if not remote_url:
     if settings.GetIsGitSvn():
-      # URL is dependent on the current directory.
-      data = RunGit(['svn', 'info'], cwd=settings.GetRoot())
-      if data:
-        keys = dict(line.split(': ', 1) for line in data.splitlines()
-                    if ': ' in line)
-        remote_url = keys.get('URL', None)
+      remote_url = cl.GetGitSvnRemoteUrl()
     else:
       if cl.GetRemoteUrl() and '/' in cl.GetUpstreamBranch():
         remote_url = (cl.GetRemoteUrl() + '@'
                       + cl.GetUpstreamBranch().split('/')[-1])
   if remote_url:
     upload_args.extend(['--base_url', remote_url])
+    remote, remote_branch = cl.GetRemoteBranch()
+    target_ref = GetTargetRef(remote, remote_branch, options.target_branch,
+                              settings.GetPendingRefPrefix())
+    if target_ref:
+      upload_args.extend(['--target_ref', target_ref])
+
+    # Look for dependent patchsets. See crbug.com/480453 for more details.
+    remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
+    upstream_branch = ShortBranchName(upstream_branch)
+    if remote is '.':
+      # A local branch is being tracked.
+      local_branch = ShortBranchName(upstream_branch)
+      if settings.GetIsSkipDependencyUpload(local_branch):
+        print
+        print ('Skipping dependency patchset upload because git config '
+               'branch.%s.skip-deps-uploads is set to True.' % local_branch)
+        print
+      else:
+        auth_config = auth.extract_auth_config_from_options(options)
+        branch_cl = Changelist(branchref=local_branch, auth_config=auth_config)
+        branch_cl_issue_url = branch_cl.GetIssueURL()
+        branch_cl_issue = branch_cl.GetIssue()
+        branch_cl_patchset = branch_cl.GetPatchset()
+        if branch_cl_issue_url and branch_cl_issue and branch_cl_patchset:
+          upload_args.extend(
+              ['--depends_on_patchset', '%s:%s' % (
+                   branch_cl_issue, branch_cl_patchset)])
+          print
+          print ('The current branch (%s) is tracking a local branch (%s) with '
+                 'an associated CL.') % (cl.GetBranch(), local_branch)
+          print 'Adding %s/#ps%s as a dependency patchset.' % (
+              branch_cl_issue_url, branch_cl_patchset)
+          print
 
   project = settings.GetProject()
   if project:
     upload_args.extend(['--project', project])
 
+  if options.cq_dry_run:
+    upload_args.extend(['--cq_dry_run'])
+
   try:
     upload_args = ['upload'] + upload_args + args
     logging.info('upload.RealMain(%s)', upload_args)
@@ -1776,7 +2300,14 @@
 
 @subcommand.usage('[args to "git diff"]')
 def CMDupload(parser, args):
-  """Uploads the current changelist to codereview."""
+  """Uploads the current changelist to codereview.
+
+  Can skip dependency patchset uploads for a branch by running:
+    git config branch.branch_name.skip-deps-uploads True
+  To unset run:
+    git config --unset branch.branch_name.skip-deps-uploads
+  Can also set the above globally by using the --global flag.
+  """
   parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks',
                     help='bypass upload presubmit hook')
   parser.add_option('--bypass-watchlists', action='store_true',
@@ -1805,34 +2336,55 @@
                     help='set the review private (rietveld only)')
   parser.add_option('--target_branch',
                     '--target-branch',
-                    help='When uploading to gerrit, remote branch to '
-                         'use for CL.  Default: master')
+                    metavar='TARGET',
+                    help='Apply CL to remote ref TARGET.  ' +
+                         'Default: remote branch head, or master')
+  parser.add_option('--squash', action='store_true',
+                    help='Squash multiple commits into one (Gerrit only)')
   parser.add_option('--email', default=None,
                     help='email address to use to connect to Rietveld')
   parser.add_option('--tbr-owners', dest='tbr_owners', action='store_true',
                     help='add a set of OWNERS to TBR')
+  parser.add_option('--cq-dry-run', dest='cq_dry_run', action='store_true',
+                    help='Send the patchset to do a CQ dry run right after '
+                         'upload.')
+  parser.add_option('--dependencies', action='store_true',
+                    help='Uploads CLs of all the local branches that depend on '
+                         'the current branch')
 
+  orig_args = args
   add_git_similarity(parser)
+  auth.add_auth_options(parser)
   (options, args) = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
-  if options.target_branch and not settings.GetIsGerrit():
-    parser.error('Use --target_branch for non gerrit repository.')
-
-  if is_dirty_git_tree('upload'):
+  if git_common.is_dirty_git_tree('upload'):
     return 1
 
   options.reviewers = cleanup_list(options.reviewers)
   options.cc = cleanup_list(options.cc)
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   if args:
     # TODO(ukai): is it ok for gerrit case?
     base_branch = args[0]
   else:
+    if cl.GetBranch() is None:
+      DieWithError('Can\'t upload from detached HEAD state. Get on a branch!')
+
     # Default to diffing against common ancestor of upstream branch
     base_branch = cl.GetCommonAncestorWithUpstream()
     args = [base_branch, 'HEAD']
 
+  # Make sure authenticated to Rietveld before running expensive hooks. It is
+  # a fast, best efforts check. Rietveld still can reject the authentication
+  # during the actual upload.
+  if not settings.GetIsGerrit() and auth_config.use_oauth2:
+    authenticator = auth.get_authenticator_for_host(
+        cl.GetRietveldServer(), auth_config)
+    if not authenticator.has_cached_credentials():
+      raise auth.LoginRequiredError(cl.GetRietveldServer())
+
   # Apply watchlists on upload.
   change = cl.GetChange(base_branch, None)
   watchlist = watchlists.Watchlists(change.RepositoryRoot())
@@ -1876,7 +2428,25 @@
   if not ret:
     git_set_branch_value('last-upload-hash',
                          RunGit(['rev-parse', 'HEAD']).strip())
+    # Run post upload hooks, if specified.
+    if settings.GetRunPostUploadHook():
+      presubmit_support.DoPostUploadExecuter(
+          change,
+          cl,
+          settings.GetRoot(),
+          options.verbose,
+          sys.stdout)
 
+    # Upload all dependencies if specified.
+    if options.dependencies:
+      print
+      print '--dependencies has been specified.'
+      print 'All dependent local branches will be re-uploaded.'
+      print
+      # Remove the dependencies flag from args so that we do not end up in a
+      # loop.
+      orig_args.remove('--dependencies')
+      upload_branch_deps(cl, orig_args)
   return ret
 
 
@@ -1907,8 +2477,11 @@
                          "description and used as author for git). Should be " +
                          "formatted as 'First Last <email@example.com>'")
   add_git_similarity(parser)
+  auth.add_auth_options(parser)
   (options, args) = parser.parse_args(args)
-  cl = Changelist()
+  auth_config = auth.extract_auth_config_from_options(options)
+
+  cl = Changelist(auth_config=auth_config)
 
   current = cl.GetBranch()
   remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
@@ -1937,7 +2510,7 @@
   base_branch = args[0]
   base_has_submodules = IsSubmoduleMergeCommit(base_branch)
 
-  if is_dirty_git_tree(cmd):
+  if git_common.is_dirty_git_tree(cmd):
     return 1
 
   # This rev-list syntax means "show all commits not in my branch that
@@ -2022,7 +2595,11 @@
 
   commit_desc = ChangeDescription(change_desc.description)
   if cl.GetIssue():
-    commit_desc.append_footer('Review URL: %s' % cl.GetIssueURL())
+    # Xcode won't linkify this URL unless there is a non-whitespace character
+    # after it. Add a period on a new line to circumvent this. Also add a space
+    # before the period to make sure that Gitiles continues to correctly resolve
+    # the URL.
+    commit_desc.append_footer('Review URL: %s .' % cl.GetIssueURL())
   if options.contributor:
     commit_desc.append_footer('Patch from %s.' % options.contributor)
 
@@ -2097,9 +2674,19 @@
         revision = RunGit(['rev-parse', 'HEAD']).strip()
     else:
       # dcommit the merge branch.
-      _, output = RunGitWithCode(['svn', 'dcommit',
-                                  '-C%s' % options.similarity,
-                                  '--no-rebase', '--rmdir'])
+      cmd_args = [
+        'svn', 'dcommit',
+        '-C%s' % options.similarity,
+        '--no-rebase', '--rmdir',
+      ]
+      if settings.GetForceHttpsCommitUrl():
+        # Allow forcing https commit URLs for some projects that don't allow
+        # committing to http URLs (like Google Code).
+        remote_url = cl.GetGitSvnRemoteUrl()
+        if urlparse.urlparse(remote_url).scheme == 'http':
+          remote_url = remote_url.replace('http://', 'https://')
+        cmd_args.append('--commit-url=%s' % remote_url)
+      _, output = RunGitWithCode(cmd_args)
       if 'Committed r' in output:
         revision = re.match(
           '.*?\nCommitted r(\\d+)', output, re.DOTALL).group(1)
@@ -2141,7 +2728,7 @@
     props = cl.GetIssueProperties()
     patch_num = len(props['patchsets'])
     comment = "Committed patchset #%d (id:%d)%s manually as %s" % (
-        patch_num, props['patchsets'][-1], to_pending, revision[:7])
+        patch_num, props['patchsets'][-1], to_pending, revision)
     if options.bypass_hooks:
       comment += ' (tree was closed).' if GetTreeStatus() == 'closed' else '.'
     else:
@@ -2260,13 +2847,20 @@
 def CMDdcommit(parser, args):
   """Commits the current changelist via git-svn."""
   if not settings.GetIsGitSvn():
-    message = """This doesn't appear to be an SVN repository.
-If your project has a git mirror with an upstream SVN master, you probably need
-to run 'git svn init', see your project's git mirror documentation.
-If your project has a true writeable upstream repository, you probably want
-to run 'git cl land' instead.
-Choose wisely, if you get this wrong, your commit might appear to succeed but
-will instead be silently ignored."""
+    if get_footer_svn_id():
+      # If it looks like previous commits were mirrored with git-svn.
+      message = """This repository appears to be a git-svn mirror, but no
+upstream SVN master is set. You probably need to run 'git auto-svn' once."""
+    else:
+      message = """This doesn't appear to be an SVN repository.
+If your project has a true, writeable git repository, you probably want to run
+'git cl land' instead.
+If your project has a git mirror of an upstream SVN master, you probably need
+to run 'git svn init'.
+
+Using the wrong command might cause your commit to appear to succeed, and the
+review to be closed, without actually landing upstream. If you choose to
+proceed, please verify that the commit lands upstream as expected."""
     print(message)
     ask_for_data('[Press enter to dcommit or ctrl-C to quit]')
   return SendUpstream(parser, args, 'dcommit')
@@ -2276,9 +2870,10 @@
 @subcommand.usage('[upstream branch to apply against]')
 def CMDland(parser, args):
   """Commits the current changelist via git."""
-  if settings.GetIsGitSvn():
+  if settings.GetIsGitSvn() or get_footer_svn_id():
     print('This appears to be an SVN repository.')
     print('Are you sure you didn\'t mean \'git cl dcommit\'?')
+    print('(Ignore if this is the first commit after migrating from svn->git)')
     ask_for_data('[Press enter to push or ctrl-C to quit]')
   return SendUpstream(parser, args, 'land')
 
@@ -2299,12 +2894,19 @@
                         'attempting a 3-way merge')
   parser.add_option('-n', '--no-commit', action='store_true', dest='nocommit',
                     help="don't commit after patch applies")
+  auth.add_auth_options(parser)
   (options, args) = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
+
   if len(args) != 1:
     parser.print_help()
     return 1
   issue_arg = args[0]
 
+  # We don't want uncommitted changes mixed up with the patch.
+  if git_common.is_dirty_git_tree('patch'):
+    return 1
+
   # TODO(maruel): Use apply_issue.py
   # TODO(ukai): use gerrit-cherry-pick for gerrit repository?
 
@@ -2316,25 +2918,32 @@
             Changelist().GetUpstreamBranch()])
 
   return PatchIssue(issue_arg, options.reject, options.nocommit,
-                    options.directory)
+                    options.directory, auth_config)
 
 
-def PatchIssue(issue_arg, reject, nocommit, directory):
+def PatchIssue(issue_arg, reject, nocommit, directory, auth_config):
+  # PatchIssue should never be called with a dirty tree.  It is up to the
+  # caller to check this, but just in case we assert here since the
+  # consequences of the caller not checking this could be dire.
+  assert(not git_common.is_dirty_git_tree('apply'))
+
   if type(issue_arg) is int or issue_arg.isdigit():
     # Input is an issue id.  Figure out the URL.
     issue = int(issue_arg)
-    cl = Changelist(issue=issue)
+    cl = Changelist(issue=issue, auth_config=auth_config)
     patchset = cl.GetMostRecentPatchset()
     patch_data = cl.GetPatchSetDiff(issue, patchset)
   else:
     # Assume it's a URL to the patch. Default to https.
     issue_url = gclient_utils.UpgradeToHttps(issue_arg)
-    match = re.match(r'.*?/issue(\d+)_(\d+).diff', issue_url)
+    match = re.match(r'(.*?)/download/issue(\d+)_(\d+).diff', issue_url)
     if not match:
       DieWithError('Must pass an issue ID or full URL for '
           '\'Download raw patch set\'')
-    issue = int(match.group(1))
-    patchset = int(match.group(2))
+    issue = int(match.group(2))
+    cl = Changelist(issue=issue, auth_config=auth_config)
+    cl.rietveld_server = match.group(1)
+    patchset = int(match.group(3))
     patch_data = urllib2.urlopen(issue_arg).read()
 
   # Switch up to the top-level directory, if necessary, in preparation for
@@ -2368,12 +2977,16 @@
     subprocess2.check_call(cmd, env=GetNoGitPagerEnv(),
                            stdin=patch_data, stdout=subprocess2.VOID)
   except subprocess2.CalledProcessError:
-    DieWithError('Failed to apply the patch')
+    print 'Failed to apply the patch'
+    return 1
 
   # If we had an issue, commit the current state and register the issue.
   if not nocommit:
-    RunGit(['commit', '-m', 'patch from issue %s' % issue])
-    cl = Changelist()
+    RunGit(['commit', '-m', (cl.GetDescription() + '\n\n' +
+                             'patch from issue %(i)s at patchset '
+                             '%(p)s (http://crrev.com/%(i)s#ps%(p)s)'
+                             % {'i': issue, 'p': patchset})])
+    cl = Changelist(auth_config=auth_config)
     cl.SetIssue(issue)
     cl.SetPatchset(patchset)
     print "Committed patch locally."
@@ -2474,10 +3087,9 @@
       "-b", "--bot", action="append",
       help=("IMPORTANT: specify ONE builder per --bot flag. Use it multiple "
             "times to specify multiple builders. ex: "
-            "'-b win_rel:ui_tests,webkit_unit_tests -b win_layout'. See "
+            "'-b win_rel -b win_layout'. See "
             "the try server waterfall for the builders name and the tests "
-            "available. Can also be used to specify gtest_filter, e.g. "
-            "-b win_rel:base_unittests:ValuesTest.*Value"))
+            "available."))
   group.add_option(
       "-m", "--master", default='',
       help=("Specify a try master where to run the tries."))
@@ -2495,19 +3107,19 @@
       help="Override which project to use. Projects are defined "
            "server-side to define what default bot set to use")
   group.add_option(
-      "-t", "--testfilter", action="append", default=[],
-      help=("Apply a testfilter to all the selected builders. Unless the "
-            "builders configurations are similar, use multiple "
-            "--bot <builder>:<test> arguments."))
-  group.add_option(
       "-n", "--name", help="Try job name; default to current branch name")
+  group.add_option(
+      "--use-buildbucket", action="store_true", default=False,
+      help="Use buildbucket to trigger try jobs.")
   parser.add_option_group(group)
+  auth.add_auth_options(parser)
   options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   if args:
     parser.error('Unknown arguments: %s' % args)
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   if not cl.GetIssue():
     parser.error('Need to upload first')
 
@@ -2529,7 +3141,7 @@
                    ', e.g. "-m tryserver.chromium.linux".' % err_msg)
 
   def GetMasterMap():
-    # Process --bot and --testfilter.
+    # Process --bot.
     if not options.bot:
       change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
 
@@ -2565,8 +3177,7 @@
 
     for bot in old_style:
       if ':' in bot:
-        builder, tests = bot.split(':', 1)
-        builders_and_tests.setdefault(builder, []).extend(tests.split(','))
+        parser.error('Specifying testfilter is no longer supported')
       elif ',' in bot:
         parser.error('Specify one bot per --bot flag')
       else:
@@ -2582,12 +3193,6 @@
 
   masters = GetMasterMap()
 
-  if options.testfilter:
-    forced_tests = sum((t.split(',') for t in options.testfilter), [])
-    masters = dict((master, dict(
-        (b, forced_tests) for b, t in slaves.iteritems()
-        if t != ['compile'])) for master, slaves in masters.iteritems())
-
   for builders in masters.itervalues():
     if any('triggered' in b for b in builders):
       print >> sys.stderr, (
@@ -2603,23 +3208,35 @@
         '\nWARNING Mismatch between local config and server. Did a previous '
         'upload fail?\ngit-cl try always uses latest patchset from rietveld. '
         'Continuing using\npatchset %s.\n' % patchset)
-  try:
-    cl.RpcServer().trigger_distributed_try_jobs(
-        cl.GetIssue(), patchset, options.name, options.clobber,
-        options.revision, masters)
-  except urllib2.HTTPError, e:
-    if e.code == 404:
-      print('404 from rietveld; '
-            'did you mean to use "git try" instead of "git cl try"?')
+  if options.use_buildbucket:
+    try:
+      trigger_try_jobs(auth_config, cl, options, masters, 'git_cl_try')
+    except BuildbucketResponseException as ex:
+      print 'ERROR: %s' % ex
       return 1
-  print('Tried jobs on:')
+    except Exception as e:
+      stacktrace = (''.join(traceback.format_stack()) + traceback.format_exc())
+      print 'ERROR: Exception when trying to trigger tryjobs: %s\n%s' % (
+          e, stacktrace)
+      return 1
+  else:
+    try:
+      cl.RpcServer().trigger_distributed_try_jobs(
+          cl.GetIssue(), patchset, options.name, options.clobber,
+          options.revision, masters)
+    except urllib2.HTTPError as e:
+      if e.code == 404:
+        print('404 from rietveld; '
+              'did you mean to use "git try" instead of "git cl try"?')
+        return 1
+    print('Tried jobs on:')
 
-  for (master, builders) in masters.iteritems():
-    if master:
-      print 'Master: %s' % master
-    length = max(len(builder) for builder in builders)
-    for builder in sorted(builders):
-      print '  %*s: %s' % (length, builder, ','.join(builders[builder]))
+    for (master, builders) in sorted(masters.iteritems()):
+      if master:
+        print 'Master: %s' % master
+      length = max(len(builder) for builder in builders)
+      for builder in sorted(builders):
+        print '  %*s: %s' % (length, builder, ','.join(builders[builder]))
   return 0
 
 
@@ -2664,10 +3281,12 @@
 @subcommand.hidden
 def CMDset_commit(parser, args):
   """Sets the commit bit to trigger the Commit Queue."""
-  _, args = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
   if args:
     parser.error('Unrecognized args: %s' % ' '.join(args))
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   props = cl.GetIssueProperties()
   if props.get('private'):
     parser.error('Cannot set commit on private issue')
@@ -2678,10 +3297,12 @@
 @subcommand.hidden
 def CMDset_close(parser, args):
   """Closes the issue."""
-  _, args = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
   if args:
     parser.error('Unrecognized args: %s' % ' '.join(args))
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   # Ensure there actually is an issue to close.
   cl.GetDescription()
   cl.CloseIssue()
@@ -2690,8 +3311,22 @@
 
 @subcommand.hidden
 def CMDdiff(parser, args):
-  """shows differences between local tree and last upload."""
-  cl = Changelist()
+  """Shows differences between local tree and last upload."""
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
+  if args:
+    parser.error('Unrecognized args: %s' % ' '.join(args))
+
+  # Uncommitted (staged and unstaged) changes will be destroyed by
+  # "git reset --hard" if there are merging conflicts in PatchIssue().
+  # Staged changes would be committed along with the patch from last
+  # upload, hence counted toward the "last upload" side in the final
+  # diff output, and this is not what we want.
+  if git_common.is_dirty_git_tree('diff'):
+    return 1
+
+  cl = Changelist(auth_config=auth_config)
   issue = cl.GetIssue()
   branch = cl.GetBranch()
   if not issue:
@@ -2703,13 +3338,14 @@
   RunGit(['checkout', '-q', '-b', TMP_BRANCH, base_branch])
   try:
     # Patch in the latest changes from rietveld.
-    rtn = PatchIssue(issue, False, False, None)
+    rtn = PatchIssue(issue, False, False, None, auth_config)
     if rtn != 0:
+      RunGit(['reset', '--hard'])
       return rtn
 
-    # Switch back to starting brand and diff against the temporary
+    # Switch back to starting branch and diff against the temporary
     # branch containing the latest rietveld patch.
-    subprocess2.check_call(['git', 'diff', TMP_BRANCH, branch])
+    subprocess2.check_call(['git', 'diff', TMP_BRANCH, branch, '--'])
   finally:
     RunGit(['checkout', '-q', branch])
     RunGit(['branch', '-D', TMP_BRANCH])
@@ -2718,16 +3354,18 @@
 
 
 def CMDowners(parser, args):
-  """interactively find the owners for reviewing"""
+  """Interactively find the owners for reviewing."""
   parser.add_option(
       '--no-color',
       action='store_true',
       help='Use this option to disable color output')
+  auth.add_auth_options(parser)
   options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   author = RunGit(['config', 'user.email']).strip() or None
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
 
   if args:
     if len(args) > 1:
@@ -2746,14 +3384,36 @@
       disable_color=options.no_color).run()
 
 
+def BuildGitDiffCmd(diff_type, upstream_commit, args, extensions):
+  """Generates a diff command."""
+  # Generate diff for the current branch's changes.
+  diff_cmd = ['diff', '--no-ext-diff', '--no-prefix', diff_type,
+              upstream_commit, '--' ]
+
+  if args:
+    for arg in args:
+      if os.path.isdir(arg):
+        diff_cmd.extend(os.path.join(arg, '*' + ext) for ext in extensions)
+      elif os.path.isfile(arg):
+        diff_cmd.append(arg)
+      else:
+        DieWithError('Argument "%s" is not a file or a directory' % arg)
+  else:
+    diff_cmd.extend('*' + ext for ext in extensions)
+
+  return diff_cmd
+
+
 @subcommand.usage('[files or directories to diff]')
 def CMDformat(parser, args):
-  """Runs clang-format on the diff."""
-  CLANG_EXTS = ['.cc', '.cpp', '.h', '.mm', '.proto']
+  """Runs auto-formatting tools (clang-format etc.) on the diff."""
+  CLANG_EXTS = ['.cc', '.cpp', '.h', '.mm', '.proto', '.java']
   parser.add_option('--full', action='store_true',
                     help='Reformat the full content of all touched files')
   parser.add_option('--dry-run', action='store_true',
                     help='Don\'t modify any file on disk.')
+  parser.add_option('--python', action='store_true',
+                    help='Format python code with yapf (experimental).')
   parser.add_option('--diff', action='store_true',
                     help='Print diff to stdout rather than modifying files.')
   opts, args = parser.parse_args(args)
@@ -2764,15 +3424,6 @@
   if rel_base_path:
     os.chdir(rel_base_path)
 
-  # Generate diff for the current branch's changes.
-  diff_cmd = ['diff', '--no-ext-diff', '--no-prefix']
-  if opts.full:
-    # Only list the names of modified files.
-    diff_cmd.append('--name-only')
-  else:
-    # Only generate context-less patches.
-    diff_cmd.append('-U0')
-
   # Grab the merge-base commit, i.e. the upstream commit of the current
   # branch when it was created or the last time it was rebased. This is
   # to cover the case where the user may have called "git fetch origin",
@@ -2788,20 +3439,14 @@
     DieWithError('Could not find base commit for this branch. '
                  'Are you in detached state?')
 
-  diff_cmd.append(upstream_commit)
-
-  # Handle source file filtering.
-  diff_cmd.append('--')
-  if args:
-    for arg in args:
-      if os.path.isdir(arg):
-        diff_cmd += [os.path.join(arg, '*' + ext) for ext in CLANG_EXTS]
-      elif os.path.isfile(arg):
-        diff_cmd.append(arg)
-      else:
-        DieWithError('Argument "%s" is not a file or a directory' % arg)
+  if opts.full:
+    # Only list the names of modified files.
+    diff_type = '--name-only'
   else:
-    diff_cmd += ['*' + ext for ext in CLANG_EXTS]
+    # Only generate context-less patches.
+    diff_type = '-U0'
+
+  diff_cmd = BuildGitDiffCmd(diff_type, upstream_commit, args, CLANG_EXTS)
   diff_output = RunGit(diff_cmd)
 
   top_dir = os.path.normpath(
@@ -2813,18 +3458,20 @@
   except clang_format.NotFoundError, e:
     DieWithError(e)
 
+  # Set to 2 to signal to CheckPatchFormatted() that this patch isn't
+  # formatted. This is used to block during the presubmit.
+  return_value = 0
+
   if opts.full:
     # diff_output is a list of files to send to clang-format.
     files = diff_output.splitlines()
-    if not files:
-      print "Nothing to format."
-      return 0
-    cmd = [clang_format_tool]
-    if not opts.dry_run and not opts.diff:
-      cmd.append('-i')
-    stdout = RunCommand(cmd + files, cwd=top_dir)
-    if opts.diff:
-      sys.stdout.write(stdout)
+    if files:
+      cmd = [clang_format_tool]
+      if not opts.dry_run and not opts.diff:
+        cmd.append('-i')
+      stdout = RunCommand(cmd + files, cwd=top_dir)
+      if opts.diff:
+        sys.stdout.write(stdout)
   else:
     env = os.environ.copy()
     env['PATH'] = str(os.path.dirname(clang_format_tool))
@@ -2843,9 +3490,52 @@
     if opts.diff:
       sys.stdout.write(stdout)
     if opts.dry_run and len(stdout) > 0:
-      return 2
+      return_value = 2
 
-  return 0
+  # Similar code to above, but using yapf on .py files rather than clang-format
+  # on C/C++ files
+  if opts.python:
+    diff_cmd = BuildGitDiffCmd(diff_type, upstream_commit, args, ['.py'])
+    diff_output = RunGit(diff_cmd)
+    yapf_tool = gclient_utils.FindExecutable('yapf')
+    if yapf_tool is None:
+      DieWithError('yapf not found in PATH')
+
+    if opts.full:
+      files = diff_output.splitlines()
+      if files:
+        cmd = [yapf_tool]
+        if not opts.dry_run and not opts.diff:
+          cmd.append('-i')
+        stdout = RunCommand(cmd + files, cwd=top_dir)
+        if opts.diff:
+          sys.stdout.write(stdout)
+    else:
+      # TODO(sbc): yapf --lines mode still has some issues.
+      # https://github.com/google/yapf/issues/154
+      DieWithError('--python currently only works with --full')
+
+  # Build a diff command that only operates on dart files. dart's formatter
+  # does not have the nice property of only operating on modified chunks, so
+  # hard code full.
+  dart_diff_cmd = BuildGitDiffCmd('--name-only', upstream_commit,
+                                  args, ['.dart'])
+  dart_diff_output = RunGit(dart_diff_cmd)
+  if dart_diff_output:
+    try:
+      command = [dart_format.FindDartFmtToolInChromiumTree()]
+      if not opts.dry_run and not opts.diff:
+        command.append('-w')
+      command.extend(dart_diff_output.splitlines())
+
+      stdout = RunCommand(command, cwd=top_dir, env=env)
+      if opts.dry_run and stdout:
+        return_value = 2
+    except dart_format.NotFoundError as e:
+      print ('Unable to check dart code formatting. Dart SDK is not in ' +
+             'this checkout.')
+
+  return return_value
 
 
 def CMDlol(parser, args):
@@ -2903,12 +3593,15 @@
   dispatcher = subcommand.CommandDispatcher(__name__)
   try:
     return dispatcher.execute(OptionParser(), argv)
+  except auth.AuthenticationError as e:
+    DieWithError(str(e))
   except urllib2.HTTPError, e:
     if e.code != 500:
       raise
     DieWithError(
         ('AppEngine is misbehaving and returned HTTP %d, again. Keep faith '
           'and retry or visit go/isgaeup.\n%s') % (e.code, str(e)))
+  return 0
 
 
 if __name__ == '__main__':
@@ -2916,4 +3609,8 @@
   # unit testing.
   fix_encoding.fix_encoding()
   colorama.init()
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_common.py b/git_common.py
index 2e268da..2b7d258 100644
--- a/git_common.py
+++ b/git_common.py
@@ -81,19 +81,21 @@
     # crbug.com/187444
     r'RPC failed; result=\d+, HTTP code = \d+',
 
-    # crbug.com/315421
-    r'The requested URL returned error: 500 while accessing',
-
     # crbug.com/388876
     r'Connection timed out',
+
+    # crbug.com/430343
+    # TODO(dnj): Resync with Chromite.
+    r'The requested URL returned error: 5\d+',
 )
 
 GIT_TRANSIENT_ERRORS_RE = re.compile('|'.join(GIT_TRANSIENT_ERRORS),
                                      re.IGNORECASE)
 
-# First version where the for-each-ref command's format string supported the
-# upstream:track token.
-MIN_UPSTREAM_TRACK_GIT_VERSION = (1, 9)
+# git's for-each-ref command first supported the upstream:track token in its
+# format string in version 1.9.0, but some usages were broken until 2.3.0.
+# See git commit b6160d95 for more information.
+MIN_UPSTREAM_TRACK_GIT_VERSION = (2, 3)
 
 class BadCommitRefException(Exception):
   def __init__(self, refs):
@@ -292,7 +294,7 @@
 
 
 def branches(*args):
-  NO_BRANCH = ('* (no branch', '* (detached from ')
+  NO_BRANCH = ('* (no branch', '* (detached', '* (HEAD detached')
 
   key = 'depot-tools.branch-limit'
   limit = 20
@@ -319,15 +321,6 @@
     yield line.split()[-1]
 
 
-def run_with_retcode(*cmd, **kwargs):
-  """Run a command but only return the status code."""
-  try:
-    run(*cmd, **kwargs)
-    return 0
-  except subprocess2.CalledProcessError as cpe:
-    return cpe.returncode
-
-
 def config(option, default=None):
   try:
     return run('config', '--get', option) or default
@@ -407,7 +400,7 @@
   base = branch_config(branch, 'base')
   base_upstream = branch_config(branch, 'base-upstream')
   parent = parent or upstream(branch)
-  if not parent:
+  if parent is None or branch is None:
     return None
   actual_merge_base = run('merge-base', parent, branch)
 
@@ -506,7 +499,7 @@
     raise BadCommitRefException(commitrefs)
 
 
-RebaseRet = collections.namedtuple('RebaseRet', 'success message')
+RebaseRet = collections.namedtuple('RebaseRet', 'success stdout stderr')
 
 
 def rebase(parent, start, branch, abort=False):
@@ -530,11 +523,11 @@
     if TEST_MODE:
       args.insert(0, '--committer-date-is-author-date')
     run('rebase', *args)
-    return RebaseRet(True, '')
+    return RebaseRet(True, '', '')
   except subprocess2.CalledProcessError as cpe:
     if abort:
-      run('rebase', '--abort')
-    return RebaseRet(False, cpe.stdout)
+      run_with_retcode('rebase', '--abort')  # ignore failure
+    return RebaseRet(False, cpe.stdout, cpe.stderr)
 
 
 def remove_merge_base(branch):
@@ -551,6 +544,15 @@
   return run_with_stderr(*cmd, **kwargs)[0]
 
 
+def run_with_retcode(*cmd, **kwargs):
+  """Run a command but only return the status code."""
+  try:
+    run(*cmd, **kwargs)
+    return 0
+  except subprocess2.CalledProcessError as cpe:
+    return cpe.returncode
+
+
 def run_stream(*cmd, **kwargs):
   """Runs a git command. Returns stdout as a PIPE (file-like object).
 
@@ -564,6 +566,28 @@
   return proc.stdout
 
 
+@contextlib.contextmanager
+def run_stream_with_retcode(*cmd, **kwargs):
+  """Runs a git command as context manager yielding stdout as a PIPE.
+
+  stderr is dropped to avoid races if the process outputs to both stdout and
+  stderr.
+
+  Raises subprocess2.CalledProcessError on nonzero return code.
+  """
+  kwargs.setdefault('stderr', subprocess2.VOID)
+  kwargs.setdefault('stdout', subprocess2.PIPE)
+  cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
+  try:
+    proc = subprocess2.Popen(cmd, **kwargs)
+    yield proc.stdout
+  finally:
+    retcode = proc.wait()
+    if retcode != 0:
+      raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(),
+                                           None, None)
+
+
 def run_with_stderr(*cmd, **kwargs):
   """Runs a git command.
 
@@ -600,6 +624,25 @@
 def set_config(option, value, scope='local'):
   run('config', '--' + scope, option, value)
 
+
+def get_dirty_files():
+  # Make sure index is up-to-date before running diff-index.
+  run_with_retcode('update-index', '--refresh', '-q')
+  return run('diff-index', '--name-status', 'HEAD')
+
+
+def is_dirty_git_tree(cmd):
+  dirty = get_dirty_files()
+  if dirty:
+    print 'Cannot %s with a dirty tree. You must commit locally first.' % cmd
+    print 'Uncommitted files: (git diff-index --name-status HEAD)'
+    print dirty[:4096]
+    if len(dirty) > 4096: # pragma: no cover
+      print '... (run "git diff-index --name-status HEAD" to see full output).'
+    return True
+  return False
+
+
 def squash_current_branch(header=None, merge_base=None):
   header = header or 'git squash commit.'
   merge_base = merge_base or get_or_create_merge_base(current_branch())
@@ -608,7 +651,14 @@
     log_msg += '\n'
   log_msg += run('log', '--reverse', '--format=%H%n%B', '%s..HEAD' % merge_base)
   run('reset', '--soft', merge_base)
-  run('commit', '-a', '-F', '-', indata=log_msg)
+
+  if not get_dirty_files():
+    # Sometimes the squash can result in the same tree, meaning that there is
+    # nothing to commit at this point.
+    print 'Nothing to commit; squashed branch is empty'
+    return False
+  run('commit', '--no-verify', '-a', '-F', '-', indata=log_msg)
+  return True
 
 
 def tags(*args):
@@ -723,6 +773,7 @@
   except subprocess2.CalledProcessError:
     return None
 
+
 def get_git_version():
   """Returns a tuple that contains the numeric components of the current git
   version."""
diff --git a/git_footers.py b/git_footers.py
index 6e8136b..3e3ea82 100755
--- a/git_footers.py
+++ b/git_footers.py
@@ -11,10 +11,12 @@
 
 import git_common as git
 
+
 FOOTER_PATTERN = re.compile(r'^\s*([\w-]+): (.*)$')
 CHROME_COMMIT_POSITION_PATTERN = re.compile(r'^([\w/-]+)@{#(\d+)}$')
 GIT_SVN_ID_PATTERN = re.compile('^([^\s@]+)@(\d+)')
 
+
 def normalize_name(header):
   return '-'.join([ word.title() for word in header.strip().split('-') ])
 
@@ -46,6 +48,20 @@
   return footer_map
 
 
+def get_footer_svn_id(branch=None):
+  if not branch:
+    branch = git.root()
+  svn_id = None
+  message = git.run('log', '-1', '--format=%B', branch)
+  footers = parse_footers(message)
+  git_svn_id = get_unique(footers, 'git-svn-id')
+  if git_svn_id:
+    match = GIT_SVN_ID_PATTERN.match(git_svn_id)
+    if match:
+      svn_id = match.group(1)
+  return svn_id
+
+
 def get_unique(footers, key):
   key = normalize_name(key)
   values = footers[key]
@@ -79,6 +95,14 @@
   if svn_commit:
     match = GIT_SVN_ID_PATTERN.match(svn_commit)
     assert match, 'Invalid git-svn-id value: %s' % svn_commit
+    # V8 has different semantics than Chromium.
+    if re.match(r'.*https?://v8\.googlecode\.com/svn/trunk',
+                match.group(1)):
+      return ('refs/heads/candidates', match.group(2))
+    if re.match(r'.*https?://v8\.googlecode\.com/svn/branches/bleeding_edge',
+                match.group(1)):
+      return ('refs/heads/master', match.group(2))
+
     # Assume that any trunk svn revision will match the commit-position
     # semantics.
     if re.match('.*/trunk.*$', match.group(1)):
@@ -129,7 +153,12 @@
     for k in footers.keys():
       for v in footers[k]:
         print '%s: %s' % (k, v)
+  return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_freezer.py b/git_freezer.py
index 7a23be2..91a4ec0 100755
--- a/git_freezer.py
+++ b/git_freezer.py
@@ -22,12 +22,17 @@
   return thaw()
 
 
-def main():
+def main(args):
   dispatcher = subcommand.CommandDispatcher(__name__)
-  ret = dispatcher.execute(optparse.OptionParser(), sys.argv[1:])
+  ret = dispatcher.execute(optparse.OptionParser(), args)
   if ret:
     print ret
+  return 0
 
 
 if __name__ == '__main__':
-  main()
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_map.py b/git_map.py
index 65814b9..99c8b05 100755
--- a/git_map.py
+++ b/git_map.py
@@ -37,13 +37,13 @@
 # Git emits combined color
 BRIGHT_RED = '\x1b[1;31m'
 
-def main():
+def main(argv):
   map_extra = config_list('depot_tools.map_extra')
   fmt = '%C(red bold)%h%x09%Creset%C(green)%d%Creset %C(yellow)%ad%Creset ~ %s'
   log_proc = subprocess2.Popen(
     [GIT_EXE, 'log', '--graph', '--branches', '--tags', root(),
      '--color=always', '--date=short', ('--pretty=format:' + fmt)
-    ] + map_extra + sys.argv[1:],
+    ] + map_extra + argv,
     stdout=subprocess2.PIPE,
     shell=False)
 
@@ -110,5 +110,8 @@
 
 
 if __name__ == '__main__':
-  sys.exit(main())
-
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_map_branches.py b/git_map_branches.py
index 350fed3..cd43900 100755
--- a/git_map_branches.py
+++ b/git_map_branches.py
@@ -19,6 +19,7 @@
     * Note that multiple branches may be Cyan, if they are all on the same
       commit, and you have that commit checked out.
   * Green - a local branch
+  * Blue - a 'branch-heads' branch
   * Magenta - a tag
   * Magenta '{NO UPSTREAM}' - If you have local branches which do not track any
     upstream, then you will see this.
@@ -27,12 +28,14 @@
 import argparse
 import collections
 import sys
+import subprocess2
 
 from third_party import colorama
 from third_party.colorama import Fore, Style
 
 from git_common import current_branch, upstream, tags, get_branches_info
 from git_common import get_git_version, MIN_UPSTREAM_TRACK_GIT_VERSION, hash_one
+from git_common import run
 
 DEFAULT_SEPARATOR = ' ' * 4
 
@@ -107,6 +110,8 @@
 
   def __init__(self):
     self.verbosity = 0
+    self.maxjobs = 0
+    self.show_subject = False
     self.output = OutputManager()
     self.__gone_branches = set()
     self.__branches_info = None
@@ -114,10 +119,25 @@
     self.__current_branch = None
     self.__current_hash = None
     self.__tag_set = None
+    self.__status_info = {}
 
   def start(self):
     self.__branches_info = get_branches_info(
         include_tracking_status=self.verbosity >= 1)
+    if (self.verbosity >= 2):
+      # Avoid heavy import unless necessary.
+      from git_cl import get_cl_statuses, color_for_status
+
+      status_info = get_cl_statuses(self.__branches_info.keys(),
+                                    fine_grained=self.verbosity > 2,
+                                    max_processes=self.maxjobs)
+
+      for _ in xrange(len(self.__branches_info)):
+        # This is a blocking get which waits for the remote CL status to be
+        # retrieved.
+        (branch, url, status) = status_info.next()
+        self.__status_info[branch] = (url, color_for_status(status))
+
     roots = set()
 
     # A map of parents to a list of their children.
@@ -126,7 +146,7 @@
         continue
 
       parent = branch_info.upstream
-      if parent and not self.__branches_info[parent]:
+      if not self.__branches_info[parent]:
         branch_upstream = upstream(branch)
         # If git can't find the upstream, mark the upstream as gone.
         if branch_upstream:
@@ -156,6 +176,8 @@
   def __color_for_branch(self, branch, branch_hash):
     if branch.startswith('origin'):
       color = Fore.RED
+    elif branch.startswith('branch-heads'):
+      color = Fore.BLUE
     elif self.__is_invalid_parent(branch) or branch in self.__tag_set:
       color = Fore.MAGENTA
     elif self.__current_hash.startswith(branch_hash):
@@ -163,7 +185,7 @@
     else:
       color = Fore.GREEN
 
-    if self.__current_hash.startswith(branch_hash):
+    if branch_hash and self.__current_hash.startswith(branch_hash):
       color += Style.BRIGHT
     else:
       color += Style.NORMAL
@@ -177,7 +199,10 @@
     if branch_info:
       branch_hash = branch_info.hash
     else:
-      branch_hash = hash_one(branch, short=True)
+      try:
+        branch_hash = hash_one(branch, short=True)
+      except subprocess2.CalledProcessError:
+        branch_hash = None
 
     line = OutputLine()
 
@@ -231,10 +256,13 @@
 
     # The Rietveld issue associated with the branch.
     if self.verbosity >= 2:
-      import git_cl  # avoid heavy import cost unless we need it
       none_text = '' if self.__is_invalid_parent(branch) else 'None'
-      url = git_cl.Changelist(branchref=branch).GetIssueURL()
-      line.append(url or none_text, color=Fore.BLUE if url else Fore.WHITE)
+      (url, color) = self.__status_info[branch]
+      line.append(url or none_text, color=color)
+
+    # The subject of the most recent commit on the branch.
+    if self.show_subject:
+      line.append(run('log', '-n1', '--format=%s', branch))
 
     self.output.append(line)
 
@@ -257,14 +285,26 @@
                       help='Display branch hash and Rietveld URL')
   parser.add_argument('--no-color', action='store_true', dest='nocolor',
                       help='Turn off colors.')
+  parser.add_argument(
+      '-j', '--maxjobs', action='store', type=int,
+      help='The number of jobs to use when retrieving review status')
+  parser.add_argument('--show-subject', action='store_true',
+                      dest='show_subject', help='Show the commit subject.')
 
-  opts = parser.parse_args(argv[1:])
+  opts = parser.parse_args(argv)
 
   mapper = BranchMapper()
   mapper.verbosity = opts.v
   mapper.output.nocolor = opts.nocolor
+  mapper.maxjobs = opts.maxjobs
+  mapper.show_subject = opts.show_subject
   mapper.start()
   print mapper.output.as_formatted_string()
+  return 0
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_mark_merge_base.py b/git_mark_merge_base.py
index 673e2b4..214b3df 100755
--- a/git_mark_merge_base.py
+++ b/git_mark_merge_base.py
@@ -39,7 +39,7 @@
     try:
       remove_merge_base(cur)
     except CalledProcessError:
-      print "No merge base currently exists for %s." % cur
+      print 'No merge base currently exists for %s.' % cur
     return 0
 
   if opts.merge_base:
@@ -60,9 +60,12 @@
     print "Invalid merge_base %s" % opts.merge_base
 
   print "merge_base(%s): %s" % (cur, actual)
-
   return ret
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_nav_downstream.py b/git_nav_downstream.py
index ed3110b..6ea085b 100755
--- a/git_nav_downstream.py
+++ b/git_nav_downstream.py
@@ -34,7 +34,8 @@
     cur = hash_one(cur)
   downstreams = [b for b in branches() if upfn(b) == cur]
   if not downstreams:
-    return "No downstream branches"
+    print "No downstream branches"
+    return 1
   elif len(downstreams) == 1:
     run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
   else:
@@ -55,10 +56,12 @@
         run('checkout', downstreams[int(r)], stdout=sys.stdout,
             stderr=sys.stderr)
         break
+  return 0
 
 
 if __name__ == '__main__':
   try:
     sys.exit(main(sys.argv[1:]))
   except KeyboardInterrupt:
-    pass
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_new_branch.py b/git_new_branch.py
index 18ce018..03b0fcc 100755
--- a/git_new_branch.py
+++ b/git_new_branch.py
@@ -48,7 +48,12 @@
     sys.stderr.write(cpe.stderr)
     return 1
   sys.stderr.write('Switched to branch %s.\n' % opts.branch_name)
+  return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_number.py b/git_number.py
index 6997f51..1867b97 100755
--- a/git_number.py
+++ b/git_number.py
@@ -259,25 +259,26 @@
                   "use the 'Cr-Commit-Position' value in the commit's message.")
     return 1
 
+  if opts.reset:
+    clear_caches(on_disk=True)
+    return
+
   try:
-    if opts.reset:
-      clear_caches(on_disk=True)
-      return
+    targets = git.parse_commitrefs(*(args or ['HEAD']))
+  except git.BadCommitRefException as e:
+    parser.error(e)
 
-    try:
-      targets = git.parse_commitrefs(*(args or ['HEAD']))
-    except git.BadCommitRefException as e:
-      parser.error(e)
+  load_generation_numbers(targets)
+  if not opts.no_cache:
+    finalize(targets)
 
-    load_generation_numbers(targets)
-    if not opts.no_cache:
-      finalize(targets)
-
-    print '\n'.join(map(str, map(get_num, targets)))
-    return 0
-  except KeyboardInterrupt:
-    return 1
+  print '\n'.join(map(str, map(get_num, targets)))
+  return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_rebase_update.py b/git_rebase_update.py
index 09eaffa..f98b8b2 100755
--- a/git_rebase_update.py
+++ b/git_rebase_update.py
@@ -12,28 +12,35 @@
 import logging
 import sys
 import textwrap
+import os
 
+from fnmatch import fnmatch
 from pprint import pformat
 
 import git_common as git
 
 
 STARTING_BRANCH_KEY = 'depot-tools.rebase-update.starting-branch'
+STARTING_WORKDIR_KEY = 'depot-tools.rebase-update.starting-workdir'
 
 
-def find_return_branch():
-  """Finds the branch which we should return to after rebase-update completes.
+def find_return_branch_workdir():
+  """Finds the branch and working directory which we should return to after
+  rebase-update completes.
 
-  This value may persist across multiple invocations of rebase-update, if
+  These values may persist across multiple invocations of rebase-update, if
   rebase-update runs into a conflict mid-way.
   """
   return_branch = git.config(STARTING_BRANCH_KEY)
+  workdir = git.config(STARTING_WORKDIR_KEY)
   if not return_branch:
+    workdir = os.getcwd()
+    git.set_config(STARTING_WORKDIR_KEY, workdir)
     return_branch = git.current_branch()
     if return_branch != 'HEAD':
       git.set_config(STARTING_BRANCH_KEY, return_branch)
 
-  return return_branch
+  return return_branch, workdir
 
 
 def fetch_remotes(branch_tree):
@@ -41,15 +48,23 @@
   fetch_tags = False
   remotes = set()
   tag_set = git.tags()
+  fetchspec_map = {}
+  all_fetchspec_configs = git.run(
+      'config', '--get-regexp', r'^remote\..*\.fetch').strip()
+  for fetchspec_config in all_fetchspec_configs.splitlines():
+    key, _, fetchspec = fetchspec_config.partition(' ')
+    dest_spec = fetchspec.partition(':')[2]
+    remote_name = key.split('.')[1]
+    fetchspec_map[dest_spec] = remote_name
   for parent in branch_tree.itervalues():
     if parent in tag_set:
       fetch_tags = True
     else:
       full_ref = git.run('rev-parse', '--symbolic-full-name', parent)
-      if full_ref.startswith('refs/remotes'):
-        parts = full_ref.split('/')
-        remote_name = parts[2]
-        remotes.add(remote_name)
+      for dest_spec, remote_name in fetchspec_map.iteritems():
+        if fnmatch(full_ref, dest_spec):
+          remotes.add(remote_name)
+          break
 
   fetch_args = []
   if fetch_tags:
@@ -121,7 +136,8 @@
   if git.hash_one(parent) != start_hash:
     # Try a plain rebase first
     print 'Rebasing:', branch
-    if not git.rebase(parent, start_hash, branch, abort=True).success:
+    rebase_ret = git.rebase(parent, start_hash, branch, abort=True)
+    if not rebase_ret.success:
       # TODO(iannucci): Find collapsible branches in a smarter way?
       print "Failed! Attempting to squash", branch, "...",
       squash_branch = branch+"_squash_attempt"
@@ -138,25 +154,36 @@
         git.squash_current_branch(merge_base=start_hash)
         git.rebase(parent, start_hash, branch)
       else:
-        # rebase and leave in mid-rebase state.
-        git.rebase(parent, start_hash, branch)
         print "Failed!"
         print
-        print "Here's what git-rebase had to say:"
-        print squash_ret.message
-        print
-        print textwrap.dedent(
-        """
-        Squashing failed. You probably have a real merge conflict.
 
-        Your working copy is in mid-rebase. Either:
-         * completely resolve like a normal git-rebase; OR
-         * abort the rebase and mark this branch as dormant:
-               git config branch.%s.dormant true
+        # rebase and leave in mid-rebase state.
+        # This second rebase attempt should always fail in the same
+        # way that the first one does.  If it magically succeeds then
+        # something very strange has happened.
+        second_rebase_ret = git.rebase(parent, start_hash, branch)
+        if second_rebase_ret.success: # pragma: no cover
+          print "Second rebase succeeded unexpectedly!"
+          print "Please see: http://crbug.com/425696"
+          print "First rebased failed with:"
+          print rebase_ret.stderr
+        else:
+          print "Here's what git-rebase (squashed) had to say:"
+          print
+          print squash_ret.stdout
+          print squash_ret.stderr
+          print textwrap.dedent(
+          """\
+          Squashing failed. You probably have a real merge conflict.
 
-        And then run `git rebase-update` again to resume.
-        """ % branch)
-        return False
+          Your working copy is in mid-rebase. Either:
+           * completely resolve like a normal git-rebase; OR
+           * abort the rebase and mark this branch as dormant:
+                 git config branch.%s.dormant true
+
+          And then run `git rebase-update` again to resume.
+          """ % branch)
+          return False
   else:
     print '%s up-to-date' % branch
 
@@ -166,7 +193,7 @@
   return True
 
 
-def main(args=()):
+def main(args=None):
   parser = argparse.ArgumentParser()
   parser.add_argument('--verbose', '-v', action='store_true')
   parser.add_argument('--no_fetch', '--no-fetch', '-n',
@@ -193,7 +220,8 @@
     )
     return 1
 
-  return_branch = find_return_branch()
+  return_branch, return_workdir = find_return_branch_workdir()
+  os.chdir(git.run('rev-parse', '--show-toplevel'))
 
   if git.current_branch() == 'HEAD':
     if git.run('status', '--porcelain'):
@@ -243,10 +271,17 @@
           % (return_branch, root_branch)
         )
       git.run('checkout', root_branch)
+    if return_workdir:
+      os.chdir(return_workdir)
     git.set_config(STARTING_BRANCH_KEY, '')
+    git.set_config(STARTING_WORKDIR_KEY, '')
 
   return retcode
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_rename_branch.py b/git_rename_branch.py
index cefa012..c0ac42e 100755
--- a/git_rename_branch.py
+++ b/git_rename_branch.py
@@ -44,7 +44,12 @@
   except subprocess2.CalledProcessError as cpe:
     sys.stderr.write(cpe.stderr)
     return 1
+  return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_reparent_branch.py b/git_reparent_branch.py
index fe79d3c..f24f52f 100755
--- a/git_reparent_branch.py
+++ b/git_reparent_branch.py
@@ -73,4 +73,8 @@
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_retry.py b/git_retry.py
index b40e6d2..d6dee65 100755
--- a/git_retry.py
+++ b/git_retry.py
@@ -153,4 +153,8 @@
 if __name__ == '__main__':
   logging.basicConfig()
   logging.getLogger().setLevel(logging.WARNING)
-  sys.exit(main(sys.argv[2:]))
+  try:
+    sys.exit(main(sys.argv[2:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_squash_branch.py b/git_squash_branch.py
index 0e02539..33366d2 100755
--- a/git_squash_branch.py
+++ b/git_squash_branch.py
@@ -6,7 +6,7 @@
 import argparse
 import sys
 
-from git_common import squash_current_branch
+import git_common
 
 def main(args):
   parser = argparse.ArgumentParser()
@@ -14,7 +14,15 @@
       '-m', '--message', metavar='<msg>', default='git squash commit.',
       help='Use the given <msg> as the first line of the commit message.')
   opts = parser.parse_args(args)
-  squash_current_branch(opts.message)
+  if git_common.is_dirty_git_tree('squash-branch'):
+    return 1
+  git_common.squash_current_branch(opts.message)
+  return 0
+
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_try.py b/git_try.py
index 45a5217..f8f6d30 100755
--- a/git_try.py
+++ b/git_try.py
@@ -41,8 +41,7 @@
     return None
 
 
-if __name__ == '__main__':
-  args = sys.argv[1:]
+def main(args):
   patchset = GetRietveldPatchsetNumber()
   if patchset:
     args.extend([
@@ -68,3 +67,12 @@
   except third_party.upload.ClientLoginError, e:
     print('Got an exception while trying to log in to Rietveld.')
     print(str(e))
+  return 0
+
+
+if __name__ == '__main__':
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_upstream_diff.py b/git_upstream_diff.py
index 9d9db95..3e38c78 100755
--- a/git_upstream_diff.py
+++ b/git_upstream_diff.py
@@ -38,8 +38,12 @@
 
   cmd += extra_args
 
-  subprocess2.check_call(cmd)
+  return subprocess2.check_call(cmd)
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/gn.py b/gn.py
index 325e685..32c2fa0 100755
--- a/gn.py
+++ b/gn.py
@@ -30,8 +30,12 @@
     print >> sys.stderr, 'gn.py: Could not find gn executable at: %s' % gn_path
     return 2
   else:
-    return subprocess.call([gn_path] + sys.argv[1:])
+    return subprocess.call([gn_path] + args[1:])
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+  try:
+    sys.exit(main(sys.argv))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/gsutil.py b/gsutil.py
new file mode 100755
index 0000000..53589a2
--- /dev/null
+++ b/gsutil.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run a pinned gsutil."""
+
+
+import argparse
+import base64
+import hashlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+import urllib2
+import zipfile
+
+
+GSUTIL_URL = 'https://storage.googleapis.com/pub/'
+API_URL = 'https://www.googleapis.com/storage/v1/b/pub/o/'
+
+THIS_DIR = os.path.dirname(os.path.abspath(__file__))
+DEFAULT_BIN_DIR = os.path.join(THIS_DIR, 'external_bin', 'gsutil')
+DEFAULT_FALLBACK_GSUTIL = os.path.join(
+    THIS_DIR, 'third_party', 'gsutil', 'gsutil')
+
+
+class InvalidGsutilError(Exception):
+  pass
+
+
+def download_gsutil(version, target_dir):
+  """Downloads gsutil into the target_dir."""
+  filename = 'gsutil_%s.zip' % version
+  target_filename = os.path.join(target_dir, filename)
+
+  # Check if the target exists already.
+  if os.path.exists(target_filename):
+    md5_calc = hashlib.md5()
+    with open(target_filename, 'rb') as f:
+      while True:
+        buf = f.read(4096)
+        if not buf:
+          break
+        md5_calc.update(buf)
+    local_md5 = md5_calc.hexdigest()
+
+    metadata_url = '%s%s' % (API_URL, filename)
+    metadata = json.load(urllib2.urlopen(metadata_url))
+    remote_md5 = base64.b64decode(metadata['md5Hash'])
+
+    if local_md5 == remote_md5:
+      return target_filename
+    os.remove(target_filename)
+
+  # Do the download.
+  url = '%s%s' % (GSUTIL_URL, filename)
+  u = urllib2.urlopen(url)
+  with open(target_filename, 'wb') as f:
+    while True:
+      buf = u.read(4096)
+      if not buf:
+        break
+      f.write(buf)
+  return target_filename
+
+
+def check_gsutil(gsutil_bin):
+  """Run gsutil version and make sure it runs."""
+  return subprocess.call(
+      [sys.executable, gsutil_bin, 'version'],
+      stdout=subprocess.PIPE, stderr=subprocess.STDOUT) == 0
+
+def ensure_gsutil(version, target):
+  bin_dir = os.path.join(target, 'gsutil_%s' % version)
+  gsutil_bin = os.path.join(bin_dir, 'gsutil', 'gsutil')
+  if os.path.isfile(gsutil_bin) and check_gsutil(gsutil_bin):
+    # Everything is awesome! we're all done here.
+    return gsutil_bin
+
+  if os.path.isdir(bin_dir):
+    # Clean up if we're redownloading a corrupted gsutil.
+    shutil.rmtree(bin_dir)
+  cache_dir = os.path.join(target, '.cache_dir')
+  if not os.path.isdir(cache_dir):
+    os.makedirs(cache_dir)
+  target_zip_filename = download_gsutil(version, cache_dir)
+  with zipfile.ZipFile(target_zip_filename, 'r') as target_zip:
+    target_zip.extractall(bin_dir)
+
+  # Final check that the gsutil bin is okay.  This should never fail.
+  if not check_gsutil(gsutil_bin):
+    raise InvalidGsutilError()
+
+  return gsutil_bin
+
+
+def run_gsutil(force_version, fallback, target, args):
+  if force_version:
+    gsutil_bin = ensure_gsutil(force_version, target)
+  else:
+    gsutil_bin = fallback
+  cmd = [sys.executable, gsutil_bin] + args
+  return subprocess.call(cmd)
+
+
+def parse_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--force-version', default='4.13')
+  parser.add_argument('--fallback', default=DEFAULT_FALLBACK_GSUTIL)
+  parser.add_argument('--target', default=DEFAULT_BIN_DIR)
+  parser.add_argument('args', nargs=argparse.REMAINDER)
+
+  args, extras = parser.parse_known_args()
+  if args.args and args.args[0] == '--':
+    args.args.pop(0)
+  if extras:
+    args.args = extras + args.args
+  return args.force_version, args.fallback, args.target, args.args
+
+
+def main():
+  force_version, fallback, target, args = parse_args()
+  return run_gsutil(force_version, fallback, target, args)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/infra/README.md b/infra/README.md
new file mode 100644
index 0000000..5788e8a
--- /dev/null
+++ b/infra/README.md
@@ -0,0 +1 @@
+This directory contains infra-specific files.
diff --git a/infra/config/OWNERS b/infra/config/OWNERS
new file mode 100644
index 0000000..2aa95ea
--- /dev/null
+++ b/infra/config/OWNERS
@@ -0,0 +1,5 @@
+set noparent
+akuegel@chromium.org
+phajdan.jr@chromium.org
+sergiyb@chromium.org
+tandrii@chromium.org
diff --git a/infra/config/README.md b/infra/config/README.md
new file mode 100644
index 0000000..c036d61
--- /dev/null
+++ b/infra/config/README.md
@@ -0,0 +1 @@
+This directory contains configuration files for infra services.
diff --git a/infra/config/cq.cfg b/infra/config/cq.cfg
new file mode 100644
index 0000000..3469cdc
--- /dev/null
+++ b/infra/config/cq.cfg
@@ -0,0 +1,38 @@
+# Commit Queue configuration file. The documentation of the format can be found
+# at http://luci-config.appspot.com/schemas/projects/refs:cq.cfg.
+
+version: 1
+cq_name: "depot_tools"
+cq_status_url: "https://chromium-cq-status.appspot.com"
+svn_repo_url: "svn://svn.chromium.org/chrome/trunk/tools/depot_tools"
+
+rietveld {
+  url: "https://codereview.chromium.org"
+  project_bases: "^svn\\:\\/\\/svn\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^svn\\:\\/\\/chrome\\-svn\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^svn\\:\\/\\/chrome\\-svn\\.corp\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^svn\\:\\/\\/chrome\\-svn\\.corp\\.google\\.com\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^http\\:\\/\\/src\\.chromium\\.org\\/svn/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^https\\:\\/\\/src\\.chromium\\.org\\/svn/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^http\\:\\/\\/src\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^https\\:\\/\\/src\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^https?\\:\\/\\/git\\.chromium\\.org\\/git\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+  project_bases: "^https?\\:\\/\\/git\\.chromium\\.org\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+  project_bases: "^https?\\:\\/\\/chromium\\.googlesource\\.com\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+  project_bases: "^https?\\:\\/\\/chromium\\.googlesource\\.com\\/a\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+}
+
+verifiers {
+  reviewer_lgtm {
+    committer_list: "chromium"
+  }
+
+  try_job {
+    buckets {
+      name: "tryserver.chromium.linux"
+      builders {
+        name: "depot_tools_presubmit"
+      }
+    }
+  }
+}
diff --git a/man/html/depot_tools_tutorial.html b/man/html/depot_tools_tutorial.html
index ffe62c1..2c59b80 100644
--- a/man/html/depot_tools_tutorial.html
+++ b/man/html/depot_tools_tutorial.html
@@ -873,7 +873,7 @@
 <div class="paragraph"><p>Clone the <em>depot_tools</em> repository:</p></div>
 <div class="listingblock">
 <div class="content">
-<pre><code><strong><span class="white">$ git clone https://chromium.googlesource.com/chromium/tools/depot_tools</span></strong></code></pre>
+<pre><code><strong><span class="white">$ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git</span></strong></code></pre>
 </div></div>
 <div class="paragraph"><p>Add <em>depot_tools</em> to the <em>end</em> of your PATH (you will probably want to put this
 in your <code>~/.bashrc</code> or <code>~/.zshrc</code>). Assuming you cloned <em>depot_tools</em> to
@@ -965,8 +965,8 @@
 commands:</p></div>
 <div class="listingblock">
 <div class="content">
-<pre><code><strong><span class="white">$ git config --global user.name &#8220;John Doe&#8221;</span></strong>
-<strong><span class="white">$ git config --global user.email &#8220;jdoe@email.com&#8221;</span></strong>
+<pre><code><strong><span class="white">$ git config --global user.name "John Doe"</span></strong>
+<strong><span class="white">$ git config --global user.email "jdoe@email.com"</span></strong>
 <strong><span class="white">$ git config --global core.autocrlf false</span></strong>
 <strong><span class="white">$ git config --global core.filemode false</span></strong>
 <strong><span class="white">$</span></strong> # and for fun!
@@ -1238,9 +1238,9 @@
 </code></pre></div></div><p><div class="paragraph"> Let's fix something!</p></div><div class="listingblock"><div class="content"><pre><code><span style="font-weight: bold; color: #ffffff">$ git new-branch fix_typo</span>
 <span style="font-weight: bold; color: #ffffff">$ echo -e '/Banana\ns/Banana/Kuun\nwq' | ed build/whitespace_file.txt</span>
 1503
-1501
 It was a Domo-Banana.
 It was a Domo-Kuun.
+1501
 <span style="font-weight: bold; color: #ffffff">$ git commit -am 'Fix terrible typo.'</span>
 [fix_typo 615ffa7] Fix terrible typo.
  1 file changed, 1 insertion(+), 1 deletion(-)
@@ -1290,9 +1290,9 @@
   (use "git push" to publish your local commits)
 <span style="font-weight: bold; color: #ffffff">$ echo -e '/Kuun\ns/Kuun/Kun\nwq' | ed build/whitespace_file.txt</span>
 1501
-1500
 It was a Domo-Kuun.
 It was a Domo-Kun.
+1500
 <span style="font-weight: bold; color: #ffffff">$ git upstream-diff --wordwise</span>
 <span style="font-weight: bold">diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt</span>
 <span style="font-weight: bold">index 3eba355..57cdcee 100644</span>
@@ -1342,14 +1342,14 @@
 
 HEAD is now at beec6f4... Make ReflectorImpl use mailboxes
 <span style="font-weight: bold; color: #ffffff">$ git nav-downstream</span>
-Please select a downstream branch
-  0. chap2
-  1. fix_typo
-Selection (0-1)[0]: 0
 Previous HEAD position was beec6f4... Make ReflectorImpl use mailboxes
 Switched to branch 'chap2'
 Your branch is ahead of 'origin/master' by 1 commit.
   (use "git push" to publish your local commits)
+Please select a downstream branch
+  0. chap2
+  1. fix_typo
+Selection (0-1)[0]: 0
 <span style="font-weight: bold; color: #ffffff">$ git map-branches</span>
 <span style="color: #e42e16"></span><span style="color: #e42e16">origin/master
 </span><span style="color: #33d6e5"></span><span style="font-weight: bold; color: #33d6e5">  chap2 *
@@ -1581,7 +1581,7 @@
 <div id="footnotes"><hr /></div>
 <div id="footer">
 <div id="footer-text">
-Last updated 2014-05-09 17:43:43 PDT
+Last updated 2015-01-13 15:27:56 PST
 </div>
 </div>
 </body>
diff --git a/man/html/git-auto-svn.html b/man/html/git-auto-svn.html
new file mode 100644
index 0000000..24445d0
--- /dev/null
+++ b/man/html/git-auto-svn.html
@@ -0,0 +1,837 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
+    "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
+<head>
+<meta http-equiv="Content-Type" content="application/xhtml+xml; charset=UTF-8" />
+<meta name="generator" content="AsciiDoc 8.6.9" />
+<title>git-auto-svn(1)</title>
+<style type="text/css">
+/* Shared CSS for AsciiDoc xhtml11 and html5 backends */
+
+/* Default font. */
+body {
+  font-family: Georgia,serif;
+}
+
+/* Title font. */
+h1, h2, h3, h4, h5, h6,
+div.title, caption.title,
+thead, p.table.header,
+#toctitle,
+#author, #revnumber, #revdate, #revremark,
+#footer {
+  font-family: Arial,Helvetica,sans-serif;
+}
+
+body {
+  margin: 1em 5% 1em 5%;
+}
+
+a {
+  color: blue;
+  text-decoration: underline;
+}
+a:visited {
+  color: fuchsia;
+}
+
+em {
+  font-style: italic;
+  color: navy;
+}
+
+strong {
+  font-weight: bold;
+  color: #083194;
+}
+
+h1, h2, h3, h4, h5, h6 {
+  color: #527bbd;
+  margin-top: 1.2em;
+  margin-bottom: 0.5em;
+  line-height: 1.3;
+}
+
+h1, h2, h3 {
+  border-bottom: 2px solid silver;
+}
+h2 {
+  padding-top: 0.5em;
+}
+h3 {
+  float: left;
+}
+h3 + * {
+  clear: left;
+}
+h5 {
+  font-size: 1.0em;
+}
+
+div.sectionbody {
+  margin-left: 0;
+}
+
+hr {
+  border: 1px solid silver;
+}
+
+p {
+  margin-top: 0.5em;
+  margin-bottom: 0.5em;
+}
+
+ul, ol, li > p {
+  margin-top: 0;
+}
+ul > li     { color: #aaa; }
+ul > li > * { color: black; }
+
+.monospaced, code, pre {
+  font-family: "Courier New", Courier, monospace;
+  font-size: inherit;
+  color: navy;
+  padding: 0;
+  margin: 0;
+}
+pre {
+  white-space: pre-wrap;
+}
+
+#author {
+  color: #527bbd;
+  font-weight: bold;
+  font-size: 1.1em;
+}
+#email {
+}
+#revnumber, #revdate, #revremark {
+}
+
+#footer {
+  font-size: small;
+  border-top: 2px solid silver;
+  padding-top: 0.5em;
+  margin-top: 4.0em;
+}
+#footer-text {
+  float: left;
+  padding-bottom: 0.5em;
+}
+#footer-badges {
+  float: right;
+  padding-bottom: 0.5em;
+}
+
+#preamble {
+  margin-top: 1.5em;
+  margin-bottom: 1.5em;
+}
+div.imageblock, div.exampleblock, div.verseblock,
+div.quoteblock, div.literalblock, div.listingblock, div.sidebarblock,
+div.admonitionblock {
+  margin-top: 1.0em;
+  margin-bottom: 1.5em;
+}
+div.admonitionblock {
+  margin-top: 2.0em;
+  margin-bottom: 2.0em;
+  margin-right: 10%;
+  color: #606060;
+}
+
+div.content { /* Block element content. */
+  padding: 0;
+}
+
+/* Block element titles. */
+div.title, caption.title {
+  color: #527bbd;
+  font-weight: bold;
+  text-align: left;
+  margin-top: 1.0em;
+  margin-bottom: 0.5em;
+}
+div.title + * {
+  margin-top: 0;
+}
+
+td div.title:first-child {
+  margin-top: 0.0em;
+}
+div.content div.title:first-child {
+  margin-top: 0.0em;
+}
+div.content + div.title {
+  margin-top: 0.0em;
+}
+
+div.sidebarblock > div.content {
+  background: #ffffee;
+  border: 1px solid #dddddd;
+  border-left: 4px solid #f0f0f0;
+  padding: 0.5em;
+}
+
+div.listingblock > div.content {
+  border: 1px solid #dddddd;
+  border-left: 5px solid #f0f0f0;
+  background: #f8f8f8;
+  padding: 0.5em;
+}
+
+div.quoteblock, div.verseblock {
+  padding-left: 1.0em;
+  margin-left: 1.0em;
+  margin-right: 10%;
+  border-left: 5px solid #f0f0f0;
+  color: #888;
+}
+
+div.quoteblock > div.attribution {
+  padding-top: 0.5em;
+  text-align: right;
+}
+
+div.verseblock > pre.content {
+  font-family: inherit;
+  font-size: inherit;
+}
+div.verseblock > div.attribution {
+  padding-top: 0.75em;
+  text-align: left;
+}
+/* DEPRECATED: Pre version 8.2.7 verse style literal block. */
+div.verseblock + div.attribution {
+  text-align: left;
+}
+
+div.admonitionblock .icon {
+  vertical-align: top;
+  font-size: 1.1em;
+  font-weight: bold;
+  text-decoration: underline;
+  color: #527bbd;
+  padding-right: 0.5em;
+}
+div.admonitionblock td.content {
+  padding-left: 0.5em;
+  border-left: 3px solid #dddddd;
+}
+
+div.exampleblock > div.content {
+  border-left: 3px solid #dddddd;
+  padding-left: 0.5em;
+}
+
+div.imageblock div.content { padding-left: 0; }
+span.image img { border-style: none; vertical-align: text-bottom; }
+a.image:visited { color: white; }
+
+dl {
+  margin-top: 0.8em;
+  margin-bottom: 0.8em;
+}
+dt {
+  margin-top: 0.5em;
+  margin-bottom: 0;
+  font-style: normal;
+  color: navy;
+}
+dd > *:first-child {
+  margin-top: 0.1em;
+}
+
+ul, ol {
+    list-style-position: outside;
+}
+ol.arabic {
+  list-style-type: decimal;
+}
+ol.loweralpha {
+  list-style-type: lower-alpha;
+}
+ol.upperalpha {
+  list-style-type: upper-alpha;
+}
+ol.lowerroman {
+  list-style-type: lower-roman;
+}
+ol.upperroman {
+  list-style-type: upper-roman;
+}
+
+div.compact ul, div.compact ol,
+div.compact p, div.compact p,
+div.compact div, div.compact div {
+  margin-top: 0.1em;
+  margin-bottom: 0.1em;
+}
+
+tfoot {
+  font-weight: bold;
+}
+td > div.verse {
+  white-space: pre;
+}
+
+div.hdlist {
+  margin-top: 0.8em;
+  margin-bottom: 0.8em;
+}
+div.hdlist tr {
+  padding-bottom: 15px;
+}
+dt.hdlist1.strong, td.hdlist1.strong {
+  font-weight: bold;
+}
+td.hdlist1 {
+  vertical-align: top;
+  font-style: normal;
+  padding-right: 0.8em;
+  color: navy;
+}
+td.hdlist2 {
+  vertical-align: top;
+}
+div.hdlist.compact tr {
+  margin: 0;
+  padding-bottom: 0;
+}
+
+.comment {
+  background: yellow;
+}
+
+.footnote, .footnoteref {
+  font-size: 0.8em;
+}
+
+span.footnote, span.footnoteref {
+  vertical-align: super;
+}
+
+#footnotes {
+  margin: 20px 0 20px 0;
+  padding: 7px 0 0 0;
+}
+
+#footnotes div.footnote {
+  margin: 0 0 5px 0;
+}
+
+#footnotes hr {
+  border: none;
+  border-top: 1px solid silver;
+  height: 1px;
+  text-align: left;
+  margin-left: 0;
+  width: 20%;
+  min-width: 100px;
+}
+
+div.colist td {
+  padding-right: 0.5em;
+  padding-bottom: 0.3em;
+  vertical-align: top;
+}
+div.colist td img {
+  margin-top: 0.3em;
+}
+
+@media print {
+  #footer-badges { display: none; }
+}
+
+#toc {
+  margin-bottom: 2.5em;
+}
+
+#toctitle {
+  color: #527bbd;
+  font-size: 1.1em;
+  font-weight: bold;
+  margin-top: 1.0em;
+  margin-bottom: 0.1em;
+}
+
+div.toclevel0, div.toclevel1, div.toclevel2, div.toclevel3, div.toclevel4 {
+  margin-top: 0;
+  margin-bottom: 0;
+}
+div.toclevel2 {
+  margin-left: 2em;
+  font-size: 0.9em;
+}
+div.toclevel3 {
+  margin-left: 4em;
+  font-size: 0.9em;
+}
+div.toclevel4 {
+  margin-left: 6em;
+  font-size: 0.9em;
+}
+
+span.aqua { color: aqua; }
+span.black { color: black; }
+span.blue { color: blue; }
+span.fuchsia { color: fuchsia; }
+span.gray { color: gray; }
+span.green { color: green; }
+span.lime { color: lime; }
+span.maroon { color: maroon; }
+span.navy { color: navy; }
+span.olive { color: olive; }
+span.purple { color: purple; }
+span.red { color: red; }
+span.silver { color: silver; }
+span.teal { color: teal; }
+span.white { color: white; }
+span.yellow { color: yellow; }
+
+span.aqua-background { background: aqua; }
+span.black-background { background: black; }
+span.blue-background { background: blue; }
+span.fuchsia-background { background: fuchsia; }
+span.gray-background { background: gray; }
+span.green-background { background: green; }
+span.lime-background { background: lime; }
+span.maroon-background { background: maroon; }
+span.navy-background { background: navy; }
+span.olive-background { background: olive; }
+span.purple-background { background: purple; }
+span.red-background { background: red; }
+span.silver-background { background: silver; }
+span.teal-background { background: teal; }
+span.white-background { background: white; }
+span.yellow-background { background: yellow; }
+
+span.big { font-size: 2em; }
+span.small { font-size: 0.6em; }
+
+span.underline { text-decoration: underline; }
+span.overline { text-decoration: overline; }
+span.line-through { text-decoration: line-through; }
+
+div.unbreakable { page-break-inside: avoid; }
+
+
+/*
+ * xhtml11 specific
+ *
+ * */
+
+div.tableblock {
+  margin-top: 1.0em;
+  margin-bottom: 1.5em;
+}
+div.tableblock > table {
+  border: 3px solid #527bbd;
+}
+thead, p.table.header {
+  font-weight: bold;
+  color: #527bbd;
+}
+p.table {
+  margin-top: 0;
+}
+/* Because the table frame attribute is overriden by CSS in most browsers. */
+div.tableblock > table[frame="void"] {
+  border-style: none;
+}
+div.tableblock > table[frame="hsides"] {
+  border-left-style: none;
+  border-right-style: none;
+}
+div.tableblock > table[frame="vsides"] {
+  border-top-style: none;
+  border-bottom-style: none;
+}
+
+
+/*
+ * html5 specific
+ *
+ * */
+
+table.tableblock {
+  margin-top: 1.0em;
+  margin-bottom: 1.5em;
+}
+thead, p.tableblock.header {
+  font-weight: bold;
+  color: #527bbd;
+}
+p.tableblock {
+  margin-top: 0;
+}
+table.tableblock {
+  border-width: 3px;
+  border-spacing: 0px;
+  border-style: solid;
+  border-color: #527bbd;
+  border-collapse: collapse;
+}
+th.tableblock, td.tableblock {
+  border-width: 1px;
+  padding: 4px;
+  border-style: solid;
+  border-color: #527bbd;
+}
+
+table.tableblock.frame-topbot {
+  border-left-style: hidden;
+  border-right-style: hidden;
+}
+table.tableblock.frame-sides {
+  border-top-style: hidden;
+  border-bottom-style: hidden;
+}
+table.tableblock.frame-none {
+  border-style: hidden;
+}
+
+th.tableblock.halign-left, td.tableblock.halign-left {
+  text-align: left;
+}
+th.tableblock.halign-center, td.tableblock.halign-center {
+  text-align: center;
+}
+th.tableblock.halign-right, td.tableblock.halign-right {
+  text-align: right;
+}
+
+th.tableblock.valign-top, td.tableblock.valign-top {
+  vertical-align: top;
+}
+th.tableblock.valign-middle, td.tableblock.valign-middle {
+  vertical-align: middle;
+}
+th.tableblock.valign-bottom, td.tableblock.valign-bottom {
+  vertical-align: bottom;
+}
+
+
+/*
+ * manpage specific
+ *
+ * */
+
+body.manpage h1 {
+  padding-top: 0.5em;
+  padding-bottom: 0.5em;
+  border-top: 2px solid silver;
+  border-bottom: 2px solid silver;
+}
+body.manpage h2 {
+  border-style: none;
+}
+body.manpage div.sectionbody {
+  margin-left: 3em;
+}
+
+@media print {
+  body.manpage div#toc { display: none; }
+}
+
+
+div.listingblock > div.content {
+  background: rgb(28, 28, 28);
+}
+
+div.listingblock > div > pre > code {
+  color: rgb(187, 187, 187);
+}
+</style>
+<script type="text/javascript">
+/*<![CDATA[*/
+var asciidoc = {  // Namespace.
+
+/////////////////////////////////////////////////////////////////////
+// Table Of Contents generator
+/////////////////////////////////////////////////////////////////////
+
+/* Author: Mihai Bazon, September 2002
+ * http://students.infoiasi.ro/~mishoo
+ *
+ * Table Of Content generator
+ * Version: 0.4
+ *
+ * Feel free to use this script under the terms of the GNU General Public
+ * License, as long as you do not remove or alter this notice.
+ */
+
+ /* modified by Troy D. Hanson, September 2006. License: GPL */
+ /* modified by Stuart Rackham, 2006, 2009. License: GPL */
+
+// toclevels = 1..4.
+toc: function (toclevels) {
+
+  function getText(el) {
+    var text = "";
+    for (var i = el.firstChild; i != null; i = i.nextSibling) {
+      if (i.nodeType == 3 /* Node.TEXT_NODE */) // IE doesn't speak constants.
+        text += i.data;
+      else if (i.firstChild != null)
+        text += getText(i);
+    }
+    return text;
+  }
+
+  function TocEntry(el, text, toclevel) {
+    this.element = el;
+    this.text = text;
+    this.toclevel = toclevel;
+  }
+
+  function tocEntries(el, toclevels) {
+    var result = new Array;
+    var re = new RegExp('[hH]([1-'+(toclevels+1)+'])');
+    // Function that scans the DOM tree for header elements (the DOM2
+    // nodeIterator API would be a better technique but not supported by all
+    // browsers).
+    var iterate = function (el) {
+      for (var i = el.firstChild; i != null; i = i.nextSibling) {
+        if (i.nodeType == 1 /* Node.ELEMENT_NODE */) {
+          var mo = re.exec(i.tagName);
+          if (mo && (i.getAttribute("class") || i.getAttribute("className")) != "float") {
+            result[result.length] = new TocEntry(i, getText(i), mo[1]-1);
+          }
+          iterate(i);
+        }
+      }
+    }
+    iterate(el);
+    return result;
+  }
+
+  var toc = document.getElementById("toc");
+  if (!toc) {
+    return;
+  }
+
+  // Delete existing TOC entries in case we're reloading the TOC.
+  var tocEntriesToRemove = [];
+  var i;
+  for (i = 0; i < toc.childNodes.length; i++) {
+    var entry = toc.childNodes[i];
+    if (entry.nodeName.toLowerCase() == 'div'
+     && entry.getAttribute("class")
+     && entry.getAttribute("class").match(/^toclevel/))
+      tocEntriesToRemove.push(entry);
+  }
+  for (i = 0; i < tocEntriesToRemove.length; i++) {
+    toc.removeChild(tocEntriesToRemove[i]);
+  }
+
+  // Rebuild TOC entries.
+  var entries = tocEntries(document.getElementById("content"), toclevels);
+  for (var i = 0; i < entries.length; ++i) {
+    var entry = entries[i];
+    if (entry.element.id == "")
+      entry.element.id = "_toc_" + i;
+    var a = document.createElement("a");
+    a.href = "#" + entry.element.id;
+    a.appendChild(document.createTextNode(entry.text));
+    var div = document.createElement("div");
+    div.appendChild(a);
+    div.className = "toclevel" + entry.toclevel;
+    toc.appendChild(div);
+  }
+  if (entries.length == 0)
+    toc.parentNode.removeChild(toc);
+},
+
+
+/////////////////////////////////////////////////////////////////////
+// Footnotes generator
+/////////////////////////////////////////////////////////////////////
+
+/* Based on footnote generation code from:
+ * http://www.brandspankingnew.net/archive/2005/07/format_footnote.html
+ */
+
+footnotes: function () {
+  // Delete existing footnote entries in case we're reloading the footnodes.
+  var i;
+  var noteholder = document.getElementById("footnotes");
+  if (!noteholder) {
+    return;
+  }
+  var entriesToRemove = [];
+  for (i = 0; i < noteholder.childNodes.length; i++) {
+    var entry = noteholder.childNodes[i];
+    if (entry.nodeName.toLowerCase() == 'div' && entry.getAttribute("class") == "footnote")
+      entriesToRemove.push(entry);
+  }
+  for (i = 0; i < entriesToRemove.length; i++) {
+    noteholder.removeChild(entriesToRemove[i]);
+  }
+
+  // Rebuild footnote entries.
+  var cont = document.getElementById("content");
+  var spans = cont.getElementsByTagName("span");
+  var refs = {};
+  var n = 0;
+  for (i=0; i<spans.length; i++) {
+    if (spans[i].className == "footnote") {
+      n++;
+      var note = spans[i].getAttribute("data-note");
+      if (!note) {
+        // Use [\s\S] in place of . so multi-line matches work.
+        // Because JavaScript has no s (dotall) regex flag.
+        note = spans[i].innerHTML.match(/\s*\[([\s\S]*)]\s*/)[1];
+        spans[i].innerHTML =
+          "[<a id='_footnoteref_" + n + "' href='#_footnote_" + n +
+          "' title='View footnote' class='footnote'>" + n + "</a>]";
+        spans[i].setAttribute("data-note", note);
+      }
+      noteholder.innerHTML +=
+        "<div class='footnote' id='_footnote_" + n + "'>" +
+        "<a href='#_footnoteref_" + n + "' title='Return to text'>" +
+        n + "</a>. " + note + "</div>";
+      var id =spans[i].getAttribute("id");
+      if (id != null) refs["#"+id] = n;
+    }
+  }
+  if (n == 0)
+    noteholder.parentNode.removeChild(noteholder);
+  else {
+    // Process footnoterefs.
+    for (i=0; i<spans.length; i++) {
+      if (spans[i].className == "footnoteref") {
+        var href = spans[i].getElementsByTagName("a")[0].getAttribute("href");
+        href = href.match(/#.*/)[0];  // Because IE return full URL.
+        n = refs[href];
+        spans[i].innerHTML =
+          "[<a href='#_footnote_" + n +
+          "' title='View footnote' class='footnote'>" + n + "</a>]";
+      }
+    }
+  }
+},
+
+install: function(toclevels) {
+  var timerId;
+
+  function reinstall() {
+    asciidoc.footnotes();
+    if (toclevels) {
+      asciidoc.toc(toclevels);
+    }
+  }
+
+  function reinstallAndRemoveTimer() {
+    clearInterval(timerId);
+    reinstall();
+  }
+
+  timerId = setInterval(reinstall, 500);
+  if (document.addEventListener)
+    document.addEventListener("DOMContentLoaded", reinstallAndRemoveTimer, false);
+  else
+    window.onload = reinstallAndRemoveTimer;
+}
+
+}
+asciidoc.install();
+/*]]>*/
+</script>
+</head>
+<body class="manpage">
+<div id="header">
+<h1>
+git-auto-svn(1) Manual Page
+</h1>
+<h2>NAME</h2>
+<div class="sectionbody">
+<p>git-auto-svn -
+   Automatically set up git-svn metadata for a repo mirrored from SVN.
+</p>
+</div>
+</div>
+<div id="content">
+<div class="sect1">
+<h2 id="_synopsis">SYNOPSIS</h2>
+<div class="sectionbody">
+<div class="verseblock">
+<pre class="content"><em>git auto-svn</em></pre>
+<div class="attribution">
+</div></div>
+</div>
+</div>
+<div class="sect1">
+<h2 id="_description">DESCRIPTION</h2>
+<div class="sectionbody">
+<div class="paragraph"><p><code>git auto-svn</code> automatically sets up git-svn metadata and runs git-svn fetch for
+repos that are homed in SVN but mirrored to Git (such as depot_tools itself).</p></div>
+<div class="paragraph"><p>It determines the metadata to use by inspecting the <code>git-svn-id</code> footer of the
+HEAD of the remote upstream ref (by default, <code>origin/master</code>). <code>git-svn-id</code>
+footers look like this:</p></div>
+<div class="literalblock">
+<div class="content">
+<pre><code>git-svn-id: svn://some.host.org/repo/path/to/a/sub/folder@123456 0039d316-1c4b-4281-b951-d872f2087c98</code></pre>
+</div></div>
+<div class="paragraph"><p><code>git auto-svn</code> extracts the repository url
+(svn://some.host.org/repo/path/to/a/sub/folder) from the <code>git-svn-id</code>, and
+splits it into the root repository (svn://some.host.org/repo) and the path
+within that repository (/path/to/a/sub/folder).</p></div>
+<div class="paragraph"><p>It then sets up the following stanza in .git/config:</p></div>
+<div class="literalblock">
+<div class="content">
+<pre><code>[svn-remote "svn"]
+    url = svn://some.host.org/repo
+    fetch = path/to/a/sub/folder:refs/remotes/origin/master</code></pre>
+</div></div>
+<div class="paragraph"><p>Finally, it runs <code>git svn fetch</code> to pull in the data from the svn remote.</p></div>
+</div>
+</div>
+<div class="sect1">
+<h2 id="_configuration_variables">CONFIGURATION VARIABLES</h2>
+<div class="sectionbody">
+<div class="sect2">
+<h3 id="_svn_remote_svn_url">svn-remote.svn.url</h3>
+<div class="paragraph"><p>This is the url of the root of the remote svn repository.</p></div>
+</div>
+<div class="sect2">
+<h3 id="_svn_remote_svn_fetch">svn-remote.svn.fetch</h3>
+<div class="paragraph"><p>This looks like a git refspec, but maps a subdirectory of the svn repository
+to a single ref in the git remote.</p></div>
+</div>
+</div>
+</div>
+<div class="sect1">
+<h2 id="_example">EXAMPLE</h2>
+<div class="sectionbody">
+<div class="literalblock">
+<div class="content">
+<pre><code>git clone https://chromium.googlesource.com/chromium/tools/depot_tools
+cd depot_tools
+git auto-svn</code></pre>
+</div></div>
+<div class="paragraph"><p>This results in the following stanza in <code>depot_tools/.git/config</code>:</p></div>
+<div class="literalblock">
+<div class="content">
+<pre><code>[svn-remote "svn"]
+    url = svn://svn.chromium.org/chrome
+    fetch = trunk/tools/depot_tools:refs/remotes/origin/master</code></pre>
+</div></div>
+</div>
+</div>
+<div class="sect1">
+<h2 id="_chromium_depot_tools">CHROMIUM DEPOT_TOOLS</h2>
+<div class="sectionbody">
+<div class="paragraph"><p>Part of the chromium <a href="depot_tools.html">depot_tools(7)</a> suite. These tools are meant to
+assist with the development of chromium and related projects. Download the tools
+from <a href="https://chromium.googlesource.com/chromium/tools/depot_tools.git">here</a>.</p></div>
+</div>
+</div>
+</div>
+<div id="footnotes"><hr /></div>
+<div id="footer">
+<div id="footer-text">
+Last updated 2014-09-30 16:15:22 CEST
+</div>
+</div>
+</body>
+</html>
diff --git a/man/html/git-map-branches.html b/man/html/git-map-branches.html
index e3d1d33..5ac03db 100644
--- a/man/html/git-map-branches.html
+++ b/man/html/git-map-branches.html
@@ -790,6 +790,11 @@
 </li>
 <li>
 <p>
+<em>branch-heads</em> branches are <span class="blue">blue</span>.
+</p>
+</li>
+<li>
+<p>
 <code>{NO UPSTREAM}</code> is a special placeholder in <span class="fuchsia">magenta</span>.
 </p>
 <div class="ulist"><ul>
@@ -820,13 +825,13 @@
 assuming that the <code>frozen_changes</code> branch was currently checked out, running
 <em>git map-branches</em> would result in an output like:</p></div>
 <div class="paragraph"><p></p></div><div class="listingblock"><div class="content"><pre><code><span style="font-weight: bold; color: #ffffff">$ git map-branches</span>
-<span style="color: #e42e16"></span><span style="color: #e42e16">origin/master
+<span style="color: #d338d3"></span><span style="color: #d338d3">{NO_UPSTREAM}
+</span><span style="color: #19c518"></span><span style="color: #19c518">  no_upstream
+</span><span style="color: #e42e16"></span><span style="color: #e42e16">origin/master
 </span><span style="color: #19c518"></span><span style="color: #19c518">  cool_feature
 </span><span style="color: #19c518"></span><span style="color: #19c518">    subfeature
 </span><span style="color: #19c518"></span><span style="color: #19c518">  fixit
-</span><span style="color: #33d6e5"></span><span style="font-weight: bold; color: #33d6e5">    frozen_branch *
-</span><span style="font-weight: bold; color: #d338d3"></span><span style="color: #d338d3">{NO UPSTREAM}
-</span><span style="color: #19c518"></span><span style="color: #19c518">  no_upstream</span>
+</span><span style="color: #33d6e5"></span><span style="font-weight: bold; color: #33d6e5">    frozen_branch *</span>
 </code></pre></div></div><p><div class="paragraph"></p></div>
 </div>
 </div>
@@ -860,7 +865,7 @@
 <div id="footnotes"><hr /></div>
 <div id="footer">
 <div id="footer-text">
-Last updated 2014-04-10 14:23:11 PDT
+Last updated 2014-09-23 13:01:42 EST
 </div>
 </div>
 </body>
diff --git a/man/man1/git-auto-svn.1 b/man/man1/git-auto-svn.1
new file mode 100644
index 0000000..84e238b
--- /dev/null
+++ b/man/man1/git-auto-svn.1
@@ -0,0 +1,113 @@
+'\" t
+.\"     Title: git-auto-svn
+.\"    Author: [FIXME: author] [see http://docbook.sf.net/el/author]
+.\" Generator: DocBook XSL Stylesheets v1.78.1 <http://docbook.sf.net/>
+.\"      Date: 09/30/2014
+.\"    Manual: Chromium depot_tools Manual
+.\"    Source: depot_tools 2bbacdc
+.\"  Language: English
+.\"
+.TH "GIT\-AUTO\-SVN" "1" "09/30/2014" "depot_tools 2bbacdc" "Chromium depot_tools Manual"
+.\" -----------------------------------------------------------------
+.\" * Define some portability stuff
+.\" -----------------------------------------------------------------
+.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.\" http://bugs.debian.org/507673
+.\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html
+.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.ie \n(.g .ds Aq \(aq
+.el       .ds Aq '
+.\" -----------------------------------------------------------------
+.\" * set default formatting
+.\" -----------------------------------------------------------------
+.\" disable hyphenation
+.nh
+.\" disable justification (adjust text to left margin only)
+.ad l
+.\" -----------------------------------------------------------------
+.\" * MAIN CONTENT STARTS HERE *
+.\" -----------------------------------------------------------------
+.SH "NAME"
+git-auto-svn \- Automatically set up git\-svn metadata for a repo mirrored from SVN\&.
+.SH "SYNOPSIS"
+.sp
+.nf
+\fIgit auto\-svn\fR
+.fi
+.sp
+.SH "DESCRIPTION"
+.sp
+git auto\-svn automatically sets up git\-svn metadata and runs git\-svn fetch for repos that are homed in SVN but mirrored to Git (such as depot_tools itself)\&.
+.sp
+It determines the metadata to use by inspecting the git\-svn\-id footer of the HEAD of the remote upstream ref (by default, origin/master)\&. git\-svn\-id footers look like this:
+.sp
+.if n \{\
+.RS 4
+.\}
+.nf
+git\-svn\-id: svn://some\&.host\&.org/repo/path/to/a/sub/folder@123456 0039d316\-1c4b\-4281\-b951\-d872f2087c98
+.fi
+.if n \{\
+.RE
+.\}
+.sp
+git auto\-svn extracts the repository url (svn://some\&.host\&.org/repo/path/to/a/sub/folder) from the git\-svn\-id, and splits it into the root repository (svn://some\&.host\&.org/repo) and the path within that repository (/path/to/a/sub/folder)\&.
+.sp
+It then sets up the following stanza in \&.git/config:
+.sp
+.if n \{\
+.RS 4
+.\}
+.nf
+[svn\-remote "svn"]
+    url = svn://some\&.host\&.org/repo
+    fetch = path/to/a/sub/folder:refs/remotes/origin/master
+.fi
+.if n \{\
+.RE
+.\}
+.sp
+Finally, it runs git svn fetch to pull in the data from the svn remote\&.
+.SH "CONFIGURATION VARIABLES"
+.SS "svn\-remote\&.svn\&.url"
+.sp
+This is the url of the root of the remote svn repository\&.
+.SS "svn\-remote\&.svn\&.fetch"
+.sp
+This looks like a git refspec, but maps a subdirectory of the svn repository to a single ref in the git remote\&.
+.SH "EXAMPLE"
+.sp
+.if n \{\
+.RS 4
+.\}
+.nf
+git clone https://chromium\&.googlesource\&.com/chromium/tools/depot_tools
+cd depot_tools
+git auto\-svn
+.fi
+.if n \{\
+.RE
+.\}
+.sp
+This results in the following stanza in depot_tools/\&.git/config:
+.sp
+.if n \{\
+.RS 4
+.\}
+.nf
+[svn\-remote "svn"]
+    url = svn://svn\&.chromium\&.org/chrome
+    fetch = trunk/tools/depot_tools:refs/remotes/origin/master
+.fi
+.if n \{\
+.RE
+.\}
+.SH "CHROMIUM DEPOT_TOOLS"
+.sp
+Part of the chromium \fBdepot_tools\fR(7) suite\&. These tools are meant to assist with the development of chromium and related projects\&. Download the tools from \m[blue]\fBhere\fR\m[]\&\s-2\u[1]\d\s+2\&.
+.SH "NOTES"
+.IP " 1." 4
+here
+.RS 4
+\%https://chromium.googlesource.com/chromium/tools/depot_tools.git
+.RE
diff --git a/man/man1/git-map-branches.1 b/man/man1/git-map-branches.1
index 8578253..552a495 100644
--- a/man/man1/git-map-branches.1
+++ b/man/man1/git-map-branches.1
@@ -1,13 +1,13 @@
 '\" t
 .\"     Title: git-map-branches
 .\"    Author: [FIXME: author] [see http://docbook.sf.net/el/author]
-.\" Generator: DocBook XSL Stylesheets v1.78.1 <http://docbook.sf.net/>
-.\"      Date: 04/10/2014
+.\" Generator: DocBook XSL Stylesheets v1.76.1 <http://docbook.sf.net/>
+.\"      Date: 09/23/2014
 .\"    Manual: Chromium depot_tools Manual
-.\"    Source: depot_tools 68b1017
+.\"    Source: depot_tools 28bf2be
 .\"  Language: English
 .\"
-.TH "GIT\-MAP\-BRANCHES" "1" "04/10/2014" "depot_tools 68b1017" "Chromium depot_tools Manual"
+.TH "GIT\-MAP\-BRANCHES" "1" "09/23/2014" "depot_tools 28bf2be" "Chromium depot_tools Manual"
 .\" -----------------------------------------------------------------
 .\" * Define some portability stuff
 .\" -----------------------------------------------------------------
@@ -95,6 +95,21 @@
 .sp -1
 .IP \(bu 2.3
 .\}
+
+\fIbranch\-heads\fR
+branches are
+blue\&.
+.RE
+.sp
+.RS 4
+.ie n \{\
+\h'-04'\(bu\h'+03'\c
+.\}
+.el \{\
+.sp -1
+.IP \(bu 2.3
+.\}
+
 {NO UPSTREAM}
 is a special placeholder in
 magenta\&.
@@ -137,13 +152,13 @@
 .\}
 .nf
 \fB$ git map\-branches\fR
+{NO_UPSTREAM}
+  no_upstream
 origin/master
   cool_feature
     subfeature
   fixit
-\fB    frozen_branch *
-\fR{NO UPSTREAM}
-  no_upstream
+\fB    frozen_branch *\fR
 .fi
 .if n \{\
 .RE
diff --git a/man/man7/depot_tools_tutorial.7 b/man/man7/depot_tools_tutorial.7
index 576370c..8aa6560 100644
--- a/man/man7/depot_tools_tutorial.7
+++ b/man/man7/depot_tools_tutorial.7
@@ -1,13 +1,13 @@
 '\" t
 .\"     Title: depot_tools_tutorial
 .\"    Author: [FIXME: author] [see http://docbook.sf.net/el/author]
-.\" Generator: DocBook XSL Stylesheets v1.76.1 <http://docbook.sf.net/>
-.\"      Date: 05/09/2014
+.\" Generator: DocBook XSL Stylesheets v1.78.1 <http://docbook.sf.net/>
+.\"      Date: 01/13/2015
 .\"    Manual: Chromium depot_tools Manual
-.\"    Source: depot_tools 54dac93
+.\"    Source: depot_tools 379a889
 .\"  Language: English
 .\"
-.TH "DEPOT_TOOLS_TUTORIAL" "7" "05/09/2014" "depot_tools 54dac93" "Chromium depot_tools Manual"
+.TH "DEPOT_TOOLS_TUTORIAL" "7" "01/13/2015" "depot_tools 379a889" "Chromium depot_tools Manual"
 .\" -----------------------------------------------------------------
 .\" * Define some portability stuff
 .\" -----------------------------------------------------------------
@@ -41,7 +41,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 Setting up
 .RE
 .sp
@@ -53,7 +52,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 Getting the code
 .RE
 .sp
@@ -65,7 +63,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 TL;DR
 .RE
 .sp
@@ -77,7 +74,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 Creating / Uploading a CL
 .RE
 .sp
@@ -89,7 +85,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 Updating the code
 .RE
 .sp
@@ -101,7 +96,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 Managing multiple CLs
 .RE
 .sp
@@ -113,7 +107,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 Managing dependent CLs
 .RE
 .sp
@@ -125,7 +118,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 Example Walkthrough
 .RE
 .sp
@@ -158,7 +150,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 \m[blue]\fBThink like (a) Git\fR\m[]\&\s-2\u[1]\d\s+2
 \- A lighthearted overview of git\&. If you\(cqre sorta\-familiar with git, but not
 \fIcomfortable\fR
@@ -173,7 +164,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 \m[blue]\fBGit Immersion Tutorial\fR\m[]\&\s-2\u[2]\d\s+2
 \- An in\-depth git tutorial\&.
 .RE
@@ -186,7 +176,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 \m[blue]\fBpcottle\(cqs Visual Git Branching\fR\m[]\&\s-2\u[3]\d\s+2
 \- An excellent interactive/graphical demo on how git handles commits, branches, and shows the operations git performs on them\&.
 .RE
@@ -199,7 +188,6 @@
 .sp -1
 .IP \(bu 2.3
 .\}
-
 \m[blue]\fBPro Git book\fR\m[]\&\s-2\u[4]\d\s+2
 \- \(lqThe\(rq book for learning git from basics to advanced concepts\&. A bit dry, but very through\&.
 .RE
@@ -227,7 +215,7 @@
 .RS 4
 .\}
 .nf
-\fB$ git clone https://chromium\&.googlesource\&.com/chromium/tools/depot_tools\fR
+\fB$ git clone https://chromium\&.googlesource\&.com/chromium/tools/depot_tools\&.git\fR
 .fi
 .if n \{\
 .RE
@@ -254,8 +242,8 @@
 .RS 4
 .\}
 .nf
-\fB$ git config \-\-global user\&.name \(lqJohn Doe\(rq\fR
-\fB$ git config \-\-global user\&.email \(lqjdoe@email\&.com\(rq\fR
+\fB$ git config \-\-global user\&.name "John Doe"\fR
+\fB$ git config \-\-global user\&.email "jdoe@email\&.com"\fR
 \fB$ git config \-\-global core\&.autocrlf false\fR
 \fB$ git config \-\-global core\&.filemode false\fR
 \fB$\fR # and for fun!
@@ -485,6 +473,7 @@
 .RE
 .\}
 .sp
+
 .PP
 \fBgit-map\fR(1)
 .RS 4
@@ -578,9 +567,9 @@
 \fB$ git new\-branch fix_typo\fR
 \fB$ echo \-e \*(Aq/Banana\ens/Banana/Kuun\enwq\*(Aq | ed build/whitespace_file\&.txt\fR
 1503
-1501
 It was a Domo\-Banana\&.
 It was a Domo\-Kuun\&.
+1501
 \fB$ git commit \-am \*(AqFix terrible typo\&.\*(Aq\fR
 [fix_typo 615ffa7] Fix terrible typo\&.
  1 file changed, 1 insertion(+), 1 deletion(\-)
@@ -652,9 +641,9 @@
   (use "git push" to publish your local commits)
 \fB$ echo \-e \*(Aq/Kuun\ens/Kuun/Kun\enwq\*(Aq | ed build/whitespace_file\&.txt\fR
 1501
-1500
 It was a Domo\-Kuun\&.
 It was a Domo\-Kun\&.
+1500
 \fB$ git upstream\-diff \-\-wordwise\fR
 \fBdiff \-\-git a/build/whitespace_file\&.txt b/build/whitespace_file\&.txt\fR
 \fBindex 3eba355\&.\&.57cdcee 100644\fR
@@ -726,14 +715,14 @@
 
 HEAD is now at beec6f4\&.\&.\&. Make ReflectorImpl use mailboxes
 \fB$ git nav\-downstream\fR
-Please select a downstream branch
-  0\&. chap2
-  1\&. fix_typo
-Selection (0\-1)[0]: 0
 Previous HEAD position was beec6f4\&.\&.\&. Make ReflectorImpl use mailboxes
 Switched to branch \*(Aqchap2\*(Aq
 Your branch is ahead of \*(Aqorigin/master\*(Aq by 1 commit\&.
   (use "git push" to publish your local commits)
+Please select a downstream branch
+  0\&. chap2
+  1\&. fix_typo
+Selection (0\-1)[0]: 0
 \fB$ git map\-branches\fR
 origin/master
 \fB  chap2 *
diff --git a/man/src/_git-auto-svn_desc.helper.txt b/man/src/_git-auto-svn_desc.helper.txt
new file mode 100644
index 0000000..eabe443
--- /dev/null
+++ b/man/src/_git-auto-svn_desc.helper.txt
@@ -0,0 +1 @@
+Automatically set up git-svn metadata for a repo mirrored from SVN.
diff --git a/man/src/depot_tools_tutorial.txt b/man/src/depot_tools_tutorial.txt
index 64ad485..312886a 100644
--- a/man/src/depot_tools_tutorial.txt
+++ b/man/src/depot_tools_tutorial.txt
@@ -71,7 +71,7 @@
 
 [subs="quotes"]
 ----
-[white]**$ git clone https://chromium.googlesource.com/chromium/tools/depot_tools**
+[white]**$ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git**
 ----
 
 Add 'depot_tools' to the 'end' of your PATH (you will probably want to put this
@@ -136,8 +136,8 @@
 
 [subs="quotes,attributes"]
 ----
-[white]**$ git config --global user.name ``John Doe''**
-[white]**$ git config --global user.email ``jdoe@email.com''**
+[white]**$ git config --global user.name "John Doe"**
+[white]**$ git config --global user.email "jdoe@email.com"**
 [white]**$ git config --global core.autocrlf false**
 [white]**$ git config --global core.filemode false**
 [white]**$** # and for fun!
diff --git a/man/src/git-auto-svn.txt b/man/src/git-auto-svn.txt
new file mode 100644
index 0000000..9aa780d
--- /dev/null
+++ b/man/src/git-auto-svn.txt
@@ -0,0 +1,69 @@
+git-auto-svn(1)
+===============
+
+NAME
+----
+git-auto-svn -
+include::_git-auto-svn_desc.helper.txt[]
+
+SYNOPSIS
+--------
+[verse]
+'git auto-svn'
+
+DESCRIPTION
+-----------
+
+`git auto-svn` automatically sets up git-svn metadata and runs git-svn fetch for
+repos that are homed in SVN but mirrored to Git (such as depot_tools itself).
+
+It determines the metadata to use by inspecting the `git-svn-id` footer of the
+HEAD of the remote upstream ref (by default, `origin/master`). `git-svn-id`
+footers look like this:
+
+  git-svn-id: svn://some.host.org/repo/path/to/a/sub/folder@123456 0039d316-1c4b-4281-b951-d872f2087c98
+
+`git auto-svn` extracts the repository url
+(svn://some.host.org/repo/path/to/a/sub/folder) from the `git-svn-id`, and
+splits it into the root repository (svn://some.host.org/repo) and the path
+within that repository (/path/to/a/sub/folder).
+
+It then sets up the following stanza in .git/config:
+
+  [svn-remote "svn"]
+      url = svn://some.host.org/repo
+      fetch = path/to/a/sub/folder:refs/remotes/origin/master
+
+Finally, it runs `git svn fetch` to pull in the data from the svn remote.
+
+CONFIGURATION VARIABLES
+-----------------------
+
+svn-remote.svn.url
+~~~~~~~~~~~~~~~~~~
+
+This is the url of the root of the remote svn repository.
+
+svn-remote.svn.fetch
+~~~~~~~~~~~~~~~~~~~~
+
+This looks like a git refspec, but maps a subdirectory of the svn repository
+to a single ref in the git remote.
+
+EXAMPLE
+-------
+
+  git clone https://chromium.googlesource.com/chromium/tools/depot_tools
+  cd depot_tools
+  git auto-svn
+
+This results in the following stanza in `depot_tools/.git/config`:
+
+  [svn-remote "svn"]
+      url = svn://svn.chromium.org/chrome
+      fetch = trunk/tools/depot_tools:refs/remotes/origin/master
+
+
+include::_footer.txt[]
+
+// vim: ft=asciidoc:
diff --git a/man/src/git-map-branches.txt b/man/src/git-map-branches.txt
index a925e9c..1bae3fb 100644
--- a/man/src/git-map-branches.txt
+++ b/man/src/git-map-branches.txt
@@ -21,6 +21,7 @@
    (`*`) after the name.
 * Local branches are [green]#green#.
 * Remote branches are [red]#red# (usually, the root of all other branches).
+* 'branch-heads' branches are [blue]#blue#.
 * `{NO UPSTREAM}` is a special placeholder in [fuchsia]#magenta#.
 ** Branches which have this as their parent are usually misconfigured, and
    should be assigned a parent by checking out the branch and running git branch
diff --git a/man/src/make_docs.sh b/man/src/make_docs.sh
index 73d76e2..d7af87d 100755
--- a/man/src/make_docs.sh
+++ b/man/src/make_docs.sh
@@ -23,6 +23,7 @@
 }
 
 ensure_in_path xmlto
+ensure_in_path hg
 
 DFLT_CATALOG_PATH="/usr/local/etc/xml/catalog"
 if [[ ! $XML_CATALOG_FILES && -f "$DFLT_CATALOG_PATH" ]]
diff --git a/my_activity.py b/my_activity.py
index a64f3c8..e7af09e 100755
--- a/my_activity.py
+++ b/my_activity.py
@@ -13,6 +13,9 @@
   - my_activity.py -b 4/5/12 -e 6/7/12 for stats between 4/5/12 and 6/7/12.
 """
 
+# TODO(vadimsh): This script knows too much about ClientLogin and cookies. It
+# will stop to work on ~20 Apr 2015.
+
 # These services typically only provide a created time and a last modified time
 # for each item for general queries. This is not enough to determine if there
 # was activity in a given time period. So, we first query for all things created
@@ -33,10 +36,15 @@
 import urllib
 import urllib2
 
+import auth
+import fix_encoding
 import gerrit_util
 import rietveld
 from third_party import upload
 
+import auth
+from third_party import httplib2
+
 try:
   from dateutil.relativedelta import relativedelta # pylint: disable=F0401
 except ImportError:
@@ -76,6 +84,13 @@
     'requires_auth': False,
     'email_domain': 'chromium.org',
   },
+  {
+    'url': 'webrtc-codereview.appspot.com',
+    'shorturl': 'go/rtcrev',
+    'supports_owner_modified_query': True,
+    'requires_auth': False,
+    'email_domain': 'webrtc.org',
+  },
 ]
 
 gerrit_instances = [
@@ -87,18 +102,14 @@
     'url': 'chrome-internal-review.googlesource.com',
     'shorturl': 'crosreview.com/i',
   },
-  {
-    'host': 'gerrit.chromium.org',
-    'port': 29418,
-  },
-  {
-    'host': 'gerrit-int.chromium.org',
-    'port': 29419,
-  },
 ]
 
 google_code_projects = [
   {
+    'name': 'brillo',
+    'shorturl': 'brbug.com',
+  },
+  {
     'name': 'chromium',
     'shorturl': 'crbug.com',
   },
@@ -120,36 +131,6 @@
   },
 ]
 
-# Uses ClientLogin to authenticate the user for Google Code issue trackers.
-def get_auth_token(email):
-  # KeyringCreds will use the system keyring on the first try, and prompt for
-  # a password on the next ones.
-  creds = upload.KeyringCreds('code.google.com', 'code.google.com', email)
-  for _ in xrange(3):
-    email, password = creds.GetUserCredentials()
-    url = 'https://www.google.com/accounts/ClientLogin'
-    data = urllib.urlencode({
-        'Email': email,
-        'Passwd': password,
-        'service': 'code',
-        'source': 'chrome-my-activity',
-        'accountType': 'GOOGLE',
-    })
-    req = urllib2.Request(url, data=data, headers={'Accept': 'text/plain'})
-    try:
-      response = urllib2.urlopen(req)
-      response_body = response.read()
-      response_dict = dict(x.split('=')
-                           for x in response_body.split('\n') if x)
-      return response_dict['Auth']
-    except urllib2.HTTPError, e:
-      print e
-
-  print 'Unable to authenticate to code.google.com.'
-  print 'Some issues may be missing.'
-  return None
-
-
 def username(email):
   """Keeps the username of an email address."""
   return email and email.split('@', 1)[0]
@@ -218,31 +199,12 @@
   # Check the codereview cookie jar to determine which Rietveld instances to
   # authenticate to.
   def check_cookies(self):
-    cookie_file = os.path.expanduser('~/.codereview_upload_cookies')
-    if not os.path.exists(cookie_file):
-      print 'No Rietveld cookie file found.'
-      cookie_jar = []
-    else:
-      cookie_jar = cookielib.MozillaCookieJar(cookie_file)
-      try:
-        cookie_jar.load()
-        print 'Found cookie file: %s' % cookie_file
-      except (cookielib.LoadError, IOError):
-        print 'Error loading Rietveld cookie file: %s' % cookie_file
-        cookie_jar = []
-
     filtered_instances = []
 
     def has_cookie(instance):
-      for cookie in cookie_jar:
-        if cookie.name == 'SACSID' and cookie.domain == instance['url']:
-          return True
-      if self.options.auth:
-        return get_yes_or_no('No cookie found for %s. Authorize for this '
-                             'instance? (may require application-specific '
-                             'password)' % instance['url'])
-      filtered_instances.append(instance)
-      return False
+      auth_config = auth.extract_auth_config_from_options(self.options)
+      a = auth.get_authenticator_for_host(instance['url'], auth_config)
+      return a.has_cached_credentials()
 
     for instance in rietveld_instances:
       instance['auth'] = has_cookie(instance)
@@ -260,7 +222,8 @@
 
 
     email = None if instance['auth'] else ''
-    remote = rietveld.Rietveld('https://' + instance['url'], email, None)
+    auth_config = auth.extract_auth_config_from_options(self.options)
+    remote = rietveld.Rietveld('https://' + instance['url'], auth_config, email)
 
     # See def search() in rietveld.py to see all the filters you can use.
     query_modified_after = None
@@ -448,108 +411,52 @@
       })
     return ret
 
-  def google_code_issue_search(self, instance):
-    time_format = '%Y-%m-%dT%T'
-    # See http://code.google.com/p/support/wiki/IssueTrackerAPI
-    # q=<owner>@chromium.org does a full text search for <owner>@chromium.org.
-    # This will accept the issue if owner is the owner or in the cc list. Might
-    # have some false positives, though.
+  def project_hosting_issue_search(self, instance):
+    auth_config = auth.extract_auth_config_from_options(self.options)
+    authenticator = auth.get_authenticator_for_host(
+        "code.google.com", auth_config)
+    http = authenticator.authorize(httplib2.Http())
+    url = "https://www.googleapis.com/projecthosting/v2/projects/%s/issues" % (
+       instance["name"])
+    epoch = datetime.utcfromtimestamp(0)
+    user_str = '%s@chromium.org' % self.user
 
-    # Don't filter normally on modified_before because it can filter out things
-    # that were modified in the time period and then modified again after it.
-    gcode_url = ('https://code.google.com/feeds/issues/p/%s/issues/full' %
-                 instance['name'])
-
-    gcode_data = urllib.urlencode({
-        'alt': 'json',
-        'max-results': '100000',
-        'q': '%s' % self.user,
-        'published-max': self.modified_before.strftime(time_format),
-        'updated-min': self.modified_after.strftime(time_format),
+    query_data = urllib.urlencode({
+      'maxResults': 10000,
+      'q': user_str,
+      'publishedMax': '%d' % (self.modified_before - epoch).total_seconds(),
+      'updatedMin': '%d' % (self.modified_after - epoch).total_seconds(),
     })
-
-    opener = urllib2.build_opener()
-    if self.google_code_auth_token:
-      opener.addheaders = [('Authorization', 'GoogleLogin auth=%s' %
-                            self.google_code_auth_token)]
-    gcode_json = None
-    try:
-      gcode_get = opener.open(gcode_url + '?' + gcode_data)
-      gcode_json = json.load(gcode_get)
-      gcode_get.close()
-    except urllib2.HTTPError, _:
-      print 'Unable to access ' + instance['name'] + ' issue tracker.'
-
-    if not gcode_json or 'entry' not in gcode_json['feed']:
+    url = url + '?' + query_data
+    _, body = http.request(url)
+    content = json.loads(body)
+    if not content:
+      print "Unable to parse %s response from projecthosting." % (
+          instance["name"])
       return []
 
-    issues = gcode_json['feed']['entry']
-    issues = map(partial(self.process_google_code_issue, instance), issues)
-    issues = filter(self.filter_issue, issues)
-    issues = sorted(issues, key=lambda i: i['modified'], reverse=True)
+    issues = []
+    if 'items' in content:
+      items = content['items']
+      for item in items:
+        issue = {
+          "header": item["title"],
+          "created": item["published"],
+          "modified": item["updated"],
+          "author": item["author"]["name"],
+          "url": "https://code.google.com/p/%s/issues/detail?id=%s" % (
+              instance["name"], item["id"]),
+          "comments": []
+        }
+        if 'owner' in item:
+          issue['owner'] = item['owner']['name']
+        else:
+          issue['owner'] = 'None'
+        if issue['owner'] == user_str or issue['author'] == user_str:
+          issues.append(issue)
+
     return issues
 
-  def process_google_code_issue(self, project, issue):
-    ret = {}
-    ret['created'] = datetime_from_google_code(issue['published']['$t'])
-    ret['modified'] = datetime_from_google_code(issue['updated']['$t'])
-
-    ret['owner'] = ''
-    if 'issues$owner' in issue:
-      ret['owner'] = issue['issues$owner']['issues$username']['$t']
-    ret['author'] = issue['author'][0]['name']['$t']
-
-    if 'shorturl' in project:
-      issue_id = issue['id']['$t']
-      issue_id = issue_id[issue_id.rfind('/') + 1:]
-      ret['url'] = 'http://%s/%d' % (project['shorturl'], int(issue_id))
-    else:
-      issue_url = issue['link'][1]
-      if issue_url['rel'] != 'alternate':
-        raise RuntimeError
-      ret['url'] = issue_url['href']
-    ret['header'] = issue['title']['$t']
-
-    ret['replies'] = self.get_google_code_issue_replies(issue)
-    return ret
-
-  def get_google_code_issue_replies(self, issue):
-    """Get all the comments on the issue."""
-    replies_url = issue['link'][0]
-    if replies_url['rel'] != 'replies':
-      raise RuntimeError
-
-    replies_data = urllib.urlencode({
-        'alt': 'json',
-        'fields': 'entry(published,author,content)',
-    })
-
-    opener = urllib2.build_opener()
-    opener.addheaders = [('Authorization', 'GoogleLogin auth=%s' %
-                          self.google_code_auth_token)]
-    try:
-      replies_get = opener.open(replies_url['href'] + '?' + replies_data)
-    except urllib2.HTTPError, _:
-      return []
-
-    replies_json = json.load(replies_get)
-    replies_get.close()
-    return self.process_google_code_issue_replies(replies_json)
-
-  @staticmethod
-  def process_google_code_issue_replies(replies):
-    if 'entry' not in replies['feed']:
-      return []
-
-    ret = []
-    for entry in replies['feed']['entry']:
-      e = {}
-      e['created'] = datetime_from_google_code(entry['published']['$t'])
-      e['content'] = entry['content']['$t']
-      e['author'] = entry['author'][0]['name']['$t']
-      ret.append(e)
-    return ret
-
   def print_heading(self, heading):
     print
     print self.options.output_format_heading.format(heading=heading)
@@ -599,7 +506,7 @@
       values = dict(required_values.items() + optional_values.items())
     else:
       values = required_values
-    print output_format.format(**values)
+    print output_format.format(**values).encode(sys.getdefaultencoding())
 
 
   def filter_issue(self, issue, should_filter_by_user=True):
@@ -635,10 +542,6 @@
     # required.
     pass
 
-  def auth_for_issues(self):
-    self.google_code_auth_token = (
-        get_auth_token(self.options.local_user + '@chromium.org'))
-
   def get_changes(self):
     for instance in rietveld_instances:
       self.changes += self.rietveld_search(instance, owner=self.user)
@@ -669,7 +572,7 @@
 
   def get_issues(self):
     for project in google_code_projects:
-      self.issues += self.google_code_issue_search(project)
+      self.issues += self.project_hosting_issue_search(project)
 
   def print_issues(self):
     if self.issues:
@@ -694,10 +597,10 @@
       help='Filter on user, default=%default')
   parser.add_option(
       '-b', '--begin', metavar='<date>',
-      help='Filter issues created after the date')
+      help='Filter issues created after the date (mm/dd/yy)')
   parser.add_option(
       '-e', '--end', metavar='<date>',
-      help='Filter issues created before the date')
+      help='Filter issues created before the date (mm/dd/yy)')
   quarter_begin, quarter_end = get_quarter_of(datetime.today() -
                                               relativedelta(months=2))
   parser.add_option(
@@ -709,10 +612,10 @@
       help='Use this year\'s dates')
   parser.add_option(
       '-w', '--week_of', metavar='<date>',
-      help='Show issues for week of the date')
+      help='Show issues for week of the date (mm/dd/yy)')
   parser.add_option(
-      '-W', '--last_week', action='store_true',
-      help='Show last week\'s issues')
+      '-W', '--last_week', action='count',
+      help='Show last week\'s issues. Use more times for more weeks.')
   parser.add_option(
       '-a', '--auth',
       action='store_true',
@@ -773,6 +676,7 @@
       help='Use markdown-friendly output (overrides --output-format '
            'and --output-format-heading)')
   parser.add_option_group(output_format_group)
+  auth.add_auth_options(parser)
 
   # Remove description formatting
   parser.format_description = (
@@ -795,7 +699,8 @@
     elif options.week_of:
       begin, end = (get_week_of(datetime.strptime(options.week_of, '%m/%d/%y')))
     elif options.last_week:
-      begin, end = (get_week_of(datetime.today() - timedelta(days=7)))
+      begin, end = (get_week_of(datetime.today() -
+                                timedelta(days=1 + 7 * options.last_week)))
     else:
       begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
   else:
@@ -826,17 +731,18 @@
     my_activity.auth_for_changes()
   if options.reviews:
     my_activity.auth_for_reviews()
-  if options.issues:
-    my_activity.auth_for_issues()
 
   print 'Looking up activity.....'
 
-  if options.changes:
-    my_activity.get_changes()
-  if options.reviews:
-    my_activity.get_reviews()
-  if options.issues:
-    my_activity.get_issues()
+  try:
+    if options.changes:
+      my_activity.get_changes()
+    if options.reviews:
+      my_activity.get_reviews()
+    if options.issues:
+      my_activity.get_issues()
+  except auth.AuthenticationError as e:
+    print "auth.AuthenticationError: %s" % e
 
   print '\n\n\n'
 
@@ -847,4 +753,11 @@
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+  # Fix encoding to support non-ascii issue titles.
+  fix_encoding.fix_encoding()
+
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/my_reviews.py b/my_reviews.py
index 7d81ebc..a26aa02 100755
--- a/my_reviews.py
+++ b/my_reviews.py
@@ -14,6 +14,7 @@
 import os
 import sys
 
+import auth
 import rietveld
 
 
@@ -117,7 +118,7 @@
           self.not_requested * 100. / self.actually_reviewed)
     assert bool(first_day) == bool(last_day)
     if first_day and last_day:
-      assert first_day < last_day
+      assert first_day <= last_day
       self.days = (to_datetime(last_day) - to_datetime(first_day)).days + 1
       assert self.days > 0
 
@@ -214,9 +215,10 @@
       ', '.join(sorted(issue['reviewers'])))
 
 
-def print_reviews(reviewer, created_after, created_before, instance_url):
+def print_reviews(
+    reviewer, created_after, created_before, instance_url, auth_config):
   """Prints issues |reviewer| received and potentially reviewed."""
-  remote = rietveld.Rietveld(instance_url, None, None)
+  remote = rietveld.Rietveld(instance_url, auth_config)
 
   # The stats we gather. Feel free to send me a CL to get more stats.
   stats = Stats()
@@ -268,8 +270,9 @@
       to_time(stats.median_latency))
 
 
-def print_count(reviewer, created_after, created_before, instance_url):
-  remote = rietveld.Rietveld(instance_url, None, None)
+def print_count(
+    reviewer, created_after, created_before, instance_url, auth_config):
+  remote = rietveld.Rietveld(instance_url, auth_config)
   print len(list(remote.search(
       reviewer=reviewer,
       created_after=created_after,
@@ -306,13 +309,18 @@
   rietveld.upload.verbosity = 0
   today = datetime.date.today()
   begin, end = get_previous_quarter(today)
-  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
+  default_email = os.environ.get('EMAIL_ADDRESS')
+  if not default_email:
+    user = os.environ.get('USER')
+    if user:
+      default_email = user + '@chromium.org'
+
+  parser = optparse.OptionParser(description=__doc__)
   parser.add_option(
       '--count', action='store_true',
       help='Just count instead of printing individual issues')
   parser.add_option(
-      '-r', '--reviewer', metavar='<email>',
-      default=os.environ.get('EMAIL_ADDRESS'),
+      '-r', '--reviewer', metavar='<email>', default=default_email,
       help='Filter on issue reviewer, default=%default')
   parser.add_option(
       '-b', '--begin', metavar='<date>',
@@ -322,26 +330,31 @@
       help='Filter issues created before the date')
   parser.add_option(
       '-Q', '--last_quarter', action='store_true',
-      help='Use last quarter\'s dates, e.g. %s to %s' % (
-        begin, end))
+      help='Use last quarter\'s dates, e.g. %s to %s' % (begin, end))
   parser.add_option(
       '-i', '--instance_url', metavar='<host>',
       default='http://codereview.chromium.org',
       help='Host to use, default is %default')
+  auth.add_auth_options(parser)
   # Remove description formatting
   parser.format_description = (
       lambda _: parser.description)  # pylint: disable=E1101
   options, args = parser.parse_args()
+  auth_config = auth.extract_auth_config_from_options(options)
   if args:
     parser.error('Args unsupported')
-  if not options.reviewer:
-    parser.error('$EMAIL_ADDRESS is not set, please use -r')
+  if options.reviewer is None:
+    parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r')
+
   print >> sys.stderr, 'Searching for reviews by %s' % options.reviewer
   if options.last_quarter:
     options.begin = begin
     options.end = end
     print >> sys.stderr, 'Using range %s to %s' % (
         options.begin, options.end)
+  else:
+    if options.begin is None or options.end is None:
+      parser.error('Please specify either --last_quarter or --begin and --end')
 
   # Validate dates.
   try:
@@ -355,15 +368,21 @@
         options.reviewer,
         options.begin,
         options.end,
-        options.instance_url)
+        options.instance_url,
+        auth_config)
   else:
     print_reviews(
         options.reviewer,
         options.begin,
         options.end,
-        options.instance_url)
+        options.instance_url,
+        auth_config)
   return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/ninja b/ninja
index 3812b33..a337045 100755
--- a/ninja
+++ b/ninja
@@ -12,8 +12,8 @@
 No prebuilt ninja binary was found for this system.
 Try building your own binary by doing:
   cd ~
-  git clone https://github.com/martine/ninja.git -b v1.5.1
-  ./ninja/bootstrap.py
+  git clone https://github.com/martine/ninja.git -b v1.6.0
+  cd ninja && ./configure.py --bootstrap
 Then add ~/ninja/ to your PATH.
 EOF
 }
diff --git a/ninja-linux32 b/ninja-linux32
index 6a8ccf1..5880fae 100755
--- a/ninja-linux32
+++ b/ninja-linux32
Binary files differ
diff --git a/ninja-mac b/ninja-mac
index 79a1593..64fcacc 100755
--- a/ninja-mac
+++ b/ninja-mac
Binary files differ
diff --git a/owners.py b/owners.py
index 30646ff..563675a 100644
--- a/owners.py
+++ b/owners.py
@@ -18,6 +18,7 @@
           | comment
 
 directive := "set noparent"
+          |  "file:" glob
           |  email_address
           |  "*"
 
@@ -45,6 +46,11 @@
 for the glob, and only the "per-file" owners are used for files matching that
 glob.
 
+If the "file:" directive is used, the referred to OWNERS file will be parsed and
+considered when determining the valid set of OWNERS. If the filename starts with
+"//" it is relative to the root of the repository, otherwise it is relative to
+the current file
+
 Examples for all of these combinations can be found in tests/owners_unittest.py.
 """
 
@@ -118,6 +124,9 @@
     # (This is implicitly true for the root directory).
     self.stop_looking = set([''])
 
+    # Set of files which have already been read.
+    self.read_files = set()
+
   def reviewers_for(self, files, author):
     """Returns a suggested set of reviewers that will cover the files.
 
@@ -189,16 +198,23 @@
     for f in files:
       dirpath = self.os_path.dirname(f)
       while not dirpath in self.owners_for:
-        self._read_owners_in_dir(dirpath)
+        self._read_owners(self.os_path.join(dirpath, 'OWNERS'))
         if self._stop_looking(dirpath):
           break
         dirpath = self.os_path.dirname(dirpath)
 
-  def _read_owners_in_dir(self, dirpath):
-    owners_path = self.os_path.join(self.root, dirpath, 'OWNERS')
+  def _read_owners(self, path):
+    owners_path = self.os_path.join(self.root, path)
     if not self.os_path.exists(owners_path):
       return
+
+    if owners_path in self.read_files:
+      return
+
+    self.read_files.add(owners_path)
+
     comment = []
+    dirpath = self.os_path.dirname(path)
     in_comment = False
     lineno = 0
     for line in self.fopen(owners_path):
@@ -244,6 +260,23 @@
                  line_type, owners_path, lineno, comment):
     if directive == 'set noparent':
       self.stop_looking.add(path)
+    elif directive.startswith('file:'):
+      owners_file = self._resolve_include(directive[5:], owners_path)
+      if not owners_file:
+        raise SyntaxErrorInOwnersFile(owners_path, lineno,
+            ('%s does not refer to an existing file.' % directive[5:]))
+
+      self._read_owners(owners_file)
+
+      dirpath = self.os_path.dirname(owners_file)
+      for key in self.owned_by:
+        if not dirpath in self.owned_by[key]:
+          continue
+        self.owned_by[key].add(path)
+
+      if dirpath in self.owners_for:
+        self.owners_for.setdefault(path, set()).update(self.owners_for[dirpath])
+
     elif self.email_regexp.match(directive) or directive == EVERYONE:
       self.comments.setdefault(directive, {})
       self.comments[directive][path] = comment
@@ -251,9 +284,23 @@
       self.owners_for.setdefault(path, set()).add(directive)
     else:
       raise SyntaxErrorInOwnersFile(owners_path, lineno,
-          ('%s is not a "set" directive, "*", '
+          ('%s is not a "set" directive, file include, "*", '
            'or an email address: "%s"' % (line_type, directive)))
 
+  def _resolve_include(self, path, start):
+    if path.startswith('//'):
+      include_path = path[2:]
+    else:
+      assert start.startswith(self.root)
+      start = self.os_path.dirname(start[len(self.root):])
+      include_path = self.os_path.join(start, path)
+
+    owners_path = self.os_path.join(self.root, include_path)
+    if not self.os_path.exists(owners_path):
+      return None
+
+    return include_path
+
   def _covering_set_of_owners_for(self, files, author):
     dirs_remaining = set(self._enclosing_dir_with_owners(f) for f in files)
     all_possible_owners = self.all_possible_owners(dirs_remaining, author)
diff --git a/patch.py b/patch.py
index 9b65ce1..1bc608c 100644
--- a/patch.py
+++ b/patch.py
@@ -50,14 +50,19 @@
   def _process_filename(filename):
     filename = filename.replace('\\', '/')
     # Blacklist a few characters for simplicity.
-    for i in ('%', '$', '..', '\'', '"'):
+    for i in ('$', '..', '\'', '"', '<', '>', ':', '|', '?', '*'):
       if i in filename:
         raise UnsupportedPatchFormat(
             filename, 'Can\'t use \'%s\' in filename.' % i)
-    for i in ('/', 'CON', 'COM'):
-      if filename.startswith(i):
-        raise UnsupportedPatchFormat(
-            filename, 'Filename can\'t start with \'%s\'.' % i)
+    if filename.startswith('/'):
+      raise UnsupportedPatchFormat(
+          filename, 'Filename can\'t start with \'/\'.')
+    if filename == 'CON':
+      raise UnsupportedPatchFormat(
+          filename, 'Filename can\'t be \'CON\'.')
+    if re.match('COM\d', filename):
+      raise UnsupportedPatchFormat(
+          filename, 'Filename can\'t be \'%s\'.' % filename)
     return filename
 
   def set_relpath(self, relpath):
diff --git a/presubmit_canned_checks.py b/presubmit_canned_checks.py
index 011b982..5ed8656 100644
--- a/presubmit_canned_checks.py
+++ b/presubmit_canned_checks.py
@@ -7,6 +7,27 @@
 import os as _os
 _HERE = _os.path.dirname(_os.path.abspath(__file__))
 
+# Justifications for each filter:
+#
+# - build/include       : Too many; fix in the future.
+# - build/include_order : Not happening; #ifdefed includes.
+# - build/namespace     : I'm surprised by how often we violate this rule.
+# - readability/casting : Mistakes a whole bunch of function pointer.
+# - runtime/int         : Can be fixed long term; volume of errors too high
+# - runtime/virtual     : Broken now, but can be fixed in the future?
+# - whitespace/braces   : We have a lot of explicit scoping in chrome code.
+# - readability/inheritance : Temporary, while the OVERRIDE and FINAL fixup
+#                             is in progress.
+DEFAULT_LINT_FILTERS = [
+  '-build/include',
+  '-build/include_order',
+  '-build/namespace',
+  '-readability/casting',
+  '-runtime/int',
+  '-runtime/virtual',
+  '-whitespace/braces',
+  '-readability/inheritance'
+]
 
 ### Description checks
 
@@ -90,7 +111,8 @@
   return []
 
 
-def CheckChangeLintsClean(input_api, output_api, source_file_filter=None):
+def CheckChangeLintsClean(input_api, output_api, source_file_filter=None,
+                          lint_filters=None, verbose_level=None):
   """Checks that all '.cc' and '.h' files pass cpplint.py."""
   _RE_IS_TEST = input_api.re.compile(r'.*tests?.(cc|h)$')
   result = []
@@ -100,18 +122,8 @@
   # pylint: disable=W0212
   cpplint._cpplint_state.ResetErrorCounts()
 
-  # Justifications for each filter:
-  #
-  # - build/include       : Too many; fix in the future.
-  # - build/include_order : Not happening; #ifdefed includes.
-  # - build/namespace     : I'm surprised by how often we violate this rule.
-  # - readability/casting : Mistakes a whole bunch of function pointer.
-  # - runtime/int         : Can be fixed long term; volume of errors too high
-  # - runtime/virtual     : Broken now, but can be fixed in the future?
-  # - whitespace/braces   : We have a lot of explicit scoping in chrome code.
-  cpplint._SetFilters('-build/include,-build/include_order,-build/namespace,'
-                      '-readability/casting,-runtime/int,-runtime/virtual,'
-                      '-whitespace/braces')
+  lint_filters = lint_filters or DEFAULT_LINT_FILTERS
+  cpplint._SetFilters(','.join(lint_filters))
 
   # We currently are more strict with normal code than unit tests; 4 and 5 are
   # the verbosity level that would normally be passed to cpplint.py through
@@ -124,7 +136,8 @@
     else:
       level = 4
 
-    cpplint.ProcessFile(file_name, level)
+    verbose_level = verbose_level or level
+    cpplint.ProcessFile(file_name, verbose_level)
 
   if cpplint._cpplint_state.error_count > 0:
     if input_api.is_committing:
@@ -332,11 +345,14 @@
   OBJC_FILE_EXTS = ('h', 'm', 'mm')
   OBJC_EXCEPTIONS = ('#define', '#endif', '#if', '#import', '#include',
                      '#pragma')
+  PY_FILE_EXTS = ('py')
+  PY_EXCEPTIONS = ('import', 'from')
 
   LANGUAGE_EXCEPTIONS = [
     (CPP_FILE_EXTS, CPP_EXCEPTIONS),
     (JAVA_FILE_EXTS, JAVA_EXCEPTIONS),
     (OBJC_FILE_EXTS, OBJC_EXCEPTIONS),
+    (PY_FILE_EXTS, PY_EXCEPTIONS),
   ]
 
   def no_long_lines(file_extension, line):
@@ -355,15 +371,22 @@
     if line_len <= file_maxlen:
       return True
 
-    if line_len > extra_maxlen:
-      return False
-
+    # Allow long URLs of any length.
     if any((url in line) for url in ('file://', 'http://', 'https://')):
       return True
 
+    if line_len > extra_maxlen:
+      return False
+
     if 'url(' in line and file_extension == 'css':
       return True
 
+    if '<include' in line and file_extension in ('css', 'html', 'js'):
+      return True
+
+    if 'pylint: disable=line-too-long' in line and file_extension == 'py':
+      return True
+
     return input_api.re.match(
         r'.*[A-Za-z][A-Za-z_0-9]{%d,}.*' % long_symbol, line)
 
@@ -697,7 +720,7 @@
 
 
 def GetPylint(input_api, output_api, white_list=None, black_list=None,
-              disabled_warnings=None, extra_paths_list=None):
+              disabled_warnings=None, extra_paths_list=None, pylintrc=None):
   """Run pylint on python files.
 
   The default white_list enforces looking only at *.py files.
@@ -732,7 +755,11 @@
     input_api.logging.info('Skipping pylint: no matching changes.')
     return []
 
-  extra_args = ['--rcfile=%s' % input_api.os_path.join(_HERE, 'pylintrc')]
+  if pylintrc is not None:
+    pylintrc = input_api.os_path.join(input_api.PresubmitLocalPath(), pylintrc)
+  else:
+    pylintrc = input_api.os_path.join(_HERE, 'pylintrc')
+  extra_args = ['--rcfile=%s' % pylintrc]
   if disabled_warnings:
     extra_args.extend(['-d', ','.join(disabled_warnings)])
 
@@ -750,21 +777,30 @@
   env['PYTHONPATH'] = input_api.os_path.pathsep.join(
       extra_paths_list + sys.path).encode('utf8')
 
-  def GetPylintCmd(files):
+  def GetPylintCmd(flist, extra, parallel):
     # Windows needs help running python files so we explicitly specify
     # the interpreter to use. It also has limitations on the size of
     # the command-line, so we pass arguments via a pipe.
-    if len(files) == 1:
-      description = files[0]
+    cmd = [input_api.python_executable,
+           input_api.os_path.join(_HERE, 'third_party', 'pylint.py'),
+           '--args-on-stdin']
+    if len(flist) == 1:
+      description = flist[0]
     else:
-      description = '%s files' % len(files)
+      description = '%s files' % len(flist)
+
+    args = extra_args[:]
+    if extra:
+      args.extend(extra)
+      description += ' using %s' % (extra,)
+    if parallel:
+      args.append('--jobs=%s' % input_api.cpu_count)
+      description += ' on %d cores' % input_api.cpu_count
 
     return input_api.Command(
         name='Pylint (%s)' % description,
-        cmd=[input_api.python_executable,
-             input_api.os_path.join(_HERE, 'third_party', 'pylint.py'),
-             '--args-on-stdin'],
-        kwargs={'env': env, 'stdin': '\n'.join(files + extra_args)},
+        cmd=cmd,
+        kwargs={'env': env, 'stdin': '\n'.join(args + flist)},
         message=error_type)
 
   # Always run pylint and pass it all the py files at once.
@@ -776,9 +812,20 @@
   # a quick local edit to diagnose pylint issues more
   # easily.
   if True:
-    return [GetPylintCmd(files)]
+    # pylint's cycle detection doesn't work in parallel, so spawn a second,
+    # single-threaded job for just that check.
+
+    # Some PRESUBMITs explicitly mention cycle detection.
+    if not any('R0401' in a or 'cyclic-import' in a for a in extra_args):
+      return [
+        GetPylintCmd(files, ["--disable=cyclic-import"], True),
+        GetPylintCmd(files, ["--disable=all", "--enable=cyclic-import"], False)
+      ]
+    else:
+      return [ GetPylintCmd(files, [], True) ]
+
   else:
-    return map(lambda x: GetPylintCmd([x]), files)
+    return map(lambda x: GetPylintCmd([x], [], 1), files)
 
 
 def RunPylint(input_api, *args, **kwargs):
@@ -832,13 +879,16 @@
   return []
 
 
-def CheckOwners(input_api, output_api, source_file_filter=None,
-                author_counts_as_owner=True):
+def CheckOwners(input_api, output_api, source_file_filter=None):
   if input_api.is_committing:
     if input_api.tbr:
       return [output_api.PresubmitNotifyResult(
           '--tbr was specified, skipping OWNERS check')]
-    if not input_api.change.issue:
+    if input_api.change.issue:
+      if _GetRietveldIssueProps(input_api, None).get('cq_dry_run', False):
+        return [output_api.PresubmitNotifyResult(
+            'This is a CQ dry run, skipping OWNERS check')]
+    else:
       return [output_api.PresubmitError("OWNERS check failed: this change has "
           "no Rietveld issue number, so we can't check it for approvals.")]
     needed = 'LGTM from an OWNER'
@@ -858,7 +908,7 @@
 
   owner_email = owner_email or input_api.change.author_email
 
-  if author_counts_as_owner and owner_email:
+  if owner_email:
     reviewers_plus_owner = set([owner_email]).union(reviewers)
     missing_files = owners_db.files_not_covered_by(affected_files,
         reviewers_plus_owner)
@@ -933,7 +983,8 @@
 def _CheckConstNSObject(input_api, output_api, source_file_filter):
   """Checks to make sure no objective-c files have |const NSSomeClass*|."""
   pattern = input_api.re.compile(
-      r'const\s+NS(?!(Point|Range|Rect|Size)\s*\*)\w*\s*\*')
+    r'(?<!reinterpret_cast<)'
+    r'const\s+NS(?!(Point|Range|Rect|Size)\s*\*)\w*\s*\*')
 
   def objective_c_filter(f):
     return (source_file_filter(f) and
@@ -957,26 +1008,11 @@
 
 
 def CheckSingletonInHeaders(input_api, output_api, source_file_filter=None):
-  """Checks to make sure no header files have |Singleton<|."""
-  pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
-  files = []
-  for f in input_api.AffectedSourceFiles(source_file_filter):
-    if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
-        f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
-      contents = input_api.ReadFile(f)
-      for line in contents.splitlines(False):
-        if (not input_api.re.match(r'//', line) and # Strip C++ comment.
-            pattern.search(line)):
-          files.append(f)
-          break
-
-  if files:
-    return [ output_api.PresubmitError(
-        'Found Singleton<T> in the following header files.\n' +
-        'Please move them to an appropriate source file so that the ' +
-        'template gets instantiated in a single compilation unit.',
-        files) ]
-  return []
+  """Deprecated, must be removed."""
+  return [
+    output_api.PresubmitNotifyResult(
+        'CheckSingletonInHeaders is deprecated, please remove it.')
+  ]
 
 
 def PanProjectChecks(input_api, output_api,
@@ -1064,9 +1100,6 @@
   snapshot("checking nsobjects")
   results.extend(_CheckConstNSObject(
       input_api, output_api, source_file_filter=sources))
-  snapshot("checking singletons")
-  results.extend(CheckSingletonInHeaders(
-      input_api, output_api, source_file_filter=sources))
 
   # The following checks are only done on commit, since the commit bot will
   # auto-fix most of these.
@@ -1100,8 +1133,32 @@
   cmd = ['cl', 'format', '--dry-run', input_api.PresubmitLocalPath()]
   code, _ = git_cl.RunGitWithCode(cmd, suppress_stderr=True)
   if code == 2:
+    short_path = input_api.basename(input_api.PresubmitLocalPath())
+    full_path = input_api.os_path.relpath(input_api.PresubmitLocalPath(),
+                                          input_api.change.RepositoryRoot())
     return [output_api.PresubmitPromptWarning(
-      'Your patch is not formatted, please run git cl format.')]
+      'The %s directory requires source formatting. '
+      'Please run git cl format %s' %
+      (short_path, full_path))]
   # As this is just a warning, ignore all other errors if the user
   # happens to have a broken clang-format, doesn't use git, etc etc.
   return []
+
+
+def CheckGNFormatted(input_api, output_api):
+  import gn
+  affected_files = input_api.AffectedFiles(
+      include_deletes=False,
+      file_filter=lambda x: x.LocalPath().endswith('.gn') or
+                            x.LocalPath().endswith('.gni'))
+  warnings = []
+  for f in affected_files:
+    cmd = ['gn', 'format', '--dry-run', f.AbsoluteLocalPath()]
+    rc = gn.main(cmd)
+    if rc == 2:
+      warnings.append(output_api.PresubmitPromptWarning(
+          '%s requires formatting. Please run `gn format --in-place %s`.' % (
+              f.AbsoluteLocalPath(), f.LocalPath())))
+  # It's just a warning, so ignore other types of failures assuming they'll be
+  # caught elsewhere.
+  return warnings
diff --git a/presubmit_support.py b/presubmit_support.py
index e929e31..523bb79 100755
--- a/presubmit_support.py
+++ b/presubmit_support.py
@@ -39,6 +39,7 @@
 from warnings import warn
 
 # Local imports.
+import auth
 import fix_encoding
 import gclient_utils
 import owners
@@ -292,6 +293,7 @@
     self.os_listdir = os.listdir
     self.os_walk = os.walk
     self.os_path = os.path
+    self.os_stat = os.stat
     self.pickle = pickle
     self.marshal = marshal
     self.re = re
@@ -309,6 +311,14 @@
     # InputApi.platform is the platform you're currently running on.
     self.platform = sys.platform
 
+    self.cpu_count = multiprocessing.cpu_count()
+
+    # this is done here because in RunTests, the current working directory has
+    # changed, which causes Pool() to explode fantastically when run on windows
+    # (because it tries to load the __main__ module, which imports lots of
+    # things relative to the current working directory).
+    self._run_tests_pool = multiprocessing.Pool(self.cpu_count)
+
     # The local path of the currently-being-processed presubmit script.
     self._current_presubmit_path = os.path.dirname(presubmit_path)
 
@@ -488,11 +498,8 @@
         if self.verbose:
           t.info = _PresubmitNotifyResult
     if len(tests) > 1 and parallel:
-      pool = multiprocessing.Pool()
       # async recipe works around multiprocessing bug handling Ctrl-C
-      msgs.extend(pool.map_async(CallCommand, tests).get(99999))
-      pool.close()
-      pool.join()
+      msgs.extend(self._run_tests_pool.map_async(CallCommand, tests).get(99999))
     else:
       msgs.extend(map(CallCommand, tests))
     return [m for m in msgs if m]
@@ -1142,6 +1149,37 @@
     return get_preferred_try_masters(project, change)
 
 
+class GetPostUploadExecuter(object):
+  @staticmethod
+  def ExecPresubmitScript(script_text, presubmit_path, cl, change):
+    """Executes PostUploadHook() from a single presubmit script.
+
+    Args:
+      script_text: The text of the presubmit script.
+      presubmit_path: Project script to run.
+      cl: The Changelist object.
+      change: The Change object.
+
+    Return:
+      A list of results objects.
+    """
+    context = {}
+    try:
+      exec script_text in context
+    except Exception, e:
+      raise PresubmitFailure('"%s" had an exception.\n%s'
+                             % (presubmit_path, e))
+
+    function_name = 'PostUploadHook'
+    if function_name not in context:
+      return {}
+    post_upload_hook = context[function_name]
+    if not len(inspect.getargspec(post_upload_hook)[0]) == 3:
+      raise PresubmitFailure(
+          'Expected function "PostUploadHook" to take three arguments.')
+    return post_upload_hook(cl, change, OutputApi(False))
+
+
 def DoGetTrySlaves(change,
                    changed_files,
                    repository_root,
@@ -1262,6 +1300,49 @@
   return results
 
 
+def DoPostUploadExecuter(change,
+                         cl,
+                         repository_root,
+                         verbose,
+                         output_stream):
+  """Execute the post upload hook.
+
+  Args:
+    change: The Change object.
+    cl: The Changelist object.
+    repository_root: The repository root.
+    verbose: Prints debug info.
+    output_stream: A stream to write debug output to.
+  """
+  presubmit_files = ListRelevantPresubmitFiles(
+      change.LocalPaths(), repository_root)
+  if not presubmit_files and verbose:
+    output_stream.write("Warning, no PRESUBMIT.py found.\n")
+  results = []
+  executer = GetPostUploadExecuter()
+  # The root presubmit file should be executed after the ones in subdirectories.
+  # i.e. the specific post upload hooks should run before the general ones.
+  # Thus, reverse the order provided by ListRelevantPresubmitFiles.
+  presubmit_files.reverse()
+
+  for filename in presubmit_files:
+    filename = os.path.abspath(filename)
+    if verbose:
+      output_stream.write("Running %s\n" % filename)
+    # Accept CRLF presubmit script.
+    presubmit_script = gclient_utils.FileRead(filename, 'rU')
+    results.extend(executer.ExecPresubmitScript(
+        presubmit_script, filename, cl, change))
+  output_stream.write('\n')
+  if results:
+    output_stream.write('** Post Upload Hook Messages **\n')
+  for result in results:
+    result.handle(output_stream)
+    output_stream.write('\n')
+
+  return results
+
+
 class PresubmitExecuter(object):
   def __init__(self, change, committing, rietveld_obj, verbose):
     """
@@ -1530,7 +1611,7 @@
     return cmd_data.info('%s (%4.2fs)' % (cmd_data.name, duration))
 
 
-def Main(argv):
+def main(argv=None):
   parser = optparse.OptionParser(usage="%prog [options] <files...>",
                                  version="%prog " + str(__version__))
   parser.add_option("-c", "--commit", action="store_true", default=False,
@@ -1562,7 +1643,6 @@
                     "to skip multiple canned checks.")
   parser.add_option("--rietveld_url", help=optparse.SUPPRESS_HELP)
   parser.add_option("--rietveld_email", help=optparse.SUPPRESS_HELP)
-  parser.add_option("--rietveld_password", help=optparse.SUPPRESS_HELP)
   parser.add_option("--rietveld_fetch", action='store_true', default=False,
                     help=optparse.SUPPRESS_HELP)
   # These are for OAuth2 authentication for bots. See also apply_issue.py
@@ -1571,7 +1651,9 @@
 
   parser.add_option("--trybot-json",
                     help="Output trybot information to the file specified.")
+  auth.add_auth_options(parser)
   options, args = parser.parse_args(argv)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   if options.verbose >= 2:
     logging.basicConfig(level=logging.DEBUG)
@@ -1583,9 +1665,6 @@
   if options.rietveld_email and options.rietveld_email_file:
     parser.error("Only one of --rietveld_email or --rietveld_email_file "
                  "can be passed to this program.")
-  if options.rietveld_private_key_file and options.rietveld_password:
-    parser.error("Only one of --rietveld_private_key_file or "
-                 "--rietveld_password can be passed to this program.")
 
   if options.rietveld_email_file:
     with open(options.rietveld_email_file, "rb") as f:
@@ -1607,8 +1686,8 @@
     else:
       rietveld_obj = rietveld.CachingRietveld(
         options.rietveld_url,
-        options.rietveld_email,
-        options.rietveld_password)
+        auth_config,
+        options.rietveld_email)
     if options.rietveld_fetch:
       assert options.issue
       props = rietveld_obj.get_issue_properties(options.issue, False)
@@ -1674,4 +1753,8 @@
 
 if __name__ == '__main__':
   fix_encoding.fix_encoding()
-  sys.exit(Main(None))
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/pylint.py b/pylint.py
index 9caa3a3..f24ab17 100755
--- a/pylint.py
+++ b/pylint.py
@@ -19,4 +19,8 @@
 # another rcfile is to be used, passing --rcfile a second time on the command-
 # line will work fine.
 command = [sys.executable, _PYLINT, '--rcfile=%s' % _RC_FILE] + sys.argv[1:]
-sys.exit(subprocess.call(command))
+try:
+  sys.exit(subprocess.call(command))
+except KeyboardInterrupt:
+  sys.stderr.write('interrupted\n')
+  sys.exit(1)
diff --git a/pylintrc b/pylintrc
index b76c81d..77b0063 100644
--- a/pylintrc
+++ b/pylintrc
@@ -61,7 +61,36 @@
 # W0603: Using the global statement
 # W0703: Catch "Exception"
 # W1201: Specify string format arguments as logging function parameters
-disable=C0103,C0111,C0302,I0010,I0011,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0122,W0141,W0142,W0402,W0404,W0511,W0603,W0703,W1201
+#
+# These should get enabled, but the codebase has too many violations currently.
+# bad-continuation
+# anomalous-backslash-in-string
+# bad-context-manager
+# bad-indentation
+# bad-str-strip-call
+# bad-whitespace
+# cell-var-from-loop
+# deprecated-lambda
+# eval-used
+# function-redefined
+# import-error
+# locally-enabled
+# missing-final-newline
+# no-init
+# no-name-in-module
+# no-self-use
+# not-callable
+# old-style-class
+# protected-access
+# superfluous-parens
+# super-on-old-class
+# too-many-function-args
+# trailing-whitespace
+# unnecessary-semicolon
+# unpacking-non-sequence
+# unused-import
+# useless-else-on-loop
+disable=C0103,C0111,C0302,I0010,I0011,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0122,W0141,W0142,W0402,W0404,W0511,W0603,W0703,W1201,bad-continuation,anomalous-backslash-in-string,bad-context-manager,bad-indentation,bad-str-strip-call,bad-whitespace,cell-var-from-loop,deprecated-lambda,eval-used,function-redefined,import-error,locally-enabled,missing-final-newline,no-init,no-name-in-module,no-self-use,not-callable,old-style-class,protected-access,superfluous-parens,super-on-old-class,too-many-function-args,trailing-whitespace,unnecessary-semicolon,unpacking-non-sequence,unused-import,useless-else-on-loop
 
 
 [REPORTS]
@@ -70,9 +99,6 @@
 # (visual studio) and html
 output-format=text
 
-# Include message's id in output
-include-ids=yes
-
 # Put messages in a separate file for each module / package specified on the
 # command line instead of printing them on stdout. Reports (if any) will be
 # written in a file name "pylint_global.[txt|html]".
diff --git a/recipes/android.py b/recipes/android.py
index efc78a3..24739a2 100644
--- a/recipes/android.py
+++ b/recipes/android.py
@@ -21,6 +21,10 @@
       },
     }
 
+  @staticmethod
+  def expected_root(_props):
+    return ''
+
 
 def main(argv=None):
   return Android().handle_args(argv)
diff --git a/recipes/blink.py b/recipes/blink.py
index 2e3aa9a..99c20ed 100644
--- a/recipes/blink.py
+++ b/recipes/blink.py
@@ -15,23 +15,41 @@
 
   @staticmethod
   def fetch_spec(props):
-    submodule_spec = {
-      'third_party/WebKit': {
-        'svn_url': 'svn://svn.chromium.org/blink/trunk',
-        'svn_branch': 'trunk',
-        'svn_ref': 'master',
-      }
+    chromium_url = 'https://chromium.googlesource.com/chromium/src.git'
+    chromium_solution = {'name': 'src',
+                         'url': chromium_url,
+                         'deps_file': 'DEPS',
+                         'managed': False,
+                         'custom_deps': {
+                             'src/third_party/WebKit': None,
+                         },
     }
+    blink_url = 'https://chromium.googlesource.com/chromium/blink.git'
+    blink_solution = {'name': 'src/third_party/WebKit',
+                      'url': blink_url,
+                      'deps_file': '.DEPS.git',
+                      'managed': False,
+                      'custom_deps': {},
+    }
+    spec = {
+      'solutions': [chromium_solution, blink_solution],
+      'auto': True,
+    }
+    if props.get('target_os'):
+      spec['target_os'] = props['target_os'].split(',')
+    if props.get('target_os_only'):
+      spec['target_os_only'] = props['target_os_only']
+    toolchain_hook = [sys.executable, 'src/build/confirm_toolchain.py']
+    spec['fetch_hooks'] = [toolchain_hook]
     return {
-      'alias': {
-        'recipe': 'chromium',
-        'props': [
-          '--webkit_revision=ToT',
-          '--submodule_git_svn_spec=' + json.dumps(submodule_spec),
-        ],
-      },
+      'type': 'gclient_git_svn',
+      'gclient_git_svn_spec': spec,
     }
 
+  @staticmethod
+  def expected_root(_props):
+    return 'src/third_party/WebKit'
+
 
 def main(argv=None):
   return Blink().handle_args(argv)
diff --git a/recipes/chromium.py b/recipes/chromium.py
index 7ebf984..6416ee0 100644
--- a/recipes/chromium.py
+++ b/recipes/chromium.py
@@ -31,11 +31,9 @@
       spec['target_os'] = props['target_os'].split(',')
     if props.get('target_os_only'):
       spec['target_os_only'] = props['target_os_only']
-    checkout_type = 'gclient_git'
-    spec_type = '%s_spec' % checkout_type
     return {
-      'type': checkout_type,
-      spec_type: spec,
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
     }
 
   @staticmethod
diff --git a/recipes/dart.py b/recipes/dart.py
new file mode 100644
index 0000000..2b4d78c
--- /dev/null
+++ b/recipes/dart.py
@@ -0,0 +1,46 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class Dart(recipe_util.Recipe):
+  """Basic Recipe class for Dart."""
+
+  @staticmethod
+  def fetch_spec(props):
+    url = 'https://github.com/dart-lang/sdk.git'
+    solution = {
+      'name'   :'sdk',
+      'url'    : url,
+      'deps_file': 'DEPS',
+      'managed'   : False,
+      'custom_deps': {},
+      'safesync_url': '',
+    }
+    spec = {
+      'solutions': [solution],
+    }
+    if props.get('target_os'):
+      spec['target_os'] = props['target_os'].split(',')
+    return {
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'sdk'
+
+
+def main(argv=None):
+  return Dart().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/dartium.py b/recipes/dartium.py
new file mode 100644
index 0000000..d026533
--- /dev/null
+++ b/recipes/dartium.py
@@ -0,0 +1,46 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class Dart(recipe_util.Recipe):
+  """Basic Recipe class for Dart."""
+
+  @staticmethod
+  def fetch_spec(props):
+    url = 'https://github.com/dart-lang/sdk.git'
+    solution = {
+      'name'   :'src/dart',
+      'url'    : url,
+      'deps_file': 'tools/deps/dartium.deps/DEPS',
+      'managed'   : False,
+      'custom_deps': {},
+      'safesync_url': '',
+    }
+    spec = {
+      'solutions': [solution],
+    }
+    if props.get('target_os'):
+      spec['target_os'] = props['target_os'].split(',')
+    return {
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'src'
+
+
+def main(argv=None):
+  return Dart().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/depot_tools.py b/recipes/depot_tools.py
index 1c9f7ee..7c3b91c 100644
--- a/recipes/depot_tools.py
+++ b/recipes/depot_tools.py
@@ -15,25 +15,16 @@
   @staticmethod
   def fetch_spec(props):
     url = 'https://chromium.googlesource.com/chromium/tools/depot_tools.git'
-    solution = { 'name'   :'src',
-                 'url'    : url,
-                 'deps_file': '.DEPS.git',
-                 'managed'   : False,
-                 'custom_deps': {},
-                 'safesync_url': '',
+    solution = {
+        'name'        : 'depot_tools',
+        'url'         : url,
+        'deps_file'   : 'DEPS',
+        'managed'     : False,
     }
     spec = {
       'solutions': [solution],
-      'svn_url': 'svn://svn.chromium.org/chrome',
-      'svn_branch': 'trunk/tools/depot_tools',
-      'svn_ref': 'master',
+      'auto': True,
     }
-    if props.get('submodule_git_svn_spec'):
-      spec['submodule_git_svn_spec'] = props['submodule_git_svn_spec']
-    if props.get('target_os'):
-      spec['target_os'] = props['target_os'].split(',')
-    if props.get('target_os_only'):
-      spec['target_os_only'] = props['target_os_only']
     checkout_type = 'gclient_git_svn'
     if props.get('nosvn'):
       checkout_type = 'gclient_git'
@@ -45,7 +36,7 @@
 
   @staticmethod
   def expected_root(_props):
-    return 'src'
+    return 'depot_tools'
 
 
 def main(argv=None):
diff --git a/recipes/infra.py b/recipes/infra.py
index 3c65b44..f9698a6 100644
--- a/recipes/infra.py
+++ b/recipes/infra.py
@@ -14,24 +14,18 @@
 
   @staticmethod
   def fetch_spec(_props):
-    solution = lambda name, path_infix = None: {
-      'name'     : name,
-      'url'      : 'https://chromium.googlesource.com/infra/%s%s.git' % (
-        path_infix + '/' if path_infix else '', name
-      ),
-      'deps_file': '.DEPS.git',
-      'managed'  : False,
-    }
-    spec = {
-        'solutions': [
-          solution('infra'),
-          solution('expect_tests', 'testing'),
-          solution('testing_support', 'testing'),
-        ],
-    }
     return {
-        'type': 'gclient_git',
-        'gclient_git_spec': spec,
+      'type': 'gclient_git',
+      'gclient_git_spec': {
+        'solutions': [
+          {
+            'name'     : 'infra',
+            'url'      : 'https://chromium.googlesource.com/infra/infra.git',
+            'deps_file': '.DEPS.git',
+            'managed'  : False,
+          }
+        ],
+      },
     }
 
   @staticmethod
diff --git a/recipes/infra_internal.py b/recipes/infra_internal.py
new file mode 100644
index 0000000..ad6afb9
--- /dev/null
+++ b/recipes/infra_internal.py
@@ -0,0 +1,46 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class InfraInternal(recipe_util.Recipe):
+  """Basic Recipe class for the whole set of Infrastructure repositories."""
+
+  @staticmethod
+  def fetch_spec(_props):
+    def url(host, repo):
+      return 'https://%s.googlesource.com/%s.git' % (host, repo)
+
+    spec = {
+      'solutions': [
+        {
+          'name': 'infra_internal',
+          'url': url('chrome-internal', 'infra/infra_internal'),
+          'deps_file': '.DEPS.git',
+          'managed': False
+        },
+      ],
+    }
+    return {
+        'type': 'gclient_git',
+        'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'infra_internal'
+
+
+def main(argv=None):
+  return InfraInternal().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
+
diff --git a/recipes/ios.py b/recipes/ios.py
index e7ee08b..7c4eee0 100644
--- a/recipes/ios.py
+++ b/recipes/ios.py
@@ -21,6 +21,10 @@
       },
     }
 
+  @staticmethod
+  def expected_root(_props):
+    return ''
+
 
 def main(argv=None):
   return IOS().handle_args(argv)
diff --git a/recipes/mojo.py b/recipes/mojo.py
new file mode 100644
index 0000000..f7316ac
--- /dev/null
+++ b/recipes/mojo.py
@@ -0,0 +1,46 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class Mojo(recipe_util.Recipe):
+  """Basic Recipe class for Mojo."""
+
+  @staticmethod
+  def fetch_spec(props):
+    url = 'https://github.com/domokit/mojo.git'
+    solution = {
+      'name'   :'src',
+      'url'    : url,
+      'deps_file': 'DEPS',
+      'managed'   : False,
+      'custom_deps': {},
+      'safesync_url': '',
+    }
+    spec = {
+      'solutions': [solution],
+    }
+    if props.get('target_os'):
+      spec['target_os'] = props['target_os'].split(',')
+    return {
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'src'
+
+
+def main(argv=None):
+  return Mojo().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/nacl.py b/recipes/nacl.py
index 3e3595f..59c824c 100644
--- a/recipes/nacl.py
+++ b/recipes/nacl.py
@@ -16,32 +16,24 @@
   def fetch_spec(props):
     url = ('https://chromium.googlesource.com/native_client/'
            'src/native_client.git')
-    solution = { 'name'   :'native_client',
-                 'url'    : url,
-                 'deps_file': '.DEPS.git',
-                 'managed'   : False,
-                 'custom_deps': {},
-                 'safesync_url': '',
+    solution = {
+        'name'        : 'native_client',
+        'url'         : url,
+        'deps_file'   : 'DEPS',
+        'managed'     : False,
+        'custom_deps' : {},
+        'safesync_url': '',
     }
     spec = {
       'solutions': [solution],
-      'svn_url': 'svn://svn.chromium.org/native_client',
-      'svn_branch': 'trunk/src/native_client',
-      'svn_ref': 'master',
     }
-    if props.get('submodule_git_svn_spec'):
-      spec['submodule_git_svn_spec'] = props['submodule_git_svn_spec']
     if props.get('target_os'):
       spec['target_os'] = props['target_os'].split(',')
     if props.get('target_os_only'):
       spec['target_os_only'] = props['target_os_only']
-    checkout_type = 'gclient_git_svn'
-    if props.get('nosvn'):
-      checkout_type = 'gclient_git'
-    spec_type = '%s_spec' % checkout_type
     return {
-      'type': checkout_type,
-      spec_type: spec,
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
     }
 
   @staticmethod
diff --git a/recipes/naclports.py b/recipes/naclports.py
index 3e68741..10a0b28 100644
--- a/recipes/naclports.py
+++ b/recipes/naclports.py
@@ -15,12 +15,13 @@
   @staticmethod
   def fetch_spec(props):
     url = 'https://chromium.googlesource.com/external/naclports.git'
-    solution = { 'name'   :'src',
-                 'url'    : url,
-                 'deps_file': 'DEPS',
-                 'managed'   : False,
-                 'custom_deps': {},
-                 'safesync_url': '',
+    solution = {
+        'name'        : 'src',
+        'url'         : url,
+        'deps_file'   : 'DEPS',
+        'managed'     : False,
+        'custom_deps' : {},
+        'safesync_url': '',
     }
     spec = {
       'solutions': [solution],
diff --git a/recipes/pdfium.py b/recipes/pdfium.py
new file mode 100644
index 0000000..35bf7b8
--- /dev/null
+++ b/recipes/pdfium.py
@@ -0,0 +1,44 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class PDFium(recipe_util.Recipe):
+  """Basic Recipe class for PDFium."""
+
+  @staticmethod
+  def fetch_spec(props):
+    url = 'https://pdfium.googlesource.com/pdfium.git'
+    solution = {
+        'name'        : 'pdfium',
+        'url'         : url,
+        'deps_file'   : 'DEPS',
+        'managed'     : False,
+        'custom_deps' : {},
+        'safesync_url': '',
+    }
+    spec = {
+      'solutions': [solution],
+    }
+    return {
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'pdfium'
+
+
+def main(argv=None):
+  return PDFium().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/skia.py b/recipes/skia.py
new file mode 100644
index 0000000..171157c
--- /dev/null
+++ b/recipes/skia.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class Skia(recipe_util.Recipe):
+  """Basic Recipe class for the Skia repository."""
+
+  @staticmethod
+  def fetch_spec(_props):
+    solution = {
+      'name'     : 'skia',
+      'url'      : 'https://skia.googlesource.com/skia.git',
+      'deps_file': 'DEPS',
+      'managed'  : False,
+    }
+    spec = {
+      'solutions': [solution]
+    }
+    return {
+        'type': 'gclient_git',
+        'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'skia'
+
+
+def main(argv=None):
+  return Skia().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/skia_buildbot.py b/recipes/skia_buildbot.py
new file mode 100644
index 0000000..42b6c8d
--- /dev/null
+++ b/recipes/skia_buildbot.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class SkiaBuildbot(recipe_util.Recipe):
+  """Basic Recipe class for the Skia Buildbot repository."""
+
+  @staticmethod
+  def fetch_spec(_props):
+    solution = {
+      'name'     : 'buildbot',
+      'url'      : 'https://skia.googlesource.com/buildbot.git',
+      'deps_file': 'DEPS',
+      'managed'  : False,
+    }
+    spec = {
+      'solutions': [solution]
+    }
+    return {
+        'type': 'gclient_git',
+        'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'buildbot'
+
+
+def main(argv=None):
+  return SkiaBuildbot().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/v8.py b/recipes/v8.py
index b243867..b2270d0 100644
--- a/recipes/v8.py
+++ b/recipes/v8.py
@@ -14,28 +14,22 @@
 
   @staticmethod
   def fetch_spec(props):
-    ref = 'bleeding_edge'
-    url = 'https://chromium.googlesource.com/external/v8.git@%s' % ref
-    solution = { 'name'   :'v8',
-                 'url'    : url,
-                 'deps_file': '.DEPS.git',
-                 'managed'   : False,
-                 'custom_deps': {},
-                 'safesync_url': '',
+    url = 'https://chromium.googlesource.com/v8/v8.git'
+    solution = {
+        'name'        : 'v8',
+        'url'         : url,
+        'deps_file'   : 'DEPS',
+        'managed'     : False,
+        'custom_deps' : {},
+        'safesync_url': '',
     }
     spec = {
       'solutions': [solution],
-      'svn_url': 'https://v8.googlecode.com/svn',
-      'svn_branch': 'branches/bleeding_edge',
-      'svn_ref': 'bleeding_edge',
+      'with_branch_heads': True,
     }
-    checkout_type = 'gclient_git_svn'
-    if props.get('nosvn'):
-      checkout_type = 'gclient_git'
-    spec_type = '%s_spec' % checkout_type
     return {
-      'type': checkout_type,
-      spec_type: spec,
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
     }
 
   @staticmethod
diff --git a/recipes/webrtc.py b/recipes/webrtc.py
new file mode 100644
index 0000000..e409d2f
--- /dev/null
+++ b/recipes/webrtc.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class WebRTC(recipe_util.Recipe):
+  """Basic Recipe class for WebRTC."""
+
+  @staticmethod
+  def fetch_spec(props):
+    url = 'https://chromium.googlesource.com/external/webrtc.git'
+    spec = {
+      'solutions': [
+        {
+          'name': 'src',
+          'url': url,
+          'deps_file': 'DEPS',
+          'managed': False,
+          'custom_deps': {},
+          'safesync_url': '',
+        },
+      ],
+      'with_branch_heads': True,
+    }
+
+    if props.get('target_os'):
+      spec['target_os'] = props['target_os'].split(',')
+
+    return {
+      'type': 'gclient_git',
+      'gclient_git_spec': spec,
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'src'
+
+
+def main(argv=None):
+  return WebRTC().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/webrtc_android.py b/recipes/webrtc_android.py
new file mode 100644
index 0000000..6d6018a
--- /dev/null
+++ b/recipes/webrtc_android.py
@@ -0,0 +1,34 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class WebRTCAndroid(recipe_util.Recipe):
+  """Basic Recipe alias for Android -> WebRTC."""
+
+  @staticmethod
+  def fetch_spec(props):
+    return {
+      'alias': {
+        'recipe': 'webrtc',
+        'props': ['--target_os=android,unix'],
+      },
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'src'
+
+
+def main(argv=None):
+  return WebRTCAndroid().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/recipes/webrtc_ios.py b/recipes/webrtc_ios.py
new file mode 100644
index 0000000..550ad8d
--- /dev/null
+++ b/recipes/webrtc_ios.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import recipe_util  # pylint: disable=F0401
+
+
+# This class doesn't need an __init__ method, so we disable the warning
+# pylint: disable=W0232
+class WebRTCIOS(recipe_util.Recipe):
+  """Basic Recipe alias for iOS -> WebRTC."""
+
+  @staticmethod
+  def fetch_spec(props):
+    return {
+      'alias': {
+        'recipe': 'webrtc',
+        'props': ['--target_os=ios,mac'],
+      },
+    }
+
+  @staticmethod
+  def expected_root(_props):
+    return 'src'
+
+
+def main(argv=None):
+  return WebRTCIOS().handle_args(argv)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/rietveld.py b/rietveld.py
index 0bf3d74..ec5d781 100644
--- a/rietveld.py
+++ b/rietveld.py
@@ -37,33 +37,16 @@
 
 class Rietveld(object):
   """Accesses rietveld."""
-  def __init__(self, url, email, password, extra_headers=None):
+  def __init__(
+      self, url, auth_config, email=None, extra_headers=None, maxtries=None):
     self.url = url.rstrip('/')
-    # Email and password are accessed by commit queue, keep them.
-    self.email = email
-    self.password = password
-    # TODO(maruel): It's not awesome but maybe necessary to retrieve the value.
-    # It happens when the presubmit check is ran out of process, the cookie
-    # needed to be recreated from the credentials. Instead, it should pass the
-    # email and the cookie.
-    if email and password:
-      get_creds = lambda: (email, password)
-      self.rpc_server = upload.HttpRpcServer(
-            self.url,
-            get_creds,
-            extra_headers=extra_headers or {})
-    else:
-      if email == '':
-        # If email is given as an empty string, then assume we want to make
-        # requests that do not need authentication.  Bypass authentication by
-        # setting the auth_function to None.
-        self.rpc_server = upload.HttpRpcServer(url, None)
-      else:
-        self.rpc_server = upload.GetRpcServer(url, email)
+    self.rpc_server = upload.GetRpcServer(self.url, auth_config, email)
 
     self._xsrf_token = None
     self._xsrf_token_time = None
 
+    self._maxtries = maxtries or 40
+
   def xsrf_token(self):
     if (not self._xsrf_token_time or
         (time.time() - self._xsrf_token_time) > 30*60):
@@ -101,6 +84,20 @@
     data['description'] = '\n'.join(data['description'].strip().splitlines())
     return data
 
+  def get_depends_on_patchset(self, issue, patchset):
+    """Returns the patchset this patchset depends on if it exists."""
+    url = '/%d/patchset/%d/get_depends_on_patchset' % (issue, patchset)
+    resp = None
+    try:
+      resp = json.loads(self.get(url))
+    except (urllib2.HTTPError, ValueError):
+      # The get_depends_on_patchset endpoint does not exist on this Rietveld
+      # instance yet. Ignore the error and proceed.
+      # TODO(rmistry): Make this an error when all Rietveld instances have
+      # this endpoint.
+      pass
+    return resp
+
   def get_patchset_properties(self, issue, patchset):
     """Returns the patchset properties."""
     url = '/api/%d/%d' % (issue, patchset)
@@ -337,11 +334,12 @@
 
   def trigger_try_jobs(
       self, issue, patchset, reason, clobber, revision, builders_and_tests,
-      master=None):
+      master=None, category='cq'):
     """Requests new try jobs.
 
     |builders_and_tests| is a map of builders: [tests] to run.
     |master| is the name of the try master the builders belong to.
+    |category| is used to distinguish regular jobs and experimental jobs.
 
     Returns the keys of the new TryJobResult entites.
     """
@@ -350,6 +348,7 @@
       ('clobber', 'True' if clobber else 'False'),
       ('builders', json.dumps(builders_and_tests)),
       ('xsrf_token', self.xsrf_token()),
+      ('category', category),
     ]
     if revision:
       params.append(('revision', revision))
@@ -361,15 +360,17 @@
     return self.post('/%d/try/%d' % (issue, patchset), params)
 
   def trigger_distributed_try_jobs(
-      self, issue, patchset, reason, clobber, revision, masters):
+      self, issue, patchset, reason, clobber, revision, masters,
+      category='cq'):
     """Requests new try jobs.
 
     |masters| is a map of masters: map of builders: [tests] to run.
+    |category| is used to distinguish regular jobs and experimental jobs.
     """
     for (master, builders_and_tests) in masters.iteritems():
       self.trigger_try_jobs(
           issue, patchset, reason, clobber, revision, builders_and_tests,
-          master)
+          master, category)
 
   def get_pending_try_jobs(self, cursor=None, limit=100):
     """Retrieves the try job requests in pending state.
@@ -409,8 +410,7 @@
         old_error_exit(msg)
       upload.ErrorExit = trap_http_500
 
-      maxtries = 40
-      for retry in xrange(maxtries):
+      for retry in xrange(self._maxtries):
         try:
           logging.debug('%s' % request_path)
           result = self.rpc_server.Send(request_path, **kwargs)
@@ -418,7 +418,7 @@
           # How nice.
           return result
         except urllib2.HTTPError, e:
-          if retry >= (maxtries - 1):
+          if retry >= (self._maxtries - 1):
             raise
           flake_codes = [500, 502, 503]
           if retry_on_404:
@@ -426,14 +426,14 @@
           if e.code not in flake_codes:
             raise
         except urllib2.URLError, e:
-          if retry >= (maxtries - 1):
+          if retry >= (self._maxtries - 1):
             raise
           if (not 'Name or service not known' in e.reason and
               not 'EOF occurred in violation of protocol' in e.reason):
             # Usually internal GAE flakiness.
             raise
         except ssl.SSLError, e:
-          if retry >= (maxtries - 1):
+          if retry >= (self._maxtries - 1):
             raise
           if not 'timed out' in str(e):
             raise
@@ -571,17 +571,16 @@
                client_email,
                client_private_key_file,
                private_key_password=None,
-               extra_headers=None):
-
-    # These attributes are accessed by commit queue. Keep them.
-    self.email = client_email
-    self.private_key_file = client_private_key_file
+               extra_headers=None,
+               maxtries=None):
 
     if private_key_password is None:  # '' means 'empty password'
       private_key_password = 'notasecret'
 
     self.url = url.rstrip('/')
-    bot_url = self.url + '/bots'
+    bot_url = self.url
+    if self.url.endswith('googleplex.com'):
+      bot_url = self.url + '/bots'
 
     with open(client_private_key_file, 'rb') as f:
       client_private_key = f.read()
@@ -594,6 +593,8 @@
     self._xsrf_token = None
     self._xsrf_token_time = None
 
+    self._maxtries = maxtries or 40
+
 
 class CachingRietveld(Rietveld):
   """Caches the common queries.
@@ -670,14 +671,6 @@
   def url(self):
     return self._rietveld.url
 
-  @property
-  def email(self):
-    return self._rietveld.email
-
-  @property
-  def password(self):
-    return self._rietveld.password
-
   def get_pending_issues(self):
     pending_issues = self._rietveld.get_pending_issues()
 
@@ -698,6 +691,9 @@
   def get_patchset_properties(self, issue, patchset):
     return self._rietveld.get_patchset_properties(issue, patchset)
 
+  def get_depends_on_patchset(self, issue, patchset):
+    return self._rietveld.get_depends_on_patchset(issue, patchset)
+
   def get_patch(self, issue, patchset):
     return self._rietveld.get_patch(issue, patchset)
 
@@ -719,11 +715,12 @@
 
   def trigger_try_jobs(  # pylint:disable=R0201
       self, issue, patchset, reason, clobber, revision, builders_and_tests,
-      master=None):
+      master=None, category='cq'):
     logging.info('ReadOnlyRietveld: triggering try jobs %r for issue %d' %
         (builders_and_tests, issue))
 
   def trigger_distributed_try_jobs(  # pylint:disable=R0201
-      self, issue, patchset, reason, clobber, revision, masters):
+      self, issue, patchset, reason, clobber, revision, masters,
+      category='cq'):
     logging.info('ReadOnlyRietveld: triggering try jobs %r for issue %d' %
         (masters, issue))
diff --git a/roll-dep-svn b/roll-dep-svn
new file mode 100755
index 0000000..1d65c7b
--- /dev/null
+++ b/roll-dep-svn
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/roll_dep_svn.py" "$@"
diff --git a/roll-dep-svn.bat b/roll-dep-svn.bat
new file mode 100755
index 0000000..01e8287
--- /dev/null
+++ b/roll-dep-svn.bat
@@ -0,0 +1,10 @@
+@echo off

+:: Copyright (c) 2012 The Chromium Authors. All rights reserved.

+:: Use of this source code is governed by a BSD-style license that can be

+:: found in the LICENSE file.

+

+:: This is required with cygwin only.

+PATH=%~dp0;%PATH%

+

+:: Defer control.

+%~dp0python "%~dp0\roll_dep_svn.py" %*

diff --git a/roll_dep.py b/roll_dep.py
index f071fcc..c3c80b1 100755
--- a/roll_dep.py
+++ b/roll_dep.py
@@ -1,356 +1,162 @@
 #!/usr/bin/env python
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-"""This scripts takes the path to a dep and an svn revision, and updates the
-parent repo's DEPS file with the corresponding git revision.  Sample invocation:
+"""Rolls DEPS controlled dependency.
 
-[chromium/src]$ roll-dep third_party/WebKit 12345
-
-After the script completes, the DEPS file will be dirty with the new revision.
-The user can then:
-
-$ git add DEPS
-$ git commit
+Works only with git checkout and git dependencies.  Currently this
+script will always roll to the tip of to origin/master.
 """
 
-import ast
+import argparse
 import os
 import re
+import subprocess
 import sys
 
-from itertools import izip
-from subprocess import check_output, Popen, PIPE
-from textwrap import dedent
+NEED_SHELL = sys.platform.startswith('win')
+
+GITILES_REGEX = r'https?://[^/]*\.googlesource\.com/'
 
 
-SHA1_RE = re.compile('^[a-fA-F0-9]{40}$')
-GIT_SVN_ID_RE = re.compile('^git-svn-id: .*@([0-9]+) .*$')
-ROLL_DESCRIPTION_STR = (
-'''Roll %(dep_path)s %(before_rev)s:%(after_rev)s%(svn_range)s
-
-Summary of changes available at:
-%(revlog_url)s
-''')
+class Error(Exception):
+  pass
 
 
-def shorten_dep_path(dep):
-  """Shorten the given dep path if necessary."""
-  while len(dep) > 31:
-    dep = '.../' + dep.lstrip('./').partition('/')[2]
-  return dep
+def check_output(*args, **kwargs):
+  """subprocess.check_output() passing shell=True on Windows for git."""
+  kwargs.setdefault('shell', NEED_SHELL)
+  return subprocess.check_output(*args, **kwargs)
 
 
-def posix_path(path):
-  """Convert a possibly-Windows path to a posix-style path."""
-  (_, path) = os.path.splitdrive(path)
-  return path.replace(os.sep, '/')
+def check_call(*args, **kwargs):
+  """subprocess.check_call() passing shell=True on Windows for git."""
+  kwargs.setdefault('shell', NEED_SHELL)
+  subprocess.check_call(*args, **kwargs)
 
 
-def platform_path(path):
-  """Convert a path to the native path format of the host OS."""
-  return path.replace('/', os.sep)
+def is_pristine(root, merge_base='origin/master'):
+  """Returns True if a git checkout is pristine."""
+  cmd = ['git', 'diff', '--ignore-submodules', merge_base]
+  return not (check_output(cmd, cwd=root).strip() or
+              check_output(cmd + ['--cached'], cwd=root).strip())
 
 
-def find_gclient_root():
-  """Find the directory containing the .gclient file."""
-  cwd = posix_path(os.getcwd())
-  result = ''
-  for _ in xrange(len(cwd.split('/'))):
-    if os.path.exists(os.path.join(result, '.gclient')):
-      return result
-    result = os.path.join(result, os.pardir)
-  assert False, 'Could not find root of your gclient checkout.'
-
-
-def get_solution(gclient_root, dep_path):
-  """Find the solution in .gclient containing the dep being rolled."""
-  dep_path = os.path.relpath(dep_path, gclient_root)
-  cwd = os.getcwd().rstrip(os.sep) + os.sep
-  gclient_root = os.path.realpath(gclient_root)
-  gclient_path = os.path.join(gclient_root, '.gclient')
-  gclient_locals = {}
-  execfile(gclient_path, {}, gclient_locals)
-  for soln in gclient_locals['solutions']:
-    soln_relpath = platform_path(soln['name'].rstrip('/')) + os.sep
-    if (dep_path.startswith(soln_relpath) or
-        cwd.startswith(os.path.join(gclient_root, soln_relpath))):
-      return soln
-  assert False, 'Could not determine the parent project for %s' % dep_path
-
-
-def verify_git_revision(dep_path, revision):
-  """Verify that a git revision exists in a repository."""
-  p = Popen(['git', 'rev-list', '-n', '1', revision],
-            cwd=dep_path, stdout=PIPE, stderr=PIPE)
-  result = p.communicate()[0].strip()
-  if p.returncode != 0 or not SHA1_RE.match(result):
-    result = None
-  return result
-
-
-def get_svn_revision(dep_path, git_revision):
-  """Given a git revision, return the corresponding svn revision."""
-  p = Popen(['git', 'log', '-n', '1', '--pretty=format:%B', git_revision],
-            stdout=PIPE, cwd=dep_path)
-  (log, _) = p.communicate()
-  assert p.returncode == 0, 'git log %s failed.' % git_revision
-  for line in reversed(log.splitlines()):
-    m = GIT_SVN_ID_RE.match(line.strip())
-    if m:
-      return m.group(1)
-  return None
-
-
-def convert_svn_revision(dep_path, revision):
-  """Find the git revision corresponding to an svn revision."""
-  err_msg = 'Unknown error'
-  revision = int(revision)
-  with open(os.devnull, 'w') as devnull:
-    for ref in ('HEAD', 'origin/master'):
-      try:
-        log_p = Popen(['git', 'log', ref],
-                      cwd=dep_path, stdout=PIPE, stderr=devnull)
-        grep_p = Popen(['grep', '-e', '^commit ', '-e', '^ *git-svn-id: '],
-                       stdin=log_p.stdout, stdout=PIPE, stderr=devnull)
-        git_rev = None
-        prev_svn_rev = None
-        for line in grep_p.stdout:
-          if line.startswith('commit '):
-            git_rev = line.split()[1]
-            continue
-          try:
-            svn_rev = int(line.split()[1].partition('@')[2])
-          except (IndexError, ValueError):
-            print >> sys.stderr, (
-                'WARNING: Could not parse svn revision out of "%s"' % line)
-            continue
-          if svn_rev == revision:
-            return git_rev
-          if svn_rev > revision:
-            prev_svn_rev = svn_rev
-            continue
-          if prev_svn_rev:
-            err_msg = 'git history skips from revision %d to revision %d.' % (
-                svn_rev, prev_svn_rev)
-          else:
-            err_msg = (
-                'latest available revision is %d; you may need to '
-                '"git fetch origin" to get the latest commits.' % svn_rev)
-      finally:
-        log_p.terminate()
-        grep_p.terminate()
-  raise RuntimeError('No match for revision %d; %s' % (revision, err_msg))
-
-
-def get_git_revision(dep_path, revision):
-  """Convert the revision argument passed to the script to a git revision."""
-  svn_revision = None
-  if revision.startswith('r'):
-    git_revision = convert_svn_revision(dep_path, revision[1:])
-    svn_revision = revision[1:]
-  elif re.search('[a-fA-F]', revision):
-    git_revision = verify_git_revision(dep_path, revision)
-    svn_revision = get_svn_revision(dep_path, git_revision)
-  elif len(revision) > 6:
-    git_revision = verify_git_revision(dep_path, revision)
-    if git_revision:
-      svn_revision = get_svn_revision(dep_path, git_revision)
-    else:
-      git_revision = convert_svn_revision(dep_path, revision)
-      svn_revision = revision
-  else:
-    try:
-      git_revision = convert_svn_revision(dep_path, revision)
-      svn_revision = revision
-    except RuntimeError:
-      git_revision = verify_git_revision(dep_path, revision)
-      if not git_revision:
-        raise
-      svn_revision = get_svn_revision(dep_path, git_revision)
-  return git_revision, svn_revision
-
-
-def ast_err_msg(node):
-  return 'ERROR: Undexpected DEPS file AST structure at line %d column %d' % (
-      node.lineno, node.col_offset)
-
-
-def find_deps_section(deps_ast, section):
-  """Find a top-level section of the DEPS file in the AST."""
+def roll(root, deps_dir, key, reviewers, bug):
+  deps = os.path.join(root, 'DEPS')
   try:
-    result = [n.value for n in deps_ast.body if
-              n.__class__ is ast.Assign and
-              n.targets[0].__class__ is ast.Name and
-              n.targets[0].id == section][0]
-    return result
-  except IndexError:
-    return None
+    with open(deps, 'rb') as f:
+      deps_content = f.read()
+  except (IOError, OSError):
+    raise Error('Ensure the script is run in the directory '
+                'containing DEPS file.')
+
+  if not is_pristine(root):
+    raise Error('Ensure %s is clean first.' % root)
+
+  full_dir = os.path.normpath(os.path.join(os.path.dirname(root), deps_dir))
+  if not os.path.isdir(full_dir):
+    raise Error('Directory not found: %s' % deps_dir)
+  head = check_output(['git', 'rev-parse', 'HEAD'], cwd=full_dir).strip()
+
+  if not head in deps_content:
+    print('Warning: %s is not checked out at the expected revision in DEPS' %
+          deps_dir)
+    if key is None:
+      print("Warning: no key specified.  Using '%s'." % deps_dir)
+      key = deps_dir
+
+    # It happens if the user checked out a branch in the dependency by himself.
+    # Fall back to reading the DEPS to figure out the original commit.
+    for i in deps_content.splitlines():
+      m = re.match(r'\s+"' + key + '": "([a-z0-9]{40})",', i)
+      if m:
+        head = m.group(1)
+        break
+    else:
+      raise Error('Expected to find commit %s for %s in DEPS' % (head, key))
+
+  print('Found old revision %s' % head)
+
+  check_call(['git', 'fetch', 'origin'], cwd=full_dir)
+  master = check_output(
+      ['git', 'rev-parse', 'origin/master'], cwd=full_dir).strip()
+  print('Found new revision %s' % master)
+
+  if master == head:
+    raise Error('No revision to roll!')
+
+  commit_range = '%s..%s' % (head[:9], master[:9])
+  upstream_url = check_output(
+      ['git', 'config', 'remote.origin.url'], cwd=full_dir).strip()
+
+  log_url = None
+  if re.match(GITILES_REGEX, upstream_url):
+    log_url = '%s/+log/%s..%s' % (upstream_url, head, master)
+
+  msg_args = {
+      'deps_dir': deps_dir,
+      'commit_range': commit_range,
+      'log': '%s\n\n' % log_url if log_url else '',
+      'reviewer': 'R=%s\n' % ','.join(reviewers) if reviewers else '',
+      'bug': 'BUG=%s\n' % bug if bug else '',
+  }
+  msg = (
+      'Roll %(deps_dir)s %(commit_range)s\n'
+      '\n'
+      '%(log)s'
+      '%(reviewer)s'
+      '%(bug)s' % msg_args)
+
+  print('Commit message:')
+  print('\n'.join('    ' + i for i in msg.splitlines()))
+  deps_content = deps_content.replace(head, master)
+  with open(deps, 'wb') as f:
+    f.write(deps_content)
+  check_call(['git', 'add', 'DEPS'], cwd=root)
+  check_call(['git', 'commit', '-m', msg], cwd=root)
+  print('')
+  if not reviewers:
+    print('You forgot to pass -r, make sure to insert a R=foo@example.com line')
+    print('to the commit description before emailing.')
+    print('')
+  print('Run:')
+  print('  git cl upload --send-mail')
 
 
-def find_dict_index(dict_node, key):
-  """Given a key, find the index of the corresponding dict entry."""
-  assert dict_node.__class__ is ast.Dict, ast_err_msg(dict_node)
-  indices = [i for i, n in enumerate(dict_node.keys) if
-             n.__class__ is ast.Str and n.s == key]
-  assert len(indices) < 2, (
-      'Found redundant dict entries for key "%s"' % key)
-  return indices[0] if indices else None
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('-r', '--reviewer',
+      help='To specify multiple reviewers, use comma separated list, e.g. '
+           '-r joe,jane,john. Defaults to @chromium.org')
+  parser.add_argument('-b', '--bug')
+  parser.add_argument('dep_path', help='path to dependency')
+  parser.add_argument('key', nargs='?',
+      help='regexp for dependency in DEPS file')
+  args = parser.parse_args()
 
+  reviewers = None
+  if args.reviewer:
+    reviewers = args.reviewer.split(',')
+    for i, r in enumerate(reviewers):
+      if not '@' in r:
+        reviewers[i] = r + '@chromium.org'
 
-def update_node(deps_lines, deps_ast, node, git_revision):
-  """Update an AST node with the new git revision."""
-  if node.__class__ is ast.Str:
-    return update_string(deps_lines, node, git_revision)
-  elif node.__class__ is ast.BinOp:
-    return update_binop(deps_lines, deps_ast, node, git_revision)
-  elif node.__class__ is ast.Call:
-    return update_call(deps_lines, deps_ast, node, git_revision)
-  else:
-    assert False, ast_err_msg(node)
+  try:
+    roll(
+        os.getcwd(),
+        args.dep_path,
+        args.key,
+        reviewers=reviewers,
+        bug=args.bug)
 
-
-def update_string(deps_lines, string_node, git_revision):
-  """Update a string node in the AST with the new git revision."""
-  line_idx = string_node.lineno - 1
-  start_idx = string_node.col_offset - 1
-  line = deps_lines[line_idx]
-  (prefix, sep, old_rev) = string_node.s.partition('@')
-  if sep:
-    start_idx = line.find(prefix + sep, start_idx) + len(prefix + sep)
-    tail_idx = start_idx + len(old_rev)
-  else:
-    start_idx = line.find(prefix, start_idx)
-    tail_idx = start_idx + len(prefix)
-    old_rev = prefix
-  deps_lines[line_idx] = line[:start_idx] + git_revision + line[tail_idx:]
-  return line_idx
-
-
-def update_binop(deps_lines, deps_ast, binop_node, git_revision):
-  """Update a binary operation node in the AST with the new git revision."""
-  # Since the revision part is always last, assume that it's the right-hand
-  # operand that needs to be updated.
-  return update_node(deps_lines, deps_ast, binop_node.right, git_revision)
-
-
-def update_call(deps_lines, deps_ast, call_node, git_revision):
-  """Update a function call node in the AST with the new git revision."""
-  # The only call we know how to handle is Var()
-  assert call_node.func.id == 'Var', ast_err_msg(call_node)
-  assert call_node.args and call_node.args[0].__class__ is ast.Str, (
-      ast_err_msg(call_node))
-  return update_var(deps_lines, deps_ast, call_node.args[0].s, git_revision)
-
-
-def update_var(deps_lines, deps_ast, var_name, git_revision):
-  """Update an entry in the vars section of the DEPS file with the new
-  git revision."""
-  vars_node = find_deps_section(deps_ast, 'vars')
-  assert vars_node, 'Could not find "vars" section of DEPS file.'
-  var_idx = find_dict_index(vars_node, var_name)
-  assert var_idx is not None, (
-      'Could not find definition of "%s" var in DEPS file.' % var_name)
-  val_node = vars_node.values[var_idx]
-  return update_node(deps_lines, deps_ast, val_node, git_revision)
-
-
-def short_rev(rev, dep_path):
-  return check_output(['git', 'rev-parse', '--short', rev],
-                      cwd=dep_path).rstrip()
-
-
-def generate_commit_message(deps_section, dep_path, dep_name, new_rev):
-  (url, _, old_rev) = deps_section[dep_name].partition('@')
-  if url.endswith('.git'):
-    url = url[:-4]
-  old_rev_short = short_rev(old_rev, dep_path)
-  new_rev_short = short_rev(new_rev, dep_path)
-  url += '/+log/%s..%s' % (old_rev_short, new_rev_short)
-  old_svn_rev = get_svn_revision(dep_path, old_rev)
-  new_svn_rev = get_svn_revision(dep_path, new_rev)
-  svn_range_str = ''
-  if old_svn_rev and new_svn_rev:
-    svn_range_str = ' (svn %s:%s)' % (old_svn_rev, new_svn_rev)
-  return dedent(ROLL_DESCRIPTION_STR % {
-    'dep_path': shorten_dep_path(dep_name),
-    'before_rev': old_rev_short,
-    'after_rev': new_rev_short,
-    'svn_range': svn_range_str,
-    'revlog_url': url,
-  })
-
-def update_deps_entry(deps_lines, deps_ast, value_node, new_rev, comment):
-  line_idx = update_node(deps_lines, deps_ast, value_node, new_rev)
-  (content, _, _) = deps_lines[line_idx].partition('#')
-  if comment:
-    deps_lines[line_idx] = '%s # %s' % (content.rstrip(), comment)
-  else:
-    deps_lines[line_idx] = content.rstrip()
-
-def update_deps(soln_path, dep_path, dep_name, new_rev, comment):
-  """Update the DEPS file with the new git revision."""
-  commit_msg = ''
-  deps_file = os.path.join(soln_path, 'DEPS')
-  with open(deps_file) as fh:
-    deps_content = fh.read()
-  deps_locals = {}
-  def _Var(key):
-    return deps_locals['vars'][key]
-  deps_locals['Var'] = _Var
-  exec deps_content in {}, deps_locals
-  deps_lines = deps_content.splitlines()
-  deps_ast = ast.parse(deps_content, deps_file)
-  deps_node = find_deps_section(deps_ast, 'deps')
-  assert deps_node, 'Could not find "deps" section of DEPS file'
-  dep_idx = find_dict_index(deps_node, dep_name)
-  if dep_idx is not None:
-    value_node = deps_node.values[dep_idx]
-    update_deps_entry(deps_lines, deps_ast, value_node, new_rev, comment)
-    commit_msg = generate_commit_message(deps_locals['deps'], dep_path,
-                                         dep_name, new_rev)
-  deps_os_node = find_deps_section(deps_ast, 'deps_os')
-  if deps_os_node:
-    for (os_name, os_node) in izip(deps_os_node.keys, deps_os_node.values):
-      dep_idx = find_dict_index(os_node, dep_name)
-      if dep_idx is not None:
-        value_node = os_node.values[dep_idx]
-        if value_node.__class__ is ast.Name and value_node.id == 'None':
-          pass
-        else:
-          update_deps_entry(deps_lines, deps_ast, value_node, new_rev, comment)
-          commit_msg = generate_commit_message(
-              deps_locals['deps_os'][os_name], dep_path, dep_name, new_rev)
-  if commit_msg:
-    print 'Pinning %s' % dep_name
-    print 'to revision %s' % new_rev
-    print 'in %s' % deps_file
-    with open(deps_file, 'w') as fh:
-      for line in deps_lines:
-        print >> fh, line
-    with open(os.path.join(soln_path, '.git', 'MERGE_MSG'), 'a') as fh:
-      fh.write(commit_msg)
-  else:
-    print 'Could not find an entry in %s to update.' % deps_file
-  return 0 if commit_msg else 1
-
-
-def main(argv):
-  if len(argv) != 2 :
-    print >> sys.stderr, 'Usage: roll_dep.py <dep path> <svn revision>'
+  except Error as e:
+    sys.stderr.write('error: %s\n' % e)
     return 1
-  (dep_path, revision) = argv[0:2]
-  dep_path = platform_path(dep_path)
-  assert os.path.isdir(dep_path), 'No such directory: %s' % dep_path
-  gclient_root = find_gclient_root()
-  soln = get_solution(gclient_root, dep_path)
-  soln_path = os.path.relpath(os.path.join(gclient_root, soln['name']))
-  dep_name = posix_path(os.path.relpath(dep_path, gclient_root))
-  (git_rev, svn_rev) = get_git_revision(dep_path, revision)
-  comment = ('from svn revision %s' % svn_rev) if svn_rev else None
-  assert git_rev, 'Could not find git revision matching %s.' % revision
-  return update_deps(soln_path, dep_path, dep_name, git_rev, comment)
+
+  return 0
+
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  sys.exit(main())
diff --git a/roll_dep_svn.py b/roll_dep_svn.py
new file mode 100755
index 0000000..d7af5eb
--- /dev/null
+++ b/roll_dep_svn.py
@@ -0,0 +1,417 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rolls a git-svn dependency.
+
+It takes the path to a dep and a git commit hash or svn revision, and updates
+the parent repo's DEPS file with the corresponding git commit hash.
+
+Sample invocation:
+
+[chromium/src]$ roll-dep-svn third_party/WebKit 12345
+
+After the script completes, the DEPS file will be dirty with the new revision.
+The user can then:
+
+$ git add DEPS
+$ git commit
+"""
+
+import ast
+import optparse
+import os
+import re
+import sys
+
+from itertools import izip
+from subprocess import check_output, Popen, PIPE
+from textwrap import dedent
+
+
+SHA1_RE = re.compile('^[a-fA-F0-9]{40}$')
+GIT_SVN_ID_RE = re.compile('^git-svn-id: .*@([0-9]+) .*$')
+ROLL_DESCRIPTION_STR = (
+'''Roll %(dep_path)s %(before_rev)s:%(after_rev)s%(svn_range)s
+
+Summary of changes available at:
+%(revlog_url)s
+''')
+
+
+def shorten_dep_path(dep):
+  """Shorten the given dep path if necessary."""
+  while len(dep) > 31:
+    dep = '.../' + dep.lstrip('./').partition('/')[2]
+  return dep
+
+
+def posix_path(path):
+  """Convert a possibly-Windows path to a posix-style path."""
+  (_, path) = os.path.splitdrive(path)
+  return path.replace(os.sep, '/')
+
+
+def platform_path(path):
+  """Convert a path to the native path format of the host OS."""
+  return path.replace('/', os.sep)
+
+
+def find_gclient_root():
+  """Find the directory containing the .gclient file."""
+  cwd = posix_path(os.getcwd())
+  result = ''
+  for _ in xrange(len(cwd.split('/'))):
+    if os.path.exists(os.path.join(result, '.gclient')):
+      return result
+    result = os.path.join(result, os.pardir)
+  assert False, 'Could not find root of your gclient checkout.'
+
+
+def get_solution(gclient_root, dep_path):
+  """Find the solution in .gclient containing the dep being rolled."""
+  dep_path = os.path.relpath(dep_path, gclient_root)
+  cwd = os.getcwd().rstrip(os.sep) + os.sep
+  gclient_root = os.path.realpath(gclient_root)
+  gclient_path = os.path.join(gclient_root, '.gclient')
+  gclient_locals = {}
+  execfile(gclient_path, {}, gclient_locals)
+  for soln in gclient_locals['solutions']:
+    soln_relpath = platform_path(soln['name'].rstrip('/')) + os.sep
+    if (dep_path.startswith(soln_relpath) or
+        cwd.startswith(os.path.join(gclient_root, soln_relpath))):
+      return soln
+  assert False, 'Could not determine the parent project for %s' % dep_path
+
+
+def is_git_hash(revision):
+  """Determines if a given revision is a git hash."""
+  return SHA1_RE.match(revision)
+
+
+def verify_git_revision(dep_path, revision):
+  """Verify that a git revision exists in a repository."""
+  p = Popen(['git', 'rev-list', '-n', '1', revision],
+            cwd=dep_path, stdout=PIPE, stderr=PIPE)
+  result = p.communicate()[0].strip()
+  if p.returncode != 0 or not is_git_hash(result):
+    result = None
+  return result
+
+
+def get_svn_revision(dep_path, git_revision):
+  """Given a git revision, return the corresponding svn revision."""
+  p = Popen(['git', 'log', '-n', '1', '--pretty=format:%B', git_revision],
+            stdout=PIPE, cwd=dep_path)
+  (log, _) = p.communicate()
+  assert p.returncode == 0, 'git log %s failed.' % git_revision
+  for line in reversed(log.splitlines()):
+    m = GIT_SVN_ID_RE.match(line.strip())
+    if m:
+      return m.group(1)
+  return None
+
+
+def convert_svn_revision(dep_path, revision):
+  """Find the git revision corresponding to an svn revision."""
+  err_msg = 'Unknown error'
+  revision = int(revision)
+  latest_svn_rev = None
+  with open(os.devnull, 'w') as devnull:
+    for ref in ('HEAD', 'origin/master'):
+      try:
+        log_p = Popen(['git', 'log', ref],
+                      cwd=dep_path, stdout=PIPE, stderr=devnull)
+        grep_p = Popen(['grep', '-e', '^commit ', '-e', '^ *git-svn-id: '],
+                       stdin=log_p.stdout, stdout=PIPE, stderr=devnull)
+        git_rev = None
+        prev_svn_rev = None
+        for line in grep_p.stdout:
+          if line.startswith('commit '):
+            git_rev = line.split()[1]
+            continue
+          try:
+            svn_rev = int(line.split()[1].partition('@')[2])
+          except (IndexError, ValueError):
+            print >> sys.stderr, (
+                'WARNING: Could not parse svn revision out of "%s"' % line)
+            continue
+          if not latest_svn_rev or int(svn_rev) > int(latest_svn_rev):
+            latest_svn_rev = svn_rev
+          if svn_rev == revision:
+            return git_rev
+          if svn_rev > revision:
+            prev_svn_rev = svn_rev
+            continue
+          if prev_svn_rev:
+            err_msg = 'git history skips from revision %d to revision %d.' % (
+                svn_rev, prev_svn_rev)
+          else:
+            err_msg = (
+                'latest available revision is %d; you may need to '
+                '"git fetch origin" to get the latest commits.' %
+                latest_svn_rev)
+      finally:
+        log_p.terminate()
+        grep_p.terminate()
+  raise RuntimeError('No match for revision %d; %s' % (revision, err_msg))
+
+
+def get_git_revision(dep_path, revision):
+  """Convert the revision argument passed to the script to a git revision."""
+  svn_revision = None
+  if revision.startswith('r'):
+    git_revision = convert_svn_revision(dep_path, revision[1:])
+    svn_revision = revision[1:]
+  elif re.search('[a-fA-F]', revision):
+    git_revision = verify_git_revision(dep_path, revision)
+    if not git_revision:
+      raise RuntimeError('Please \'git fetch origin\' in %s' % dep_path)
+    svn_revision = get_svn_revision(dep_path, git_revision)
+  elif len(revision) > 6:
+    git_revision = verify_git_revision(dep_path, revision)
+    if git_revision:
+      svn_revision = get_svn_revision(dep_path, git_revision)
+    else:
+      git_revision = convert_svn_revision(dep_path, revision)
+      svn_revision = revision
+  else:
+    try:
+      git_revision = convert_svn_revision(dep_path, revision)
+      svn_revision = revision
+    except RuntimeError:
+      git_revision = verify_git_revision(dep_path, revision)
+      if not git_revision:
+        raise
+      svn_revision = get_svn_revision(dep_path, git_revision)
+  return git_revision, svn_revision
+
+
+def ast_err_msg(node):
+  return 'ERROR: Undexpected DEPS file AST structure at line %d column %d' % (
+      node.lineno, node.col_offset)
+
+
+def find_deps_section(deps_ast, section):
+  """Find a top-level section of the DEPS file in the AST."""
+  try:
+    result = [n.value for n in deps_ast.body if
+              n.__class__ is ast.Assign and
+              n.targets[0].__class__ is ast.Name and
+              n.targets[0].id == section][0]
+    return result
+  except IndexError:
+    return None
+
+
+def find_dict_index(dict_node, key):
+  """Given a key, find the index of the corresponding dict entry."""
+  assert dict_node.__class__ is ast.Dict, ast_err_msg(dict_node)
+  indices = [i for i, n in enumerate(dict_node.keys) if
+             n.__class__ is ast.Str and n.s == key]
+  assert len(indices) < 2, (
+      'Found redundant dict entries for key "%s"' % key)
+  return indices[0] if indices else None
+
+
+def update_node(deps_lines, deps_ast, node, git_revision):
+  """Update an AST node with the new git revision."""
+  if node.__class__ is ast.Str:
+    return update_string(deps_lines, node, git_revision)
+  elif node.__class__ is ast.BinOp:
+    return update_binop(deps_lines, deps_ast, node, git_revision)
+  elif node.__class__ is ast.Call:
+    return update_call(deps_lines, deps_ast, node, git_revision)
+  else:
+    assert False, ast_err_msg(node)
+
+
+def update_string(deps_lines, string_node, git_revision):
+  """Update a string node in the AST with the new git revision."""
+  line_idx = string_node.lineno - 1
+  start_idx = string_node.col_offset - 1
+  line = deps_lines[line_idx]
+  (prefix, sep, old_rev) = string_node.s.partition('@')
+  if sep:
+    start_idx = line.find(prefix + sep, start_idx) + len(prefix + sep)
+    tail_idx = start_idx + len(old_rev)
+  else:
+    start_idx = line.find(prefix, start_idx)
+    tail_idx = start_idx + len(prefix)
+    old_rev = prefix
+  deps_lines[line_idx] = line[:start_idx] + git_revision + line[tail_idx:]
+  return line_idx
+
+
+def update_binop(deps_lines, deps_ast, binop_node, git_revision):
+  """Update a binary operation node in the AST with the new git revision."""
+  # Since the revision part is always last, assume that it's the right-hand
+  # operand that needs to be updated.
+  return update_node(deps_lines, deps_ast, binop_node.right, git_revision)
+
+
+def update_call(deps_lines, deps_ast, call_node, git_revision):
+  """Update a function call node in the AST with the new git revision."""
+  # The only call we know how to handle is Var()
+  assert call_node.func.id == 'Var', ast_err_msg(call_node)
+  assert call_node.args and call_node.args[0].__class__ is ast.Str, (
+      ast_err_msg(call_node))
+  return update_var(deps_lines, deps_ast, call_node.args[0].s, git_revision)
+
+
+def update_var(deps_lines, deps_ast, var_name, git_revision):
+  """Update an entry in the vars section of the DEPS file with the new
+  git revision."""
+  vars_node = find_deps_section(deps_ast, 'vars')
+  assert vars_node, 'Could not find "vars" section of DEPS file.'
+  var_idx = find_dict_index(vars_node, var_name)
+  assert var_idx is not None, (
+      'Could not find definition of "%s" var in DEPS file.' % var_name)
+  val_node = vars_node.values[var_idx]
+  return update_node(deps_lines, deps_ast, val_node, git_revision)
+
+
+def short_rev(rev, dep_path):
+  return check_output(['git', 'rev-parse', '--short', rev],
+                      cwd=dep_path).rstrip()
+
+
+def generate_commit_message(deps_section, dep_path, dep_name, new_rev):
+  (url, _, old_rev) = deps_section[dep_name].partition('@')
+  if url.endswith('.git'):
+    url = url[:-4]
+  old_rev_short = short_rev(old_rev, dep_path)
+  new_rev_short = short_rev(new_rev, dep_path)
+  url += '/+log/%s..%s' % (old_rev_short, new_rev_short)
+  try:
+    old_svn_rev = get_svn_revision(dep_path, old_rev)
+    new_svn_rev = get_svn_revision(dep_path, new_rev)
+  except Exception:
+    # Ignore failures that might arise from the repo not being checked out.
+    old_svn_rev = new_svn_rev = None
+  svn_range_str = ''
+  if old_svn_rev and new_svn_rev:
+    svn_range_str = ' (svn %s:%s)' % (old_svn_rev, new_svn_rev)
+  return dedent(ROLL_DESCRIPTION_STR % {
+    'dep_path': shorten_dep_path(dep_name),
+    'before_rev': old_rev_short,
+    'after_rev': new_rev_short,
+    'svn_range': svn_range_str,
+    'revlog_url': url,
+  })
+
+
+def update_deps_entry(deps_lines, deps_ast, value_node, new_rev, comment):
+  line_idx = update_node(deps_lines, deps_ast, value_node, new_rev)
+  (content, _, _) = deps_lines[line_idx].partition('#')
+  if comment:
+    deps_lines[line_idx] = '%s # %s' % (content.rstrip(), comment)
+  else:
+    deps_lines[line_idx] = content.rstrip()
+
+
+def update_deps(deps_file, dep_path, dep_name, new_rev, comment):
+  """Update the DEPS file with the new git revision."""
+  commit_msg = ''
+  with open(deps_file) as fh:
+    deps_content = fh.read()
+  deps_locals = {}
+  def _Var(key):
+    return deps_locals['vars'][key]
+  deps_locals['Var'] = _Var
+  exec deps_content in {}, deps_locals
+  deps_lines = deps_content.splitlines()
+  deps_ast = ast.parse(deps_content, deps_file)
+  deps_node = find_deps_section(deps_ast, 'deps')
+  assert deps_node, 'Could not find "deps" section of DEPS file'
+  dep_idx = find_dict_index(deps_node, dep_name)
+  if dep_idx is not None:
+    value_node = deps_node.values[dep_idx]
+    update_deps_entry(deps_lines, deps_ast, value_node, new_rev, comment)
+    commit_msg = generate_commit_message(deps_locals['deps'], dep_path,
+                                         dep_name, new_rev)
+  deps_os_node = find_deps_section(deps_ast, 'deps_os')
+  if deps_os_node:
+    for (os_name, os_node) in izip(deps_os_node.keys, deps_os_node.values):
+      dep_idx = find_dict_index(os_node, dep_name)
+      if dep_idx is not None:
+        value_node = os_node.values[dep_idx]
+        if value_node.__class__ is ast.Name and value_node.id == 'None':
+          pass
+        else:
+          update_deps_entry(deps_lines, deps_ast, value_node, new_rev, comment)
+          commit_msg = generate_commit_message(
+              deps_locals['deps_os'][os_name.s], dep_path, dep_name, new_rev)
+  if not commit_msg:
+    print 'Could not find an entry in %s to update.' % deps_file
+    return 1
+
+  print 'Pinning %s' % dep_name
+  print 'to revision %s' % new_rev
+  print 'in %s' % deps_file
+  with open(deps_file, 'w') as fh:
+    for line in deps_lines:
+      print >> fh, line
+  deps_file_dir = os.path.normpath(os.path.dirname(deps_file))
+  deps_file_root = Popen(
+      ['git', 'rev-parse', '--show-toplevel'],
+      cwd=deps_file_dir, stdout=PIPE).communicate()[0].strip()
+  with open(os.path.join(deps_file_root, '.git', 'MERGE_MSG'), 'w') as fh:
+    fh.write(commit_msg)
+  return 0
+
+
+def main(argv):
+  usage = 'Usage: roll-dep-svn [options] <dep path> <rev> [ <DEPS file> ]'
+  parser = optparse.OptionParser(usage=usage, description=__doc__)
+  parser.add_option('--no-verify-revision',
+                    help='Don\'t verify the revision passed in. This '
+                         'also skips adding an svn revision comment '
+                         'for git dependencies and requires the passed '
+                         'revision to be a git hash.',
+                    default=False, action='store_true')
+  options, args = parser.parse_args(argv)
+  if len(args) not in (2, 3):
+    parser.error('Expected either 2 or 3 positional parameters.')
+  arg_dep_path, revision = args[:2]
+  gclient_root = find_gclient_root()
+  dep_path = platform_path(arg_dep_path)
+  if not os.path.exists(dep_path):
+    dep_path = os.path.join(gclient_root, dep_path)
+  if not options.no_verify_revision:
+    # Only require the path to exist if the revision should be verified. A path
+    # to e.g. os deps might not be checked out.
+    if not os.path.isdir(dep_path):
+      print >> sys.stderr, 'No such directory: %s' % arg_dep_path
+      return 1
+  if len(args) > 2:
+    deps_file = args[2]
+  else:
+    soln = get_solution(gclient_root, dep_path)
+    soln_path = os.path.relpath(os.path.join(gclient_root, soln['name']))
+    deps_file = os.path.join(soln_path, 'DEPS')
+  dep_name = posix_path(os.path.relpath(dep_path, gclient_root))
+  if options.no_verify_revision:
+    if not is_git_hash(revision):
+      print >> sys.stderr, (
+          'The passed revision %s must be a git hash when skipping revision '
+          'verification.' % revision)
+      return 1
+    git_rev = revision
+    comment = None
+  else:
+    git_rev, svn_rev = get_git_revision(dep_path, revision)
+    comment = ('from svn revision %s' % svn_rev) if svn_rev else None
+    if not git_rev:
+      print >> sys.stderr, 'Could not find git revision matching %s.' % revision
+      return 1
+  return update_deps(deps_file, dep_path, dep_name, git_rev, comment)
+
+
+if __name__ == '__main__':
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/scm.py b/scm.py
index 9bc96bc..94b925e 100644
--- a/scm.py
+++ b/scm.py
@@ -443,6 +443,16 @@
       return False
 
   @staticmethod
+  def IsDirectoryVersioned(cwd, relative_dir):
+    """Checks whether the given |relative_dir| is part of cwd's repo."""
+    return bool(GIT.Capture(['ls-tree', 'HEAD', relative_dir], cwd=cwd))
+
+  @staticmethod
+  def CleanupDir(cwd, relative_dir):
+    """Cleans up untracked file inside |relative_dir|."""
+    return bool(GIT.Capture(['clean', '-df', relative_dir], cwd=cwd))
+
+  @staticmethod
   def GetGitSvnHeadRev(cwd):
     """Gets the most recently pulled git-svn revision."""
     try:
diff --git a/subcommand.py b/subcommand.py
index f0edc4b..9262a4c 100644
--- a/subcommand.py
+++ b/subcommand.py
@@ -37,6 +37,7 @@
     will result in oldname not being documented but supported and redirecting to
     newcmd. Make it a real function that calls the old function if you want it
     to be documented.
+  - CMDfoo_bar will be command 'foo-bar'.
 """
 
 import difflib
@@ -92,6 +93,11 @@
   return sys.modules.get('colorama') or sys.modules.get('third_party.colorama')
 
 
+def _function_to_name(name):
+  """Returns the name of a CMD function."""
+  return name[3:].replace('_', '-')
+
+
 class CommandDispatcher(object):
   def __init__(self, module):
     """module is the name of the main python module where to look for commands.
@@ -113,32 +119,32 @@
 
     Automatically adds 'help' if not already defined.
 
+    Normalizes '_' in the commands to '-'.
+
     A command can be effectively disabled by defining a global variable to None,
     e.g.:
       CMDhelp = None
     """
     cmds = dict(
-        (fn[3:], getattr(self.module, fn))
-        for fn in dir(self.module) if fn.startswith('CMD'))
+        (_function_to_name(name), getattr(self.module, name))
+        for name in dir(self.module) if name.startswith('CMD'))
     cmds.setdefault('help', CMDhelp)
     return cmds
 
-  def find_nearest_command(self, name):
-    """Retrieves the function to handle a command.
+  def find_nearest_command(self, name_asked):
+    """Retrieves the function to handle a command as supplied by the user.
 
-    It automatically tries to guess the intended command by handling typos or
-    incomplete names.
+    It automatically tries to guess the _intended command_ by handling typos
+    and/or incomplete names.
     """
-    # Implicitly replace foo-bar to foo_bar since foo-bar is not a valid python
-    # symbol but it's faster to type.
-    name = name.replace('-', '_')
     commands = self.enumerate_commands()
-    if name in commands:
-      return commands[name]
+    name_to_dash = name_asked.replace('_', '-')
+    if name_to_dash in commands:
+      return commands[name_to_dash]
 
     # An exact match was not found. Try to be smart and look if there's
     # something similar.
-    commands_with_prefix = [c for c in commands if c.startswith(name)]
+    commands_with_prefix = [c for c in commands if c.startswith(name_asked)]
     if len(commands_with_prefix) == 1:
       return commands[commands_with_prefix[0]]
 
@@ -147,7 +153,7 @@
       return difflib.SequenceMatcher(a=a, b=b).ratio()
 
     hamming_commands = sorted(
-        ((close_enough(c, name), c) for c in commands),
+        ((close_enough(c, name_asked), c) for c in commands),
         reverse=True)
     if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3:
       # Too ambiguous.
@@ -167,8 +173,8 @@
         (name, handler) for name, handler in commands.iteritems()
         if not getattr(handler, 'hidden', None))
     docs = sorted(
-        (name, self._create_command_summary(name, handler))
-        for name, handler in commands.iteritems())
+        (cmd_name, self._create_command_summary(cmd_name, handler))
+        for cmd_name, handler in commands.iteritems())
     # Skip commands without a docstring.
     docs = [i for i in docs if i[1]]
     # Then calculate maximum length for alignment:
@@ -183,14 +189,14 @@
     return (
         'Commands are:\n' +
         ''.join(
-            '  %s%-*s%s %s\n' % (green, length, name, reset, doc)
-            for name, doc in docs))
+            '  %s%-*s%s %s\n' % (green, length, cmd_name, reset, doc)
+            for cmd_name, doc in docs))
 
   def _add_command_usage(self, parser, command):
     """Modifies an OptionParser object with the function's documentation."""
-    name = command.__name__[3:]
-    if name == 'help':
-      name = '<command>'
+    cmd_name = _function_to_name(command.__name__)
+    if cmd_name == 'help':
+      cmd_name = '<command>'
       # Use the module's docstring as the description for the 'help' command if
       # available.
       parser.description = (self.module.__doc__ or '').rstrip()
@@ -214,14 +220,15 @@
         parser.epilog = '\n' + parser.epilog.strip() + '\n'
 
     more = getattr(command, 'usage_more', '')
-    parser.set_usage(
-        'usage: %%prog %s [options]%s' % (name, '' if not more else ' ' + more))
+    extra = '' if not more else ' ' + more
+    parser.set_usage('usage: %%prog %s [options]%s' % (cmd_name, extra))
 
   @staticmethod
-  def _create_command_summary(name, command):
-    """Creates a oneline summary from the command's docstring."""
-    if name != command.__name__[3:]:
-      # Skip aliases.
+  def _create_command_summary(cmd_name, command):
+    """Creates a oneliner summary from the command's docstring."""
+    if cmd_name != _function_to_name(command.__name__):
+      # Skip aliases. For example using at module level:
+      # CMDfoo = CMDbar
       return ''
     doc = command.__doc__ or ''
     line = doc.split('\n', 1)[0].rstrip('.')
diff --git a/subprocess2.py b/subprocess2.py
index 9f547a6..21e3487 100644
--- a/subprocess2.py
+++ b/subprocess2.py
@@ -34,13 +34,13 @@
 class CalledProcessError(subprocess.CalledProcessError):
   """Augment the standard exception with more data."""
   def __init__(self, returncode, cmd, cwd, stdout, stderr):
-    super(CalledProcessError, self).__init__(returncode, cmd)
-    self.stdout = stdout
+    super(CalledProcessError, self).__init__(returncode, cmd, output=stdout)
+    self.stdout = self.output  # for backward compatibility.
     self.stderr = stderr
     self.cwd = cwd
 
   def __str__(self):
-    out = 'Command %s returned non-zero exit status %s' % (
+    out = 'Command %r returned non-zero exit status %s' % (
         ' '.join(self.cmd), self.returncode)
     if self.cwd:
       out += ' in ' + self.cwd
diff --git a/testing_support/coverage_utils.py b/testing_support/coverage_utils.py
index f049938..8b91230 100644
--- a/testing_support/coverage_utils.py
+++ b/testing_support/coverage_utils.py
@@ -21,7 +21,7 @@
   %s""") % (version, msg)
   sys.exit(1)
 
-def covered_main(includes, require_native=None):
+def covered_main(includes, require_native=None, required_percentage=100.0):
   """Equivalent of unittest.main(), except that it gathers coverage data, and
   asserts if the test is not at 100% coverage.
 
@@ -63,8 +63,8 @@
     retcode = e.code or retcode
 
   COVERAGE.stop()
-  if COVERAGE.report() != 100.0:
-    print 'FATAL: not at 100% coverage.'
+  if COVERAGE.report() < required_percentage:
+    print 'FATAL: not at required %f%% coverage.' % required_percentage
     retcode = 2
 
   return retcode
diff --git a/testing_support/fake_repos.py b/testing_support/fake_repos.py
index 25e3a9b..92483fa 100755
--- a/testing_support/fake_repos.py
+++ b/testing_support/fake_repos.py
@@ -59,36 +59,6 @@
   return diff
 
 
-def commit_svn(repo, usr, pwd):
-  """Commits the changes and returns the new revision number."""
-  to_add = []
-  to_remove = []
-  for status, filepath in scm.SVN.CaptureStatus(None, repo):
-    if status[0] == '?':
-      to_add.append(filepath)
-    elif status[0] == '!':
-      to_remove.append(filepath)
-  if to_add:
-    subprocess2.check_output(
-        ['svn', 'add', '--no-auto-props', '-q'] + to_add, cwd=repo)
-  if to_remove:
-    subprocess2.check_output(['svn', 'remove', '-q'] + to_remove, cwd=repo)
-
-  out = subprocess2.check_output(
-      ['svn', 'commit', repo, '-m', 'foo', '--non-interactive',
-        '--no-auth-cache',
-        '--username', usr, '--password', pwd],
-      cwd=repo)
-  match = re.search(r'(\d+)', out)
-  if not match:
-    raise Exception('Commit failed', out)
-  rev = match.group(1)
-  status = subprocess2.check_output(['svn', 'status'], cwd=repo)
-  assert len(status) == 0, status
-  logging.debug('At revision %s' % rev)
-  return rev
-
-
 def commit_git(repo):
   """Commits the changes and returns the new hash."""
   subprocess2.check_call(['git', 'add', '-A', '-f'], cwd=repo)
@@ -162,14 +132,14 @@
 
 
 class FakeReposBase(object):
-  """Generate both svn and git repositories to test gclient functionality.
+  """Generate git repositories to test gclient functionality.
 
   Many DEPS functionalities need to be tested: Var, File, From, deps_os, hooks,
   use_relative_paths.
 
-  And types of dependencies: Relative urls, Full urls, both svn and git.
+  And types of dependencies: Relative urls, Full urls, git.
 
-  populateSvn() and populateGit() need to be implemented by the subclass.
+  populateGit() needs to be implemented by the subclass.
   """
   # Hostname
   NB_GIT_REPOS = 1
@@ -181,25 +151,16 @@
   def __init__(self, host=None):
     self.trial = trial_dir.TrialDir('repos')
     self.host = host or '127.0.0.1'
-    # Format is [ None, tree, tree, ...]
-    # i.e. revisions are 1-based.
-    self.svn_revs = [None]
     # Format is { repo: [ None, (hash, tree), (hash, tree), ... ], ... }
     # so reference looks like self.git_hashes[repo][rev][0] for hash and
     # self.git_hashes[repo][rev][1] for it's tree snapshot.
-    # For consistency with self.svn_revs, it is 1-based too.
+    # It is 1-based too.
     self.git_hashes = {}
-    self.svnserve = None
     self.gitdaemon = None
     self.git_pid_file = None
     self.git_root = None
-    self.svn_checkout = None
-    self.svn_repo = None
     self.git_dirty = False
-    self.svn_dirty = False
-    self.svn_port = None
     self.git_port = None
-    self.svn_base = None
     self.git_base = None
 
   @property
@@ -214,50 +175,23 @@
         # self.root_dir is not set before this call.
         self.trial.set_up()
         self.git_root = join(self.root_dir, 'git')
-        self.svn_checkout = join(self.root_dir, 'svn_checkout')
-        self.svn_repo = join(self.root_dir, 'svn')
       finally:
         # Registers cleanup.
         atexit.register(self.tear_down)
 
   def cleanup_dirt(self):
     """For each dirty repository, destroy it."""
-    if self.svn_dirty:
-      if not self.tear_down_svn():
-        logging.error('Using both leaking checkout and svn dirty checkout')
     if self.git_dirty:
       if not self.tear_down_git():
         logging.error('Using both leaking checkout and git dirty checkout')
 
   def tear_down(self):
     """Kills the servers and delete the directories."""
-    self.tear_down_svn()
     self.tear_down_git()
     # This deletes the directories.
     self.trial.tear_down()
     self.trial = None
 
-  def tear_down_svn(self):
-    if self.svnserve:
-      logging.debug('Killing svnserve pid %s' % self.svnserve.pid)
-      try:
-        self.svnserve.kill()
-      except OSError as e:
-        if e.errno != errno.ESRCH:   # no such process
-          raise
-      wait_for_port_to_free(self.host, self.svn_port)
-      self.svnserve = None
-      self.svn_port = None
-      self.svn_base = None
-      if not self.trial.SHOULD_LEAK:
-        logging.debug('Removing %s' % self.svn_repo)
-        gclient_utils.rmtree(self.svn_repo)
-        logging.debug('Removing %s' % self.svn_checkout)
-        gclient_utils.rmtree(self.svn_checkout)
-      else:
-        return False
-    return True
-
   def tear_down_git(self):
     if self.gitdaemon:
       logging.debug('Killing git-daemon pid %s' % self.gitdaemon.pid)
@@ -300,60 +234,6 @@
       else:
         write(join(root, k), v)
 
-  def set_up_svn(self):
-    """Creates subversion repositories and start the servers."""
-    self.set_up()
-    if self.svnserve:
-      return True
-    try:
-      subprocess2.check_call(['svnadmin', 'create', self.svn_repo])
-    except (OSError, subprocess2.CalledProcessError):
-      return False
-    write(join(self.svn_repo, 'conf', 'svnserve.conf'),
-        '[general]\n'
-        'anon-access = read\n'
-        'auth-access = write\n'
-        'password-db = passwd\n')
-    text = '[users]\n'
-    text += ''.join('%s = %s\n' % (usr, pwd) for usr, pwd in self.USERS)
-    write(join(self.svn_repo, 'conf', 'passwd'), text)
-
-    # Necessary to be able to change revision properties
-    revprop_hook_filename = join(self.svn_repo, 'hooks', 'pre-revprop-change')
-    if sys.platform == 'win32':
-      # TODO(kustermann): Test on Windows one day.
-      write("%s.bat" % revprop_hook_filename, "")
-    else:
-      write(revprop_hook_filename,
-          '#!/bin/sh\n'
-          'exit 0\n')
-      os.chmod(revprop_hook_filename, 0755)
-
-    # Mac 10.6 ships with a buggy subversion build and we need this line
-    # to work around the bug.
-    write(join(self.svn_repo, 'db', 'fsfs.conf'),
-        '[rep-sharing]\n'
-        'enable-rep-sharing = false\n')
-
-    # Start the daemon.
-    self.svn_port = find_free_port(self.host, 10000)
-    logging.debug('Using port %d' % self.svn_port)
-    cmd = ['svnserve', '-d', '--foreground', '-r', self.root_dir,
-        '--listen-port=%d' % self.svn_port]
-    if self.host == '127.0.0.1':
-      cmd.append('--listen-host=' + self.host)
-    self.check_port_is_free(self.svn_port)
-    self.svnserve = subprocess2.Popen(
-        cmd,
-        cwd=self.svn_repo,
-        stdout=subprocess2.PIPE,
-        stderr=subprocess2.PIPE)
-    wait_for_port_to_bind(self.host, self.svn_port, self.svnserve)
-    self.svn_base = 'svn://%s:%d/svn/' % (self.host, self.svn_port)
-    self.populateSvn()
-    self.svn_dirty = False
-    return True
-
   def set_up_git(self):
     """Creates git repositories and start the servers."""
     self.set_up()
@@ -390,24 +270,6 @@
     self.git_dirty = False
     return True
 
-  def _commit_svn(self, tree):
-    self._genTree(self.svn_checkout, tree)
-    commit_svn(self.svn_checkout, self.USERS[0][0], self.USERS[0][1])
-    if self.svn_revs and self.svn_revs[-1]:
-      new_tree = self.svn_revs[-1].copy()
-      new_tree.update(tree)
-    else:
-      new_tree = tree.copy()
-    self.svn_revs.append(new_tree)
-
-  def _set_svn_commit_date(self, revision, date):
-    subprocess2.check_output(
-        ['svn', 'propset', 'svn:date', '--revprop', '-r', revision, date,
-         self.svn_base,
-         '--username', self.USERS[0][0],
-         '--password', self.USERS[0][1],
-         '--non-interactive'])
-
   def _commit_git(self, repo, tree):
     repo_root = join(self.git_root, repo)
     self._genTree(repo_root, tree)
@@ -430,133 +292,14 @@
     finally:
       sock.close()
 
-  def populateSvn(self):
-    raise NotImplementedError()
-
   def populateGit(self):
     raise NotImplementedError()
 
 
 class FakeRepos(FakeReposBase):
-  """Implements populateSvn() and populateGit()."""
+  """Implements populateGit()."""
   NB_GIT_REPOS = 5
 
-  def populateSvn(self):
-    """Creates a few revisions of changes including DEPS files."""
-    # Repos
-    subprocess2.check_call(
-        ['svn', 'checkout', self.svn_base, self.svn_checkout,
-         '-q', '--non-interactive', '--no-auth-cache',
-         '--username', self.USERS[0][0], '--password', self.USERS[0][1]])
-    assert os.path.isdir(join(self.svn_checkout, '.svn'))
-    def file_system(rev, DEPS, DEPS_ALT=None):
-      fs = {
-        'origin': 'svn@%(rev)d\n',
-        'trunk/origin': 'svn/trunk@%(rev)d\n',
-        'trunk/src/origin': 'svn/trunk/src@%(rev)d\n',
-        'trunk/src/third_party/origin': 'svn/trunk/src/third_party@%(rev)d\n',
-        'trunk/other/origin': 'src/trunk/other@%(rev)d\n',
-        'trunk/third_party/origin': 'svn/trunk/third_party@%(rev)d\n',
-        'trunk/third_party/foo/origin': 'svn/trunk/third_party/foo@%(rev)d\n',
-        'trunk/third_party/prout/origin': 'svn/trunk/third_party/foo@%(rev)d\n',
-      }
-      for k in fs.iterkeys():
-        fs[k] = fs[k] % { 'rev': rev }
-      fs['trunk/src/DEPS'] = DEPS
-      if DEPS_ALT:
-        fs['trunk/src/DEPS.alt'] = DEPS_ALT
-      return fs
-
-    # Testing:
-    # - dependency disapear
-    # - dependency renamed
-    # - versioned and unversioned reference
-    # - relative and full reference
-    # - deps_os
-    # - var
-    # - hooks
-    # - From
-    # - File
-    # TODO(maruel):
-    # - $matching_files
-    # - use_relative_paths
-    DEPS = """
-vars = {
-  'DummyVariable': 'third_party',
-}
-deps = {
-  'src/other': '%(svn_base)strunk/other@1',
-  'src/third_party/fpp': '/trunk/' + Var('DummyVariable') + '/foo',
-}
-deps_os = {
-  'mac': {
-    'src/third_party/prout': '/trunk/third_party/prout',
-  },
-}""" % { 'svn_base': self.svn_base }
-
-    DEPS_ALT = """
-deps = {
-  'src/other2': '%(svn_base)strunk/other@2'
-}
-""" % { 'svn_base': self.svn_base }
-
-    fs = file_system(1, DEPS, DEPS_ALT)
-    self._commit_svn(fs)
-
-    fs = file_system(2, """
-deps = {
-  'src/other': '%(svn_base)strunk/other',
-  # Load another DEPS and load a dependency from it. That's an example of
-  # WebKit's chromium checkout flow. Verify it works out of order.
-  'src/third_party/foo': From('src/file/other', 'foo/bar'),
-  'src/file/other': File('%(svn_base)strunk/other/DEPS'),
-}
-# I think this is wrong to have the hooks run from the base of the gclient
-# checkout. It's maybe a bit too late to change that behavior.
-hooks = [
-  {
-    'pattern': '.',
-    'action': ['python', '-c',
-               'open(\\'src/svn_hooked1\\', \\'w\\').write(\\'svn_hooked1\\')'],
-  },
-  {
-    # Should not be run.
-    'pattern': 'nonexistent',
-    'action': ['python', '-c',
-               'open(\\'src/svn_hooked2\\', \\'w\\').write(\\'svn_hooked2\\')'],
-  },
-]
-""" % { 'svn_base': self.svn_base })
-    fs['trunk/other/DEPS'] = """
-deps = {
-  'foo/bar': '/trunk/third_party/foo@1',
-  # Only the requested deps should be processed.
-  'invalid': '/does_not_exist',
-}
-"""
-    # WebKit abuses this.
-    fs['trunk/webkit/.gclient'] = """
-solutions = [
-  {
-    'name': './',
-    'url': None,
-  },
-]
-"""
-    fs['trunk/webkit/DEPS'] = """
-deps = {
-  'foo/bar': '%(svn_base)strunk/third_party/foo@1'
-}
-
-hooks = [
-  {
-    'pattern': '.*',
-    'action': ['echo', 'foo'],
-  },
-]
-""" % { 'svn_base': self.svn_base }
-    self._commit_svn(fs)
-
   def populateGit(self):
     # Testing:
     # - dependency disappear
@@ -707,62 +450,11 @@
     })
 
 
-class FakeRepoTransitive(FakeReposBase):
-  """Implements populateSvn()"""
-
-  def populateSvn(self):
-    """Creates a few revisions of changes including a DEPS file."""
-    # Repos
-    subprocess2.check_call(
-        ['svn', 'checkout', self.svn_base, self.svn_checkout,
-         '-q', '--non-interactive', '--no-auth-cache',
-         '--username', self.USERS[0][0], '--password', self.USERS[0][1]])
-    assert os.path.isdir(join(self.svn_checkout, '.svn'))
-
-    def file_system(rev):
-      DEPS = """deps = {
-                'src/different_repo': '%(svn_base)strunk/third_party',
-                'src/different_repo_fixed': '%(svn_base)strunk/third_party@1',
-                'src/same_repo': '/trunk/third_party',
-                'src/same_repo_fixed': '/trunk/third_party@1',
-             }""" % { 'svn_base': self.svn_base }
-      return {
-        'trunk/src/DEPS': DEPS,
-        'trunk/src/origin': 'svn/trunk/src@%(rev)d' % { 'rev': rev },
-        'trunk/third_party/origin':
-            'svn/trunk/third_party@%(rev)d' % { 'rev': rev },
-      }
-
-    # We make three commits. We use always the same DEPS contents but
-    # - 'trunk/src/origin' contains 'svn/trunk/src/origin@rX'
-    # - 'trunk/third_party/origin' contains 'svn/trunk/third_party/origin@rX'
-    # where 'X' is the revision number.
-    # So the 'origin' files will change in every commit.
-    self._commit_svn(file_system(1))
-    self._commit_svn(file_system(2))
-    self._commit_svn(file_system(3))
-    # We rewrite the timestamps so we can test that '--transitive' will take the
-    # parent timestamp on different repositories and the parent revision
-    # otherwise.
-    self._set_svn_commit_date('1', '2011-10-01T03:00:00.000000Z')
-    self._set_svn_commit_date('2', '2011-10-09T03:00:00.000000Z')
-    self._set_svn_commit_date('3', '2011-10-02T03:00:00.000000Z')
-
-  def populateGit(self):
-    pass
-
-
 class FakeRepoSkiaDEPS(FakeReposBase):
   """Simulates the Skia DEPS transition in Chrome."""
 
   NB_GIT_REPOS = 5
 
-  DEPS_svn_pre = """deps = {
-  'src/third_party/skia/gyp': '%(svn_base)sskia/gyp',
-  'src/third_party/skia/include': '%(svn_base)sskia/include',
-  'src/third_party/skia/src': '%(svn_base)sskia/src',
-}"""
-
   DEPS_git_pre = """deps = {
   'src/third_party/skia/gyp': '%(git_base)srepo_3',
   'src/third_party/skia/include': '%(git_base)srepo_4',
@@ -773,32 +465,6 @@
   'src/third_party/skia': '%(git_base)srepo_1',
 }"""
 
-  def populateSvn(self):
-    """Create revisions which simulate the Skia DEPS transition in Chrome."""
-    subprocess2.check_call(
-        ['svn', 'checkout', self.svn_base, self.svn_checkout,
-         '-q', '--non-interactive', '--no-auth-cache',
-         '--username', self.USERS[0][0], '--password', self.USERS[0][1]])
-    assert os.path.isdir(join(self.svn_checkout, '.svn'))
-
-    # Skia repo.
-    self._commit_svn({
-        'skia/skia_base_file': 'root-level file.',
-        'skia/gyp/gyp_file': 'file in the gyp directory',
-        'skia/include/include_file': 'file in the include directory',
-        'skia/src/src_file': 'file in the src directory',
-    })
-
-    # Chrome repo.
-    self._commit_svn({
-        'trunk/src/DEPS': self.DEPS_svn_pre % {'svn_base': self.svn_base},
-        'trunk/src/myfile': 'svn/trunk/src@1'
-    })
-    self._commit_svn({
-        'trunk/src/DEPS': self.DEPS_post % {'git_base': self.git_base},
-        'trunk/src/myfile': 'svn/trunk/src@2'
-    })
-
   def populateGit(self):
     # Skia repo.
     self._commit_git('repo_1', {
@@ -820,14 +486,48 @@
     # Chrome repo.
     self._commit_git('repo_2', {
         'DEPS': self.DEPS_git_pre % {'git_base': self.git_base},
-        'myfile': 'svn/trunk/src@1'
+        'myfile': 'src/trunk/src@1'
     })
     self._commit_git('repo_2', {
         'DEPS': self.DEPS_post % {'git_base': self.git_base},
-        'myfile': 'svn/trunk/src@2'
+        'myfile': 'src/trunk/src@2'
     })
 
 
+class FakeRepoBlinkDEPS(FakeReposBase):
+  """Simulates the Blink DEPS transition in Chrome."""
+
+  NB_GIT_REPOS = 2
+  DEPS_pre = 'deps = {"src/third_party/WebKit": "%(git_base)srepo_2",}'
+  DEPS_post = 'deps = {}'
+
+  def populateGit(self):
+    # Blink repo.
+    self._commit_git('repo_2', {
+        'OWNERS': 'OWNERS-pre',
+        'Source/exists_always': '_ignored_',
+        'Source/exists_before_but_not_after': '_ignored_',
+    })
+
+    # Chrome repo.
+    self._commit_git('repo_1', {
+        'DEPS': self.DEPS_pre % {'git_base': self.git_base},
+        'myfile': 'myfile@1',
+        '.gitignore': '/third_party/WebKit',
+    })
+    self._commit_git('repo_1', {
+        'DEPS': self.DEPS_post % {'git_base': self.git_base},
+        'myfile': 'myfile@2',
+        '.gitignore': '',
+        'third_party/WebKit/OWNERS': 'OWNERS-post',
+        'third_party/WebKit/Source/exists_always': '_ignored_',
+        'third_party/WebKit/Source/exists_after_but_not_before': '_ignored',
+    })
+
+  def populateSvn(self):
+    raise NotImplementedError()
+
+
 class FakeReposTestBase(trial_dir.TestCase):
   """This is vaguely inspired by twisted."""
   # Static FakeRepos instances. Lazy loaded.
@@ -846,11 +546,6 @@
     # self.FAKE_REPOS is kept across tests.
 
   @property
-  def svn_base(self):
-    """Shortcut."""
-    return self.FAKE_REPOS.svn_base
-
-  @property
   def git_base(self):
     """Shortcut."""
     return self.FAKE_REPOS.git_base
@@ -883,23 +578,7 @@
       logging.debug('Actual %s\n%s' % (tree_root, pprint.pformat(actual)))
       logging.debug('Expected\n%s' % pprint.pformat(tree))
       logging.debug('Diff\n%s' % pprint.pformat(diff))
-      self.assertEquals(diff, [])
-
-  def mangle_svn_tree(self, *args):
-    """Creates a 'virtual directory snapshot' to compare with the actual result
-    on disk."""
-    result = {}
-    for item, new_root in args:
-      old_root, rev = item.split('@', 1)
-      tree = self.FAKE_REPOS.svn_revs[int(rev)]
-      for k, v in tree.iteritems():
-        if not k.startswith(old_root):
-          continue
-        item = k[len(old_root) + 1:]
-        if item.startswith('.'):
-          continue
-        result[join(new_root, item).replace(os.sep, '/')] = v
-    return result
+    self.assertEquals(diff, {})
 
   def mangle_git_tree(self, *args):
     """Creates a 'virtual directory snapshot' to compare with the actual result
@@ -925,7 +604,6 @@
   fake = FakeRepos()
   print 'Using %s' % fake.root_dir
   try:
-    fake.set_up_svn()
     fake.set_up_git()
     print('Fake setup, press enter to quit or Ctrl-C to keep the checkouts.')
     sys.stdin.readline()
diff --git a/testing_support/filesystem_mock.py b/testing_support/filesystem_mock.py
index 07e1834..b45ea8e 100644
--- a/testing_support/filesystem_mock.py
+++ b/testing_support/filesystem_mock.py
@@ -38,6 +38,11 @@
       return path[:-1]
     return path
 
+  def basename(self, path):
+    if self.sep not in path:
+      return ''
+    return self._split(path)[-1] or self.sep
+
   def dirname(self, path):
     if self.sep not in path:
       return ''
diff --git a/testing_support/get_appengine.py b/testing_support/get_appengine.py
new file mode 100755
index 0000000..98820ff
--- /dev/null
+++ b/testing_support/get_appengine.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script is copied from
+https://chromium.googlesource.com/infra/infra.git/+/master/bootstrap
+"""
+
+import datetime
+import logging
+import optparse
+import os
+import re
+import shutil
+import sys
+import time
+import tempfile
+import urllib2
+import zipfile
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+def get_gae_sdk_version(gae_path):
+  """Returns the installed GAE SDK version or None."""
+  version_path = os.path.join(gae_path, 'VERSION')
+  if os.path.isfile(version_path):
+    values = dict(
+        map(lambda x: x.strip(), l.split(':'))
+        for l in open(version_path) if ':' in l)
+    if 'release' in values:
+      return values['release'].strip('"')
+
+
+def get_latest_gae_sdk_url(name):
+  """Returns the url to get the latest GAE SDK and its version."""
+  url = 'https://cloud.google.com/appengine/downloads.html'
+  logging.debug('%s', url)
+  content = urllib2.urlopen(url).read()
+  regexp = (
+      r'(https\:\/\/storage.googleapis.com\/appengine-sdks\/featured\/'
+      + re.escape(name) + r'[0-9\.]+?\.zip)')
+  m = re.search(regexp, content)
+  url = m.group(1)
+  # Calculate the version from the url.
+  new_version = re.search(re.escape(name) + r'(.+?).zip', url).group(1)
+  # Upgrade to https
+  return url.replace('http://', 'https://'), new_version
+
+
+def extract_zip(z, root_path):
+  """Extracts files in a zipfile but keep the executable bits."""
+  count = 0
+  for f in z.infolist():
+    perm = (f.external_attr >> 16L) & 0777
+    mtime = time.mktime(datetime.datetime(*f.date_time).timetuple())
+    filepath = os.path.join(root_path, f.filename)
+    logging.debug('Extracting %s', f.filename)
+    if f.filename.endswith('/'):
+      os.mkdir(filepath, perm)
+    else:
+      z.extract(f, root_path)
+      os.chmod(filepath, perm)
+      count += 1
+    os.utime(filepath, (mtime, mtime))
+  print('Extracted %d files' % count)
+
+
+def install_latest_gae_sdk(root_path, fetch_go, dry_run):
+  if fetch_go:
+    rootdir = 'go_appengine'
+    if sys.platform == 'darwin':
+      name = 'go_appengine_sdk_darwin_amd64-'
+    else:
+      # Add other platforms as needed.
+      name = 'go_appengine_sdk_linux_amd64-'
+  else:
+    rootdir = 'google_appengine'
+    name = 'google_appengine_'
+
+  # The zip file already contains 'google_appengine' (for python) or
+  # 'go_appengine' (for go) in its path so it's a bit
+  # awkward to unzip otherwise. Hard code the path in for now.
+  gae_path = os.path.join(root_path, rootdir)
+  print('Looking up path %s' % gae_path)
+  version = get_gae_sdk_version(gae_path)
+  if version:
+    print('Found installed version %s' % version)
+  else:
+    print('Didn\'t find an SDK')
+
+  url, new_version = get_latest_gae_sdk_url(name)
+  print('New version is %s' % new_version)
+  if version == new_version:
+    return 0
+
+  if os.path.isdir(gae_path):
+    print('Removing previous version')
+    if not dry_run:
+      shutil.rmtree(gae_path)
+
+  print('Fetching %s' % url)
+  if not dry_run:
+    u = urllib2.urlopen(url)
+    with tempfile.NamedTemporaryFile() as f:
+      while True:
+        chunk = u.read(2 ** 20)
+        if not chunk:
+          break
+        f.write(chunk)
+      # Assuming we're extracting there. In fact, we have no idea.
+      print('Extracting into %s' % gae_path)
+      z = zipfile.ZipFile(f, 'r')
+      try:
+        extract_zip(z, root_path)
+      finally:
+        z.close()
+  return 0
+
+
+def main():
+  parser = optparse.OptionParser(prog='python -m %s' % __package__)
+  parser.add_option('-v', '--verbose', action='store_true')
+  parser.add_option(
+      '-g', '--go', action='store_true', help='Defaults to python SDK')
+  parser.add_option(
+      '-d', '--dest', default=os.path.dirname(BASE_DIR), help='Output')
+  parser.add_option('--dry-run', action='store_true', help='Do not download')
+  options, args = parser.parse_args()
+  if args:
+    parser.error('Unsupported args: %s' % ' '.join(args))
+  logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
+  return install_latest_gae_sdk(
+      os.path.abspath(options.dest), options.go, options.dry_run)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/testing_support/git_test_utils.py b/testing_support/git_test_utils.py
index 10e54f5..fbe7c73 100644
--- a/testing_support/git_test_utils.py
+++ b/testing_support/git_test_utils.py
@@ -349,7 +349,6 @@
         env['GIT_%s' % singleton] = str(val)
     return env
 
-
   def git(self, *args, **kwargs):
     """Runs a git command specified by |args| in this repo."""
     assert self.repo_path is not None
diff --git a/testing_support/local_rietveld.py b/testing_support/local_rietveld.py
index d394913..83d232f 100755
--- a/testing_support/local_rietveld.py
+++ b/testing_support/local_rietveld.py
@@ -58,48 +58,55 @@
     # TODO(maruel): This should be in /tmp but that would mean having to fetch
     # everytime. This test is already annoyingly slow.
     self.rietveld = os.path.join(self.base_dir, '_rietveld')
+    self.rietveld_app = os.path.join(
+        self.rietveld, 'appengine', 'chromium_rietveld')
     self.test_server = None
     self.port = None
     self.tempdir = None
-
-    # Find the GAE SDK
-    previous_dir = ''
-    self.sdk_path = ''
-    base_dir = self.base_dir
-    while base_dir != previous_dir:
-      previous_dir = base_dir
-      self.sdk_path = os.path.join(base_dir, 'google_appengine')
-      if not os.path.isfile(os.path.join(self.sdk_path, 'VERSION')):
-        base_dir = os.path.dirname(base_dir)
-    self.dev_app = os.path.join(self.sdk_path, 'dev_appserver.py')
+    self.dev_app = None
 
   def install_prerequisites(self):
-    # First, verify the Google AppEngine SDK is available.
-    if not os.path.isfile(self.dev_app):
-      raise Failure(
-          'Install google_appengine sdk in %s or higher up' % self.base_dir)
+    # First, install the Google AppEngine SDK.
+    cmd = [os.path.join(self.base_dir, 'get_appengine.py'),
+           '--dest=%s' % self.base_dir]
+    try:
+      subprocess2.check_call(cmd)
+    except (OSError, subprocess2.CalledProcessError), e:
+      raise Failure('Failed to run %s\n%s' % (cmd, e))
+    sdk_path = os.path.join(self.base_dir, 'google_appengine')
+    self.dev_app = os.path.join(sdk_path, 'dev_appserver.py')
 
-    if os.path.isdir(os.path.join(self.rietveld, '.svn')):
-      # Left over from subversion. Delete it.
+    if os.path.isdir(os.path.join(self.rietveld, '.hg')):
+      # Left over from mercurial. Delete it.
+      print('Deleting deprecated mercurial rietveld files...')
       shutil.rmtree(self.rietveld)
 
     # Second, checkout rietveld if not available.
-    rev = '9349cab9a3bb'
     if not os.path.isdir(self.rietveld):
       print('Checking out rietveld...')
       try:
+        subprocess2.check_call(['git', 'init', self.rietveld])
         subprocess2.check_call(
-            [ 'hg', 'clone', '-q', '-u', rev, '-r', rev,
-              'https://code.google.com/p/rietveld/', self.rietveld])
+            ['git', 'remote', 'add', '-f', 'origin',
+             'https://chromium.googlesource.com/infra/infra.git'],
+            cwd=self.rietveld)
+        subprocess2.check_call(
+            ['git', 'config', 'core.sparseCheckout', 'true'],
+            cwd=self.rietveld)
+        with file(os.path.join(self.rietveld, '.git/info/sparse-checkout'),
+                  'w') as sparse_file:
+          sparse_file.write('appengine/chromium_rietveld')
+        subprocess2.check_call(
+            ['git', 'pull', 'origin', 'master'],
+            cwd=self.rietveld)
       except (OSError, subprocess2.CalledProcessError), e:
-        raise Failure(
-            'Failed to checkout rietveld. Do you have mercurial installed?\n'
-            '%s' % e)
+        raise Failure('Failed to clone rietveld. \n%s' % e)
     else:
       print('Syncing rietveld...')
       try:
         subprocess2.check_call(
-            ['hg', 'co', '-q', '-C', rev], cwd=self.rietveld)
+            ['git', 'pull', 'origin', 'master'],
+            cwd=self.rietveld)
       except (OSError, subprocess2.CalledProcessError), e:
         raise Failure('Failed to sync rietveld\n%s' % e)
 
@@ -117,7 +124,7 @@
     cmd = [
         sys.executable,
         self.dev_app,
-        '.',
+        './app.yaml',  # Explicitly specify file to avoid bringing up backends.
         '--port', str(self.port),
         '--admin_port', str(admin_port),
         '--storage', self.tempdir,
@@ -133,7 +140,7 @@
       cmd.extend(('-a', '0.0.0.0'))
     logging.info(' '.join(cmd))
     self.test_server = subprocess2.Popen(
-        cmd, stdout=stdout, stderr=stderr, cwd=self.rietveld)
+        cmd, stdout=stdout, stderr=stderr, cwd=self.rietveld_app)
     # Loop until port 127.0.0.1:port opens or the process dies.
     while not test_port(self.port):
       self.test_server.poll()
diff --git a/testing_support/super_mox.py b/testing_support/super_mox.py
index 3322719..36abab4 100644
--- a/testing_support/super_mox.py
+++ b/testing_support/super_mox.py
@@ -55,11 +55,6 @@
     return (self._RANDOM_CHOICE((self._OS_SEP, '')) +
             self._DirElts(max_elt_count, max_elt_length))
 
-  def SvnUrl(self, max_elt_count=4, max_elt_length=8):
-    return ('svn://random_host:port/a' +
-            self._DirElts(max_elt_count, max_elt_length
-                ).replace(self._OS_SEP, '/'))
-
   def RootDir(self, max_elt_count=4, max_elt_length=8):
     return self._OS_SEP + self._DirElts(max_elt_count, max_elt_length)
 
diff --git a/tests/abandon.sh b/tests/abandon.sh
index da4cf7f..314f556 100755
--- a/tests/abandon.sh
+++ b/tests/abandon.sh
@@ -24,7 +24,7 @@
   git add test; git commit -q -m "branch work"
   export GIT_EDITOR=$(which true)
   test_expect_success "upload succeeds" \
-    "$GIT_CL upload -m test master  | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test master  | grep -q 'Issue created'"
 
   # Switch back to master, delete the branch.
   git checkout master
diff --git a/tests/basic.sh b/tests/basic.sh
index 85dedae..0984ae0 100755
--- a/tests/basic.sh
+++ b/tests/basic.sh
@@ -21,18 +21,18 @@
   git add test; git commit -q -m "branch work"
 
   test_expect_success "git-cl upload wants a server" \
-    "$GIT_CL upload 2>&1 | grep -q 'You must configure'"
+    "$GIT_CL upload --no-oauth2 2>&1 | grep -q 'You must configure'"
 
   git config rietveld.server localhost:10000
 
   test_expect_success "git-cl status has no issue" \
-    "$GIT_CL_STATUS | grep -q 'no issue'"
+    "$GIT_CL_STATUS | grep -q 'No issue assigned'"
 
   # Prevent the editor from coming up when you upload.
   export GIT_EDITOR=$(which true)
 
   test_expect_success "upload succeeds (needs a server running on localhost)" \
-    "$GIT_CL upload -m test master | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test master | grep -q 'Issue created'"
 
   test_expect_success "git-cl status now knows the issue" \
     "$GIT_CL_STATUS | grep -q 'Issue number'"
@@ -46,7 +46,7 @@
        $URL/edit
 
   test_expect_success "git-cl dcommits ok" \
-    "$GIT_CL dcommit -f"
+    "$GIT_CL dcommit -f --no-oauth2"
 
   git checkout -q master
   git svn -q rebase >/dev/null 2>&1
diff --git a/tests/checkout_test.py b/tests/checkout_test.py
index d07f15a..de0d7e6 100755
--- a/tests/checkout_test.py
+++ b/tests/checkout_test.py
@@ -34,16 +34,6 @@
 class FakeRepos(fake_repos.FakeReposBase):
   TEST_GIT_REPO = 'repo_1'
 
-  def populateSvn(self):
-    """Creates a few revisions of changes files."""
-    subprocess2.check_call(
-        ['svn', 'checkout', self.svn_base, self.svn_checkout, '-q',
-         '--non-interactive', '--no-auth-cache',
-         '--username', self.USERS[0][0], '--password', self.USERS[0][1]])
-    assert os.path.isdir(os.path.join(self.svn_checkout, '.svn'))
-    self._commit_svn(self._svn_tree_1())
-    self._commit_svn(self._svn_tree_2())
-
   def populateGit(self):
     """Creates a few revisions of changes files."""
     self._commit_git(self.TEST_GIT_REPO, self._git_tree())
@@ -96,52 +86,6 @@
       '#endif\n')
     return fs
 
-  @staticmethod
-  def _svn_tree_1():
-    fs = {}
-    fs['trunk/origin'] = 'svn@1'
-    fs['trunk/codereview.settings'] = (
-        '# Test data\n'
-        'bar: pouet\n')
-    fs['trunk/chrome/file.cc'] = (
-        'a\n'
-        'bb\n'
-        'ccc\n'
-        'dd\n'
-        'e\n'
-        'ff\n'
-        'ggg\n'
-        'hh\n'
-        'i\n'
-        'jj\n'
-        'kkk\n'
-        'll\n'
-        'm\n'
-        'nn\n'
-        'ooo\n'
-        'pp\n'
-        'q\n')
-    return fs
-
-  @classmethod
-  def _svn_tree_2(cls):
-    fs = cls._svn_tree_1()
-    fs['trunk/origin'] = 'svn@2\n'
-    fs['trunk/extra'] = 'dummy\n'
-    fs['trunk/bin_file'] = '\x00'
-    fs['trunk/chromeos/views/DOMui_menu_widget.h'] = (
-      '// Copyright (c) 2010\n'
-      '// Use of this source code\n'
-      '// found in the LICENSE file.\n'
-      '\n'
-      '#ifndef DOM\n'
-      '#define DOM\n'
-      '#pragma once\n'
-      '\n'
-      '#include <string>\n'
-      '#endif\n')
-    return fs
-
 
 # pylint: disable=R0201
 class BaseTest(fake_repos.FakeReposTestBase):
@@ -150,9 +94,6 @@
   is_read_only = False
 
   def setUp(self):
-    # Need to enforce subversion_config first.
-    checkout.SvnMixIn.svn_config_dir = os.path.join(
-        ROOT_DIR, 'subversion_config')
     super(BaseTest, self).setUp()
     self._old_call = subprocess2.call
     def redirect_call(args, **kwargs):
@@ -240,107 +181,6 @@
     self.assertTree(tree, root)
 
 
-class SvnBaseTest(BaseTest):
-  def setUp(self):
-    super(SvnBaseTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_svn()
-    self.assertTrue(self.enabled)
-    self.svn_trunk = 'trunk'
-    self.svn_url = self.svn_base + self.svn_trunk
-    self.previous_log = self._log()
-
-  def _log(self):
-    # Don't use the local checkout in case of caching incorrency.
-    out = subprocess2.check_output(
-        ['svn', 'log', self.svn_url,
-         '--non-interactive', '--no-auth-cache',
-         '--username', self.usr, '--password', self.pwd,
-         '--with-all-revprops', '--xml',
-         '--limit', '1'])
-    logentry = ElementTree.XML(out).find('logentry')
-    if logentry == None:
-      return {'revision': 0}
-    data = {
-        'revision': int(logentry.attrib['revision']),
-    }
-    def set_item(name):
-      item = logentry.find(name)
-      if item != None:
-        data[name] = item.text
-    set_item('author')
-    set_item('msg')
-    revprops = logentry.find('revprops')
-    if revprops != None:
-      data['revprops'] = []
-      for prop in revprops.getiterator('property'):
-        data['revprops'].append((prop.attrib['name'], prop.text))
-    return data
-
-  def _check_base(self, co, root, expected):
-    read_only = isinstance(co, checkout.ReadOnlyCheckout)
-    self.assertEquals(not read_only, bool(expected))
-    self.assertEquals(read_only, self.is_read_only)
-    if not read_only:
-      self.FAKE_REPOS.svn_dirty = True
-
-    self.assertEquals(root, co.project_path)
-    svn_rev = co.prepare(None)
-    self.assertEquals(int, type(svn_rev))
-    self.assertEquals(self.previous_log['revision'], svn_rev)
-    self.assertEquals('pouet', co.get_settings('bar'))
-    self.assertTree(self.get_trunk(False), root)
-    patches = self.get_patches()
-    co.apply_patch(patches)
-    self.assertEquals(
-        ['bin_file', 'chrome/file.cc', 'new_dir/subdir/new_file', 'extra'],
-        patches.filenames)
-
-    # Verify that the patch is applied even for read only checkout.
-    self.assertTree(self.get_trunk(True), root)
-    fake_author = self.FAKE_REPOS.USERS[1][0]
-    revision = co.commit(u'msg', fake_author)
-    # Nothing changed.
-    self.assertTree(self.get_trunk(True), root)
-
-    if read_only:
-      self.assertEquals('FAKE', revision)
-      self.assertEquals(self.previous_log['revision'], co.prepare(None))
-      # Changes should be reverted now.
-      self.assertTree(self.get_trunk(False), root)
-      expected = self.previous_log
-    else:
-      self.assertEquals(self.previous_log['revision'] + 1, revision)
-      self.assertEquals(self.previous_log['revision'] + 1, co.prepare(None))
-      self.assertTree(self.get_trunk(True), root)
-      expected = expected.copy()
-      expected['msg'] = 'msg'
-      expected['revision'] = self.previous_log['revision'] + 1
-      expected.setdefault('author', fake_author)
-
-    actual = self._log()
-    self.assertEquals(expected, actual)
-
-  def _test_prepare(self, co):
-    self.assertEquals(1, co.prepare(1))
-
-  def get_trunk(self, modified):
-    tree = {}
-    subroot = 'trunk/'
-    for k, v in self.FAKE_REPOS.svn_revs[-1].iteritems():
-      if k.startswith(subroot):
-        f = k[len(subroot):]
-        assert f not in tree
-        tree[f] = v
-
-    if modified:
-      content_lines = tree['chrome/file.cc'].splitlines(True)
-      tree['chrome/file.cc'] = ''.join(
-          content_lines[0:5] + ['FOO!\n'] + content_lines[5:])
-      del tree['extra']
-      tree['new_dir/subdir/new_file'] = 'A new file\nshould exist.\n'
-    return tree
-
-
 class GitBaseTest(BaseTest):
   def setUp(self):
     super(GitBaseTest, self).setUp()
@@ -454,11 +294,12 @@
     root = os.path.join(self.root_dir, self.name)
     self._check_base(self._get_co(None), root, None)
 
+  @unittest.skip('flaky')
   def testException(self):
     self._check_exception(
         self._get_co(None),
-        'While running git apply --index -p1;\n  fatal: corrupt patch at line '
-        '12\n')
+        'While running git apply --index -3 -p1;\n  fatal: corrupt patch at '
+        'line 12\n')
 
   def testProcess(self):
     self._test_process(self._get_co)
@@ -480,250 +321,6 @@
     self.assertEquals(expected, out)
 
 
-class SvnCheckout(SvnBaseTest):
-  def _get_co(self, post_processors):
-    self.assertNotEqual(False, post_processors)
-    return checkout.SvnCheckout(
-        self.root_dir, self.name, self.usr, self.pwd, self.svn_url,
-        post_processors)
-
-  def testAll(self):
-    expected = {
-        'author': self.FAKE_REPOS.USERS[0][0],
-        'revprops': [('realauthor', self.FAKE_REPOS.USERS[1][0])]
-    }
-    root = os.path.join(self.root_dir, self.name)
-    self._check_base(self._get_co(None), root, expected)
-
-  def testException(self):
-    self._check_exception(
-        self._get_co(None),
-        'While running patch -p1 --forward --force --no-backup-if-mismatch;\n'
-        '  patching file chrome/file.cc\n'
-        '  Hunk #1 FAILED at 3.\n'
-        '  1 out of 1 hunk FAILED -- saving rejects to file '
-        'chrome/file.cc.rej\n')
-
-  def testSvnProps(self):
-    co = self._get_co(None)
-    co.prepare(None)
-    try:
-      # svn:ignore can only be applied to directories.
-      svn_props = [('svn:ignore', 'foo')]
-      co.apply_patch(
-          [patch.FilePatchDiff('chrome/file.cc', RAW.PATCH, svn_props)])
-      self.fail()
-    except checkout.PatchApplicationFailed, e:
-      self.assertEquals(e.filename, 'chrome/file.cc')
-      self.assertEquals(
-          e.status,
-          'While running svn propset svn:ignore foo chrome/file.cc '
-          '--non-interactive;\n'
-          '  patching file chrome/file.cc\n'
-          '  svn: Cannot set \'svn:ignore\' on a file (\'chrome/file.cc\')\n')
-    co.prepare(None)
-    svn_props = [('svn:eol-style', 'LF'), ('foo', 'bar')]
-    co.apply_patch(
-        [patch.FilePatchDiff('chrome/file.cc', RAW.PATCH, svn_props)])
-    filepath = os.path.join(self.root_dir, self.name, 'chrome/file.cc')
-    # Manually verify the properties.
-    props = subprocess2.check_output(
-        ['svn', 'proplist', filepath],
-        cwd=self.root_dir).splitlines()[1:]
-    props = sorted(p.strip() for p in props)
-    expected_props = dict(svn_props)
-    self.assertEquals(sorted(expected_props.iterkeys()), props)
-    for k, v in expected_props.iteritems():
-      value = subprocess2.check_output(
-        ['svn', 'propget', '--strict', k, filepath],
-        cwd=self.root_dir).strip()
-      self.assertEquals(v, value)
-
-  def testWithRevPropsSupport(self):
-    # Add the hook that will commit in a way that removes the race condition.
-    hook = os.path.join(self.FAKE_REPOS.svn_repo, 'hooks', 'pre-commit')
-    shutil.copyfile(os.path.join(ROOT_DIR, 'sample_pre_commit_hook'), hook)
-    os.chmod(hook, 0755)
-    expected = {
-        'revprops': [('commit-bot', 'user1@example.com')],
-    }
-    root = os.path.join(self.root_dir, self.name)
-    self._check_base(self._get_co(None), root, expected)
-
-  def testWithRevPropsSupportNotCommitBot(self):
-    # Add the hook that will commit in a way that removes the race condition.
-    hook = os.path.join(self.FAKE_REPOS.svn_repo, 'hooks', 'pre-commit')
-    shutil.copyfile(os.path.join(ROOT_DIR, 'sample_pre_commit_hook'), hook)
-    os.chmod(hook, 0755)
-    co = checkout.SvnCheckout(
-        self.root_dir, self.name,
-        self.FAKE_REPOS.USERS[1][0], self.FAKE_REPOS.USERS[1][1],
-        self.svn_url)
-    root = os.path.join(self.root_dir, self.name)
-    expected = {
-        'author': self.FAKE_REPOS.USERS[1][0],
-    }
-    self._check_base(co, root, expected)
-
-  def testAutoProps(self):
-    co = self._get_co(None)
-    co.svn_config = checkout.SvnConfig(
-        os.path.join(ROOT_DIR, 'subversion_config'))
-    co.prepare(None)
-    patches = self.get_patches()
-    co.apply_patch(patches)
-    self.assertEquals(
-        ['bin_file', 'chrome/file.cc', 'new_dir/subdir/new_file', 'extra'],
-        patches.filenames)
-    # *.txt = svn:eol-style=LF in subversion_config/config.
-    out = subprocess2.check_output(
-        ['svn', 'pget', 'svn:eol-style', 'chrome/file.cc'],
-        cwd=co.project_path)
-    self.assertEquals('LF\n', out)
-
-  def testProcess(self):
-    self._test_process(self._get_co)
-
-  def testPrepare(self):
-    self._test_prepare(self._get_co(None))
-
-  def testMove(self):
-    co = self._get_co(None)
-    self._check_move(co)
-    out = subprocess2.check_output(
-        ['svn', 'status'], cwd=co.project_path)
-    out = sorted(out.splitlines())
-    expected = sorted(
-      [
-        'A  +    chromeos/views/webui_menu_widget.h',
-        'D       chromeos/views/DOMui_menu_widget.h',
-      ])
-    self.assertEquals(expected, out)
-    # Make sure ancestry is what is expected;
-    env = os.environ.copy()
-    env['LANGUAGE'] = 'en_US.UTF-8'
-    out = subprocess2.check_output(
-        ['svn', 'info', 'chromeos/views/webui_menu_widget.h'],
-        cwd=co.project_path,
-        env=env)
-    values = dict(l.split(': ', 1) for l in out.splitlines() if l)
-    expected = {
-      'Checksum': '65837bb3da662c8fa88a4a50940ea7c6',
-      'Copied From Rev': '2',
-      'Copied From URL':
-          '%strunk/chromeos/views/DOMui_menu_widget.h' % self.svn_base,
-      'Name': 'webui_menu_widget.h',
-      'Node Kind': 'file',
-      'Path': 'chromeos/views/webui_menu_widget.h',
-      'Repository Root': '%s' % self.svn_base.rstrip('/'),
-      'Revision': '2',
-      'Schedule': 'add',
-      'URL': '%strunk/chromeos/views/webui_menu_widget.h' % self.svn_base,
-    }
-    self.assertEquals(expected, values)
-
-
-class RawCheckout(SvnBaseTest):
-  def setUp(self):
-    super(RawCheckout, self).setUp()
-    # Use a svn checkout as the base.
-    self.base_co = checkout.SvnCheckout(
-        self.root_dir, self.name, None, None, self.svn_url)
-    self.base_co.prepare(None)
-
-  def _get_co(self, post_processors):
-    self.assertNotEqual(False, post_processors)
-    return checkout.RawCheckout(self.root_dir, self.name, post_processors)
-
-  def testAll(self):
-    # Can't use self._check_base() since it's too different.
-    root = os.path.join(self.root_dir, self.name)
-    co = self._get_co(None)
-
-    # A copy of BaseTest._check_base()
-    self.assertEquals(root, co.project_path)
-    self.assertEquals(None, co.prepare(None))
-    self.assertEquals('pouet', co.get_settings('bar'))
-    self.assertTree(self.get_trunk(False), root)
-    patches = self.get_patches()
-    co.apply_patch(patches)
-    self.assertEquals(
-        ['bin_file', 'chrome/file.cc', 'new_dir/subdir/new_file', 'extra'],
-        patches.filenames)
-
-    # Verify that the patch is applied even for read only checkout.
-    self.assertTree(self.get_trunk(True), root)
-    try:
-      co.commit(u'msg', self.FAKE_REPOS.USERS[1][0])
-      self.fail()
-    except NotImplementedError:
-      pass
-    self.assertTree(self.get_trunk(True), root)
-    # Verify that prepare() is a no-op.
-    self.assertEquals(None, co.prepare(None))
-    self.assertTree(self.get_trunk(True), root)
-
-  def testException(self):
-    self._check_exception(
-        self._get_co(None),
-        'While running patch -u --binary -p1;\n'
-        '  patching file chrome/file.cc\n'
-        '  Hunk #1 FAILED at 3.\n'
-        '  1 out of 1 hunk FAILED -- saving rejects to file '
-        'chrome/file.cc.rej\n')
-
-  def testProcess(self):
-    self._test_process(self._get_co)
-
-  def testPrepare(self):
-    # RawCheckout doesn't support prepare() but emulate it.
-    co = self._get_co(None)
-    revs = [1]
-    def prepare(asked):
-      self.assertEquals(1, asked)
-      return revs.pop(0)
-    co.prepare = prepare
-    self._test_prepare(co)
-    self.assertEquals([], revs)
-
-  def testMove(self):
-    self._check_move(self._get_co(None))
-
-
-class ReadOnlyCheckout(SvnBaseTest):
-  # Use SvnCheckout as the backed since it support read-only checkouts too.
-  is_read_only = True
-
-  def _get_co(self, post_processors):
-    self.assertNotEqual(False, post_processors)
-    return checkout.ReadOnlyCheckout(
-        checkout.SvnCheckout(
-            self.root_dir, self.name, None, None, self.svn_url, None),
-        post_processors)
-
-  def testAll(self):
-    root = os.path.join(self.root_dir, self.name)
-    self._check_base(self._get_co(None), root, None)
-
-  def testException(self):
-    self._check_exception(
-        self._get_co(None),
-        'While running patch -p1 --forward --force --no-backup-if-mismatch;\n'
-        '  patching file chrome/file.cc\n'
-        '  Hunk #1 FAILED at 3.\n'
-        '  1 out of 1 hunk FAILED -- saving rejects to file '
-        'chrome/file.cc.rej\n')
-
-  def testProcess(self):
-    self._test_process(self._get_co)
-
-  def testPrepare(self):
-    self._test_prepare(self._get_co(None))
-
-  def testMove(self):
-    self._check_move(self._get_co(None))
-
-
 if __name__ == '__main__':
   if '-v' in sys.argv:
     DEBUGGING = True
diff --git a/tests/download_from_google_storage_unittests.py b/tests/download_from_google_storage_unittests.py
index abdca72..f87c6a7 100755
--- a/tests/download_from_google_storage_unittests.py
+++ b/tests/download_from_google_storage_unittests.py
@@ -23,7 +23,7 @@
 # ../third_party/gsutil/gsutil
 GSUTIL_DEFAULT_PATH = os.path.join(
     os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
-    'third_party', 'gsutil', 'gsutil')
+    'gsutil.py')
 TEST_DIR = os.path.dirname(os.path.abspath(__file__))
 
 
@@ -36,8 +36,8 @@
     self.history = []
     self.lock = threading.Lock()
 
-  def add_expected(self, return_code, out, err):
-    self.expected.append((return_code, out, err))
+  def add_expected(self, return_code, out, err, fn=None):
+    self.expected.append((return_code, out, err, fn))
 
   def append_history(self, method, args):
     self.history.append((method, args))
@@ -46,7 +46,10 @@
     with self.lock:
       self.append_history('call', args)
       if self.expected:
-        return self.expected.pop(0)[0]
+        code, _out, _err, fn = self.expected.pop(0)
+        if fn:
+          fn()
+        return code
       else:
         return 0
 
@@ -54,7 +57,10 @@
     with self.lock:
       self.append_history('check_call', args)
       if self.expected:
-        return self.expected.pop(0)
+        code, out, err, fn = self.expected.pop(0)
+        if fn:
+          fn()
+        return code, out, err
       else:
         return (0, '', '')
 
@@ -169,12 +175,11 @@
         ('check_call',
             ('ls', input_filename)),
         ('check_call',
-            ('cp', '-q', input_filename, output_filename))]
+            ('cp', input_filename, output_filename))]
     if sys.platform != 'win32':
       expected_calls.append(
           ('check_call',
-           ('ls',
-            '-L',
+           ('stat',
             'gs://sometesturl/7871c8e24da15bad8b0be2c36edc9dc77e37727f')))
     expected_output = [
         '0> Downloading %s...' % output_filename]
@@ -210,7 +215,7 @@
         0, self.queue, False, self.base_url, self.gsutil,
         stdout_queue, self.ret_codes, True)
     expected_output = [
-        '0> File %s for %s does not exist, skipping.' % (
+        '0> Failed to fetch file %s for %s, skipping. [Err: ]' % (
             input_filename, output_filename),
     ]
     expected_calls = [
@@ -218,7 +223,7 @@
             ('ls', input_filename))
     ]
     expected_ret_codes = [
-        (1, 'File %s for %s does not exist.' % (
+        (1, 'Failed to fetch file %s for %s. [Err: ]' % (
             input_filename, output_filename))
     ]
     self.assertEqual(list(stdout_queue.queue), expected_output)
@@ -248,17 +253,42 @@
         ('check_call',
             ('ls', input_filename)),
         ('check_call',
-            ('cp', '-q', input_filename, output_filename))
+            ('cp', input_filename, output_filename))
     ]
     if sys.platform != 'win32':
       expected_calls.append(
           ('check_call',
-           ('ls',
-            '-L',
+           ('stat',
             'gs://sometesturl/7871c8e24da15bad8b0be2c36edc9dc77e37727f')))
     self.assertEqual(self.gsutil.history, expected_calls)
     self.assertEqual(code, 101)
 
+  def test_corrupt_download(self):
+    q = Queue.Queue()
+    out_q = Queue.Queue()
+    ret_codes = Queue.Queue()
+    tmp_dir = tempfile.mkdtemp()
+    sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+    output_filename = os.path.join(tmp_dir, 'lorem_ipsum.txt')
+    q.put(('7871c8e24da15bad8b0be2c36edc9dc77e37727f', output_filename))
+    q.put((None, None))
+    def _write_bad_file():
+      with open(output_filename, 'w') as f:
+        f.write('foobar')
+    self.gsutil.add_expected(0, '', '')
+    self.gsutil.add_expected(0, '', '', _write_bad_file)
+    download_from_google_storage._downloader_worker_thread(
+        1, q, True, self.base_url, self.gsutil, out_q, ret_codes, True)
+    self.assertTrue(q.empty())
+    msg = ('1> ERROR remote sha1 (%s) does not match expected sha1 (%s).' %
+           ('8843d7f92416211de9ebb963ff4ce28125932878', sha1_hash))
+    self.assertEquals(out_q.get(), '1> Downloading %s...' % output_filename)
+    self.assertEquals(out_q.get(), msg)
+    self.assertEquals(ret_codes.get(), (20, msg))
+    self.assertTrue(out_q.empty())
+    self.assertTrue(ret_codes.empty())
+
+
   def test_download_directory_no_recursive_non_force(self):
     sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
     input_filename = '%s/%s' % (self.base_url, sha1_hash)
@@ -280,12 +310,11 @@
         ('check_call',
             ('ls', input_filename)),
         ('check_call',
-            ('cp', '-q', input_filename, output_filename))]
+            ('cp', input_filename, output_filename))]
     if sys.platform != 'win32':
       expected_calls.append(
           ('check_call',
-           ('ls',
-            '-L',
+           ('stat',
             'gs://sometesturl/7871c8e24da15bad8b0be2c36edc9dc77e37727f')))
     self.assertEqual(self.gsutil.history, expected_calls)
     self.assertEqual(code, 0)
diff --git a/tests/gcl_unittest.py b/tests/gcl_unittest.py
deleted file mode 100755
index b378744..0000000
--- a/tests/gcl_unittest.py
+++ /dev/null
@@ -1,622 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for gcl.py."""
-
-# pylint: disable=E1103,E1101,E1120
-
-import os
-import sys
-
-sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-
-from testing_support.super_mox import mox, SuperMoxTestBase
-
-import gcl
-import presubmit_support
-
-
-class GclTestsBase(SuperMoxTestBase):
-  """Setups and tear downs the mocks but doesn't test anything as-is."""
-  def setUp(self):
-    SuperMoxTestBase.setUp(self)
-    self.fake_root_dir = self.RootDir()
-    self.mox.StubOutWithMock(gcl, 'RunShell')
-    self.mox.StubOutWithMock(gcl.SVN, '_CaptureInfo')
-    self.mox.StubOutWithMock(gcl.SVN, 'GetCheckoutRoot')
-    self.mox.StubOutWithMock(gcl, 'tempfile')
-    self.mox.StubOutWithMock(gcl.upload, 'RealMain')
-    self.mox.StubOutWithMock(gcl.gclient_utils, 'FileRead')
-    self.mox.StubOutWithMock(gcl.gclient_utils, 'FileWrite')
-    gcl.REPOSITORY_ROOT = None
-    self.old_review_settings = gcl.CODEREVIEW_SETTINGS
-    self.assertEquals(gcl.CODEREVIEW_SETTINGS, {})
-
-  def tearDown(self):
-    gcl.CODEREVIEW_SETTINGS = self.old_review_settings
-
-  def fakeChange(self, files=None):  # pylint: disable=R0201
-    if files == None:
-      files = [('A', 'aa'), ('M', 'bb')]
-
-    change_info = self.mox.CreateMock(gcl.ChangeInfo)
-    change_info.name = 'naame'
-    change_info.issue = 1
-    change_info.patchset = 0
-    change_info.description = 'deescription'
-    change_info.files = files
-    change_info.GetFiles = lambda : change_info.files
-    change_info.GetIssueDescription = lambda : change_info.description
-    change_info.GetFileNames = lambda : [f[1] for f in change_info.files]
-    change_info.GetLocalRoot = lambda : 'proout'
-    change_info.patch = None
-    change_info.rietveld = 'https://my_server'
-    change_info._closed = False
-    change_info._deleted = False
-    change_info._comments_added = []
-
-    class RpcServer(object):
-      # pylint: disable=R0201,W0613
-      def get_issue_properties(self, issue, messages):
-        return { 'patchsets': [1337] }
-    change_info.RpcServer = RpcServer
-
-    def AddComment(comment):
-      # pylint: disable=W0212
-      change_info._comments_added.append(comment)
-    change_info.AddComment = AddComment
-
-    def Delete():
-      change_info._deleted = True
-    change_info.Delete = Delete
-
-    def CloseIssue():
-      change_info._closed = True
-    change_info.CloseIssue = CloseIssue
-
-    return change_info
-
-
-class GclUnittest(GclTestsBase):
-  """General gcl.py tests."""
-  def tearDown(self):
-    gcl.CODEREVIEW_SETTINGS = {}
-
-  def testMembersChanged(self):
-    self.mox.ReplayAll()
-    members = [
-        'CODEREVIEW_SETTINGS', 'CODEREVIEW_SETTINGS_FILE',
-        'CODEREVIEW_SETTINGS_FILE_NOT_FOUND',
-        'CMDchange', 'CMDchanges', 'CMDcommit', 'CMDdelete', 'CMDdeleteempties',
-        'CMDdescription', 'CMDdiff', 'CMDhelp', 'CMDlint', 'CMDnothave',
-        'CMDopened', 'CMDpassthru', 'CMDpresubmit', 'CMDrename', 'CMDsettings',
-        'CMDstatus', 'CMDtry', 'CMDupload',
-        'ChangeInfo', 'Command', 'DEFAULT_LINT_IGNORE_REGEX',
-        'DEFAULT_LINT_REGEX', 'CheckHomeForFile', 'DoPresubmitChecks',
-        'ErrorExit', 'FILES_CACHE', 'FilterFlag', 'GenUsage',
-        'GenerateChangeName', 'GenerateDiff', 'GetCLs', 'GetCacheDir',
-        'GetCachedFile', 'GetChangelistInfoFile', 'GetChangesDir',
-        'GetCodeReviewSetting', 'GetFilesNotInCL', 'GetInfoDir',
-        'GetModifiedFiles', 'GetRepositoryRoot', 'GetTreeStatus', 'ListFiles',
-        'LoadChangelistInfoForMultiple', 'MISSING_TEST_MSG',
-        'OptionallyDoPresubmitChecks', 'REPOSITORY_ROOT',
-        'RunShell', 'RunShellWithReturnCode', 'SVN',
-        'TryChange', 'UnknownFiles', 'Warn',
-        'attrs', 'breakpad', 'defer_attributes', 'fix_encoding',
-        'gclient_utils', 'git_cl', 'json', 'main', 'need_change',
-        'need_change_and_args', 'no_args', 'optparse', 'os',
-        'presubmit_support', 'random', 're', 'rietveld',
-        'ssl', 'string', 'subprocess2', 'sys', 'tempfile', 'time',
-        'upload', 'urllib2',
-    ]
-    # If this test fails, you should add the relevant test.
-    self.compareMembers(gcl, members)
-
-  def testIsSVNMoved(self):
-    # TODO(maruel): TEST ME
-    pass
-
-  def testGetSVNFileProperty(self):
-    # TODO(maruel): TEST ME
-    pass
-
-  def testUnknownFiles(self):
-    # TODO(maruel): TEST ME
-    pass
-
-  def testCheckHomeForFile(self):
-    # TODO(maruel): TEST ME
-    pass
-
-  def testDefaultSettings(self):
-    self.assertEquals({}, gcl.CODEREVIEW_SETTINGS)
-
-  def testGetCodeReviewSettingOk(self):
-    self.mox.StubOutWithMock(gcl, 'GetCachedFile')
-    gcl.GetCachedFile(gcl.CODEREVIEW_SETTINGS_FILE).AndReturn(
-        'foo:bar\n'
-        '# comment\n'
-        ' c : d \n\r'
-        'e: f')
-    self.mox.ReplayAll()
-    self.assertEquals('bar', gcl.GetCodeReviewSetting('foo'))
-    self.assertEquals('d', gcl.GetCodeReviewSetting('c'))
-    self.assertEquals('f', gcl.GetCodeReviewSetting('e'))
-    self.assertEquals('', gcl.GetCodeReviewSetting('other'))
-    self.assertEquals(
-        {'foo': 'bar', 'c': 'd', 'e': 'f', '__just_initialized': None},
-        gcl.CODEREVIEW_SETTINGS)
-
-  def testGetCodeReviewSettingFail(self):
-    self.mox.StubOutWithMock(gcl, 'GetCachedFile')
-    gcl.GetCachedFile(gcl.CODEREVIEW_SETTINGS_FILE).AndReturn(
-        'aaa\n'
-        ' c : d \n\r'
-        'e: f')
-    self.mox.ReplayAll()
-    try:
-      gcl.GetCodeReviewSetting('c')
-      self.fail()
-    except gcl.gclient_utils.Error:
-      pass
-    self.assertEquals({}, gcl.CODEREVIEW_SETTINGS)
-
-  def testGetRepositoryRootNone(self):
-    gcl.os.getcwd().AndReturn(self.fake_root_dir)
-    gcl.SVN.GetCheckoutRoot(self.fake_root_dir).AndReturn(None)
-    self.mox.ReplayAll()
-    self.assertRaises(gcl.gclient_utils.Error, gcl.GetRepositoryRoot)
-
-  def testGetRepositoryRootGood(self):
-    root_path = gcl.os.path.join('bleh', 'prout', 'pouet')
-    gcl.os.getcwd().AndReturn(root_path)
-    gcl.SVN.GetCheckoutRoot(root_path).AndReturn(root_path + '.~')
-    self.mox.ReplayAll()
-    self.assertEquals(gcl.GetRepositoryRoot(), root_path + '.~')
-
-  def testHelp(self):
-    gcl.sys.stdout.write = lambda x: None
-    self.mox.ReplayAll()
-    gcl.CMDhelp([])
-
-
-class ChangeInfoUnittest(GclTestsBase):
-  def setUp(self):
-    GclTestsBase.setUp(self)
-    self.mox.StubOutWithMock(gcl, 'GetChangelistInfoFile')
-    self.mox.StubOutWithMock(gcl, 'GetRepositoryRoot')
-
-  def testChangeInfoMembers(self):
-    self.mox.ReplayAll()
-    members = [
-      'AddComment', 'CloseIssue', 'Delete', 'Exists', 'GetFiles',
-      'GetApprovingReviewers', 'GetFileNames', 'GetIssueDescription',
-      'GetLocalRoot', 'Load',
-      'MissingTests', 'NeedsUpload', 'PrimeLint', 'RpcServer', 'Save',
-      'SendToRietveld',
-      'SEPARATOR',
-      'UpdateDescriptionFromIssue', 'UpdateRietveldDescription',
-      'append_footer',
-      'description', 'force_description', 'get_reviewers', 'issue', 'name',
-      'needs_upload', 'patch', 'patchset', 'rietveld', 'update_reviewers',
-    ]
-    # If this test fails, you should add the relevant test.
-    self.compareMembers(
-        gcl.ChangeInfo('', 0, 0, '', None, self.fake_root_dir, 'foo', False),
-        members)
-
-  def testChangeInfoBase(self):
-    files = [('M', 'foo'), ('A', 'bar')]
-    self.mox.ReplayAll()
-    o = gcl.ChangeInfo(
-        'name2',
-        '42',
-        '53',
-        'description2',
-        files,
-        self.fake_root_dir,
-        'foo',
-        False)
-    self.assertEquals(o.name, 'name2')
-    self.assertEquals(o.issue, 42)
-    self.assertEquals(o.patchset, 53)
-    self.assertEquals(o.description, 'description2')
-    self.assertEquals(o.patch, None)
-    self.assertEquals(o.GetFileNames(), ['foo', 'bar'])
-    self.assertEquals(o.GetFiles(), files)
-    self.assertEquals(o.GetLocalRoot(), self.fake_root_dir)
-
-  def testLoadWithIssue(self):
-    self.mox.StubOutWithMock(gcl, 'GetCodeReviewSetting')
-    description = ["This is some description.", "force an extra separator."]
-    gcl.GetChangelistInfoFile('bleh').AndReturn('bleeeh')
-    gcl.os.path.exists('bleeeh').AndReturn(True)
-    gcl.gclient_utils.FileRead('bleeeh').AndReturn(
-      gcl.ChangeInfo.SEPARATOR.join(["42, 53", "G      b.cc"] + description))
-    gcl.GetCodeReviewSetting('CODE_REVIEW_SERVER').AndReturn('foo')
-    # Does an upgrade.
-    gcl.GetChangelistInfoFile('bleh').AndReturn('bleeeh')
-    gcl.gclient_utils.FileWrite('bleeeh', mox.IgnoreArg())
-    self.mox.ReplayAll()
-
-    change_info = gcl.ChangeInfo.Load('bleh', self.fake_root_dir, True, False)
-    self.assertEquals(change_info.name, 'bleh')
-    self.assertEquals(change_info.issue, 42)
-    self.assertEquals(change_info.patchset, 53)
-    self.assertEquals(change_info.description,
-                      gcl.ChangeInfo.SEPARATOR.join(description))
-    self.assertEquals(change_info.GetFiles(), [('G      ', 'b.cc')])
-
-  def testLoadEmpty(self):
-    self.mox.StubOutWithMock(gcl, 'GetCodeReviewSetting')
-    gcl.GetChangelistInfoFile('bleh').AndReturn('bleeeh')
-    gcl.os.path.exists('bleeeh').AndReturn(True)
-    gcl.gclient_utils.FileRead('bleeeh').AndReturn(
-        gcl.ChangeInfo.SEPARATOR.join(["", "", ""]))
-    gcl.GetCodeReviewSetting('CODE_REVIEW_SERVER').AndReturn('foo')
-    # Does an upgrade.
-    gcl.GetChangelistInfoFile('bleh').AndReturn('bleeeh')
-    gcl.gclient_utils.FileWrite('bleeeh', mox.IgnoreArg())
-    self.mox.ReplayAll()
-
-    change_info = gcl.ChangeInfo.Load('bleh', self.fake_root_dir, True, False)
-    self.assertEquals(change_info.name, 'bleh')
-    self.assertEquals(change_info.issue, 0)
-    self.assertEquals(change_info.patchset, 0)
-    self.assertEquals(change_info.description, "")
-    self.assertEquals(change_info.GetFiles(), [])
-
-  def testSaveEmpty(self):
-    gcl.GetChangelistInfoFile('').AndReturn('foo')
-    values = {
-        'description': '', 'patchset': 2, 'issue': 1,
-        'files': [], 'needs_upload': False, 'rietveld': 'https://foo'}
-    gcl.gclient_utils.FileWrite(
-        'foo', gcl.json.dumps(values, sort_keys=True, indent=2))
-    self.mox.ReplayAll()
-
-    change_info = gcl.ChangeInfo(
-        '', 1, 2, '', None, self.fake_root_dir, 'foo', False)
-    change_info.Save()
-
-  def testSaveDirty(self):
-    gcl.GetChangelistInfoFile('n').AndReturn('foo')
-    values = {
-        'description': 'des', 'patchset': 0, 'issue': 0,
-        'files': [], 'needs_upload': True, 'rietveld': 'https://foo'}
-    gcl.gclient_utils.FileWrite(
-        'foo', gcl.json.dumps(values, sort_keys=True, indent=2))
-    self.mox.ReplayAll()
-
-    change_info = gcl.ChangeInfo('n', 0, 0, 'des', None, self.fake_root_dir,
-                                 'foo', needs_upload=True)
-    change_info.Save()
-
-
-class CMDuploadUnittest(GclTestsBase):
-  def setUp(self):
-    GclTestsBase.setUp(self)
-    self.mox.StubOutWithMock(gcl, 'CheckHomeForFile')
-    self.mox.StubOutWithMock(gcl, 'DoPresubmitChecks')
-    self.mox.StubOutWithMock(gcl, 'GenerateDiff')
-    self.mox.StubOutWithMock(gcl, 'GetCodeReviewSetting')
-    self.mox.StubOutWithMock(gcl, 'GetRepositoryRoot')
-    self.mox.StubOutWithMock(gcl.ChangeInfo, 'SendToRietveld')
-    self.mox.StubOutWithMock(gcl, 'TryChange')
-    self.mox.StubOutWithMock(gcl.ChangeInfo, 'Load')
-
-  def testNew(self):
-    change_info = self.mox.CreateMock(gcl.ChangeInfo)
-    change_info.name = 'naame'
-    change_info.issue = 1
-    change_info.patchset = 0
-    change_info.description = 'deescription\n\nR=foo@bar.com',
-    change_info.files = [('A', 'aa'), ('M', 'bb')]
-    change_info.patch = None
-    change_info.rietveld = 'https://my_server'
-    files = [item[1] for item in change_info.files]
-    output = presubmit_support.PresubmitOutput()
-    gcl.DoPresubmitChecks(change_info, False, True).AndReturn(output)
-    #gcl.GetCodeReviewSetting('CODE_REVIEW_SERVER').AndReturn('my_server')
-    gcl.os.getcwd().AndReturn('somewhere')
-    change_info.GetFiles().AndReturn(change_info.files)
-    gcl.os.chdir('proout')
-    change_info.get_reviewers().AndReturn('foo@bar.com')
-    change_info.GetFileNames().AndReturn(files)
-    gcl.GenerateDiff(files)
-    gcl.upload.RealMain(['upload.py', '-y', '--server=https://my_server',
-                         '-r', 'georges@example.com',
-                         '--issue=1', '--title= '],
-                         change_info.patch).AndReturn(("1",
-                                                                    "2"))
-    change_info.GetLocalRoot().AndReturn('proout')
-    change_info.Save()
-    change_info.PrimeLint()
-    gcl.os.chdir('somewhere')
-    gcl.sys.stdout.write("*** Upload does not submit a try; use gcl try to"
-                         " submit a try. ***")
-    gcl.sys.stdout.write("\n")
-    gcl.GetRepositoryRoot().AndReturn(self.fake_root_dir)
-    gcl.ChangeInfo.Load('naame', self.fake_root_dir, True, True
-        ).AndReturn(change_info)
-    self.mox.ReplayAll()
-
-    gcl.CMDupload(['naame', '-r', 'georges@example.com'])
-    self.checkstdout('*** Upload does not submit a try; use gcl try to submit '
-        'a try. ***\n'
-        '*** Upload does not submit a try; use gcl try to submit a try. ***\n')
-
-  def testServerOverride(self):
-    change_info = gcl.ChangeInfo(
-        'naame',
-        0,
-        0,
-        'deescription',
-        [('A', 'aa'), ('M', 'bb')],
-        self.fake_root_dir,
-        'my_server',
-        False)
-    self.mox.StubOutWithMock(change_info, 'Save')
-    change_info.Save()
-    output = presubmit_support.PresubmitOutput()
-    gcl.DoPresubmitChecks(change_info, False, True).AndReturn(output)
-    gcl.tempfile.mkstemp(text=True).AndReturn((42, 'descfile'))
-    gcl.os.write(42, change_info.description)
-    gcl.os.close(42)
-    gcl.GetCodeReviewSetting('CC_LIST')
-    gcl.GetCodeReviewSetting('PRIVATE')
-    gcl.GetCodeReviewSetting('PROJECT')
-    gcl.os.getcwd().AndReturn('somewhere')
-    gcl.os.chdir(change_info.GetLocalRoot())
-    gcl.GenerateDiff(change_info.GetFileNames())
-    gcl.upload.RealMain(
-        [ 'upload.py', '-y', '--server=https://my_server', '--server=a',
-          '--file=descfile'],
-        change_info.patch).AndReturn(("1", "2"))
-    gcl.os.remove('descfile')
-    change_info.SendToRietveld("/lint/issue%s_%s" % ('1', '2'), timeout=60)
-    gcl.os.chdir('somewhere')
-    gcl.sys.stdout.write("*** Upload does not submit a try; use gcl try to"
-                         " submit a try. ***")
-    gcl.sys.stdout.write("\n")
-    gcl.GetRepositoryRoot().AndReturn(self.fake_root_dir)
-    gcl.ChangeInfo.Load('naame', self.fake_root_dir, True, True
-        ).AndReturn(change_info)
-    self.mox.ReplayAll()
-
-    gcl.CMDupload(['naame', '--server=a', '--no_watchlists'])
-    self.checkstdout('*** Upload does not submit a try; use gcl try to submit '
-        'a try. ***\n'
-        '*** Upload does not submit a try; use gcl try to submit a try. ***\n')
-
-  def testNormal(self):
-    change_info = gcl.ChangeInfo(
-        'naame',
-        0,
-        0,
-        'deescription',
-        [('A', 'aa'), ('M', 'bb')],
-        self.fake_root_dir,
-        'my_server',
-        False)
-    self.mox.StubOutWithMock(change_info, 'Save')
-    change_info.Save()
-    output = presubmit_support.PresubmitOutput()
-    gcl.DoPresubmitChecks(change_info, False, True).AndReturn(output)
-    gcl.tempfile.mkstemp(text=True).AndReturn((42, 'descfile'))
-    gcl.os.write(42, change_info.description)
-    gcl.os.close(42)
-    gcl.GetCodeReviewSetting('CC_LIST')
-    gcl.GetCodeReviewSetting('PRIVATE')
-    gcl.GetCodeReviewSetting('PROJECT')
-    gcl.os.getcwd().AndReturn('somewhere')
-    gcl.os.chdir(change_info.GetLocalRoot())
-    gcl.GenerateDiff(change_info.GetFileNames())
-    gcl.upload.RealMain(
-        ['upload.py', '-y', '--server=https://my_server', "--file=descfile" ],
-        change_info.patch).AndReturn(("1", "2"))
-    gcl.os.remove('descfile')
-    change_info.SendToRietveld("/lint/issue%s_%s" % ('1', '2'), timeout=60)
-    gcl.os.chdir('somewhere')
-    gcl.sys.stdout.write("*** Upload does not submit a try; use gcl try to"
-                         " submit a try. ***")
-    gcl.sys.stdout.write("\n")
-    gcl.GetRepositoryRoot().AndReturn(self.fake_root_dir)
-    gcl.ChangeInfo.Load('naame', self.fake_root_dir, True, True
-        ).AndReturn(change_info)
-    self.mox.ReplayAll()
-
-    gcl.CMDupload(['naame', '--no_watchlists'])
-    self.assertEquals(change_info.issue, 1)
-    self.assertEquals(change_info.patchset, 2)
-    self.checkstdout('*** Upload does not submit a try; use gcl try to submit '
-        'a try. ***\n'
-        '*** Upload does not submit a try; use gcl try to submit a try. ***\n')
-
-  def testNoServer(self):
-    self.mox.StubOutWithMock(gcl.sys, 'stderr')
-    gcl.sys.stderr.write(
-        'Don\'t use the -s flag, fix codereview.settings instead')
-    gcl.sys.stderr.write('\n')
-    gcl.GetRepositoryRoot().AndReturn(self.fake_root_dir)
-    gcl.ChangeInfo.Load('naame', self.fake_root_dir, True, True
-        ).AndReturn(1)
-    self.mox.ReplayAll()
-
-    try:
-      gcl.CMDupload(['naame', '-s', 'foo'])
-      self.fail()
-    except SystemExit:
-      pass
-
-  def testReviewersInDescription(self):
-    change_info = self.mox.CreateMock(gcl.ChangeInfo)
-    change_info.name = 'naame'
-    change_info.issue = 1
-    change_info.patchset = 0
-    change_info.description = 'deescription\n\nR=georges@example.com',
-    change_info.files = [('A', 'aa'), ('M', 'bb')]
-    change_info.patch = None
-    change_info.rietveld = 'https://my_server'
-    files = [item[1] for item in change_info.files]
-    output = presubmit_support.PresubmitOutput()
-    gcl.DoPresubmitChecks(change_info, False, True).AndReturn(output)
-    #gcl.GetCodeReviewSetting('CODE_REVIEW_SERVER').AndReturn('my_server')
-    gcl.os.getcwd().AndReturn('somewhere')
-    change_info.GetFiles().AndReturn(change_info.files)
-    change_info.get_reviewers().AndReturn(['georges@example.com'])
-    change_info.GetFileNames().AndReturn(files)
-    change_info.GetLocalRoot().AndReturn('proout')
-    gcl.os.chdir('proout')
-    gcl.GenerateDiff(files)
-    gcl.upload.RealMain(['upload.py', '-y', '--server=https://my_server',
-                         '--reviewers=georges@example.com',
-                         '--issue=1', '--title= '],
-                         change_info.patch).AndReturn(("1", "2"))
-    change_info.Save()
-    change_info.PrimeLint()
-    gcl.os.chdir('somewhere')
-    gcl.sys.stdout.write("*** Upload does not submit a try; use gcl try to"
-                         " submit a try. ***")
-    gcl.sys.stdout.write("\n")
-    gcl.GetRepositoryRoot().AndReturn(self.fake_root_dir)
-    gcl.ChangeInfo.Load('naame', self.fake_root_dir, True, True
-        ).AndReturn(change_info)
-    self.mox.ReplayAll()
-
-    gcl.CMDupload(['naame'])
-    self.checkstdout('*** Upload does not submit a try; use gcl try to submit '
-        'a try. ***\n'
-        '*** Upload does not submit a try; use gcl try to submit a try. ***\n')
-
-  def testSuggestReviewers(self):
-    change_info = self.fakeChange()
-    output = presubmit_support.PresubmitOutput()
-    output.reviewers = ['foo@example.com', 'bar@example.com']
-    gcl.DoPresubmitChecks(change_info, False, True).AndReturn(output)
-    #gcl.GetCodeReviewSetting('CODE_REVIEW_SERVER').AndReturn('my_server')
-    gcl.os.getcwd().AndReturn('somewhere')
-    gcl.os.chdir('proout')
-    gcl.GenerateDiff(change_info.GetFileNames())
-    gcl.upload.RealMain(['upload.py', '-y', '--server=https://my_server',
-                         '--reviewers=foo@example.com,bar@example.com',
-                         '--issue=1', '--title= '],
-                         change_info.patch).AndReturn(("1", "2"))
-    change_info.get_reviewers().AndReturn(['foo@example.com,bar@example.com'])
-    change_info.Save()
-    change_info.PrimeLint()
-    gcl.os.chdir('somewhere')
-    gcl.sys.stdout.write("*** Upload does not submit a try; use gcl try to"
-                         " submit a try. ***")
-    gcl.sys.stdout.write("\n")
-    gcl.GetRepositoryRoot().AndReturn(self.fake_root_dir)
-    gcl.ChangeInfo.Load('naame', self.fake_root_dir, True, True
-        ).AndReturn(change_info)
-    self.mox.ReplayAll()
-
-    gcl.CMDupload(['naame'])
-    self.checkstdout('*** Upload does not submit a try; use gcl try to submit '
-        'a try. ***\n'
-        '*** Upload does not submit a try; use gcl try to submit a try. ***\n')
-
-
-class CMDCommitUnittest(GclTestsBase):
-  def mockLoad(self, files=None):
-    self.mox.StubOutWithMock(gcl, 'GetRepositoryRoot')
-    self.mox.StubOutWithMock(gcl.ChangeInfo, 'Load')
-    gcl.GetRepositoryRoot().AndReturn(self.fake_root_dir)
-    change_info = self.fakeChange(files)
-    gcl.ChangeInfo.Load('naame', self.fake_root_dir, True, True
-        ).AndReturn(change_info)
-    return change_info
-
-  def mockPresubmit(self, change_info, fail):
-    self.mox.StubOutWithMock(gcl, 'OptionallyDoPresubmitChecks')
-    output = presubmit_support.PresubmitOutput()
-    if fail:
-      output.fail()
-    gcl.OptionallyDoPresubmitChecks(change_info, True, []).AndReturn(output)
-
-  def mockCommit(self, change_info, commit_message, shell_output):
-    gcl.tempfile.mkstemp(text=True).AndReturn((42, 'commit'))
-    gcl.os.write(42, commit_message)
-    gcl.os.close(42)
-    gcl.tempfile.mkstemp(text=True).AndReturn((43, 'files'))
-    gcl.os.write(43, '\n'.join(change_info.GetFileNames()))
-    gcl.os.close(43)
-
-    gcl.RunShell(['svn', 'commit', '--file=commit', '--targets=files'],
-        True).AndReturn(shell_output)
-    if 'Committed' in shell_output:
-      self.mox.StubOutWithMock(gcl, 'GetCodeReviewSetting')
-      gcl.GetCodeReviewSetting('VIEW_VC').AndReturn('http://view/')
-
-    gcl.os.remove('commit')
-    gcl.os.remove('files')
-
-  def testPresubmitEmpty(self):
-    self.mockLoad(files=[])
-    self.mox.ReplayAll()
-
-    retval = gcl.CMDcommit(['naame'])
-
-    self.assertEquals(retval, 1)
-
-  def testPresubmitFails(self):
-    change_info = self.mockLoad()
-    self.mockPresubmit(change_info, fail=True)
-    self.mox.ReplayAll()
-
-    retval = gcl.CMDcommit(['naame'])
-
-    self.assertEquals(retval, 1)
-
-  def testPresubmitSucceeds(self):
-    change_info = self.mockLoad()
-    self.mockPresubmit(change_info, fail=False)
-    self.mockCommit(
-        change_info, 'deescription\n\nReview URL: https://my_server/1', '')
-    change_info.UpdateDescriptionFromIssue()
-    change_info.GetApprovingReviewers().AndReturn(['a@c'])
-    change_info.update_reviewers(['a@c'])
-    self.mox.ReplayAll()
-
-    retval = gcl.CMDcommit(['naame'])
-
-    self.assertEquals(retval, 0)
-    self.assertEquals(change_info.description, 'deescription')
-    # pylint: disable=W0212
-    self.assertFalse(change_info._deleted)
-    self.assertFalse(change_info._closed)
-
-  def testPresubmitSucceedsWithCommittedMessage(self):
-    change_info = self.mockLoad()
-    self.mockPresubmit(change_info, fail=False)
-    self.mockCommit(
-        change_info,
-        'deescription\n\nReview URL: https://my_server/1',
-        '\nCommitted revision 12345')
-    change_info.UpdateDescriptionFromIssue()
-    change_info.GetApprovingReviewers().AndReturn(['a@c'])
-    change_info.update_reviewers(['a@c'])
-    change_info.append_footer('Committed: http://view/12345')
-    self.mox.ReplayAll()
-
-    retval = gcl.CMDcommit(['naame'])
-    self.assertEquals(retval, 0)
-    # This is because append_footer is mocked.
-    self.assertEquals(change_info.description, 'deescription')
-    # pylint: disable=W0212
-    self.assertTrue(change_info._deleted)
-    self.assertTrue(change_info._closed)
-    self.assertEqual(
-        change_info._comments_added,
-        ["Committed patchset #1 (id:1337) manually as r12345 (presubmit "
-         "successful)."])
-
-
-if __name__ == '__main__':
-  import unittest
-  unittest.main()
diff --git a/tests/gclient_scm_test.py b/tests/gclient_scm_test.py
index acdfca2..d3fdc68 100755
--- a/tests/gclient_scm_test.py
+++ b/tests/gclient_scm_test.py
@@ -115,695 +115,12 @@
     SuperMoxTestBase.tearDown(self)
 
 
-class SVNWrapperTestCase(BaseTestCase):
-  class OptionsObject(object):
-    def __init__(self, verbose=False, revision=None, force=False):
-      self.verbose = verbose
-      self.revision = revision
-      self.manually_grab_svn_rev = True
-      self.deps_os = None
-      self.force = force
-      self.reset = False
-      self.nohooks = False
-      # TODO(maruel): Test --jobs > 1.
-      self.jobs = 1
-      self.delete_unversioned_trees = False
-
-  def checkstdout(self, expected):
-    value = sys.stdout.getvalue()
-    sys.stdout.close()
-    # pylint: disable=E1101
-    self.assertEquals(expected, strip_timestamps(value))
-
-  def Options(self, *args, **kwargs):
-    return self.OptionsObject(*args, **kwargs)
-
-  def setUp(self):
-    BaseTestCase.setUp(self)
-    self.url = self.SvnUrl()
-
-  def testUnsupportedSCM(self):
-    args = ['gopher://foo', self.root_dir, self.relpath]
-    exception_msg = 'No SCM found for url gopher://foo'
-    self.assertRaisesError(exception_msg, self._scm_wrapper, *args)
-
-  def testSVNFullUrlForRelativeUrl(self):
-    self.url = 'svn://a/b/c/d'
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    self.assertEqual(scm.FullUrlForRelativeUrl('/crap'), 'svn://a/b/crap')
-
-  def testGITFullUrlForRelativeUrl(self):
-    self.url = 'git://a/b/c/d'
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    self.assertEqual(scm.FullUrlForRelativeUrl('/crap'), 'git://a/b/c/crap')
-
-  def testGITFakeHttpUrl(self):
-    self.url = 'git+http://foo'
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    self.assertEqual(scm.url, 'http://foo')
-
-  def testGITFakeHttpsUrl(self):
-    self.url = 'git+https://foo'
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    self.assertEqual(scm.url, 'https://foo')
-
-  def testRunCommandException(self):
-    options = self.Options(verbose=False)
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    exception = "Unsupported argument(s): %s" % ','.join(self.args)
-    self.assertRaisesError(exception, scm.RunCommand,
-                           'update', options, self.args)
-
-  def testRunCommandUnknown(self):
-    # TODO(maruel): if ever used.
-    pass
-
-  def testRevertMissing(self):
-    options = self.Options(verbose=True)
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(False)
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.5.1')
-    # It'll to a checkout instead.
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    # Checkout.
-    gclient_scm.os.path.exists(self.base_path).AndReturn(False)
-    parent = gclient_scm.os.path.dirname(self.base_path)
-    gclient_scm.os.path.exists(parent).AndReturn(False)
-    gclient_scm.os.makedirs(parent)
-    gclient_scm.os.path.exists(parent).AndReturn(True)
-    files_list = self.mox.CreateMockAnything()
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['checkout', self.url, self.base_path, '--force', '--ignore-externals'],
-        cwd=self.root_dir,
-        file_list=files_list)
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.revert(options, self.args, files_list)
-    self.checkstdout(
-        ('_____ %s is missing, synching instead\n' % self.relpath))
-
-  def testRevertNoDotSvn(self):
-    options = self.Options(verbose=True, force=True)
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
-    gclient_scm.os.path.isdir(join(self.base_path, '.svn')).AndReturn(False)
-    gclient_scm.os.path.isdir(join(self.base_path, '.git')).AndReturn(False)
-    gclient_scm.os.path.isdir(join(self.base_path, '.hg')).AndReturn(False)
-    # Checkout.
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(False)
-    parent = gclient_scm.os.path.dirname(self.base_path)
-    gclient_scm.os.path.exists(parent).AndReturn(False)
-    gclient_scm.os.makedirs(parent)
-    gclient_scm.os.path.exists(parent).AndReturn(True)
-    files_list = self.mox.CreateMockAnything()
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.6')
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['checkout', self.url, self.base_path, '--force', '--ignore-externals'],
-        cwd=self.root_dir,
-        file_list=files_list)
-    gclient_scm.gclient_utils.rmtree(self.base_path)
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.revert(options, self.args, files_list)
-    self.checkstdout(
-        '\n_____ %s is not a valid svn checkout, synching instead\n' %
-        self.relpath)
-
-  def testRevertNone(self):
-    options = self.Options(verbose=True)
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
-    gclient_scm.os.path.isdir(join(self.base_path, '.svn')).AndReturn(True)
-    gclient_scm.scm.SVN.CaptureStatus(
-        None, self.base_path, no_ignore=False).AndReturn([])
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['update', '--revision', 'BASE', '--ignore-externals'],
-        cwd=self.base_path,
-        file_list=mox.IgnoreArg())
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    file_list = []
-    scm.revert(options, self.args, file_list)
-
-  def testRevertDirectory(self):
-    options = self.Options(verbose=True)
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
-    gclient_scm.os.path.isdir(join(self.base_path, '.svn')).AndReturn(True)
-    items = [
-      ('~      ', 'a'),
-    ]
-    gclient_scm.scm.SVN.CaptureStatus(
-        None, self.base_path, no_ignore=False).AndReturn(items)
-    file_path = join(self.base_path, 'a')
-    gclient_scm.os.path.exists(file_path).AndReturn(True)
-    gclient_scm.os.path.isfile(file_path).AndReturn(False)
-    gclient_scm.os.path.islink(file_path).AndReturn(False)
-    gclient_scm.os.path.isdir(file_path).AndReturn(True)
-    gclient_scm.gclient_utils.rmtree(file_path)
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['update', '--revision', 'BASE', '--ignore-externals'],
-        cwd=self.base_path,
-        file_list=mox.IgnoreArg())
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    file_list2 = []
-    scm.revert(options, self.args, file_list2)
-    self.checkstdout(('%s\n' % file_path))
-
-  def testRevertDot(self):
-    self.mox.StubOutWithMock(gclient_scm.SVNWrapper, 'update')
-    options = self.Options(verbose=True)
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
-    gclient_scm.os.path.isdir(join(self.base_path, '.svn')).AndReturn(True)
-    items = [
-      ('~      ', '.'),
-    ]
-    gclient_scm.scm.SVN.CaptureStatus(
-        None, self.base_path, no_ignore=False).AndReturn(items)
-    # gclient_utils.rmtree() doesn't work on path ending with '.', like 'foo/.'.
-    file_path = self.base_path
-    gclient_scm.os.path.exists(file_path).AndReturn(True)
-    gclient_scm.os.path.isfile(file_path).AndReturn(False)
-    gclient_scm.os.path.islink(file_path).AndReturn(False)
-    gclient_scm.os.path.isdir(file_path).AndReturn(True)
-    gclient_scm.gclient_utils.rmtree(file_path)
-    # pylint: disable=E1120
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(False)
-    gclient_scm.SVNWrapper.update(options, [], ['.'])
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    file_list2 = []
-    scm.revert(options, self.args, file_list2)
-    self.checkstdout(('%s\n' % os.path.join(file_path, '.')))
-
-  def testStatus(self):
-    options = self.Options(verbose=True)
-    gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['status'] + self.args + ['--ignore-externals'],
-        cwd=self.base_path,
-        file_list=[]).AndReturn(None)
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    file_list = []
-    self.assertEqual(scm.status(options, self.args, file_list), None)
-
-  # TODO(maruel):  TEST REVISIONS!!!
-  # TODO(maruel):  TEST RELOCATE!!!
-  def testUpdateCheckout(self):
-    options = self.Options(verbose=True)
-    file_info = gclient_scm.gclient_utils.PrintableObject()
-    file_info.root = 'blah'
-    file_info.url = self.url
-    file_info.uuid = 'ABC'
-    file_info.revision = 42
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    # Checkout.
-    gclient_scm.os.path.exists(self.base_path).AndReturn(False)
-    parent = gclient_scm.os.path.dirname(self.base_path)
-    gclient_scm.os.path.exists(parent).AndReturn(False)
-    gclient_scm.os.makedirs(parent)
-    gclient_scm.os.path.exists(parent).AndReturn(True)
-    files_list = self.mox.CreateMockAnything()
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.5.1')
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['checkout', self.url, self.base_path, '--force', '--ignore-externals'],
-        cwd=self.root_dir,
-        file_list=files_list)
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.update(options, (), files_list)
-
-  def testUpdateUpdate(self):
-    options = self.Options(verbose=True)
-    options.force = True
-    options.nohooks = False
-    file_info = {
-      'Repository Root': 'blah',
-      'URL': self.url,
-      'UUID': 'ABC',
-      'Revision': 42,
-    }
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-
-    # Checkout or update.
-    dotted_path = join(self.base_path, '.')
-    gclient_scm.scm.SVN._CaptureInfo([], dotted_path).AndReturn(file_info)
-
-    # Verify no locked files.
-    gclient_scm.scm.SVN.CaptureStatus(None, dotted_path).AndReturn([])
-
-    # Cheat a bit here.
-    gclient_scm.scm.SVN._CaptureInfo([file_info['URL']], None
-        ).AndReturn(file_info)
-
-    # _AddAdditionalUpdateFlags()
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.5.1')
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    additional_args = []
-    if options.manually_grab_svn_rev:
-      additional_args = ['--revision', str(file_info['Revision'])]
-    additional_args.extend(['--force', '--ignore-externals'])
-    files_list = []
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['update', self.base_path] + additional_args,
-        cwd=self.root_dir, file_list=files_list)
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.update(options, (), files_list)
-
-  def testUpdateReset(self):
-    options = self.Options(verbose=True)
-    options.reset = True
-    file_info = {
-      'Repository Root': 'blah',
-      'URL': self.url,
-      'UUID': 'ABC',
-      'Revision': 42,
-    }
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-
-    # Checkout or update.
-    dotted_path = join(self.base_path, '.')
-    gclient_scm.scm.SVN._CaptureInfo([], dotted_path).AndReturn(file_info)
-
-    # Create an untracked file and directory.
-    gclient_scm.scm.SVN.CaptureStatus(None, dotted_path
-        ).AndReturn([['?  ', 'dir'], ['?  ', 'file']])
-
-    gclient_scm.scm.SVN._CaptureInfo([file_info['URL']], None
-        ).AndReturn(file_info)
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    self.mox.ReplayAll()
-    files_list = []
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.update(options, (), files_list)
-    self.checkstdout('_____ %s at 42\n' % self.relpath)
-
-  def testUpdateResetDeleteUnversionedTrees(self):
-    options = self.Options(verbose=True)
-    options.reset = True
-    options.delete_unversioned_trees = True
-
-    file_info = {
-      'Repository Root': 'blah',
-      'URL': self.url,
-      'UUID': 'ABC',
-      'Revision': 42,
-    }
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-
-    # Checkout or update.
-    dotted_path = join(self.base_path, '.')
-    gclient_scm.scm.SVN._CaptureInfo([], dotted_path).AndReturn(file_info)
-
-    # Create an untracked file and directory.
-    gclient_scm.scm.SVN.CaptureStatus(None, dotted_path
-        ).AndReturn([['?  ', 'dir'], ['?  ', 'file']])
-
-    gclient_scm.scm.SVN._CaptureInfo([file_info['URL']], None
-        ).AndReturn(file_info)
-
-    # Confirm that the untracked file is removed.
-    gclient_scm.scm.SVN.CaptureStatus(None, self.base_path
-        ).AndReturn([['?  ', 'dir'], ['?  ', 'file']])
-    gclient_scm.os.path.isdir(join(self.base_path, 'dir')).AndReturn(True)
-    gclient_scm.os.path.isdir(join(self.base_path, 'file')).AndReturn(False)
-    gclient_scm.os.path.islink(join(self.base_path, 'dir')).AndReturn(False)
-    gclient_scm.gclient_utils.rmtree(join(self.base_path, 'dir'))
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    files_list = []
-    scm.update(options, (), files_list)
-    self.checkstdout(
-      ('_____ %s at 42\n'
-       '_____ removing unversioned directory dir\n') % self.relpath)
-
-  def testUpdateSingleCheckout(self):
-    options = self.Options(verbose=True)
-    file_info = {
-      'URL': self.url,
-      'Revision': 42,
-    }
-
-    # Checks to make sure that we support svn co --depth.
-    gclient_scm.scm.SVN.current_version = None
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.5.1')
-    gclient_scm.os.path.exists(join(self.base_path, '.svn')).AndReturn(False)
-    gclient_scm.os.path.exists(join(self.base_path, 'DEPS')).AndReturn(False)
-
-    # Verify no locked files.
-    dotted_path = join(self.base_path, '.')
-    gclient_scm.scm.SVN.CaptureStatus(None, dotted_path).AndReturn([])
-
-    # When checking out a single file, we issue an svn checkout and svn update.
-    files_list = self.mox.CreateMockAnything()
-    gclient_scm.gclient_utils.CheckCallAndFilterAndHeader(
-        ['svn', 'checkout', '--depth', 'empty', self.url, self.base_path],
-        always=True,
-        cwd=self.root_dir)
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['update', 'DEPS', '--ignore-externals'],
-        cwd=self.base_path,
-        file_list=files_list)
-
-    # Now we fall back on scm.update().
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-    gclient_scm.scm.SVN._CaptureInfo([], dotted_path).AndReturn(file_info)
-    gclient_scm.scm.SVN._CaptureInfo([file_info['URL']], None
-        ).AndReturn(file_info)
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.updatesingle(options, ['DEPS'], files_list)
-    self.checkstdout('_____ %s at 42\n' % self.relpath)
-
-  def testUpdateSingleCheckoutSVN14(self):
-    options = self.Options(verbose=True)
-
-    # Checks to make sure that we support svn co --depth.
-    gclient_scm.scm.SVN.current_version = None
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.4.4')
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-
-    # When checking out a single file with svn 1.4, we use svn export
-    files_list = self.mox.CreateMockAnything()
-    gclient_scm.gclient_utils.CheckCallAndFilterAndHeader(
-        ['svn', 'export', join(self.url, 'DEPS'), join(self.base_path, 'DEPS')],
-        always=True, cwd=self.root_dir)
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.updatesingle(options, ['DEPS'], files_list)
-
-  def testUpdateSingleCheckoutSVNUpgrade(self):
-    options = self.Options(verbose=True)
-    file_info = {
-      'URL': self.url,
-      'Revision': 42,
-    }
-
-    # Checks to make sure that we support svn co --depth.
-    gclient_scm.scm.SVN.current_version = None
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.5.1')
-    gclient_scm.os.path.exists(join(self.base_path, '.svn')).AndReturn(False)
-    # If DEPS already exists, assume we're upgrading from svn1.4, so delete
-    # the old DEPS file.
-    gclient_scm.os.path.exists(join(self.base_path, 'DEPS')).AndReturn(True)
-    gclient_scm.os.remove(join(self.base_path, 'DEPS'))
-
-    # Verify no locked files.
-    gclient_scm.scm.SVN.CaptureStatus(
-        None, join(self.base_path, '.')).AndReturn([])
-
-    # When checking out a single file, we issue an svn checkout and svn update.
-    files_list = self.mox.CreateMockAnything()
-    gclient_scm.gclient_utils.CheckCallAndFilterAndHeader(
-        ['svn', 'checkout', '--depth', 'empty', self.url, self.base_path],
-        always=True,
-        cwd=self.root_dir)
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['update', 'DEPS', '--ignore-externals'],
-        cwd=self.base_path,
-        file_list=files_list)
-
-    # Now we fall back on scm.update().
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-    gclient_scm.scm.SVN._CaptureInfo(
-        [], join(self.base_path, ".")).AndReturn(file_info)
-    gclient_scm.scm.SVN._CaptureInfo([file_info['URL']], None
-        ).AndReturn(file_info)
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.updatesingle(options, ['DEPS'], files_list)
-    self.checkstdout(
-        ('_____ %s at 42\n' % self.relpath))
-
-  def testUpdateSingleUpdate(self):
-    options = self.Options(verbose=True)
-    file_info = {
-      'URL': self.url,
-      'Revision': 42,
-    }
-    # Checks to make sure that we support svn co --depth.
-    gclient_scm.scm.SVN.current_version = None
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.5.1')
-    gclient_scm.os.path.exists(join(self.base_path, '.svn')).AndReturn(True)
-
-    # Verify no locked files.
-    gclient_scm.scm.SVN.CaptureStatus(None, join(self.base_path, '.')
-        ).AndReturn([])
-
-    # Now we fall back on scm.update().
-    files_list = self.mox.CreateMockAnything()
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(False)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-    gclient_scm.scm.SVN._CaptureInfo(
-        [], join(self.base_path, '.')).AndReturn(file_info)
-    gclient_scm.scm.SVN._CaptureInfo([file_info['URL']], None
-        ).AndReturn(file_info)
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.updatesingle(options, ['DEPS'], files_list)
-    self.checkstdout('_____ %s at 42\n' % self.relpath)
-
-  def testUpdateGit(self):
-    options = self.Options(verbose=True)
-    file_path = gclient_scm.os.path.join(self.root_dir, self.relpath, '.hg')
-    gclient_scm.os.path.exists(file_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-    error = gclient_scm.subprocess2.CalledProcessError(
-        1, 'cmd', '/cwd', 'stdout', 'stderr')
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.').AndRaise(error)
-
-    bad_scm_path = os.path.join(self.root_dir, '_bad_scm',
-                                os.path.dirname(self.relpath))
-    gclient_scm.os.makedirs(bad_scm_path)
-    dest_path = os.path.join(bad_scm_path,
-                             os.path.basename(self.relpath) + 'ABCD')
-    self.mox.StubOutWithMock(gclient_scm.tempfile, 'mkdtemp', True)
-    gclient_scm.tempfile.mkdtemp(
-        prefix=os.path.basename(self.relpath),
-        dir=os.path.join(self.root_dir, '_bad_scm',
-                         os.path.dirname(self.relpath))).AndReturn(dest_path)
-    self.mox.StubOutWithMock(gclient_scm.shutil, 'move', True)
-    gclient_scm.shutil.move(self.base_path, dest_path)
-    gclient_scm.os.path.exists(self.root_dir).AndReturn(True)
-    gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-        ).AndReturn('1.5.1')
-    gclient_scm.scm.SVN.RunAndGetFileList(
-        options.verbose,
-        ['checkout', self.url, self.base_path, '--force', '--ignore-externals'],
-        cwd=self.root_dir,
-        file_list=[])
-
-    gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-        ).AndReturn({'Revision': 100})
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    scm.update(options, None, [])
-    self.checkstdout('_____ Conflicting directory found in %s. Moving to %s.\n'
-                     % (self.base_path, dest_path))
-
-  def testUpdateGitForce(self):
-    options = self.Options(verbose=True, force=True)
-    old_environ = dict(gclient_scm.os.environ)
-    gclient_scm.os.environ['CHROME_HEADLESS'] = '1'
-    try:
-      file_path = gclient_scm.os.path.join(self.root_dir, self.relpath, '.hg')
-      gclient_scm.os.path.exists(file_path).AndReturn(False)
-      gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-      self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-      gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(False)
-      error = gclient_scm.subprocess2.CalledProcessError(
-          1, 'cmd', '/cwd', 'stdout', 'stderr')
-      gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.').AndRaise(error)
-      gclient_scm.gclient_utils.rmtree(self.base_path)
-      gclient_scm.os.path.exists(self.root_dir).AndReturn(True)
-      gclient_scm.scm.SVN.Capture(['--version', '--quiet'], None
-          ).AndReturn('1.5.1')
-      gclient_scm.scm.SVN.RunAndGetFileList(
-          options.verbose,
-          ['checkout', self.url, self.base_path, '--force',
-           '--ignore-externals'],
-          cwd=self.root_dir,
-          file_list=[])
-
-      gclient_scm.scm.SVN._CaptureInfo([], self.base_path+'/.'
-          ).AndReturn({'Revision': 100})
-
-      self.mox.ReplayAll()
-      scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                              relpath=self.relpath)
-      file_list = []
-      scm.update(options, None, file_list)
-      self.checkstdout('_____ Conflicting directory found in %s. Removing.\n'
-                       % self.base_path)
-    finally:
-      gclient_scm.os.environ = old_environ
-
-  def testUpdateGitSvn(self):
-    options = self.Options(verbose=True)
-    file_path = gclient_scm.os.path.join(self.root_dir, self.relpath, '.hg')
-    gclient_scm.os.path.exists(file_path).AndReturn(False)
-    gclient_scm.os.path.exists(self.base_path).AndReturn(True)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'IsGitSvn', True)
-    gclient_scm.scm.GIT.IsGitSvn(self.base_path).AndReturn(True)
-    self.mox.StubOutWithMock(gclient_scm.scm.GIT, 'Capture', True)
-    gclient_scm.scm.GIT.Capture(['config', '--local', '--get',
-                                 'svn-remote.svn.url'],
-                                cwd=self.base_path).AndReturn(self.url)
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    file_list = []
-    scm.update(options, [], file_list)
-    self.checkstdout(
-        ('\n_____ %s looks like a git-svn checkout. Skipping.\n' % self.relpath)
-        )
-
-  def testUpdateHg(self):
-    options = self.Options(verbose=True)
-    gclient_scm.os.path.exists(join(self.base_path, '.hg')).AndReturn(True)
-
-    self.mox.ReplayAll()
-    scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir,
-                            relpath=self.relpath)
-    file_list = []
-    scm.update(options, self.args, file_list)
-    self.checkstdout(
-        ('________ found .hg directory; skipping %s\n' % self.relpath))
-
-  def testGetUsableRevSVN(self):
-    # pylint: disable=E1101
-    options = self.Options(verbose=True)
-
-    # Mock SVN revision validity checking.
-    self.mox.StubOutWithMock(
-        gclient_scm.scm.SVN, 'IsValidRevision', True)
-    gclient_scm.scm.SVN.IsValidRevision(url='%s@%s' % (self.url, 1)
-        ).AndReturn(True)
-    gclient_scm.scm.SVN.IsValidRevision(url='%s@%s' % (self.url, 'fake')
-        ).AndReturn(False)
-
-    self.mox.ReplayAll()
-
-    svn_scm = self._scm_wrapper(url=self.url, root_dir=self.root_dir)
-    # With an SVN checkout, 1 an example of a valid usable rev.
-    self.assertEquals(svn_scm.GetUsableRev(1, options), 1)
-    # With an SVN checkout, a fake or unknown rev should raise an excpetion.
-    self.assertRaises(gclient_scm.gclient_utils.Error,
-                      svn_scm.GetUsableRev, 'fake', options)
-
 class BaseGitWrapperTestCase(GCBaseTestCase, StdoutCheck, TestCaseUtils,
                              unittest.TestCase):
   """This class doesn't use pymox."""
   class OptionsObject(object):
     def __init__(self, verbose=False, revision=None):
+      self.auto_rebase = False
       self.verbose = verbose
       self.revision = revision
       self.manually_grab_svn_rev = True
@@ -1038,7 +355,7 @@
     scm.status(options, self.args, file_list)
     self.assertEquals(file_list, [file_path])
     self.checkstdout(
-        ('running \'git diff --name-status '
+        ('\n________ running \'git diff --name-status '
          '069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\nM\ta\n') %
             join(self.root_dir, '.'))
 
@@ -1058,7 +375,7 @@
     expected_file_list = [join(self.base_path, x) for x in ['a', 'b']]
     self.assertEquals(sorted(file_list), expected_file_list)
     self.checkstdout(
-        ('running \'git diff --name-status '
+        ('\n________ running \'git diff --name-status '
          '069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\nM\ta\nM\tb\n') %
             join(self.root_dir, '.'))
 
@@ -1245,6 +562,8 @@
     self.root_dir = '/tmp' if sys.platform != 'win32' else 't:\\tmp'
     self.relpath = 'fake'
     self.base_path = os.path.join(self.root_dir, self.relpath)
+    self.backup_base_path = os.path.join(self.root_dir,
+                                         'old_%s.git' % self.relpath)
 
   def tearDown(self):
     BaseTestCase.tearDown(self)
@@ -1354,6 +673,7 @@
 
     gclient_scm.os.path.isdir(
         os.path.join(self.base_path, '.git', 'hooks')).AndReturn(False)
+    gclient_scm.os.path.exists(self.backup_base_path).AndReturn(False)
     gclient_scm.os.path.exists(self.base_path).AndReturn(True)
     gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
     gclient_scm.os.path.exists(os.path.join(self.base_path, '.git')
@@ -1384,6 +704,7 @@
 
     gclient_scm.os.path.isdir(
         os.path.join(self.base_path, '.git', 'hooks')).AndReturn(False)
+    gclient_scm.os.path.exists(self.backup_base_path).AndReturn(False)
     gclient_scm.os.path.exists(self.base_path).AndReturn(True)
     gclient_scm.os.path.isdir(self.base_path).AndReturn(True)
     gclient_scm.os.path.exists(os.path.join(self.base_path, '.git')
diff --git a/tests/gclient_smoketest.py b/tests/gclient_smoketest.py
index eb2275e..50e7fd7 100755
--- a/tests/gclient_smoketest.py
+++ b/tests/gclient_smoketest.py
@@ -20,20 +20,17 @@
 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 sys.path.insert(0, ROOT_DIR)
 
-from testing_support.fake_repos import join, write
-from testing_support.fake_repos import FakeReposTestBase, FakeRepoTransitive, \
-                                       FakeRepoSkiaDEPS
-
 import gclient_utils
 import scm as gclient_scm
-
 import subprocess2
+from testing_support import fake_repos
+from testing_support.fake_repos import join, write
 
 GCLIENT_PATH = os.path.join(ROOT_DIR, 'gclient')
 COVERAGE = False
 
 
-class GClientSmokeBase(FakeReposTestBase):
+class GClientSmokeBase(fake_repos.FakeReposTestBase):
   def setUp(self):
     super(GClientSmokeBase, self).setUp()
     # Make sure it doesn't try to auto update when testing!
@@ -301,544 +298,6 @@
     self.checkBlock(res[0], [('running', deps), ('running', src)])
 
 
-class GClientSmokeSVN(GClientSmokeBase):
-  def setUp(self):
-    super(GClientSmokeSVN, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_svn()
-
-  def testSync(self):
-    # TODO(maruel): safesync.
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    # Test unversioned checkout.
-    self.parseGclient(['sync', '--deps', 'mac', '--jobs', '1'],
-        ['running', 'running',
-          # This is due to the way svn update is called for a
-          # single file when File() is used in a DEPS file.
-          ('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
-          'running', 'running', 'running', 'running'])
-    tree = self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-    # Manually remove svn_hooked1 before synching to make sure it's not
-    # recreated.
-    os.remove(join(self.root_dir, 'src', 'svn_hooked1'))
-
-    # Test incremental versioned sync: sync backward.
-    self.parseGclient(
-        ['sync', '--revision', 'src@1', '--deps', 'mac',
-          '--delete_unversioned_trees', '--jobs', '1'],
-        ['running', 'running', 'running', 'running', 'deleting'])
-    tree = self.mangle_svn_tree(
-        ('trunk/src@1', 'src'),
-        ('trunk/third_party/foo@2', 'src/third_party/fpp'),
-        ('trunk/other@1', 'src/other'),
-        ('trunk/third_party/foo@2', 'src/third_party/prout'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    self.assertTree(tree)
-    # Test incremental sync: delete-unversioned_trees isn't there.
-    self.parseGclient(['sync', '--deps', 'mac', '--jobs', '1'],
-                      ['running', 'running', 'running', 'running', 'running'])
-    tree = self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@2', 'src/third_party/fpp'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other'),
-        ('trunk/third_party/foo@2', 'src/third_party/prout'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-  def testSyncIgnoredSolutionName(self):
-    """TODO(maruel): This will become an error soon."""
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    results = self.gclient(
-        ['sync', '--deps', 'mac', '-r', 'invalid@1', '--jobs', '1'])
-    self.checkBlock(results[0], [
-        'running', 'running',
-        # This is due to the way svn update is called for a single file when
-        # File() is used in a DEPS file.
-        ('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
-        'running', 'running', 'running', 'running'])
-    self.checkString('Please fix your script, having invalid --revision flags '
-        'will soon considered an error.\n', results[1])
-    self.assertEquals(0, results[2])
-    tree = self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-  def testSyncNoSolutionName(self):
-    # When no solution name is provided, gclient uses the first solution listed.
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.parseGclient(['sync', '--deps', 'mac', '-r', '1', '--jobs', '1'],
-                      ['running', 'running', 'running', 'running'])
-    tree = self.mangle_svn_tree(
-        ('trunk/src@1', 'src'),
-        ('trunk/third_party/foo@2', 'src/third_party/fpp'),
-        ('trunk/other@1', 'src/other'),
-        ('trunk/third_party/foo@2', 'src/third_party/prout'))
-    self.assertTree(tree)
-
-  def testSyncJobs(self):
-    if not self.enabled:
-      return
-    # TODO(maruel): safesync.
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    # Test unversioned checkout.
-    # Use --jobs 1 otherwise the order is not deterministic.
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1'],
-        [
-          'running',
-          'running',
-          # This is due to the way svn update is called for a
-          # single file when File() is used in a DEPS file.
-          ('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
-          'running',
-          'running',
-          'running',
-          'running',
-        ],
-        untangle=True)
-    tree = self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-    # Manually remove svn_hooked1 before synching to make sure it's not
-    # recreated.
-    os.remove(join(self.root_dir, 'src', 'svn_hooked1'))
-
-    # Test incremental versioned sync: sync backward.
-    self.parseGclient(
-        ['sync', '--revision', 'src@1', '--deps', 'mac',
-          '--delete_unversioned_trees', '--jobs', '8'],
-        ['running', 'running', 'running', 'running', 'deleting'],
-        untangle=True)
-    tree = self.mangle_svn_tree(
-        ('trunk/src@1', 'src'),
-        ('trunk/third_party/foo@2', 'src/third_party/fpp'),
-        ('trunk/other@1', 'src/other'),
-        ('trunk/third_party/foo@2', 'src/third_party/prout'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    self.assertTree(tree)
-    # Test incremental sync: delete-unversioned_trees isn't there.
-    self.parseGclient(['sync', '--deps', 'mac', '--jobs', '8'],
-                      ['running', 'running', 'running', 'running', 'running'],
-                      untangle=True)
-    tree = self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@2', 'src/third_party/fpp'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other'),
-        ('trunk/third_party/foo@2', 'src/third_party/prout'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-  def testSyncCustomDeps(self):
-    if not self.enabled:
-      return
-    out = (
-        'solutions = [\n'
-        '  { "name"        : "src",\n'
-        '    "url"         : "%(base)s/src",\n'
-        '    "custom_deps" : {\n'
-        # Remove 2 deps, change 1, add 1.
-        '      "src/other": None,\n'
-        '      "src/third_party/foo": \'%(base)s/third_party/prout\',\n'
-        '      "src/file/other": None,\n'
-        '      "new_deps": "/trunk/src/third_party",\n'
-        '    },\n'
-        '    "safesync_url": "",\n'
-        '  },\n'
-        ']\n\n' %
-      { 'base': self.svn_base + 'trunk' })
-    fileobj = open(os.path.join(self.root_dir, '.gclient'), 'w')
-    fileobj.write(out)
-    fileobj.close()
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1'],
-        ['running', 'running', 'running', 'running'],
-        untangle=True)
-    tree = self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/prout@2', 'src/third_party/foo'),
-        ('trunk/src/third_party@2', 'new_deps'))
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-  def testSyncCustomDepsNoDeps(self):
-    if not self.enabled:
-      return
-    out = (
-        'solutions = [\n'
-        # This directory has no DEPS file.
-        '  { "name"        : "src/third_party",\n'
-        '    "url"         : "%(base)s/src/third_party",\n'
-        '    "custom_deps" : {\n'
-        # Add 1.
-        '      "src/other": \'/trunk/other\',\n'
-        '    },\n'
-        '    "safesync_url": "",\n'
-        '  },\n'
-        ']\n\n' %
-      { 'base': self.svn_base + 'trunk' })
-    fileobj = open(os.path.join(self.root_dir, '.gclient'), 'w')
-    fileobj.write(out)
-    fileobj.close()
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1'],
-        ['running', 'running'],
-        untangle=True)
-    tree = self.mangle_svn_tree(
-        ('trunk/src/third_party@2', 'src/third_party'),
-        ('trunk/other@2', 'src/other'))
-    self.assertTree(tree)
-
-  def testRevertAndStatus(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    # Tested in testSync.
-    self.gclient(['sync', '--deps', 'mac'])
-    write(join(self.root_dir, 'src', 'other', 'hi'), 'Hey!')
-
-    out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'],
-                            [['running', join(self.root_dir, 'src')],
-                             ['running', join(self.root_dir, 'src', 'other')]])
-    out = self.svnBlockCleanup(out)
-    self.checkString('file', out[0][1])
-    self.checkString('other', out[0][2])
-    self.checkString('svn_hooked1', out[0][3])
-    self.checkString(join('third_party', 'foo'), out[0][4])
-    self.checkString('hi', out[1][1])
-    self.assertEquals(5, len(out[0]))
-    self.assertEquals(2, len(out[1]))
-
-    # Revert implies --force implies running hooks without looking at pattern
-    # matching.
-    results = self.gclient(['revert', '--deps', 'mac', '--jobs', '1'])
-    out = self.splitBlock(results[0])
-    # src, src/other is missing, src/other, src/third_party/foo is missing,
-    # src/third_party/foo, 2 svn hooks, 3 related to File().
-    self.assertEquals( 8, len(out))
-    self.checkString('', results[1])
-    self.assertEquals(0, results[2])
-    tree = self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other'))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    tree['src/svn_hooked2'] = 'svn_hooked2'
-    self.assertTree(tree)
-
-    out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'],
-                            [['running', join(self.root_dir, 'src')]])
-    out = self.svnBlockCleanup(out)
-    self.checkString('file', out[0][1])
-    self.checkString('other', out[0][2])
-    self.checkString('svn_hooked1', out[0][3])
-    self.checkString('svn_hooked2', out[0][4])
-    self.checkString(join('third_party', 'foo'), out[0][5])
-    self.assertEquals(6, len(out[0]))
-    self.assertEquals(1, len(out))
-
-  def testRevertAndStatusDepsOs(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    # Tested in testSync.
-    self.gclient(['sync', '--deps', 'mac', '--revision', 'src@1'])
-    write(join(self.root_dir, 'src', 'other', 'hi'), 'Hey!')
-
-    # Without --verbose, gclient won't output the directories without
-    # modification.
-    out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'],
-                            [['running', join(self.root_dir, 'src')],
-                             ['running', join(self.root_dir, 'src', 'other')]])
-    out = self.svnBlockCleanup(out)
-    self.checkString('other', out[0][1])
-    self.checkString(join('third_party', 'fpp'), out[0][2])
-    self.checkString(join('third_party', 'prout'), out[0][3])
-    self.checkString('hi', out[1][1])
-    self.assertEquals(4, len(out[0]))
-    self.assertEquals(2, len(out[1]))
-
-    # So verify it works with --verbose.
-    out = self.parseGclient(
-        ['status', '--deps', 'mac', '--verbose', '--jobs', '1'],
-        [['running', join(self.root_dir, 'src')],
-          ['running', join(self.root_dir, 'src', 'other')],
-          ['running', join(self.root_dir, 'src', 'third_party', 'fpp')],
-          ['running', join(self.root_dir, 'src', 'third_party', 'prout')]])
-    out = self.svnBlockCleanup(out)
-    self.checkString('other', out[0][5])
-    self.checkString(join('third_party', 'fpp'), out[0][7])
-    self.checkString(join('third_party', 'prout'), out[0][8])
-    self.checkString('hi', out[1][5])
-    self.assertEquals(9, len(out[0]))
-    self.assertEquals(7, len(out[1]))
-    self.assertEquals(6, len(out[2]))
-    self.assertEquals(6, len(out[3]))
-    self.assertEquals(4, len(out))
-
-    # Revert implies --force implies running hooks without looking at pattern
-    # matching.
-    # TODO(maruel): In general, gclient revert output is wrong. It should output
-    # the file list after some ___ running 'svn status'
-    results = self.gclient(['revert', '--deps', 'mac', '--jobs', '1'])
-    out = self.splitBlock(results[0])
-    self.assertEquals(4, len(out))
-    self.checkString('', results[1])
-    self.assertEquals(0, results[2])
-    tree = self.mangle_svn_tree(
-        ('trunk/src@1', 'src'),
-        ('trunk/third_party/foo@2', 'src/third_party/fpp'),
-        ('trunk/other@1', 'src/other'),
-        ('trunk/third_party/prout@2', 'src/third_party/prout'))
-    self.assertTree(tree)
-
-    out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'],
-                            [['running', join(self.root_dir, 'src')]])
-    out = self.svnBlockCleanup(out)
-    self.checkString('other', out[0][1])
-    self.checkString(join('third_party', 'fpp'), out[0][2])
-    self.checkString(join('third_party', 'prout'), out[0][3])
-    self.assertEquals(4, len(out[0]))
-
-  def testRunHooks(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.gclient(['sync', '--deps', 'mac'])
-    out = self.parseGclient(['runhooks', '--deps', 'mac'],
-                            ['running', 'running'])
-    self.checkString(1, len(out[0]))
-    self.checkString(1, len(out[1]))
-
-  def testRunHooksDepsOs(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.gclient(['sync', '--deps', 'mac', '--revision', 'src@1'])
-    out = self.parseGclient(['runhooks', '--deps', 'mac'], [])
-    self.assertEquals([], out)
-
-  def testRevInfo(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac'])
-    out = ('src: %(base)s/src\n'
-           'src/file/other: File("%(base)s/other/DEPS")\n'
-           'src/other: %(base)s/other\n'
-           'src/third_party/foo: %(base)s/third_party/foo@1\n' %
-          { 'base': self.svn_base + 'trunk' })
-    self.check((out, '', 0), results)
-    results = self.gclient(['revinfo', '--deps', 'mac', '--actual'])
-    out = ('src: %(base)s/src@2\n'
-           'src/file/other: %(base)s/other/DEPS@2\n'
-           'src/other: %(base)s/other@2\n'
-           'src/third_party/foo: %(base)s/third_party/foo@1\n' %
-          { 'base': self.svn_base + 'trunk' })
-    self.check((out, '', 0), results)
-    results = self.gclient(['revinfo', '--deps', 'mac', '--snapshot'])
-    out = ('# Snapshot generated with gclient revinfo --snapshot\n'
-           'solutions = [\n'
-           '  { "name"        : "src",\n'
-           '    "url"         : "%(base)s/src",\n'
-           '    "deps_file"   : "DEPS",\n'
-           '    "managed"     : True,\n'
-           '    "custom_deps" : {\n'
-           '      "foo/bar": None,\n'
-           '      "invalid": None,\n'
-           '      "src/file/other": \'%(base)s/other/DEPS@2\',\n'
-           '      "src/other": \'%(base)s/other@2\',\n'
-           '      "src/third_party/foo": '
-               '\'%(base)s/third_party/foo@1\',\n'
-           '    },\n'
-           '    "safesync_url": "",\n'
-           '  },\n'
-           ']\n\n' %
-          { 'base': self.svn_base + 'trunk' })
-    self.check((out, '', 0), results)
-
-  def testRevInfoAltDeps(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/', '--deps-file',
-                  'DEPS.alt'])
-    self.gclient(['sync'])
-    results = self.gclient(['revinfo', '--snapshot'])
-    out = ('# Snapshot generated with gclient revinfo --snapshot\n'
-           'solutions = [\n'
-           '  { "name"        : "src",\n'
-           '    "url"         : "%(base)s/src",\n'
-           '    "deps_file"   : "DEPS.alt",\n'
-           '    "managed"     : True,\n'
-           '    "custom_deps" : {\n'
-           '      "foo/bar": None,\n'
-           '      "invalid": None,\n'
-           '      "src/other2": \'%(base)s/other@2\',\n'
-           '    },\n'
-           '    "safesync_url": "",\n'
-           '  },\n'
-           ']\n\n' %
-          { 'base': self.svn_base + 'trunk' })
-    self.check((out, '', 0), results)
-
-
-  def testWrongDirectory(self):
-    # Check that we're not using a .gclient configuration which only talks
-    # about a subdirectory src when we're in a different subdirectory src-other.
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.gclient(['sync'])
-    other_src = join(self.root_dir, 'src-other')
-    os.mkdir(other_src)
-    res = ('', 'Error: client not configured; see \'gclient config\'\n', 1)
-    self.check(res, self.gclient(['status'], other_src))
-
-  def testCorrectDirectory(self):
-    # Check that when we're in the subdirectory src, the .gclient configuration
-    # is used.
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.gclient(['sync'])
-    src = join(self.root_dir, 'src')
-    res = self.gclient(['status', '--jobs', '1'], src)
-    self.checkBlock(res[0], [('running', src)])
-
-  def testInitialCheckoutNotYetDone(self):
-    # Check that gclient can be executed when the initial checkout hasn't been
-    # done yet.
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.parseGclient(
-        ['sync', '--jobs', '1'],
-        ['running', 'running',
-         # This is due to the way svn update is called for a
-         # single file when File() is used in a DEPS file.
-         ('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
-         'running', 'running', 'running', 'running'])
-
-  def testInitialCheckoutFailed(self):
-    # Check that gclient can be executed from an arbitrary sub directory if the
-    # initial checkout has failed.
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    self.gclient(['sync'])
-    # Cripple the checkout.
-    os.remove(join(self.root_dir, '.gclient_entries'))
-    src = join(self.root_dir, 'src')
-    res = self.gclient(['sync', '--jobs', '1'], src)
-    self.checkBlock(res[0],
-                    ['running', 'running', 'running'])
-
-  def testUnversionedRepository(self):
-    # Check that gclient automatically deletes crippled SVN repositories.
-    if not self.enabled:
-      return
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-    cmd = ['sync', '--jobs', '1', '--delete_unversioned_trees', '--reset']
-    self.assertEquals(0, self.gclient(cmd)[-1])
-    third_party = join(self.root_dir, 'src', 'third_party')
-    subprocess2.check_call(['svn', 'propset', '-q', 'svn:ignore', 'foo', '.'],
-                           cwd=third_party)
-
-    # Cripple src/third_party/foo and make sure gclient still succeeds.
-    gclient_utils.rmtree(join(third_party, 'foo', '.svn'))
-    self.assertEquals(0, self.gclient(cmd)[-1])
-
-
-class GClientSmokeSVNTransitive(GClientSmokeBase):
-  FAKE_REPOS_CLASS = FakeRepoTransitive
-
-  def setUp(self):
-    super(GClientSmokeSVNTransitive, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_svn()
-
-  def testSyncTransitive(self):
-    if not self.enabled:
-      return
-
-    self.gclient(['config', self.svn_base + 'trunk/src/'])
-
-    def test_case(parent, timestamp, fixed, output):
-      # We check out revision 'parent' and expect the following:
-      #  - src/ is checked out at r'parent'
-      #  - src/same_repo is checked out at r'parent' (due to --transitive)
-      #  - src/same_repo_fixed is checked out at r'fixed'
-      #  - src/different_repo is checked out at r'timestamp'
-      #    (due to --transitive)
-      #  - src/different_repo_fixed is checked out at r'fixed'
-
-      revisions = self.FAKE_REPOS.svn_revs
-      self.parseGclient(
-          ['sync', '--transitive', '--revision', 'src@%d' % parent,
-           '--jobs', '1'], output)
-      self.assertTree({
-        'src/origin': revisions[parent]['trunk/src/origin'],
-        'src/DEPS': revisions[parent]['trunk/src/DEPS'],
-        'src/same_repo/origin': revisions[parent]['trunk/third_party/origin'],
-        'src/same_repo_fixed/origin':
-            revisions[fixed]['trunk/third_party/origin'],
-        'src/different_repo/origin':
-            revisions[timestamp]['trunk/third_party/origin'],
-        'src/different_repo_fixed/origin':
-            revisions[fixed]['trunk/third_party/origin'],
-      })
-
-    # Here are the test cases for checking out 'trunk/src' at r1, r2 and r3
-    # r1: Everything is normal
-    test_case(parent=1, timestamp=1, fixed=1,
-              output=['running', 'running', 'running', 'running', 'running'])
-    # r2: Svn will scan from r1 upwards until it finds a revision matching the
-    # given timestamp or it takes the next smallest one (which is r2 in this
-    # case).
-    test_case(parent=2, timestamp=2, fixed=1,
-              output=['running', 'running', 'running'])
-    # r3: Svn will scan from r1 upwards until it finds a revision matching the
-    # given timestamp or it takes the next smallest one. Since
-    # timestamp(r3) < timestamp(r2) svn will checkout r1.
-    # This happens often on http://googlecode.com but is unexpected to happen
-    # with svnserve (unless you manually change 'svn:date')
-    test_case(parent=3, timestamp=1, fixed=1,
-              output=['running', 'running', 'running'])
-
-
 class GClientSmokeGIT(GClientSmokeBase):
   def setUp(self):
     super(GClientSmokeGIT, self).setUp()
@@ -1058,8 +517,8 @@
         ('running', self.root_dir),                 # pre-deps hook
         ('running', self.root_dir),                 # pre-deps hook (fails)
     ]
-    expected_stderr = ('Error: Command /usr/bin/python -c import sys; '
-                       'sys.exit(1) returned non-zero exit status 1 in %s\n'
+    expected_stderr = ("Error: Command '/usr/bin/python -c import sys; "
+                       "sys.exit(1)' returned non-zero exit status 1 in %s\n"
                        % self.root_dir)
     stdout, stderr, retcode = self.gclient(['sync', '--deps', 'mac', '--jobs=1',
                                             '--revision',
@@ -1220,247 +679,15 @@
     self.assertTrue(os.path.exists(join(repo2_root, 'last_file')))
 
 
-class GClientSmokeBoth(GClientSmokeBase):
-  def setUp(self):
-    super(GClientSmokeBoth, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_svn() and self.FAKE_REPOS.set_up_git()
-
-  def testMultiSolutions(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.svn_base + 'trunk/src/"},'
-        '{"name": "src-git",'
-        '"url": "' + self.git_base + 'repo_1"}]'])
-    self.parseGclient(['sync', '--deps', 'mac', '--jobs', '1'],
-        ['running', 'running',
-         # This is due to the way svn update is called for a single
-         # file when File() is used in a DEPS file.
-         ('running', self.root_dir + '/src/file/other'),
-         'running', 'running', 'running', 'running',
-         'running', 'running'])
-    tree = self.mangle_git_tree(('repo_1@2', 'src-git'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree.update(self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other')))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-  def testMultiSolutionsJobs(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.svn_base + 'trunk/src/"},'
-        '{"name": "src-git",'
-        '"url": "' + self.git_base + 'repo_1"}]'])
-    # There is no guarantee that the ordering will be consistent.
-    (stdout, stderr, returncode) = self.gclient(
-        ['sync', '--deps', 'mac', '--jobs', '8'])
-    stdout = self.untangle(stdout)
-    self.checkString('', stderr)
-    self.assertEquals(0, returncode)
-    results = self.splitBlock(stdout)
-    self.assertEquals(9, len(results))
-    tree = self.mangle_git_tree(('repo_1@2', 'src-git'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree.update(self.mangle_svn_tree(
-        ('trunk/src@2', 'src'),
-        ('trunk/third_party/foo@1', 'src/third_party/foo'),
-        ('trunk/other@2', 'src/other')))
-    tree['src/file/other/DEPS'] = (
-        self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS'])
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    tree['src/svn_hooked1'] = 'svn_hooked1'
-    self.assertTree(tree)
-
-  def testMultiSolutionsMultiRev(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.svn_base + 'trunk/src/"},'
-        '{"name": "src-git",'
-        '"url": "' + self.git_base + 'repo_1"}]'])
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1', '--revision', '1',
-         '-r', 'src-git@' + self.githash('repo_1', 1)],
-        ['running', 'running', 'running', 'running'],
-        expected_stderr=
-            'You must specify the full solution name like --revision src@1\n'
-            'when you have multiple solutions setup in your .gclient file.\n'
-            'Other solutions present are: src-git.\n')
-    tree = self.mangle_git_tree(('repo_1@1', 'src-git'),
-                                ('repo_2@2', 'src/repo2'),
-                                ('repo_3@1', 'src/repo2/repo3'),
-                                ('repo_4@2', 'src/repo4'))
-    tree.update(self.mangle_svn_tree(
-        ('trunk/src@1', 'src'),
-        ('trunk/third_party/foo@2', 'src/third_party/fpp'),
-        ('trunk/other@1', 'src/other'),
-        ('trunk/third_party/foo@2', 'src/third_party/prout')))
-    self.assertTree(tree)
-
-  def testRevInfo(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.svn_base + 'trunk/src/"},'
-        '{"name": "src-git",'
-        '"url": "' + self.git_base + 'repo_1"}]'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac'])
-    out = ('src: %(svn_base)s/src/\n'
-           'src-git: %(git_base)srepo_1\n'
-           'src/file/other: File("%(svn_base)s/other/DEPS")\n'
-           'src/other: %(svn_base)s/other\n'
-           'src/repo2: %(git_base)srepo_2@%(hash2)s\n'
-           'src/repo2/repo_renamed: %(git_base)srepo_3\n'
-           'src/third_party/foo: %(svn_base)s/third_party/foo@1\n') % {
-               'svn_base': self.svn_base + 'trunk',
-               'git_base': self.git_base,
-               'hash2': self.githash('repo_2', 1)[:7],
-          }
-    self.check((out, '', 0), results)
-    results = self.gclient(['revinfo', '--deps', 'mac', '--actual'])
-    out = ('src: %(svn_base)s/src/@2\n'
-           'src-git: %(git_base)srepo_1@%(hash1)s\n'
-           'src/file/other: %(svn_base)s/other/DEPS@2\n'
-           'src/other: %(svn_base)s/other@2\n'
-           'src/repo2: %(git_base)srepo_2@%(hash2)s\n'
-           'src/repo2/repo_renamed: %(git_base)srepo_3@%(hash3)s\n'
-           'src/third_party/foo: %(svn_base)s/third_party/foo@1\n') % {
-               'svn_base': self.svn_base + 'trunk',
-               'git_base': self.git_base,
-               'hash1': self.githash('repo_1', 2),
-               'hash2': self.githash('repo_2', 1),
-               'hash3': self.githash('repo_3', 2),
-          }
-    self.check((out, '', 0), results)
-
-  def testRecurse(self):
-    if not self.enabled:
-      return
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.svn_base + 'trunk/src/"},'
-        '{"name": "src-git",'
-        '"url": "' + self.git_base + 'repo_1"}]'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['recurse', '-j1', 'sh', '-c',
-                            'echo $GCLIENT_SCM,$GCLIENT_URL,`pwd`'])
-
-    entries = [tuple(line.split(','))
-               for line in results[0].strip().split('\n')]
-    logging.debug(entries)
-
-    bases = {'svn': self.svn_base, 'git': self.git_base}
-    expected_source = [
-        ('svn', 'trunk/src/', 'src'),
-        ('git', 'repo_1', 'src-git'),
-        ('svn', 'trunk/other', 'src/other'),
-        ('git', 'repo_2@' + self.githash('repo_2', 1)[:7], 'src/repo2'),
-        ('git', 'repo_3', 'src/repo2/repo_renamed'),
-        ('svn', 'trunk/third_party/foo@1', 'src/third_party/foo'),
-      ]
-    expected = [(scm, bases[scm] + url, os.path.join(self.root_dir, path))
-                for (scm, url, path) in expected_source]
-
-    self.assertEquals(sorted(entries), sorted(expected))
-
-
 class SkiaDEPSTransitionSmokeTest(GClientSmokeBase):
   """Simulate the behavior of bisect bots as they transition across the Skia
   DEPS change."""
 
-  FAKE_REPOS_CLASS = FakeRepoSkiaDEPS
+  FAKE_REPOS_CLASS = fake_repos.FakeRepoSkiaDEPS
 
   def setUp(self):
     super(SkiaDEPSTransitionSmokeTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git() and self.FAKE_REPOS.set_up_svn()
-
-  def testSkiaDEPSChangeSVN(self):
-    if not self.enabled:
-      return
-
-    # Create an initial checkout:
-    # - Single checkout at the root.
-    # - Multiple checkouts in a shared subdirectory.
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.svn_base + 'trunk/src/",'
-        '}]'])
-
-    checkout_path = os.path.join(self.root_dir, 'src')
-    skia = os.path.join(checkout_path, 'third_party', 'skia')
-    skia_gyp = os.path.join(skia, 'gyp')
-    skia_include = os.path.join(skia, 'include')
-    skia_src = os.path.join(skia, 'src')
-
-    gyp_svn_url = self.svn_base + 'skia/gyp'
-    include_svn_url = self.svn_base + 'skia/include'
-    src_svn_url = self.svn_base + 'skia/src'
-    skia_git_url = self.git_base + 'repo_1'
-
-    # Initial sync. Verify that we get the expected checkout.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision', 'src@2'])
-    self.assertEqual(res[2], 0, 'Initial sync failed.')
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_gyp)['URL'],
-                     gyp_svn_url)
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_include)['URL'],
-                     include_svn_url)
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_src)['URL'],
-                     src_svn_url)
-
-    # Verify that the sync succeeds. Verify that we have the  expected merged
-    # checkout.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision', 'src@3'])
-    self.assertEqual(res[2], 0, 'DEPS change sync failed.')
-    self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia), skia_git_url)
-
-    # Sync again. Verify that we still have the expected merged checkout.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision', 'src@3'])
-    self.assertEqual(res[2], 0, 'Subsequent sync failed.')
-    self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia), skia_git_url)
-
-    # Sync back to the original DEPS. Verify that we get the original structure.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision', 'src@2'])
-    self.assertEqual(res[2], 0, 'Reverse sync failed.')
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_gyp)['URL'],
-                     gyp_svn_url)
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_include)['URL'],
-                     include_svn_url)
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_src)['URL'],
-                     src_svn_url)
-
-    # Sync again. Verify that we still have the original structure.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision', 'src@2'])
-    self.assertEqual(res[2], 0, 'Subsequent sync #2 failed.')
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_gyp)['URL'],
-                     gyp_svn_url)
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_include)['URL'],
-                     include_svn_url)
-    self.assertEqual(gclient_scm.SVN.CaptureLocalInfo([], skia_src)['URL'],
-                     src_svn_url)
+    self.enabled = self.FAKE_REPOS.set_up_git()
 
   def testSkiaDEPSChangeGit(self):
     if not self.enabled:
@@ -1538,98 +765,139 @@
                                              skia_src), src_git_url)
 
 
-class GClientSmokeFromCheckout(GClientSmokeBase):
-  # WebKit abuses this. It has a .gclient and a DEPS from a checkout.
+class BlinkDEPSTransitionSmokeTest(GClientSmokeBase):
+  """Simulate the behavior of bisect bots as they transition across the Blink
+  DEPS change."""
+
+  FAKE_REPOS_CLASS = fake_repos.FakeRepoBlinkDEPS
+
   def setUp(self):
-    super(GClientSmokeFromCheckout, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_svn()
-    os.rmdir(self.root_dir)
-    if self.enabled:
-      usr, pwd = self.FAKE_REPOS.USERS[0]
-      subprocess2.check_call(
-          ['svn', 'checkout', self.svn_base + '/trunk/webkit',
-           self.root_dir, '-q',
-           '--non-interactive', '--no-auth-cache',
-           '--username', usr, '--password', pwd])
+    super(BlinkDEPSTransitionSmokeTest, self).setUp()
+    self.enabled = self.FAKE_REPOS.set_up_git()
+    self.checkout_path = os.path.join(self.root_dir, 'src')
+    self.blink = os.path.join(self.checkout_path, 'third_party', 'WebKit')
+    self.blink_git_url = self.FAKE_REPOS.git_base + 'repo_2'
+    self.pre_merge_sha = self.githash('repo_1', 1)
+    self.post_merge_sha = self.githash('repo_1', 2)
 
-  def testSync(self):
+  def CheckStatusPreMergePoint(self):
+    self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
+                                             self.blink), self.blink_git_url)
+    self.assertTrue(os.path.exists(join(self.blink, '.git')))
+    self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
+    with open(join(self.blink, 'OWNERS')) as f:
+      owners_content = f.read()
+      self.assertEqual('OWNERS-pre', owners_content, 'OWNERS not updated')
+    self.assertTrue(os.path.exists(join(self.blink, 'Source', 'exists_always')))
+    self.assertTrue(os.path.exists(
+        join(self.blink, 'Source', 'exists_before_but_not_after')))
+    self.assertFalse(os.path.exists(
+        join(self.blink, 'Source', 'exists_after_but_not_before')))
+
+  def CheckStatusPostMergePoint(self):
+    # Check that the contents still exists
+    self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
+    with open(join(self.blink, 'OWNERS')) as f:
+      owners_content = f.read()
+      self.assertEqual('OWNERS-post', owners_content, 'OWNERS not updated')
+    self.assertTrue(os.path.exists(join(self.blink, 'Source', 'exists_always')))
+    # Check that file removed between the branch point are actually deleted.
+    self.assertTrue(os.path.exists(
+        join(self.blink, 'Source', 'exists_after_but_not_before')))
+    self.assertFalse(os.path.exists(
+        join(self.blink, 'Source', 'exists_before_but_not_after')))
+    # But not the .git folder
+    self.assertFalse(os.path.exists(join(self.blink, '.git')))
+
+  @unittest.skip('flaky')
+  def testBlinkDEPSChangeUsingGclient(self):
+    """Checks that {src,blink} repos are consistent when syncing going back and
+    forth using gclient sync src@revision."""
     if not self.enabled:
       return
-    self.parseGclient(['sync', '--deps', 'mac', '--jobs', '1'],
-        ['running', 'running'])
-    tree = self.mangle_svn_tree(
-        ('trunk/webkit@2', ''),
-        ('trunk/third_party/foo@1', 'foo/bar'))
-    self.assertTree(tree)
 
-  def testRevertAndStatus(self):
+    self.gclient(['config', '--spec',
+        'solutions=['
+        '{"name": "src",'
+        ' "url": "' + self.git_base + 'repo_1",'
+        '}]'])
+
+    # Go back and forth two times.
+    for _ in xrange(2):
+      res = self.gclient(['sync', '--jobs', '1',
+                          '--revision', 'src@%s' % self.pre_merge_sha])
+      self.assertEqual(res[2], 0, 'DEPS change sync failed.')
+      self.CheckStatusPreMergePoint()
+
+      res = self.gclient(['sync', '--jobs', '1',
+                          '--revision', 'src@%s' % self.post_merge_sha])
+      self.assertEqual(res[2], 0, 'DEPS change sync failed.')
+      self.CheckStatusPostMergePoint()
+
+
+  @unittest.skip('flaky')
+  def testBlinkDEPSChangeUsingGit(self):
+    """Like testBlinkDEPSChangeUsingGclient, but move the main project using
+    directly git and not gclient sync."""
     if not self.enabled:
       return
-    self.gclient(['sync'])
 
-    # TODO(maruel): This is incorrect.
-    out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'], [])
+    self.gclient(['config', '--spec',
+        'solutions=['
+        '{"name": "src",'
+        ' "url": "' + self.git_base + 'repo_1",'
+        ' "managed": False,'
+        '}]'])
 
-    # Revert implies --force implies running hooks without looking at pattern
-    # matching.
-    results = self.gclient(['revert', '--deps', 'mac', '--jobs', '1'])
-    out = self.splitBlock(results[0])
-    self.assertEquals(2, len(out))
-    self.checkString(2, len(out[0]))
-    self.checkString(2, len(out[1]))
-    self.checkString('foo', out[1][1])
-    self.checkString('', results[1])
-    self.assertEquals(0, results[2])
-    tree = self.mangle_svn_tree(
-        ('trunk/webkit@2', ''),
-        ('trunk/third_party/foo@1', 'foo/bar'))
-    self.assertTree(tree)
+    # Perform an initial sync to bootstrap the repo.
+    res = self.gclient(['sync', '--jobs', '1'])
+    self.assertEqual(res[2], 0, 'Initial gclient sync failed.')
 
-    # TODO(maruel): This is incorrect.
-    out = self.parseGclient(['status', '--deps', 'mac'], [])
+    # Go back and forth two times.
+    for _ in xrange(2):
+      subprocess2.check_call(['git', 'checkout', '-q', self.pre_merge_sha],
+                             cwd=self.checkout_path)
+      res = self.gclient(['sync', '--jobs', '1'])
+      self.assertEqual(res[2], 0, 'gclient sync failed.')
+      self.CheckStatusPreMergePoint()
 
-  def testRunHooks(self):
+      subprocess2.check_call(['git', 'checkout', '-q', self.post_merge_sha],
+                             cwd=self.checkout_path)
+      res = self.gclient(['sync', '--jobs', '1'])
+      self.assertEqual(res[2], 0, 'DEPS change sync failed.')
+      self.CheckStatusPostMergePoint()
+
+
+  @unittest.skip('flaky')
+  def testBlinkLocalBranchesArePreserved(self):
+    """Checks that the state of local git branches are effectively preserved
+    when going back and forth."""
     if not self.enabled:
       return
-    # Hooks aren't really tested for now since there is no hook defined.
-    self.gclient(['sync', '--deps', 'mac'])
-    out = self.parseGclient(['runhooks', '--deps', 'mac'], ['running'])
-    self.assertEquals(1, len(out))
-    self.assertEquals(2, len(out[0]))
-    self.assertEquals(3, len(out[0][0]))
-    self.checkString('foo', out[0][1])
-    tree = self.mangle_svn_tree(
-        ('trunk/webkit@2', ''),
-        ('trunk/third_party/foo@1', 'foo/bar'))
-    self.assertTree(tree)
 
-  def testRevInfo(self):
-    if not self.enabled:
-      return
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac'])
-    expected = (
-        './: None\nfoo/bar: %strunk/third_party/foo@1\n' % self.svn_base,
-        '', 0)
-    self.check(expected, results)
-    # TODO(maruel): To be added after the refactor.
-    #results = self.gclient(['revinfo', '--snapshot'])
-    #expected = (
-    #    './: None\nfoo/bar: %strunk/third_party/foo@1\n' % self.svn_base,
-    #    '', 0)
-    #self.check(expected, results)
+    self.gclient(['config', '--spec',
+        'solutions=['
+        '{"name": "src",'
+        ' "url": "' + self.git_base + 'repo_1",'
+        '}]'])
 
-  def testRest(self):
-    if not self.enabled:
-      return
-    self.gclient(['sync'])
-    # TODO(maruel): This is incorrect, it should run on ./ too.
-    self.parseGclient(
-        ['cleanup', '--deps', 'mac', '--verbose', '--jobs', '1'],
-        [('running', join(self.root_dir, 'foo', 'bar'))])
-    self.parseGclient(
-        ['diff', '--deps', 'mac', '--verbose', '--jobs', '1'],
-        [('running', join(self.root_dir, 'foo', 'bar'))])
+    # Initialize to pre-merge point.
+    self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
+    self.CheckStatusPreMergePoint()
+
+    # Create a branch named "foo".
+    subprocess2.check_call(['git', 'checkout', '-qB', 'foo'],
+                           cwd=self.blink)
+
+    # Cross the pre-merge point.
+    self.gclient(['sync', '--revision', 'src@%s' % self.post_merge_sha])
+    self.CheckStatusPostMergePoint()
+
+    # Go backwards and check that we still have the foo branch.
+    self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
+    self.CheckStatusPreMergePoint()
+    subprocess2.check_call(
+        ['git', 'show-ref', '-q', '--verify', 'refs/heads/foo'], cwd=self.blink)
 
 
 if __name__ == '__main__':
diff --git a/tests/gclient_test.py b/tests/gclient_test.py
index c246efc..0374ed0 100755
--- a/tests/gclient_test.py
+++ b/tests/gclient_test.py
@@ -907,6 +907,124 @@
         ],
         self._get_processed())
 
+  def testDepsFromNotAllowedHostsUnspecified(self):
+    """Verifies gclient works fine with DEPS without allowed_hosts."""
+    write(
+        '.gclient',
+        'solutions = [\n'
+        '  { "name": "foo", "url": "svn://example.com/foo",\n'
+        '    "deps_file" : ".DEPS.git",\n'
+        '  },\n'
+          ']')
+    write(
+        os.path.join('foo', 'DEPS'),
+        'deps = {\n'
+        '  "bar": "/bar",\n'
+        '}')
+    options, _ = gclient.OptionParser().parse_args([])
+    obj = gclient.GClient.LoadCurrentConfig(options)
+    obj.RunOnDeps('None', [])
+    dep = obj.dependencies[0]
+    self.assertEquals([], dep.findDepsFromNotAllowedHosts())
+    self.assertEquals(frozenset(), dep.allowed_hosts)
+    self._get_processed()
+
+  def testDepsFromNotAllowedHostsOK(self):
+    """Verifies gclient works fine with DEPS with proper allowed_hosts."""
+    write(
+        '.gclient',
+        'solutions = [\n'
+        '  { "name": "foo", "url": "svn://example.com/foo",\n'
+        '    "deps_file" : ".DEPS.git",\n'
+        '  },\n'
+          ']')
+    write(
+        os.path.join('foo', '.DEPS.git'),
+        'allowed_hosts = ["example.com"]\n'
+        'deps = {\n'
+        '  "bar": "svn://example.com/bar",\n'
+        '}')
+    options, _ = gclient.OptionParser().parse_args([])
+    obj = gclient.GClient.LoadCurrentConfig(options)
+    obj.RunOnDeps('None', [])
+    dep = obj.dependencies[0]
+    self.assertEquals([], dep.findDepsFromNotAllowedHosts())
+    self.assertEquals(frozenset(['example.com']), dep.allowed_hosts)
+    self._get_processed()
+
+  def testDepsFromNotAllowedHostsBad(self):
+    """Verifies gclient works fine with DEPS with proper allowed_hosts."""
+    write(
+        '.gclient',
+        'solutions = [\n'
+        '  { "name": "foo", "url": "svn://example.com/foo",\n'
+        '    "deps_file" : ".DEPS.git",\n'
+        '  },\n'
+          ']')
+    write(
+        os.path.join('foo', '.DEPS.git'),
+        'allowed_hosts = ["other.com"]\n'
+        'deps = {\n'
+        '  "bar": "svn://example.com/bar",\n'
+        '}')
+    options, _ = gclient.OptionParser().parse_args([])
+    obj = gclient.GClient.LoadCurrentConfig(options)
+    obj.RunOnDeps('None', [])
+    dep = obj.dependencies[0]
+    self.assertEquals(frozenset(['other.com']), dep.allowed_hosts)
+    self.assertEquals([dep.dependencies[0]], dep.findDepsFromNotAllowedHosts())
+    self._get_processed()
+
+  def testDepsParseFailureWithEmptyAllowedHosts(self):
+    """Verifies gclient fails with defined but empty allowed_hosts."""
+    write(
+        '.gclient',
+        'solutions = [\n'
+        '  { "name": "foo", "url": "svn://example.com/foo",\n'
+        '    "deps_file" : ".DEPS.git",\n'
+        '  },\n'
+          ']')
+    write(
+        os.path.join('foo', 'DEPS'),
+        'allowed_hosts = []\n'
+        'deps = {\n'
+        '  "bar": "/bar",\n'
+        '}')
+    options, _ = gclient.OptionParser().parse_args([])
+    obj = gclient.GClient.LoadCurrentConfig(options)
+    try:
+      obj.RunOnDeps('None', [])
+      self.fail()
+    except gclient_utils.Error, e:
+      self.assertIn('allowed_hosts must be', str(e))
+    finally:
+      self._get_processed()
+
+  def testDepsParseFailureWithNonIterableAllowedHosts(self):
+    """Verifies gclient fails with defined but non-iterable allowed_hosts."""
+    write(
+        '.gclient',
+        'solutions = [\n'
+        '  { "name": "foo", "url": "svn://example.com/foo",\n'
+        '    "deps_file" : ".DEPS.git",\n'
+        '  },\n'
+          ']')
+    write(
+        os.path.join('foo', 'DEPS'),
+        'allowed_hosts = None\n'
+        'deps = {\n'
+        '  "bar": "/bar",\n'
+        '}')
+    options, _ = gclient.OptionParser().parse_args([])
+    obj = gclient.GClient.LoadCurrentConfig(options)
+    try:
+      obj.RunOnDeps('None', [])
+      self.fail()
+    except gclient_utils.Error, e:
+      self.assertIn('allowed_hosts must be', str(e))
+    finally:
+      self._get_processed()
+
 
 if __name__ == '__main__':
   sys.stdout = gclient_utils.MakeFileAutoFlush(sys.stdout)
diff --git a/tests/git_cl_test.py b/tests/git_cl_test.py
index 572d51f..220e591 100755
--- a/tests/git_cl_test.py
+++ b/tests/git_cl_test.py
@@ -66,6 +66,13 @@
             "GERRIT_PORT: 29418\n")
 
 
+class AuthenticatorMock(object):
+  def __init__(self, *_args):
+    pass
+  def has_cached_credentials(self):
+    return True
+
+
 class TestGitCl(TestCase):
   def setUp(self):
     super(TestGitCl, self).setUp()
@@ -75,10 +82,11 @@
     self.mock(subprocess2, 'check_call', self._mocked_call)
     self.mock(subprocess2, 'check_output', self._mocked_call)
     self.mock(subprocess2, 'communicate', self._mocked_call)
-    self.mock(subprocess2, 'Popen', self._mocked_call)
+    self.mock(git_common, 'is_dirty_git_tree', lambda x: False)
     self.mock(git_common, 'get_or_create_merge_base',
               lambda *a: (
                   self._mocked_call(['get_or_create_merge_base']+list(a))))
+    self.mock(git_cl, 'BranchExists', lambda _: True)
     self.mock(git_cl, 'FindCodereviewSettingsFile', lambda: '')
     self.mock(git_cl, 'ask_for_data', self._mocked_call)
     self.mock(git_cl.breakpad, 'post', self._mocked_call)
@@ -88,19 +96,26 @@
     self.mock(git_cl.rietveld, 'CachingRietveld', RietveldMock)
     self.mock(git_cl.upload, 'RealMain', self.fail)
     self.mock(git_cl.watchlists, 'Watchlists', WatchlistsMock)
+    self.mock(git_cl.auth, 'get_authenticator_for_host', AuthenticatorMock)
     # It's important to reset settings to not have inter-tests interference.
     git_cl.settings = None
 
   def tearDown(self):
-    if not self.has_failed():
-      self.assertEquals([], self.calls)
-    super(TestGitCl, self).tearDown()
+    try:
+      if not self.has_failed():
+        self.assertEquals([], self.calls)
+    finally:
+      super(TestGitCl, self).tearDown()
 
   def _mocked_call(self, *args, **_kwargs):
     self.assertTrue(
         self.calls,
         '@%d  Expected: <Missing>   Actual: %r' % (self._calls_done, args))
-    expected_args, result = self.calls.pop(0)
+    top = self.calls.pop(0)
+    if len(top) > 2 and top[2]:
+      raise top[2]
+    expected_args, result = top
+
     # Also logs otherwise it could get caught in a try/finally and be hard to
     # diagnose.
     if expected_args != args:
@@ -156,20 +171,19 @@
       similarity_call,
       ((['git', 'symbolic-ref', 'HEAD'],), 'master'),
       find_copies_call,
-      ((['git', 'update-index', '--refresh', '-q'],), ''),
-      ((['git', 'diff-index', '--name-status', 'HEAD'],), ''),
       ((['git', 'symbolic-ref', 'HEAD'],), 'master'),
       ((['git', 'config', 'branch.master.merge'],), 'master'),
       ((['git', 'config', 'branch.master.remote'],), 'origin'),
       ((['get_or_create_merge_base', 'master', 'master'],),
        'fake_ancestor_sha'),
+      ((['git', 'config', 'gerrit.host'],), ''),
+      ((['git', 'config', 'branch.master.rietveldissue'],), ''),
       ] + cls._git_sanity_checks('fake_ancestor_sha', 'master') + [
       ((['git', 'rev-parse', '--show-cdup'],), ''),
       ((['git', 'rev-parse', 'HEAD'],), '12345'),
       ((['git', 'diff', '--name-status', '--no-renames', '-r',
          'fake_ancestor_sha...', '.'],),
         'M\t.gitignore\n'),
-      ((['git', 'config', 'branch.master.rietveldissue'],), ''),
       ((['git', 'config', 'branch.master.rietveldpatchset'],),
        ''),
       ((['git', 'log', '--pretty=format:%s%n%n%b',
@@ -177,7 +191,6 @@
        'foo'),
       ((['git', 'config', 'user.email'],), 'me@example.com'),
       stat_call,
-      ((['git', 'config', 'gerrit.host'],), ''),
       ((['git', 'log', '--pretty=format:%s\n\n%b',
          'fake_ancestor_sha..HEAD'],),
        'desc\n'),
@@ -221,6 +234,7 @@
         ((['git', 'symbolic-ref', 'HEAD'],), 'hash'),
         ((['git',
            'config', 'branch.hash.last-upload-hash', 'hash'],), ''),
+        ((['git', 'config', 'rietveld.run-post-upload-hook'],), ''),
     ]
 
   @staticmethod
@@ -279,8 +293,6 @@
       ((['git', 'rev-list', '--merges',
          '--grep=^SVN changes up to revision [0-9]*$',
          'refs/remotes/origin/master^!'],), ''),
-      ((['git', 'update-index', '--refresh', '-q'],), ''),
-      ((['git', 'diff-index', '--name-status', 'HEAD'],), ''),
       ((['git', 'rev-list', '^refs/heads/working',
          'refs/remotes/origin/master'],),
          ''),
@@ -348,8 +360,9 @@
       ((['git', 'reset', '--soft', 'fake_ancestor_sha'],), ''),
       ((['git', 'commit', '-m',
          'Issue: 12345\n\nR=john@chromium.org\n\n'
-         'Review URL: https://codereview.example.com/12345'],),
+         'Review URL: https://codereview.example.com/12345 .'],),
        ''),
+      ((['git', 'config', 'rietveld.force-https-commit-url'],), ''),
       ((['git',
          'svn', 'dcommit', '-C50', '--no-rebase', '--rmdir'],),
        (('', None), 0)),
@@ -542,13 +555,12 @@
         ((['git', 'symbolic-ref', 'HEAD'],), 'master'),
         ((['git', 'config', '--int', '--get',
           'branch.master.git-find-copies'],), ''),
-        ((['git', 'update-index', '--refresh', '-q'],), ''),
-        ((['git', 'diff-index', '--name-status', 'HEAD'],), ''),
         ((['git', 'symbolic-ref', 'HEAD'],), 'master'),
         ((['git', 'config', 'branch.master.merge'],), 'master'),
         ((['git', 'config', 'branch.master.remote'],), 'origin'),
         ((['get_or_create_merge_base', 'master', 'master'],),
          'fake_ancestor_sha'),
+        ((['git', 'config', 'gerrit.host'],), 'gerrit.example.com'),
         ] + cls._git_sanity_checks('fake_ancestor_sha', 'master') + [
         ((['git', 'rev-parse', '--show-cdup'],), ''),
         ((['git', 'rev-parse', 'HEAD'],), '12345'),
@@ -570,10 +582,9 @@
         ]
 
   @staticmethod
-  def _gerrit_upload_calls(description, reviewers):
+  def _gerrit_upload_calls(description, reviewers, squash,
+                           expected_upstream_ref='origin/refs/heads/master'):
     calls = [
-        ((['git', 'config', 'gerrit.host'],),
-         'gerrit.example.com'),
         ((['git', 'log', '--pretty=format:%s\n\n%b',
            'fake_ancestor_sha..HEAD'],),
          description)
@@ -589,8 +600,30 @@
              'fake_ancestor_sha..HEAD'],),
            description)
           ]
+    if squash:
+      ref_to_push = 'abcdef0123456789'
+      calls += [
+          ((['git', 'show', '--format=%s\n\n%b', '-s',
+            'refs/heads/git_cl_uploads/master'],),
+           (description, 0)),
+          ((['git', 'config', 'branch.master.merge'],),
+           'refs/heads/master'),
+          ((['git', 'config', 'branch.master.remote'],),
+           'origin'),
+          ((['get_or_create_merge_base', 'master', 'master'],),
+           'origin/master'),
+          ((['git', 'rev-parse', 'HEAD:'],),
+           '0123456789abcdef'),
+          ((['git', 'commit-tree', '0123456789abcdef', '-p',
+             'origin/master', '-m', 'd'],),
+           ref_to_push),
+          ]
+    else:
+      ref_to_push = 'HEAD'
+
     calls += [
-        ((['git', 'rev-list', 'origin/master..'],), ''),
+        ((['git', 'rev-list',
+            expected_upstream_ref + '..' + ref_to_push],), ''),
         ((['git', 'config', 'rietveld.cc'],), '')
         ]
     receive_pack = '--receive-pack=git receive-pack '
@@ -602,19 +635,32 @@
     receive_pack += ''
     calls += [
         ((['git',
-           'push', receive_pack, 'origin', 'HEAD:refs/for/master'],),
+           'push', receive_pack, 'origin',
+           ref_to_push + ':refs/for/refs/heads/master'],),
          '')
         ]
+    if squash:
+      calls += [
+          ((['git', 'rev-parse', 'HEAD'],), 'abcdef0123456789'),
+          ((['git', 'update-ref', '-m', 'Uploaded abcdef0123456789',
+            'refs/heads/git_cl_uploads/master', 'abcdef0123456789'],),
+           '')
+          ]
+
     return calls
 
   def _run_gerrit_upload_test(
       self,
       upload_args,
       description,
-      reviewers):
+      reviewers,
+      squash=False,
+      expected_upstream_ref='origin/refs/heads/master'):
     """Generic gerrit upload test framework."""
     self.calls = self._gerrit_base_calls()
-    self.calls += self._gerrit_upload_calls(description, reviewers)
+    self.calls += self._gerrit_upload_calls(
+        description, reviewers, squash,
+        expected_upstream_ref=expected_upstream_ref)
     git_cl.main(['upload'] + upload_args)
 
   def test_gerrit_upload_without_change_id(self):
@@ -642,6 +688,61 @@
         'Change-Id:123456789\n',
         ['reviewer@example.com', 'another@example.com'])
 
+  def test_gerrit_upload_squash(self):
+    self._run_gerrit_upload_test(
+        ['--squash'],
+        'desc\n\nBUG=\nChange-Id:123456789\n',
+        [],
+        squash=True,
+        expected_upstream_ref='origin/master')
+
+  def test_upload_branch_deps(self):
+    def mock_run_git(*args, **_kwargs):
+      if args[0] == ['for-each-ref',
+                       '--format=%(refname:short) %(upstream:short)',
+                       'refs/heads']:
+        # Create a local branch dependency tree that looks like this:
+        # test1 -> test2 -> test3   -> test4 -> test5
+        #                -> test3.1
+        # test6 -> test0
+        branch_deps = [
+            'test2 test1',    # test1 -> test2
+            'test3 test2',    # test2 -> test3
+            'test3.1 test2',  # test2 -> test3.1
+            'test4 test3',    # test3 -> test4
+            'test5 test4',    # test4 -> test5
+            'test6 test0',    # test0 -> test6
+            'test7',          # test7
+        ]
+        return '\n'.join(branch_deps)
+    self.mock(git_cl, 'RunGit', mock_run_git)
+
+    class RecordCalls:
+      times_called = 0
+    record_calls = RecordCalls()
+    def mock_CMDupload(*args, **_kwargs):
+      record_calls.times_called += 1
+      return 0
+    self.mock(git_cl, 'CMDupload', mock_CMDupload)
+
+    self.calls = [
+        (('[Press enter to continue or ctrl-C to quit]',), ''),
+      ]
+
+    class MockChangelist():
+      def __init__(self):
+        pass
+      def GetBranch(self):
+        return 'test1'
+      def GetIssue(self):
+        return '123'
+      def GetPatchset(self):
+        return '1001'
+
+    ret = git_cl.upload_branch_deps(MockChangelist(), [])
+    # CMDupload should have been called 5 times because of 5 dependent branches.
+    self.assertEquals(5, record_calls.times_called)
+    self.assertEquals(0, ret)
 
   def test_config_gerrit_download_hook(self):
     self.mock(git_cl, 'FindCodereviewSettingsFile', CodereviewSettingsFileMock)
@@ -687,11 +788,15 @@
         ((['git', 'config', '--unset-all',
            'rietveld.cpplint-regex'],), ''),
         ((['git', 'config', '--unset-all',
+           'rietveld.force-https-commit-url'],), ''),
+        ((['git', 'config', '--unset-all',
            'rietveld.cpplint-ignore-regex'],), ''),
         ((['git', 'config', '--unset-all',
            'rietveld.project'],), ''),
         ((['git', 'config', '--unset-all',
            'rietveld.pending-ref-prefix'],), ''),
+        ((['git', 'config', '--unset-all',
+           'rietveld.run-post-upload-hook'],), ''),
         ((['git', 'config', 'gerrit.host',
            'gerrit.chromium.org'],), ''),
         # DownloadHooks(False)
@@ -719,6 +824,8 @@
         # DownloadHooks(True)
         ((['git', 'config', 'rietveld.bug-prefix'],), ''),
         (('Bug Prefix:',), ''),
+        ((['git', 'config', 'rietveld.run-post-upload-hook'],), ''),
+        (('Run Post Upload Hook:',), ''),
         ((commit_msg_path, os.X_OK,), True),
         ]
     git_cl.main(['config'])
@@ -753,6 +860,104 @@
       actual.append(obj.description)
     self.assertEqual(expected, actual)
 
+  def test_get_target_ref(self):
+    # Check remote or remote branch not present.
+    self.assertEqual(None, git_cl.GetTargetRef('origin', None, 'master', None))
+    self.assertEqual(None, git_cl.GetTargetRef(None,
+                                               'refs/remotes/origin/master',
+                                               'master', None))
+
+    # Check default target refs for branches.
+    self.assertEqual('refs/heads/master',
+                     git_cl.GetTargetRef('origin', 'refs/remotes/origin/master',
+                                         None, None))
+    self.assertEqual('refs/heads/master',
+                     git_cl.GetTargetRef('origin', 'refs/remotes/origin/lkgr',
+                                         None, None))
+    self.assertEqual('refs/heads/master',
+                     git_cl.GetTargetRef('origin', 'refs/remotes/origin/lkcr',
+                                         None, None))
+    self.assertEqual('refs/branch-heads/123',
+                     git_cl.GetTargetRef('origin',
+                                         'refs/remotes/branch-heads/123',
+                                         None, None))
+    self.assertEqual('refs/diff/test',
+                     git_cl.GetTargetRef('origin',
+                                         'refs/remotes/origin/refs/diff/test',
+                                         None, None))
+    self.assertEqual('refs/heads/chrome/m42',
+                     git_cl.GetTargetRef('origin',
+                                         'refs/remotes/origin/chrome/m42',
+                                         None, None))
+
+    # Check target refs for user-specified target branch.
+    for branch in ('branch-heads/123', 'remotes/branch-heads/123',
+                   'refs/remotes/branch-heads/123'):
+      self.assertEqual('refs/branch-heads/123',
+                       git_cl.GetTargetRef('origin',
+                                           'refs/remotes/origin/master',
+                                           branch, None))
+    for branch in ('origin/master', 'remotes/origin/master',
+                   'refs/remotes/origin/master'):
+      self.assertEqual('refs/heads/master',
+                       git_cl.GetTargetRef('origin',
+                                           'refs/remotes/branch-heads/123',
+                                           branch, None))
+    for branch in ('master', 'heads/master', 'refs/heads/master'):
+      self.assertEqual('refs/heads/master',
+                       git_cl.GetTargetRef('origin',
+                                           'refs/remotes/branch-heads/123',
+                                           branch, None))
+
+    # Check target refs for pending prefix.
+    self.assertEqual('prefix/heads/master',
+                     git_cl.GetTargetRef('origin', 'refs/remotes/origin/master',
+                                         None, 'prefix/'))
+
+  def test_patch_when_dirty(self):
+    # Patch when local tree is dirty
+    self.mock(git_common, 'is_dirty_git_tree', lambda x: True)
+    self.assertNotEqual(git_cl.main(['patch', '123456']), 0)
+
+  def test_diff_when_dirty(self):
+    # Do 'git cl diff' when local tree is dirty
+    self.mock(git_common, 'is_dirty_git_tree', lambda x: True)
+    self.assertNotEqual(git_cl.main(['diff']), 0)
+
+  def _patch_common(self):
+    self.mock(git_cl.Changelist, 'GetMostRecentPatchset', lambda x: '60001')
+    self.mock(git_cl.Changelist, 'GetPatchSetDiff', lambda *args: None)
+    self.mock(git_cl.Changelist, 'GetDescription', lambda *args: 'Description')
+    self.mock(git_cl.Changelist, 'SetIssue', lambda *args: None)
+    self.mock(git_cl.Changelist, 'SetPatchset', lambda *args: None)
+    self.mock(git_cl, 'IsGitVersionAtLeast', lambda *args: True)
+
+    self.calls = [
+      ((['git', 'config', 'rietveld.autoupdate'],), ''),
+      ((['git', 'config', 'rietveld.server'],), 'codereview.example.com'),
+      ((['git', 'rev-parse', '--show-cdup'],), ''),
+      ((['sed', '-e', 's|^--- a/|--- |; s|^+++ b/|+++ |'],), ''),
+    ]
+
+  def test_patch_successful(self):
+    self._patch_common()
+    self.calls += [
+      ((['git', 'apply', '--index', '-p0', '--3way'],), ''),
+      ((['git', 'commit', '-m',
+         'Description\n\n' +
+         'patch from issue 123456 at patchset 60001 ' +
+         '(http://crrev.com/123456#ps60001)'],), ''),
+    ]
+    self.assertEqual(git_cl.main(['patch', '123456']), 0)
+
+  def test_patch_conflict(self):
+    self._patch_common()
+    self.calls += [
+      ((['git', 'apply', '--index', '-p0', '--3way'],), '',
+       subprocess2.CalledProcessError(1, '', '', '', '')),
+    ]
+    self.assertNotEqual(git_cl.main(['patch', '123456']), 0)
+
 if __name__ == '__main__':
   git_cl.logging.basicConfig(
       level=git_cl.logging.DEBUG if '-v' in sys.argv else git_cl.logging.ERROR)
diff --git a/tests/git_common_test.py b/tests/git_common_test.py
index 29b9a68..e7ec3b4 100755
--- a/tests/git_common_test.py
+++ b/tests/git_common_test.py
@@ -221,6 +221,24 @@
 
     self.repo.run(testfn)
 
+  def testStreamWithRetcode(self):
+    items = set(self.repo.commit_map.itervalues())
+
+    def testfn():
+      with self.gc.run_stream_with_retcode('log', '--format=%H') as stdout:
+        for line in stdout.xreadlines():
+          line = line.strip()
+          self.assertIn(line, items)
+          items.remove(line)
+
+    self.repo.run(testfn)
+
+  def testStreamWithRetcodeException(self):
+    import subprocess2
+    with self.assertRaises(subprocess2.CalledProcessError):
+      with self.gc.run_stream_with_retcode('checkout', 'unknown-branch'):
+        pass
+
   def testCurrentBranch(self):
     def cur_branch_out_of_git():
       os.chdir('..')
@@ -231,6 +249,7 @@
     self.assertEqual(self.repo.run(self.gc.current_branch), 'branch_D')
 
   def testBranches(self):
+    # This check fails with git 2.4 (see crbug.com/487172)
     self.assertEqual(self.repo.run(set, self.gc.branches()),
                      {'master', 'branch_D', 'root_A'})
 
@@ -410,7 +429,7 @@
         'parent_gone': (
             self.repo.run(self.gc.hash_one, 'parent_gone', short=True),
             'to_delete',
-            1 if supports_track else None,
+            None,
             None
         ),
         'to_delete': None
@@ -460,6 +479,7 @@
     self.repo.git('config', 'depot-tools.branch-limit', '100')
 
     # should not raise
+    # This check fails with git 2.4 (see crbug.com/487172)
     self.assertEqual(38, len(self.repo.run(list, self.gc.branches())))
 
   def testMergeBase(self):
@@ -532,6 +552,7 @@
 
   def testGetBranchTree(self):
     skipped, tree = self.repo.run(self.gc.get_branch_tree)
+    # This check fails with git 2.4 (see crbug.com/487172)
     self.assertEqual(skipped, {'master', 'root_X', 'branch_DOG', 'root_CAT'})
     self.assertEqual(tree, {
       'branch_G': 'root_A',
@@ -560,10 +581,17 @@
       ('root_A', 'root_X'),
     ])
 
+  def testIsGitTreeDirty(self):
+    self.assertEquals(False, self.repo.run(self.gc.is_dirty_git_tree, 'foo'))
+    self.repo.open('test.file', 'w').write('test data')
+    self.repo.git('add', 'test.file')
+    self.assertEquals(True, self.repo.run(self.gc.is_dirty_git_tree, 'foo'))
+
   def testSquashBranch(self):
     self.repo.git('checkout', 'branch_K')
 
-    self.repo.run(self.gc.squash_current_branch, 'cool message')
+    self.assertEquals(True, self.repo.run(self.gc.squash_current_branch,
+                                          'cool message'))
 
     lines = ['cool message', '']
     for l in 'HIJK':
@@ -579,6 +607,14 @@
       'K'
     )
 
+  def testSquashBranchEmpty(self):
+    self.repo.git('checkout', 'branch_K')
+    self.repo.git('checkout', 'branch_G', '.')
+    self.repo.git('commit', '-m', 'revert all changes no branch')
+    # Should return False since the quash would result in an empty commit
+    stdout = self.repo.capture_stdio(self.gc.squash_current_branch)[0]
+    self.assertEquals(stdout, 'Nothing to commit; squashed branch is empty\n')
+
   def testRebase(self):
     self.assertSchema("""
     A B C D E F G
@@ -709,5 +745,4 @@
 
 if __name__ == '__main__':
   sys.exit(coverage_utils.covered_main(
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_common.py')
-  ))
+    os.path.join(DEPOT_TOOLS_ROOT, 'git_common.py')))
diff --git a/tests/git_number_test.py b/tests/git_number_test.py
index 729599f..9751e80 100755
--- a/tests/git_number_test.py
+++ b/tests/git_number_test.py
@@ -83,4 +83,4 @@
   sys.exit(coverage_utils.covered_main(
     os.path.join(DEPOT_TOOLS_ROOT, 'git_number.py'),
     '3.7'
-  ))
\ No newline at end of file
+  ))
diff --git a/tests/git_rebase_update_test.py b/tests/git_rebase_update_test.py
index 84e111b..ea12d16 100755
--- a/tests/git_rebase_update_test.py
+++ b/tests/git_rebase_update_test.py
@@ -203,6 +203,7 @@
 
     self.repo.run(self.mv.main, ['special_K', 'cool_branch'])
     branches = self.repo.run(set, self.gc.branches())
+    # This check fails with git 2.4 (see crbug.com/487172)
     self.assertEqual(branches, {'cool_branch', 'master', 'sub_K', 'root_A',
                                 'branch_L', 'old_branch', 'foobar'})
 
diff --git a/tests/gsutil_test.py b/tests/gsutil_test.py
new file mode 100755
index 0000000..76570dd
--- /dev/null
+++ b/tests/gsutil_test.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test gsutil.py."""
+
+
+import __builtin__
+import base64
+import hashlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import unittest
+import urllib2
+import zipfile
+
+
+# Add depot_tools to path
+THIS_DIR = os.path.dirname(os.path.abspath(__file__))
+DEPOT_TOOLS_DIR = os.path.dirname(THIS_DIR)
+sys.path.append(DEPOT_TOOLS_DIR)
+
+import gsutil
+
+
+class TestError(Exception):
+  pass
+
+
+class Buffer(object):
+  def __init__(self, data=None):
+    self.data = data or ''
+
+  def write(self, buf):
+    self.data += buf
+
+  def read(self, amount=None):
+    if not amount:
+      amount = len(self.data)
+    result = self.data[:amount]
+    self.data = self.data[amount:]
+    return result
+
+
+class FakeCall(object):
+  def __init__(self):
+    self.expectations = []
+
+  def add_expectation(self, *args, **kwargs):
+    returns = kwargs.pop('_returns', None)
+    self.expectations.append((args, kwargs, returns))
+
+  def __call__(self, *args, **kwargs):
+    if not self.expectations:
+      raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
+    exp_args, exp_kwargs, exp_returns = self.expectations.pop(0)
+    if args != exp_args or kwargs != exp_kwargs:
+      message = 'Expected:\n  args: %s\n  kwargs: %s\n' % (exp_args, exp_kwargs)
+      message += 'Got:\n  args: %s\n  kwargs: %s\n' % (args, kwargs)
+      raise TestError(message)
+    return exp_returns
+
+
+class GsutilUnitTests(unittest.TestCase):
+  def setUp(self):
+    self.fake = FakeCall()
+    self.tempdir = tempfile.mkdtemp()
+    self.old_urlopen = getattr(urllib2, 'urlopen')
+    self.old_call = getattr(subprocess, 'call')
+    setattr(urllib2, 'urlopen', self.fake)
+    setattr(subprocess, 'call', self.fake)
+
+  def tearDown(self):
+    self.assertEqual(self.fake.expectations, [])
+    shutil.rmtree(self.tempdir)
+    setattr(urllib2, 'urlopen', self.old_urlopen)
+    setattr(subprocess, 'call', self.old_call)
+
+  def test_download_gsutil(self):
+    version = '4.2'
+    filename = 'gsutil_%s.zip' % version
+    full_filename = os.path.join(self.tempdir, filename)
+    fake_file = 'This is gsutil.zip'
+    fake_file2 = 'This is other gsutil.zip'
+    url = '%s%s' % (gsutil.GSUTIL_URL, filename)
+    self.fake.add_expectation(url, _returns=Buffer(fake_file))
+
+    self.assertEquals(
+        gsutil.download_gsutil(version, self.tempdir), full_filename)
+    with open(full_filename, 'r') as f:
+      self.assertEquals(fake_file, f.read())
+
+    metadata_url = gsutil.API_URL + filename
+    md5_calc = hashlib.md5()
+    md5_calc.update(fake_file)
+    b64_md5 = base64.b64encode(md5_calc.hexdigest())
+    self.fake.add_expectation(metadata_url, _returns=Buffer(json.dumps({
+        'md5Hash': b64_md5
+    })))
+    self.assertEquals(
+        gsutil.download_gsutil(version, self.tempdir), full_filename)
+    with open(full_filename, 'r') as f:
+      self.assertEquals(fake_file, f.read())
+    self.assertEquals(self.fake.expectations, [])
+
+    self.fake.add_expectation(metadata_url, _returns=Buffer(json.dumps({
+        'md5Hash': base64.b64encode('aaaaaaa')  # Bad MD5
+    })))
+    self.fake.add_expectation(url, _returns=Buffer(fake_file2))
+    self.assertEquals(
+        gsutil.download_gsutil(version, self.tempdir), full_filename)
+    with open(full_filename, 'r') as f:
+      self.assertEquals(fake_file2, f.read())
+    self.assertEquals(self.fake.expectations, [])
+
+  def test_ensure_gsutil_full(self):
+    version = '4.2'
+    gsutil_dir = os.path.join(self.tempdir, 'gsutil_%s' % version, 'gsutil')
+    gsutil_bin = os.path.join(gsutil_dir, 'gsutil')
+    os.makedirs(gsutil_dir)
+
+    self.fake.add_expectation(
+        [sys.executable, gsutil_bin, 'version'], stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT, _returns=1)
+
+    with open(gsutil_bin, 'w') as f:
+      f.write('Foobar')
+    zip_filename = 'gsutil_%s.zip' % version
+    url = '%s%s' % (gsutil.GSUTIL_URL, zip_filename)
+    _, tempzip = tempfile.mkstemp()
+    fake_gsutil = 'Fake gsutil'
+    with zipfile.ZipFile(tempzip, 'w') as zf:
+      zf.writestr('gsutil/gsutil', fake_gsutil)
+    with open(tempzip, 'rb') as f:
+      self.fake.add_expectation(url, _returns=Buffer(f.read()))
+    self.fake.add_expectation(
+        [sys.executable, gsutil_bin, 'version'], stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT, _returns=1)
+
+    # This should delete the old bin and rewrite it with 'Fake gsutil'
+    self.assertRaises(
+        gsutil.InvalidGsutilError, gsutil.ensure_gsutil, version, self.tempdir)
+    self.assertTrue(os.path.isdir(os.path.join(self.tempdir, '.cache_dir')))
+    self.assertTrue(os.path.exists(gsutil_bin))
+    with open(gsutil_bin, 'r') as f:
+      self.assertEquals(f.read(), fake_gsutil)
+    self.assertEquals(self.fake.expectations, [])
+
+  def test_ensure_gsutil_short(self):
+    version = '4.2'
+    gsutil_dir = os.path.join(self.tempdir, 'gsutil_%s' % version, 'gsutil')
+    gsutil_bin = os.path.join(gsutil_dir, 'gsutil')
+    os.makedirs(gsutil_dir)
+
+    # Mock out call().
+    self.fake.add_expectation(
+        [sys.executable, gsutil_bin, 'version'],
+        stdout=subprocess.PIPE, stderr=subprocess.STDOUT, _returns=0)
+
+    with open(gsutil_bin, 'w') as f:
+      f.write('Foobar')
+    self.assertEquals(
+        gsutil.ensure_gsutil(version, self.tempdir), gsutil_bin)
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tests/owners.sh b/tests/owners.sh
index 853fcf7..afe1810 100755
--- a/tests/owners.sh
+++ b/tests/owners.sh
@@ -30,16 +30,16 @@
   git add OWNERS PRESUBMIT.py ; git commit -q -m "add OWNERS"
 
   test_expect_success "upload succeeds (needs a server running on localhost)" \
-    "$GIT_CL upload -m test master | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test master | grep -q 'Issue created'"
 
   test_expect_success "git-cl status has a suggested reviewer" \
     "$GIT_CL_STATUS | grep -q 'R=ben@chromium.org'"
 
   test_expect_failure "git-cl dcommit fails w/ missing LGTM" \
-    "$GIT_CL dcommit -f"
+    "$GIT_CL dcommit -f --no-oauth2"
 
   test_expect_success "git-cl dcommit --tbr succeeds" \
-    "$GIT_CL dcommit --tbr -f | grep -q -- '--tbr was specified'"
+    "$GIT_CL dcommit --tbr -f --no-oauth2 | grep -q -- '--tbr was specified'"
 )
 SUCCESS=$?
 
diff --git a/tests/owners_unittest.py b/tests/owners_unittest.py
index f205aed..5cb1774 100755
--- a/tests/owners_unittest.py
+++ b/tests/owners_unittest.py
@@ -30,6 +30,8 @@
     s += '# %s\n' % kwargs.get('comment')
   if kwargs.get('noparent'):
     s += 'set noparent\n'
+  if kwargs.get('file'):
+    s += 'file:%s\n' % kwargs.get('file')
   s += '\n'.join(kwargs.get('lines', [])) + '\n'
   return s + '\n'.join(email_addresses) + '\n'
 
@@ -54,6 +56,11 @@
     '/content/baz/froboz.h': '',
     '/content/baz/ugly.cc': '',
     '/content/baz/ugly.h': '',
+    '/content/garply/OWNERS': owners_file(file='test/OWNERS'),
+    '/content/garply/foo.cc': '',
+    '/content/garply/test/OWNERS': owners_file(peter),
+    '/content/qux/OWNERS': owners_file(peter, file='//content/baz/OWNERS'),
+    '/content/qux/foo.cc': '',
     '/content/views/OWNERS': owners_file(ben, john, owners.EVERYONE,
                                          noparent=True),
     '/content/views/pie.h': '',
@@ -209,6 +216,50 @@
     self.assertRaises(owners.SyntaxErrorInOwnersFile,
         self.db().files_not_covered_by, ['DEPS'], [brett])
 
+  def test_file_include_absolute_path(self):
+    self.assert_files_not_covered_by(['content/qux/foo.cc'], [brett], [])
+    self.assert_files_not_covered_by(['content/qux/bar.cc'], [peter], [])
+    self.assert_files_not_covered_by(['content/qux/baz.cc'],
+                                     [tom], ['content/qux/baz.cc'])
+
+  def test_file_include_relative_path(self):
+    self.assert_files_not_covered_by(['content/garply/foo.cc'], [peter], [])
+    self.assert_files_not_covered_by(['content/garply/bar.cc'], [darin], [])
+    self.assert_files_not_covered_by(['content/garply/baz.cc'],
+                                     [tom], ['content/garply/baz.cc'])
+
+  def test_file_include_per_file_absolute_path(self):
+    self.files['/content/qux/OWNERS'] = owners_file(peter,
+        lines=['per-file foo.*=file://content/baz/OWNERS'])
+
+    self.assert_files_not_covered_by(['content/qux/foo.cc'], [brett], [])
+    self.assert_files_not_covered_by(['content/qux/baz.cc'],
+                                     [brett], ['content/qux/baz.cc'])
+
+  def test_file_include_per_file_relative_path(self):
+    self.files['/content/garply/OWNERS'] = owners_file(brett,
+        lines=['per-file foo.*=file:test/OWNERS'])
+
+    self.assert_files_not_covered_by(['content/garply/foo.cc'], [peter], [])
+    self.assert_files_not_covered_by(['content/garply/baz.cc'],
+                                     [peter], ['content/garply/baz.cc'])
+
+  def test_file_include_recursive(self):
+    self.files['/content/baz/OWNERS'] = owners_file(file='//chrome/gpu/OWNERS')
+    self.assert_files_not_covered_by(['content/qux/foo.cc'], [ken], [])
+
+  def test_file_include_recursive_loop(self):
+    self.files['/content/baz/OWNERS'] = owners_file(brett,
+        file='//content/qux/OWNERS')
+    self.test_file_include_absolute_path()
+
+  def test_file_include_different_filename(self):
+    self.files['/owners/garply'] = owners_file(peter)
+    self.files['/content/garply/OWNERS'] = owners_file(john,
+        lines=['per-file foo.*=file://owners/garply'])
+
+    self.assert_files_not_covered_by(['content/garply/foo.cc'], [peter], [])
+
   def assert_syntax_error(self, owners_file_contents):
     db = self.db()
     self.files['/foo/OWNERS'] = owners_file_contents
@@ -228,6 +279,12 @@
   def test_syntax_error__bad_email(self):
     self.assert_syntax_error('ben\n')
 
+  def test_syntax_error__invalid_absolute_file(self):
+    self.assert_syntax_error('file://foo/bar/baz\n')
+
+  def test_syntax_error__invalid_relative_file(self):
+    self.assert_syntax_error('file:foo/bar/baz\n')
+
 
 class ReviewersForTest(_BaseTestCase):
   def assert_reviewers_for(self, files, potential_suggested_reviewers,
@@ -322,6 +379,33 @@
     self.assert_reviewers_for(['chrome/gpu/gpu_channel.h'],
                               [[ben], [brett]], author=ken)
 
+  def test_reviewers_file_includes__absolute(self):
+    self.assert_reviewers_for(['content/qux/foo.cc'],
+                              [[peter], [brett], [john], [darin]])
+
+  def test_reviewers_file_includes__relative(self):
+    self.assert_reviewers_for(['content/garply/foo.cc'],
+                              [[peter], [john], [darin]])
+
+  def test_reviewers_file_includes__per_file(self):
+    self.files['/content/garply/OWNERS'] = owners_file(brett,
+        lines=['per-file foo.*=file:test/OWNERS'])
+
+    self.assert_reviewers_for(['content/garply/foo.cc'],
+                              [[brett], [peter]])
+    self.assert_reviewers_for(['content/garply/bar.cc'],
+                              [[brett]])
+
+  def test_reviewers_file_includes__per_file_noparent(self):
+    self.files['/content/garply/OWNERS'] = owners_file(brett,
+        lines=['per-file foo.*=set noparent',
+               'per-file foo.*=file:test/OWNERS'])
+
+    self.assert_reviewers_for(['content/garply/foo.cc'],
+                              [[peter]])
+    self.assert_reviewers_for(['content/garply/bar.cc'],
+                              [[brett]])
+
 
 class LowestCostOwnersTest(_BaseTestCase):
   # Keep the data in the test_lowest_cost_owner* methods as consistent with
diff --git a/tests/patch.sh b/tests/patch.sh
index 22f6c21..4306c02 100755
--- a/tests/patch.sh
+++ b/tests/patch.sh
@@ -24,7 +24,7 @@
   export GIT_EDITOR=$(which true)
 
   test_expect_success "upload succeeds (needs a server running on localhost)" \
-    "$GIT_CL upload -m test master | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test master | grep -q 'Issue created'"
 
   test_expect_success "git-cl status now knows the issue" \
     "$GIT_CL_STATUS | grep -q 'Issue number'"
@@ -33,7 +33,7 @@
 
   git checkout -q -b test2 master
 
-  test_expect_success "$GIT_CL patch $ISSUE"
+  test_expect_success "$GIT_CL patch $ISSUE --no-oauth2"
 )
 SUCCESS=$?
 
diff --git a/tests/post-dcommit-hook-test.sh b/tests/post-dcommit-hook-test.sh
index ec4e4d0..5ecb653 100755
--- a/tests/post-dcommit-hook-test.sh
+++ b/tests/post-dcommit-hook-test.sh
@@ -28,7 +28,7 @@
 TBR=foo"
 
   test_expect_success "dcommitted code" \
-      "$GIT_CL dcommit -f --bypass-hooks -m 'dcommit'"
+      "$GIT_CL dcommit --no-oauth2 -f --bypass-hooks -m 'dcommit'"
 
   test_expect_success "post-cl-dcommit hook executed" \
       "git symbolic-ref HEAD | grep -q COMMITTED"
diff --git a/tests/presubmit_unittest.py b/tests/presubmit_unittest.py
index 65c521c..87832d4 100755
--- a/tests/presubmit_unittest.py
+++ b/tests/presubmit_unittest.py
@@ -7,11 +7,12 @@
 
 # pylint: disable=E1101,E1103
 
+import StringIO
 import functools
 import itertools
 import logging
+import multiprocessing
 import os
-import StringIO
 import sys
 import time
 import unittest
@@ -156,6 +157,10 @@
     self.mox.StubOutWithMock(presubmit.scm.SVN, 'GenerateDiff')
     self.mox.StubOutWithMock(presubmit.scm.GIT, 'GenerateDiff')
 
+    # On some platforms this does all sorts of undesirable system calls, so
+    # just permanently mock it with a lambda that returns 2
+    multiprocessing.cpu_count = lambda: 2
+
 
 class PresubmitUnittest(PresubmitTestsBase):
   """General presubmit_support.py tests (excluding InputApi and OutputApi)."""
@@ -165,12 +170,13 @@
   def testMembersChanged(self):
     self.mox.ReplayAll()
     members = [
-      'AffectedFile', 'Change', 'DoGetTrySlaves', 'DoPresubmitChecks',
+      'AffectedFile', 'Change', 'DoGetTrySlaves',
+      'DoPostUploadExecuter', 'DoPresubmitChecks', 'GetPostUploadExecuter',
       'GetTrySlavesExecuter', 'GitAffectedFile', 'CallCommand', 'CommandData',
-      'GitChange', 'InputApi', 'ListRelevantPresubmitFiles', 'Main',
+      'GitChange', 'InputApi', 'ListRelevantPresubmitFiles', 'main',
       'NonexistantCannedCheckFilter', 'OutputApi', 'ParseFiles',
       'PresubmitFailure', 'PresubmitExecuter', 'PresubmitOutput', 'ScanSubDirs',
-      'SvnAffectedFile', 'SvnChange', 'cPickle', 'cpplint', 'cStringIO',
+      'SvnAffectedFile', 'SvnChange', 'auth', 'cPickle', 'cpplint', 'cStringIO',
       'contextlib', 'canned_check_filter', 'fix_encoding', 'fnmatch',
       'gclient_utils', 'glob', 'inspect', 'json', 'load_files', 'logging',
       'marshal', 'normpath', 'optparse', 'os', 'owners', 'pickle',
@@ -1152,7 +1158,7 @@
 
     self.assertEquals(
         True,
-        presubmit.Main(['--root', self.fake_root_dir, 'random_file.txt']))
+        presubmit.main(['--root', self.fake_root_dir, 'random_file.txt']))
 
   def testMainUnversionedFail(self):
     # OptParser calls presubmit.os.path.exists and is a pain when mocked.
@@ -1170,7 +1176,7 @@
     self.mox.ReplayAll()
 
     try:
-      presubmit.Main(['--root', self.fake_root_dir])
+      presubmit.main(['--root', self.fake_root_dir])
       self.fail()
     except SystemExit, e:
       self.assertEquals(2, e.code)
@@ -1182,17 +1188,15 @@
     self.mox.ReplayAll()
     members = [
       'AbsoluteLocalPaths', 'AffectedFiles', 'AffectedSourceFiles',
-      'AffectedTextFiles',
-      'DEFAULT_BLACK_LIST', 'DEFAULT_WHITE_LIST',
-      'DepotToLocalPath', 'FilterSourceFile', 'LocalPaths',
-      'LocalToDepotPath', 'Command', 'RunTests',
-      'PresubmitLocalPath', 'ReadFile', 'RightHandSideLines', 'ServerPaths',
-      'basename', 'cPickle', 'cpplint', 'cStringIO', 'canned_checks', 'change',
-      'environ', 'glob', 'host_url', 'is_committing', 'json', 'logging',
-      'marshal', 'os_listdir', 'os_walk', 'os_path', 'owners_db', 'pickle',
-      'platform', 'python_executable', 're', 'rietveld', 'subprocess', 'tbr',
-      'tempfile', 'time', 'traceback', 'unittest', 'urllib2', 'version',
-      'verbose',
+      'AffectedTextFiles', 'DEFAULT_BLACK_LIST', 'DEFAULT_WHITE_LIST',
+      'DepotToLocalPath', 'FilterSourceFile', 'LocalPaths', 'LocalToDepotPath',
+      'Command', 'RunTests', 'PresubmitLocalPath', 'ReadFile',
+      'RightHandSideLines', 'ServerPaths', 'basename', 'cPickle', 'cpplint',
+      'cStringIO', 'canned_checks', 'change', 'cpu_count', 'environ', 'glob',
+      'host_url', 'is_committing', 'json', 'logging', 'marshal', 'os_listdir',
+      'os_walk', 'os_path', 'os_stat', 'owners_db', 'pickle', 'platform',
+      'python_executable', 're', 'rietveld', 'subprocess', 'tbr', 'tempfile',
+      'time', 'traceback', 'unittest', 'urllib2', 'version', 'verbose',
     ]
     # If this test fails, you should add the relevant test.
     self.compareMembers(
@@ -1851,6 +1855,7 @@
     input_api.tbr = False
     input_api.python_executable = 'pyyyyython'
     input_api.platform = sys.platform
+    input_api.cpu_count = 2
     input_api.time = time
     input_api.canned_checks = presubmit_canned_checks
     input_api.Command = presubmit.CommandData
@@ -1861,6 +1866,7 @@
   def testMembersChanged(self):
     self.mox.ReplayAll()
     members = [
+      'DEFAULT_LINT_FILTERS',
       'CheckBuildbotPendingBuilds',
       'CheckChangeHasBugField', 'CheckChangeHasDescription',
       'CheckChangeHasNoStrayWhitespace',
@@ -1878,6 +1884,7 @@
       'CheckLicense',
       'CheckOwners',
       'CheckPatchFormatted',
+      'CheckGNFormatted',
       'CheckRietveldTryJobExecution',
       'CheckSingletonInHeaders',
       'CheckSvnModifiedDirectories',
@@ -1998,7 +2005,7 @@
     if use_source_file:
       input_api1.AffectedSourceFiles(None).AndReturn(files1)
     else:
-      input_api1.AffectedFiles(include_deleted=False).AndReturn(files1)
+      input_api1.AffectedFiles(include_deletes=False).AndReturn(files1)
     presubmit.scm.SVN.GetFileProperty(
         presubmit.normpath('foo/bar.cc'), property_name, self.fake_root_dir
         ).AndReturn(value1)
@@ -2015,7 +2022,7 @@
     if use_source_file:
       input_api2.AffectedSourceFiles(None).AndReturn(files2)
     else:
-      input_api2.AffectedFiles(include_deleted=False).AndReturn(files2)
+      input_api2.AffectedFiles(include_deletes=False).AndReturn(files2)
 
     presubmit.scm.SVN.GetFileProperty(
         presubmit.normpath('foo/bar.cc'), property_name, self.fake_root_dir
@@ -2085,41 +2092,6 @@
         'Foo', None, 'Foo ', None,
         presubmit.OutputApi.PresubmitPromptWarning)
 
-  def testCheckSingletonInHeaders(self):
-    change1 = presubmit.Change(
-        'foo1', 'foo1\n', self.fake_root_dir, None, 0, 0, None)
-    input_api1 = self.MockInputApi(change1, False)
-    affected_file1 = self.mox.CreateMock(presubmit.SvnAffectedFile)
-    affected_file2 = self.mox.CreateMock(presubmit.SvnAffectedFile)
-    input_api1.AffectedSourceFiles(None).AndReturn(
-        [affected_file1, affected_file2])
-    affected_file1.LocalPath().AndReturn('foo.h')
-    input_api1.ReadFile(affected_file1).AndReturn(
-        '// Comment mentioning Singleton<Foo>.\n' +
-        'friend class Singleton<Foo>;')
-    for _ in range(4):
-      affected_file2.LocalPath().AndReturn('foo.cc')
-
-    change2 = presubmit.Change(
-        'foo2', 'foo2\n', self.fake_root_dir, None, 0, 0, None)
-    input_api2 = self.MockInputApi(change2, False)
-
-    affected_file3 = self.mox.CreateMock(presubmit.SvnAffectedFile)
-    input_api2.AffectedSourceFiles(None).AndReturn([affected_file3])
-    affected_file3.LocalPath().AndReturn('foo.h')
-    input_api2.ReadFile(affected_file3).AndReturn(
-        'Foo* foo = Singleton<Foo>::get();')
-
-    self.mox.ReplayAll()
-
-    results1 = presubmit_canned_checks.CheckSingletonInHeaders(
-        input_api1, presubmit.OutputApi)
-    self.assertEquals(results1, [])
-    results2 = presubmit_canned_checks.CheckSingletonInHeaders(
-        input_api2, presubmit.OutputApi)
-    self.assertEquals(len(results2), 1)
-    self.assertEquals(results2[0].__class__, presubmit.OutputApi.PresubmitError)
-
   def testCheckChangeHasOnlyOneEol(self):
     self.ReadFileTest(presubmit_canned_checks.CheckChangeHasOnlyOneEol,
                       "Hey!\nHo!\n", "Hey!\nHo!\n\n",
@@ -2173,7 +2145,7 @@
     affected_files = (affected_file1, affected_file2,
                       affected_file3, affected_file4)
 
-    def test(file_filter, include_deletes):
+    def test(include_dirs=False, include_deletes=True, file_filter=None):
       self.assertFalse(include_deletes)
       for x in affected_files:
         if file_filter(x):
@@ -2237,20 +2209,20 @@
     check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
     self.ContentTest(
         check,
-        ' http:// 0 23 5',
-        None,
         ' http:// 0 23 56',
         None,
+        ' foob:// 0 23 56',
+        None,
         presubmit.OutputApi.PresubmitPromptWarning)
 
   def testCannedCheckLongLinesFile(self):
     check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
     self.ContentTest(
         check,
-        ' file:// 0 23 5',
-        None,
         ' file:// 0 23 56',
         None,
+        ' foob:// 0 23 56',
+        None,
         presubmit.OutputApi.PresubmitPromptWarning)
 
   def testCannedCheckLongLinesCssUrl(self):
@@ -2558,7 +2530,14 @@
 
     CommHelper(input_api,
         ['pyyyyython', pylint, '--args-on-stdin'],
-        env=mox.IgnoreArg(), stdin='file1.py\n--rcfile=%s' % pylintrc)
+        env=mox.IgnoreArg(), stdin=
+               '--rcfile=%s\n--disable=cyclic-import\n--jobs=2\nfile1.py'
+               % pylintrc)
+    CommHelper(input_api,
+        ['pyyyyython', pylint, '--args-on-stdin'],
+        env=mox.IgnoreArg(), stdin=
+               '--rcfile=%s\n--disable=all\n--enable=cyclic-import\nfile1.py'
+               % pylintrc)
     self.mox.ReplayAll()
 
     results = presubmit_canned_checks.RunPylint(
@@ -2601,8 +2580,8 @@
 
   def AssertOwnersWorks(self, tbr=False, issue='1', approvers=None,
       reviewers=None, is_committing=True, rietveld_response=None,
-      uncovered_files=None, expected_output='', author_counts_as_owner=True,
-      manually_specified_reviewers=None):
+      uncovered_files=None, expected_output='',
+      manually_specified_reviewers=None, cq_dry_run=False):
     if approvers is None:
       # The set of people who lgtm'ed a change.
       approvers = set()
@@ -2630,8 +2609,9 @@
     input_api.tbr = tbr
 
     if not is_committing or (not tbr and issue):
-      affected_file.LocalPath().AndReturn('foo/xyz.cc')
-      change.AffectedFiles(file_filter=None).AndReturn([affected_file])
+      if not cq_dry_run:
+        affected_file.LocalPath().AndReturn('foo/xyz.cc')
+        change.AffectedFiles(file_filter=None).AndReturn([affected_file])
       if issue and not rietveld_response:
         rietveld_response = {
           "owner_email": change.author_email,
@@ -2644,34 +2624,51 @@
 
       if is_committing:
         people = approvers
+        if issue:
+          input_api.rietveld.get_issue_properties(
+              issue=int(input_api.change.issue), messages=None).AndReturn(
+                  rietveld_response)
       else:
         people = reviewers
 
-      if issue:
-        input_api.rietveld.get_issue_properties(
-            issue=int(input_api.change.issue), messages=True).AndReturn(
-                rietveld_response)
+      if not cq_dry_run:
+        if issue:
+          input_api.rietveld.get_issue_properties(
+              issue=int(input_api.change.issue), messages=True).AndReturn(
+                  rietveld_response)
 
-      if author_counts_as_owner:
         people.add(change.author_email)
         fake_db.files_not_covered_by(set(['foo/xyz.cc']),
             people).AndReturn(uncovered_files)
-      else:
-        people.discard(change.author_email)
-        fake_db.files_not_covered_by(set(['foo/xyz.cc']),
-            people).AndReturn(uncovered_files)
-      if not is_committing and uncovered_files:
-        fake_db.reviewers_for(set(['foo']),
-            change.author_email).AndReturn(change.author_email)
+        if not is_committing and uncovered_files:
+          fake_db.reviewers_for(set(['foo']),
+              change.author_email).AndReturn(change.author_email)
 
     self.mox.ReplayAll()
     output = presubmit.PresubmitOutput()
     results = presubmit_canned_checks.CheckOwners(input_api,
-        presubmit.OutputApi, author_counts_as_owner=author_counts_as_owner)
+        presubmit.OutputApi)
     if results:
       results[0].handle(output)
     self.assertEquals(output.getvalue(), expected_output)
 
+  def testCannedCheckOwners_DryRun(self):
+    response = {
+      "owner_email": "john@example.com",
+      "cq_dry_run": True,
+      "reviewers": ["ben@example.com"],
+    }
+    self.AssertOwnersWorks(approvers=set(),
+        cq_dry_run=True,
+        rietveld_response=response,
+        reviewers=set(["ben@example.com"]),
+        expected_output='This is a CQ dry run, skipping OWNERS check\n')
+
+    self.AssertOwnersWorks(approvers=set(['ben@example.com']),
+        is_committing=False,
+        rietveld_response=response,
+        expected_output='')
+
   def testCannedCheckOwners_Approved(self):
     response = {
       "owner_email": "john@example.com",
@@ -2795,18 +2792,6 @@
                            is_committing=False,
                            expected_output='')
 
-  def testCannedCheckOwners_AuthorCountsAsOwner(self):
-    self.AssertOwnersWorks(approvers=set(['john@example.com',
-                                          'brett@example.com']),
-                           reviewers=set(['john@example.com',
-                                          'ben@example.com']),
-                           uncovered_files=set(['foo/xyz.cc', 'foo/bar.cc']),
-                           expected_output='Missing LGTM from an OWNER '
-                                           'for these files:\n'
-                                           '    foo/bar.cc\n'
-                                           '    foo/xyz.cc\n',
-                           author_counts_as_owner=False)
-
   def testCannedCheckOwners_TBR(self):
     self.AssertOwnersWorks(tbr=True,
         expected_output='--tbr was specified, skipping OWNERS check\n')
@@ -2904,9 +2889,6 @@
       affected_file.LocalPath().AndReturn('hello.py')
     input_api.AffectedSourceFiles(mox.IgnoreArg()).AndReturn([affected_file])
     input_api.ReadFile(affected_file).AndReturn('Hey!\nHo!\nHey!\nHo!\n\n')
-    input_api.AffectedSourceFiles(mox.IgnoreArg()).AndReturn([affected_file])
-    for _ in range(4):
-      affected_file.LocalPath().AndReturn('hello.py')
 
     self.mox.ReplayAll()
     results = presubmit_canned_checks.PanProjectChecks(
diff --git a/tests/push-basic.sh b/tests/push-basic.sh
index b68f804..0d434fa 100755
--- a/tests/push-basic.sh
+++ b/tests/push-basic.sh
@@ -21,17 +21,18 @@
   git add test; git commit -q -m "branch work"
 
   test_expect_success "git-cl upload wants a server" \
-    "$GIT_CL upload 2>&1 | grep -q 'You must configure'"
+    "$GIT_CL upload --no-oauth2 2>&1 | grep -q 'You must configure'"
 
   git config rietveld.server localhost:10000
 
+  # echo $($GIT_CL_STATUS)
   test_expect_success "git-cl status has no issue" \
-    "$GIT_CL_STATUS | grep -q 'no issue'"
+    "$GIT_CL_STATUS | grep -q 'No issue assigned'"
 
   # Prevent the editor from coming up when you upload.
   export GIT_EDITOR=$(which true)
   test_expect_success "upload succeeds (needs a server running on localhost)" \
-    "$GIT_CL upload -m test master | grep -q 'Issue created'"
+    "$GIT_CL upload  --no-oauth2  -m test master | grep -q 'Issue created'"
 
   test_expect_success "git-cl status now knows the issue" \
     "$GIT_CL_STATUS | grep -q 'Issue number'"
@@ -44,11 +45,12 @@
        --data-urlencode xsrf_token="$(print_xsrf_token)" \
        $URL/edit
 
+  API=$(echo $URL | sed -e 's/\([0-9]\+\)$/api\/\1/')
   test_expect_success "Base URL contains branch name" \
-      "curl -s $($GIT_CL_STATUS --field=url) | grep 'URL:[[:space:]]*[^<]' | grep -q '@master'"
+      "curl -s $API | python -mjson.tool | grep base_url | grep -q '@master'"
 
   test_expect_success "git-cl land ok" \
-    "$GIT_CL land -f"
+    "$GIT_CL land -f --no-oauth2"
 
   git checkout -q master > /dev/null 2>&1
   git pull -q > /dev/null 2>&1
diff --git a/tests/push-from-logs.sh b/tests/push-from-logs.sh
index 0fef2a7..f835c77 100755
--- a/tests/push-from-logs.sh
+++ b/tests/push-from-logs.sh
@@ -21,7 +21,7 @@
   git add test; git commit -q -m "branch work"
 
   test_expect_success "git-cl upload wants a server" \
-    "$GIT_CL upload 2>&1 | grep -q 'You must configure'"
+    "$GIT_CL upload --no-oauth2 2>&1 | grep -q 'You must configure'"
 
   git config rietveld.server localhost:10000
 
@@ -31,7 +31,7 @@
   # Prevent the editor from coming up when you upload.
   export EDITOR=$(which true)
   test_expect_success "upload succeeds (needs a server running on localhost)" \
-      "$GIT_CL upload -m test master | \
+      "$GIT_CL upload --no-oauth2 -m test master | \
       grep -q 'Issue created'"
 
   test_expect_success "git-cl status now knows the issue" \
@@ -41,13 +41,13 @@
   # Should contain 'branch work' x 2.
   test_expect_success "git-cl status has the right description for the log" \
       "$GIT_CL_STATUS --field desc | [ $( egrep -q '^branch work$' -c ) -eq 2 ]
-  
+
   test_expect_success "git-cl status has the right subject from message" \
       "$GIT_CL_STATUS --field desc | \
       [ $( egrep -q '^test$' --byte-offset) | grep '^0:' ]
 
   test_expect_success "git-cl push ok" \
-    "$GIT_CL push -f"
+    "$GIT_CL push -f --no-oauth2"
 
   git checkout -q master > /dev/null 2>&1
   git pull -q > /dev/null 2>&1
diff --git a/tests/rename.sh b/tests/rename.sh
index b16bea1..9b5d6dd 100755
--- a/tests/rename.sh
+++ b/tests/rename.sh
@@ -24,7 +24,7 @@
   git commit -q -m "renamed"
   export GIT_EDITOR=$(which true)
   test_expect_success "upload succeeds" \
-    "$GIT_CL upload -m test master | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test master | grep -q 'Issue created'"
 
   # Look at the uploaded patch and verify it is a rename patch.
   echo "Rename test not fully implemented yet.  :("
diff --git a/tests/rietveld_test.py b/tests/rietveld_test.py
index 20edb50..7b8c5ba 100755
--- a/tests/rietveld_test.py
+++ b/tests/rietveld_test.py
@@ -47,7 +47,7 @@
     super(BaseFixture, self).setUp()
     # Access to a protected member XX of a client class
     # pylint: disable=W0212
-    self.rietveld = self.TESTED_CLASS('url', 'email', 'password')
+    self.rietveld = self.TESTED_CLASS('url', None, 'email')
     self.rietveld._send = self._rietveld_send
     self.requests = []
 
@@ -456,7 +456,7 @@
 
   def setUp(self):
     super(DefaultTimeoutTest, self).setUp()
-    self.rietveld = self.TESTED_CLASS('url', 'email', 'password')
+    self.rietveld = self.TESTED_CLASS('url', None, 'email')
     self.mock(self.rietveld.rpc_server, 'Send', MockSend)
     self.sleep_time = 0
 
diff --git a/tests/save-description-on-failure.sh b/tests/save-description-on-failure.sh
index 3584688..1297fa0 100755
--- a/tests/save-description-on-failure.sh
+++ b/tests/save-description-on-failure.sh
@@ -34,7 +34,8 @@
   # Try to upload the change to an unresolvable hostname; git-cl should fail.
   export GIT_EDITOR=$(which true)
   git config rietveld.server bogus.example.com:80
-  test_expect_failure "uploading to bogus server" "$GIT_CL upload 2>/dev/null"
+  test_expect_failure "uploading to bogus server" \
+    "$GIT_CL upload --no-oauth2 2>/dev/null"
 
   # Check that the change's description was saved.
   test_expect_success "description was backed up" \
diff --git a/tests/scm_unittest.py b/tests/scm_unittest.py
index 239740e..49e9ee2 100755
--- a/tests/scm_unittest.py
+++ b/tests/scm_unittest.py
@@ -77,6 +77,7 @@
         'AssertVersion',
         'Capture',
         'CaptureStatus',
+        'CleanupDir',
         'current_version',
         'FetchUpstreamTuple',
         'GenerateDiff',
@@ -92,6 +93,7 @@
         'GetSha1ForSvnRev',
         'GetSVNBranch',
         'GetUpstreamBranch',
+        'IsDirectoryVersioned',
         'IsGitSvn',
         'IsInsideWorkTree',
         'IsValidRevision',
@@ -187,347 +189,6 @@
     self.assertTrue(scm.GIT.IsValidRevision(cwd=self.clone_dir, rev='HEAD'))
 
 
-class RealGitSvnTest(fake_repos.FakeReposTestBase):
-  def setUp(self):
-    super(RealGitSvnTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git() and self.FAKE_REPOS.set_up_svn()
-    if self.enabled:
-      self.tree_name = 'git-svn'
-      self.svn_url = scm.os.path.join(self.FAKE_REPOS.svn_base, 'trunk')
-      self.clone_dir = scm.os.path.join(self.FAKE_REPOS.git_root,
-                                        self.tree_name)
-      scm.os.makedirs(self.clone_dir)
-      self._capture(['svn', 'clone', '-q', '-q', self.svn_url, self.clone_dir])
-      # git rev-list gives revisions in reverse chronological order.
-      hashes = reversed(self._capture(['rev-list', 'HEAD']).splitlines())
-      # We insert a null value at 0 to do 1-based indexing, not 0-based, as SVN
-      # revisions are 1-based (i.e. they start at r1, not r0).
-      self.git_hashes = ([None] + list(hashes))
-
-  def tearDown(self):
-    scm.gclient_utils.rmtree(self.clone_dir)
-
-  def _capture(self, cmd, **kwargs):
-    kwargs.setdefault('cwd', self.clone_dir)
-    return scm.GIT.Capture(cmd, **kwargs)
-
-  def testGetGitSvnHeadRev(self):
-    if not self.enabled:
-      return
-    self.assertEquals(scm.GIT.GetGitSvnHeadRev(cwd=self.clone_dir), 2)
-    self._capture(['reset', '--hard', 'HEAD^'])
-    self.assertEquals(scm.GIT.GetGitSvnHeadRev(cwd=self.clone_dir), 1)
-
-  def testIsGitSvn(self):
-    if not self.enabled:
-      return
-    # Git-svn
-    self.assertTrue(scm.GIT.IsGitSvn(self.clone_dir))
-    # Pure git
-    git_dir = scm.os.path.join(self.FAKE_REPOS.git_root, 'repo_1')
-    self.assertFalse(scm.GIT.IsGitSvn(git_dir))
-    # Pure svn
-    svn_dir = scm.os.path.join(self.FAKE_REPOS.svn_checkout, 'trunk')
-    self.assertFalse(scm.GIT.IsGitSvn(svn_dir))
-
-  def testParseGitSvnSha1(self):
-    test_sha1 = 'a5c63ce8671922e5c59c0dea49ef4f9d4a3020c9'
-    expected_output = test_sha1 + '\n'
-    # Cygwin git-svn 1.7.9 prints extra escape sequences when run under
-    # TERM=xterm
-    cygwin_output = test_sha1 + '\n\033[?1034h'
-
-    self.assertEquals(scm.GIT.ParseGitSvnSha1(expected_output), test_sha1)
-    self.assertEquals(scm.GIT.ParseGitSvnSha1(cygwin_output), test_sha1)
-
-  def testGetGetSha1ForSvnRev(self):
-    if not self.enabled:
-      return
-    self.assertEquals(scm.GIT.GetSha1ForSvnRev(cwd=self.clone_dir, rev=1),
-                      self.git_hashes[1])
-    self.assertEquals(scm.GIT.GetSha1ForSvnRev(cwd=self.clone_dir, rev=2),
-                      self.git_hashes[2])
-
-
-class SVNTestCase(BaseSCMTestCase):
-  def setUp(self):
-    BaseSCMTestCase.setUp(self)
-    self.mox.StubOutWithMock(scm.SVN, 'Capture')
-    self.url = self.SvnUrl()
-
-  def testMembersChanged(self):
-    self.mox.ReplayAll()
-    members = [
-        'AssertVersion',
-        'Capture',
-        'CaptureLocalInfo',
-        'CaptureRemoteInfo',
-        'CaptureRevision',
-        'CaptureStatus',
-        'current_version',
-        'GenerateDiff',
-        'GetCheckoutRoot',
-        'GetEmail',
-        'GetFileProperty',
-        'IsMoved',
-        'IsMovedInfo',
-        'IsValidRevision',
-        'ReadSimpleAuth',
-        'Revert',
-        'RunAndGetFileList',
-    ]
-    # If this test fails, you should add the relevant test.
-    self.compareMembers(scm.SVN, members)
-
-  def testGetCheckoutRoot(self):
-    # pylint: disable=E1103
-    self.mox.StubOutWithMock(scm.SVN, '_CaptureInfo')
-    self.mox.StubOutWithMock(scm, 'GetCasedPath')
-    scm.os.path.abspath = lambda x: x
-    scm.GetCasedPath = lambda x: x
-    scm.SVN._CaptureInfo([], self.root_dir + '/foo/bar').AndReturn({
-        'Repository Root': 'svn://svn.chromium.org/chrome',
-        'URL': 'svn://svn.chromium.org/chrome/trunk/src',
-    })
-    scm.SVN._CaptureInfo([], self.root_dir + '/foo').AndReturn({
-        'Repository Root': 'svn://svn.chromium.org/chrome',
-        'URL': 'svn://svn.chromium.org/chrome/trunk',
-    })
-    scm.SVN._CaptureInfo([], self.root_dir).AndReturn({
-        'Repository Root': 'svn://svn.chromium.org/chrome',
-        'URL': 'svn://svn.chromium.org/chrome/trunk/tools/commit-queue/workdir',
-    })
-    self.mox.ReplayAll()
-    self.assertEquals(scm.SVN.GetCheckoutRoot(self.root_dir + '/foo/bar'),
-                      self.root_dir + '/foo')
-
-  def testGetFileInfo(self):
-    xml_text = r"""<?xml version="1.0"?>
-<info>
-<entry kind="file" path="%s" revision="14628">
-<url>http://src.chromium.org/svn/trunk/src/chrome/app/d</url>
-<repository><root>http://src.chromium.org/svn</root></repository>
-<wc-info>
-<schedule>add</schedule>
-<depth>infinity</depth>
-<copy-from-url>http://src.chromium.org/svn/trunk/src/chrome/app/DEPS</copy-from-url>
-<copy-from-rev>14628</copy-from-rev>
-<checksum>369f59057ba0e6d9017e28f8bdfb1f43</checksum>
-</wc-info>
-</entry>
-</info>
-""" % self.url
-    scm.SVN.Capture(['info', '--xml', self.url], None).AndReturn(xml_text)
-    expected = {
-      'URL': 'http://src.chromium.org/svn/trunk/src/chrome/app/d',
-      'UUID': None,
-      'Repository Root': 'http://src.chromium.org/svn',
-      'Schedule': 'add',
-      'Copied From URL':
-        'http://src.chromium.org/svn/trunk/src/chrome/app/DEPS',
-      'Copied From Rev': '14628',
-      'Path': self.url,
-      'Revision': 14628,
-      'Node Kind': 'file',
-    }
-    self.mox.ReplayAll()
-    file_info = scm.SVN._CaptureInfo([self.url], None)
-    self.assertEquals(sorted(file_info.items()), sorted(expected.items()))
-
-  def testCaptureInfo(self):
-    xml_text = """<?xml version="1.0"?>
-<info>
-<entry
-   kind="dir"
-   path="."
-   revision="35">
-<url>%s</url>
-<repository>
-<root>%s</root>
-<uuid>7b9385f5-0452-0410-af26-ad4892b7a1fb</uuid>
-</repository>
-<wc-info>
-<schedule>normal</schedule>
-<depth>infinity</depth>
-</wc-info>
-<commit
-   revision="35">
-<author>maruel</author>
-<date>2008-12-04T20:12:19.685120Z</date>
-</commit>
-</entry>
-</info>
-""" % (self.url, self.root_dir)
-    scm.SVN.Capture(['info', '--xml', self.url], None).AndReturn(xml_text)
-    self.mox.ReplayAll()
-    file_info = scm.SVN._CaptureInfo([self.url], None)
-    expected = {
-      'URL': self.url,
-      'UUID': '7b9385f5-0452-0410-af26-ad4892b7a1fb',
-      'Revision': 35,
-      'Repository Root': self.root_dir,
-      'Schedule': 'normal',
-      'Copied From URL': None,
-      'Copied From Rev': None,
-      'Path': '.',
-      'Node Kind': 'directory',
-    }
-    self.assertEqual(file_info, expected)
-
-  def testCaptureStatus(self):
-    text = r"""<?xml version="1.0"?>
-<status>
-<target path=".">
-<entry path="unversionned_file.txt">
-<wc-status props="none" item="unversioned"></wc-status>
-</entry>
-<entry path="build\internal\essential.vsprops">
-<wc-status props="normal" item="modified" revision="14628">
-<commit revision="13818">
-<author>ajwong@chromium.org</author>
-<date>2009-04-16T00:42:06.872358Z</date>
-</commit>
-</wc-status>
-</entry>
-<entry path="chrome\app\d">
-<wc-status props="none" copied="true" tree-conflicted="true" item="added">
-</wc-status>
-</entry>
-<entry path="chrome\app\DEPS">
-<wc-status props="modified" item="modified" revision="14628">
-<commit revision="1279">
-<author>brettw@google.com</author>
-<date>2008-08-23T17:16:42.090152Z</date>
-</commit>
-</wc-status>
-</entry>
-<entry path="scripts\master\factory\gclient_factory.py">
-<wc-status props="normal" item="conflicted" revision="14725">
-<commit revision="14633">
-<author>nsylvain@chromium.org</author>
-<date>2009-04-27T19:37:17.977400Z</date>
-</commit>
-</wc-status>
-</entry>
-</target>
-</status>
-"""
-    scm.SVN.Capture(['status', '--xml'], '.').AndReturn(text)
-
-    self.mox.ReplayAll()
-    info = scm.SVN.CaptureStatus(None, '.')
-    expected = [
-      ('?      ', 'unversionned_file.txt'),
-      ('M      ', 'build\\internal\\essential.vsprops'),
-      ('A  +   ', 'chrome\\app\\d'),
-      ('MM     ', 'chrome\\app\\DEPS'),
-      ('C      ', 'scripts\\master\\factory\\gclient_factory.py'),
-    ]
-    self.assertEquals(sorted(info), sorted(expected))
-
-  def testCaptureStatusEmpty(self):
-    text = r"""<?xml version="1.0"?>
-    <status>
-    <target
-       path="perf">
-       </target>
-       </status>"""
-    scm.SVN.Capture(['status', '--xml'], None).AndReturn(text)
-    self.mox.ReplayAll()
-    info = scm.SVN.CaptureStatus(None, None)
-    self.assertEquals(info, [])
-
-
-class RealSvnTest(fake_repos.FakeReposTestBase):
-  # Tests that work with a checkout.
-  def setUp(self):
-    super(RealSvnTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_svn()
-    if self.enabled:
-      self.svn_root = scm.os.path.join(self.root_dir, 'base')
-      scm.SVN.Capture(
-          ['checkout', self.svn_base + 'trunk/third_party', 'base'],
-          cwd=self.root_dir)
-      self.tree = self.mangle_svn_tree(('trunk/third_party@-1', ''),)
-
-  def _capture(self, cmd, **kwargs):
-    kwargs.setdefault('cwd', self.svn_root)
-    return scm.SVN.Capture(cmd, **kwargs)
-
-  def testCheckout(self):
-    if not self.enabled:
-      return
-    # Checkout and verify the tree.
-    self.assertTree(self.tree, self.svn_root)
-
-  def testIsValidRevision(self):
-    if not self.enabled:
-      return
-    url_at_rev = self.svn_base + 'trunk/third_party@%s'
-    # Invalid or non-existent.
-    self.assertFalse(scm.SVN.IsValidRevision('url://totally_invalid/trunk/foo'))
-    self.assertFalse(scm.SVN.IsValidRevision(url_at_rev % 0))
-    self.assertFalse(scm.SVN.IsValidRevision(url_at_rev % 123))
-    # Valid.
-    self.assertTrue(scm.SVN.IsValidRevision(url_at_rev % 1))
-    self.assertTrue(scm.SVN.IsValidRevision(url_at_rev % 2))
-    self.assertTrue(scm.SVN.IsValidRevision(url_at_rev % 'HEAD'))
-
-  def testRevert(self):
-    if not self.enabled:
-      return
-    # Mess around and make sure revert works for all corner cases.
-    # - svn add a file
-    # - svn add a file and delete it
-    # - Delete a file
-    # - svn delete a file
-    # - svn move a directory and svn rename files in it
-    # - add a directory tree.
-    def join(*args):
-      return scm.os.path.join(self.svn_root, *args)
-    self._capture(['move', 'foo', 'foo2'])
-    self._capture(
-        ['move',
-         scm.os.path.join('foo2', 'origin'),
-         scm.os.path.join('foo2', 'o')])
-    scm.os.remove(join('origin'))
-    self._capture(['propset', 'foo', 'bar', join('prout', 'origin')])
-    fake_repos.gclient_utils.rmtree(join('prout'))
-    with open(join('faa'), 'w') as f:
-      f.write('eh')
-    with open(join('faala'), 'w') as f:
-      f.write('oh')
-    self._capture(['add', join('faala')])
-    added_and_removed = join('added_and_removed')
-    with open(added_and_removed, 'w') as f:
-      f.write('oh')
-    self._capture(['add', added_and_removed])
-    scm.os.remove(added_and_removed)
-    # Make sure a tree of directories can be removed.
-    scm.os.makedirs(join('new_dir', 'subdir'))
-    with open(join('new_dir', 'subdir', 'newfile'), 'w') as f:
-      f.write('ah!')
-    self._capture(['add', join('new_dir')])
-    self._capture(['add', join('new_dir', 'subdir')])
-    self._capture(['add', join('new_dir', 'subdir', 'newfile')])
-    # A random file in an added directory confuses svn.
-    scm.os.makedirs(join('new_dir2', 'subdir'))
-    with open(join('new_dir2', 'subdir', 'newfile'), 'w') as f:
-      f.write('ah!')
-    self._capture(['add', join('new_dir2')])
-    self._capture(['add', join('new_dir2', 'subdir')])
-    self._capture(['add', join('new_dir2', 'subdir', 'newfile')])
-    with open(join('new_dir2', 'subdir', 'unversionedfile'), 'w') as f:
-      f.write('unadded file!')
-
-    scm.SVN.Revert(self.svn_root)
-    self._capture(['update', '--revision', 'base'])
-
-    self.assertTree(self.tree, self.svn_root)
-    # Asserting the tree is not sufficient, svn status must come out clear too.
-    self.assertEquals('', self._capture(['status']))
-
-
 if __name__ == '__main__':
   if '-v' in sys.argv:
     logging.basicConfig(level=logging.DEBUG)
diff --git a/tests/submit-from-new-dir.sh b/tests/submit-from-new-dir.sh
index f04d9dc..063568a 100755
--- a/tests/submit-from-new-dir.sh
+++ b/tests/submit-from-new-dir.sh
@@ -28,9 +28,9 @@
   git add test; git commit -q -m "branch work"
   export GIT_EDITOR=$(which true)
   test_expect_success "upload succeeds" \
-    "$GIT_CL upload -m test master | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test master | grep -q 'Issue created'"
   test_expect_success "git-cl dcommits ok" \
-    "$GIT_CL dcommit -f"
+    "$GIT_CL dcommit -f --no-oauth2"
 )
 
 SUCCESS=$?
diff --git a/tests/submodule-merge-test.sh b/tests/submodule-merge-test.sh
index 8818a2a..a703958 100755
--- a/tests/submodule-merge-test.sh
+++ b/tests/submodule-merge-test.sh
@@ -5,6 +5,8 @@
 # found in the LICENSE file.
 
 set -e
+SCRIPT_DIR=$(cd $(dirname "$BASH_SOURCE") && pwd)
+cd ${SCRIPT_DIR}
 
 . ./test-lib.sh
 
@@ -25,7 +27,7 @@
   git_diff=`git diff HEAD^ | sed -n '/^@@/,$p' | xargs`
 
   test_expect_success "dcommitted code" \
-      "$GIT_CL dcommit -f --bypass-hooks -m 'dcommit'"
+      "$GIT_CL dcommit --no-oauth2 -f --bypass-hooks -m 'dcommit'"
 
   cd ..
 
@@ -43,7 +45,7 @@
 
   cd git-svn-submodule
   git svn fetch
-  last_svn_rev=`git show refs/remotes/trunk | grep git-svn-id: | \
+  last_svn_rev=`git show refs/remotes/origin/trunk | grep git-svn-id: | \
       grep -o trunk@[0-9]* | xargs`
 
   test_expect_success "git svn fetch gets new svn revision" \
diff --git a/tests/test-lib.sh b/tests/test-lib.sh
index 1e7223d..bcda6c7 100755
--- a/tests/test-lib.sh
+++ b/tests/test-lib.sh
@@ -9,14 +9,14 @@
 
 export DEPOT_TOOLS_UPDATE=0
 
-PWD=`pwd`
+PWD=$(pwd)
 REPO_URL=file://$PWD/svnrepo
 TRUNK_URL=$REPO_URL/trunk
 BRANCH_URL=$REPO_URL/branches/some_branch
 GITREPO_PATH=$PWD/gitrepo
 GITREPO_URL=file://$GITREPO_PATH
-PATH="$PWD/..:$PATH"
-GIT_CL=$PWD/../git-cl
+PATH="$(dirname $PWD):$PATH"
+GIT_CL=$(dirname $PWD)/git-cl
 GIT_CL_STATUS="$GIT_CL status -f"
 
 # Set up an SVN repo that has a few commits to trunk.
@@ -49,6 +49,8 @@
   rm -rf git-svn
   # There appears to be no way to make git-svn completely shut up, so we
   # redirect its output.
+  # clone with --prefix origin/ to ensure the same behaviour with old and new
+  # versions of git (The default prefix was "" prior to Git 2.0)
   git svn --prefix origin/ -q clone -s $REPO_URL git-svn >/dev/null 2>&1
   (
     cd git-svn
@@ -63,7 +65,9 @@
 setup_gitsvn_submodule() {
   echo "Setting up test remote git-svn-submodule repo..."
   rm -rf git-svn-submodule
-  git svn -q clone -s $REPO_URL git-svn-submodule >/dev/null 2>&1
+  # clone with --prefix origin/ to ensure the same behaviour with old and new
+  # versions of git (The default prefix was "" prior to Git 2.0)
+  git svn --prefix origin/ -q clone -s $REPO_URL git-svn-submodule >/dev/null 2>&1
   svn_revision=`svn info file://$PWD/svnrepo | grep ^Revision | \
                 sed s/^.*:// | xargs`
   (
@@ -72,7 +76,7 @@
     git config user.email 'TestDood@example.com'
     echo 'merge-file line 1' > merge-file
     git add merge-file; git commit -q -m 'First non-svn commit on master'
-    git checkout -q refs/remotes/trunk
+    git checkout -q refs/remotes/origin/trunk
     git merge -q --no-commit --no-ff refs/heads/master >/dev/null 2>&1
     echo 'merge-edit-file line 1' > merge-edit-file
     git add merge-edit-file
diff --git a/tests/upload-local-tracking-branch.sh b/tests/upload-local-tracking-branch.sh
index 0efd6e0..8c2e6fb 100755
--- a/tests/upload-local-tracking-branch.sh
+++ b/tests/upload-local-tracking-branch.sh
@@ -24,7 +24,7 @@
   # Prevent the editor from coming up when you upload.
   export GIT_EDITOR=$(which true)
   test_expect_success "upload succeeds (needs a server running on localhost)" \
-    "$GIT_CL upload -m test | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test | grep -q 'Issue created'"
 )
 SUCCESS=$?
 
diff --git a/tests/upload-stale.sh b/tests/upload-stale.sh
index 72d1ad3..3ce6141 100755
--- a/tests/upload-stale.sh
+++ b/tests/upload-stale.sh
@@ -23,7 +23,7 @@
   # Prevent the editor from coming up when you upload.
   export GIT_EDITOR=$(which true)
   test_expect_success "upload succeeds (needs a server running on localhost)" \
-    "$GIT_CL upload -m test | grep -q 'Issue created'"
+    "$GIT_CL upload --no-oauth2 -m test | grep -q 'Issue created'"
 
   test_expect_failure "description shouldn't contain unrelated commits" \
     "$GIT_CL_STATUS | grep -q 'second commit'"
diff --git a/tests/upload_to_google_storage_unittests.py b/tests/upload_to_google_storage_unittests.py
index 24ac6b8..3bac038 100755
--- a/tests/upload_to_google_storage_unittests.py
+++ b/tests/upload_to_google_storage_unittests.py
@@ -48,14 +48,14 @@
     filenames = [self.lorem_ipsum]
     output_filename = '%s.sha1'  % self.lorem_ipsum
     code = upload_to_google_storage.upload_to_google_storage(
-        filenames, self.base_url, self.gsutil, True, False, 1, False)
+        filenames, self.base_url, self.gsutil, True, False, 1, False, 'txt')
     self.assertEqual(
         self.gsutil.history,
         [('check_call',
           ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
          ('check_call',
-          ('cp', '-q', filenames[0], '%s/%s' % (self.base_url,
-                                                self.lorem_ipsum_sha1)))])
+          ('cp', '-z', 'txt', filenames[0],
+           '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))])
     self.assertTrue(os.path.exists(output_filename))
     self.assertEqual(
         open(output_filename, 'rb').read(),
@@ -70,7 +70,7 @@
     self.gsutil.add_expected(0, '', '')
     self.gsutil.add_expected(0, etag_string, '')
     code = upload_to_google_storage.upload_to_google_storage(
-        filenames, self.base_url, self.gsutil, False, False, 1, False)
+        filenames, self.base_url, self.gsutil, False, False, 1, False, None)
     self.assertEqual(
         self.gsutil.history,
         [('check_call',
@@ -100,7 +100,8 @@
         False,
         False,
         self.stdout_queue,
-        self.ret_codes)
+        self.ret_codes,
+        None)
     expected_ret_codes = [
       (20,
        'Encountered error on uploading %s to %s/%s\nExpected error message' %
@@ -114,7 +115,7 @@
     with open(output_filename, 'wb') as f:
       f.write(fake_hash)  # Fake hash.
     code = upload_to_google_storage.upload_to_google_storage(
-        filenames, self.base_url, self.gsutil, False, False, 1, True)
+        filenames, self.base_url, self.gsutil, False, False, 1, True, None)
     self.assertEqual(
         self.gsutil.history,
         [('check_call',
@@ -122,7 +123,7 @@
          ('check_call',
           ('ls', '-L', '%s/%s' % (self.base_url, fake_hash))),
          ('check_call',
-          ('cp', '-q', filenames[0], '%s/%s' % (self.base_url, fake_hash)))])
+          ('cp', filenames[0], '%s/%s' % (self.base_url, fake_hash)))])
     self.assertEqual(
         open(output_filename, 'rb').read(), fake_hash)
     os.remove(output_filename)
@@ -162,4 +163,4 @@
 
 
 if __name__ == '__main__':
-  unittest.main()
\ No newline at end of file
+  unittest.main()
diff --git a/third_party/cq_client/OWNERS b/third_party/cq_client/OWNERS
new file mode 100644
index 0000000..aa2ee95
--- /dev/null
+++ b/third_party/cq_client/OWNERS
@@ -0,0 +1,4 @@
+akuegel@chromium.org
+phajdan.jr@chromium.org
+sergiyb@chromium.org
+tandrii@chromium.org
diff --git a/third_party/cq_client/README.md b/third_party/cq_client/README.md
new file mode 100644
index 0000000..d37caa6
--- /dev/null
+++ b/third_party/cq_client/README.md
@@ -0,0 +1,17 @@
+This directory contains CQ client library to be distributed to other repos. If
+you need to modify some files in this directory, please make sure that you are
+changing the canonical version of the source code and not one of the copies,
+which should only be updated as a whole using Glyco (when available, see
+http://crbug.com/489420).
+
+The canonical version is located at `https://chrome-internal.googlesource.com/
+infra/infra_internal/+/master/commit_queue/cq_client`.
+
+To generate `cq_pb2.py`, please use protoc version 2.6.1:
+
+    cd commit_queue/cq_client
+    protoc cq.proto --python_out $(pwd)
+
+Additionally, please make sure to use proto3-compatible syntax, e.g. no default
+values, no required fields. Ideally, we should use proto3 generator already,
+however alpha version thereof is still unstable.
diff --git a/third_party/cq_client/__init__.py b/third_party/cq_client/__init__.py
new file mode 100644
index 0000000..50b23df
--- /dev/null
+++ b/third_party/cq_client/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/third_party/cq_client/cq.proto b/third_party/cq_client/cq.proto
new file mode 100644
index 0000000..8422873
--- /dev/null
+++ b/third_party/cq_client/cq.proto
@@ -0,0 +1,154 @@
+syntax = "proto2";
+
+// This message describes a Commit Queue configuration. The config file cq.cfg
+// should be stored in the config directory located on the branch that this CQ
+// should commit to.
+message Config {
+  // Required. Version of the config format.
+  optional int32 version = 1;
+
+  // Required. Name of the CQ. May only contain characters [a-zA-Z0-9_]. It is
+  // used for various purposes, including, but not limited to match the project
+  // name for CLs on Rietveld, name of the project in the status app, internal
+  // name for logging etc. CQ name should not be confused with the project name
+  // in LUCI as there may be multiple CQs per project.
+  optional string cq_name = 2;
+
+  // List of verifiers that verify if the CL is ready to be committed.
+  optional Verifiers verifiers = 3;
+
+  // URL of the CQ status app to push updates to.
+  optional string cq_status_url = 4;
+
+  // When true, hash of the commit is not posted by CQ. This is used for
+  // projects using gnumbd as latter publishes actual hash later. Default value
+  // is false.
+  optional bool hide_ref_in_committed_msg = 5;
+
+  // Delay between commit bursts in seconds. Default value is 480.
+  optional int32 commit_burst_delay = 6;
+
+  // Maximum number of commits done sequentially, before waiting for
+  // commit_burst_delay. Default value is 4.
+  optional int32 max_commit_burst = 7;
+
+  // Defines whether a CQ is used in production. Allows to disable CQ for a
+  // given branch. Default is true.
+  optional bool in_production = 8;
+
+  // Configuration options for Rietveld code review.
+  optional Rietveld rietveld = 9;
+
+  // This can be used to override the Git repository URL used to checkout and
+  // commit changes on CQ host. This should only be used in case, when the
+  // source repository is not supported by luci-config (e.g. GitHub).
+  optional string git_repo_url = 10;
+
+  // Target ref to commit to. This can be used to specify a different ref than
+  // the one where the luci config is located. This is useful, e.g. for projects
+  // that use gnumbd where CQ should commit into a pending ref.
+  optional string target_ref = 11;
+
+  // Deprecated. URL of the SVN repository. We are deprecating SVN support.
+  optional string svn_repo_url = 12;
+
+  // Deprecated. Should be set to true, when the project's SVN repository does
+  // not have server-side hooks configured.
+  optional bool server_hooks_missing = 13;
+
+  // Deprecated. Specifies a list of verifiers that are run on a local checkout
+  // with patch applied. The only remaining use case for this is PRESUBMIT_CHECK
+  // verifier, which we are deprecating as well.
+  optional Verifiers verifiers_with_patch = 14;
+}
+
+message Rietveld {
+  // Required. URL of the codereview site.
+  optional string url = 1;
+
+  // List of regular expressions used to check if CL's base URL should be
+  // processed by this CQ. This may be useful if a single branch has multiple
+  // sub-directories that are handled by different CQs. When no regular
+  // expressions are specified, the regular expression '.*', which matches any
+  // directory, is used.
+  repeated string project_bases = 2;
+}
+
+// Verifiers are various types of checks that a Commit Queue performs on a CL.
+// All verifiers must pass in order for a CL to be landed. Configuration file
+// describes types of verifiers that should be applied to each CL and their
+// parameters.
+message Verifiers {
+  // This verifier is used to ensure that an LGTM was posted to the code review
+  // site from a valid project committer.
+  optional ReviewerLgtmVerifier reviewer_lgtm = 1;
+
+  // This verifier is used to check tree status before committing a CL. If the
+  // tree is closed, then the verifier will wait until it is reopened.
+  optional TreeStatusLgtmVerifier tree_status = 2;
+
+  // This verifier triggers a set of tryjobs that are to be run on builders on
+  // Buildbot. It automatically retries failed try-jobs and only allows CL to
+  // land if each builder has succeeded in the latest retry. If a given tryjob
+  // result is too old (>1 day) it is ignored.
+  optional TryJobVerifier try_job = 3;
+
+  // This verifier is used to ensure that the author has signed Google's
+  // Contributor License Agreement.
+  optional SignCLAVerifier sign_cla = 4;
+
+  message ReviewerLgtmVerifier {
+    // Required. Name of the project, whose committer list to use. This allows
+    // to reuse committer lists. Note that if you are adding a new list, then
+    // currently you will need to make changes to the CQ code to add support for
+    // retrieving such a list. We are working on removing the need to modify CQ
+    // code.
+    optional string committer_list = 1;
+
+    // Number of seconds to wait for LGTM on CQ. Default value is 0.
+    optional int32 max_wait_secs = 2;
+
+    // Message to be posted to code review site when no LGTM is found. Default
+    // value is "No LGTM from a valid reviewer yet. Only full committers are "
+    // "accepted.\nEven if an LGTM may have been provided, it was from a "
+    // "non-committer,\n_not_ a full super star committer.\nSee "
+    // "http://www.chromium.org/getting-involved/become-a-committer\nNote that "
+    // "this has nothing to do with OWNERS files."
+    optional string no_lgtm_msg = 3;
+  }
+
+  message TreeStatusLgtmVerifier {
+    // Required. URL of the project tree status app.
+    optional string tree_status_url = 1;
+  }
+
+  message TryJobVerifier {
+    message Builder {
+      // Name of the builder.
+      optional string name = 1;
+
+      // When true, the builder is triggered by CQ. Otherwise, it is expected to
+      // be triggered from another tryjob. Default value is true.
+      optional bool triggered = 2;
+
+      // When this field is present, it marks given builder as experimental. It
+      // is only executed on a given percentage of the CLs and the outcome does
+      // not affect the decicion whether a CL can land or not. This is typically
+      // used to test new builders and estimate their capacity requirements.
+      optional float experiment_percentage = 4;
+    }
+
+    message Bucket {
+      // Name of the bucket. This is typically the same as a master name.
+      optional string name = 1;
+
+      // Builders on which tryjobs should be triggered.
+      repeated Builder builders = 2;
+    }
+
+    // Buckets on which tryjobs are triggered/watched.
+    repeated Bucket buckets = 1;
+  }
+
+  message SignCLAVerifier {}
+}
diff --git a/third_party/cq_client/cq_pb2.py b/third_party/cq_client/cq_pb2.py
new file mode 100644
index 0000000..f317885
--- /dev/null
+++ b/third_party/cq_client/cq_pb2.py
@@ -0,0 +1,527 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: cq.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='cq.proto',
+  package='',
+  serialized_pb=_b('\n\x08\x63q.proto\"\xf5\x02\n\x06\x43onfig\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x0f\n\x07\x63q_name\x18\x02 \x01(\t\x12\x1d\n\tverifiers\x18\x03 \x01(\x0b\x32\n.Verifiers\x12\x15\n\rcq_status_url\x18\x04 \x01(\t\x12!\n\x19hide_ref_in_committed_msg\x18\x05 \x01(\x08\x12\x1a\n\x12\x63ommit_burst_delay\x18\x06 \x01(\x05\x12\x18\n\x10max_commit_burst\x18\x07 \x01(\x05\x12\x15\n\rin_production\x18\x08 \x01(\x08\x12\x1b\n\x08rietveld\x18\t \x01(\x0b\x32\t.Rietveld\x12\x14\n\x0cgit_repo_url\x18\n \x01(\t\x12\x12\n\ntarget_ref\x18\x0b \x01(\t\x12\x14\n\x0csvn_repo_url\x18\x0c \x01(\t\x12\x1c\n\x14server_hooks_missing\x18\r \x01(\x08\x12(\n\x14verifiers_with_patch\x18\x0e \x01(\x0b\x32\n.Verifiers\".\n\x08Rietveld\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x15\n\rproject_bases\x18\x02 \x03(\t\"\xd5\x04\n\tVerifiers\x12\x36\n\rreviewer_lgtm\x18\x01 \x01(\x0b\x32\x1f.Verifiers.ReviewerLgtmVerifier\x12\x36\n\x0btree_status\x18\x02 \x01(\x0b\x32!.Verifiers.TreeStatusLgtmVerifier\x12*\n\x07try_job\x18\x03 \x01(\x0b\x32\x19.Verifiers.TryJobVerifier\x12,\n\x08sign_cla\x18\x04 \x01(\x0b\x32\x1a.Verifiers.SignCLAVerifier\x1aZ\n\x14ReviewerLgtmVerifier\x12\x16\n\x0e\x63ommitter_list\x18\x01 \x01(\t\x12\x15\n\rmax_wait_secs\x18\x02 \x01(\x05\x12\x13\n\x0bno_lgtm_msg\x18\x03 \x01(\t\x1a\x31\n\x16TreeStatusLgtmVerifier\x12\x17\n\x0ftree_status_url\x18\x01 \x01(\t\x1a\xdb\x01\n\x0eTryJobVerifier\x12\x31\n\x07\x62uckets\x18\x01 \x03(\x0b\x32 .Verifiers.TryJobVerifier.Bucket\x1aI\n\x07\x42uilder\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\ttriggered\x18\x02 \x01(\x08\x12\x1d\n\x15\x65xperiment_percentage\x18\x04 \x01(\x02\x1aK\n\x06\x42ucket\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x08\x62uilders\x18\x02 \x03(\x0b\x32!.Verifiers.TryJobVerifier.Builder\x1a\x11\n\x0fSignCLAVerifier')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_CONFIG = _descriptor.Descriptor(
+  name='Config',
+  full_name='Config',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='version', full_name='Config.version', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='cq_name', full_name='Config.cq_name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='verifiers', full_name='Config.verifiers', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='cq_status_url', full_name='Config.cq_status_url', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='hide_ref_in_committed_msg', full_name='Config.hide_ref_in_committed_msg', index=4,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='commit_burst_delay', full_name='Config.commit_burst_delay', index=5,
+      number=6, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='max_commit_burst', full_name='Config.max_commit_burst', index=6,
+      number=7, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='in_production', full_name='Config.in_production', index=7,
+      number=8, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='rietveld', full_name='Config.rietveld', index=8,
+      number=9, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='git_repo_url', full_name='Config.git_repo_url', index=9,
+      number=10, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='target_ref', full_name='Config.target_ref', index=10,
+      number=11, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='svn_repo_url', full_name='Config.svn_repo_url', index=11,
+      number=12, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='server_hooks_missing', full_name='Config.server_hooks_missing', index=12,
+      number=13, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='verifiers_with_patch', full_name='Config.verifiers_with_patch', index=13,
+      number=14, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=13,
+  serialized_end=386,
+)
+
+
+_RIETVELD = _descriptor.Descriptor(
+  name='Rietveld',
+  full_name='Rietveld',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='url', full_name='Rietveld.url', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='project_bases', full_name='Rietveld.project_bases', index=1,
+      number=2, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=388,
+  serialized_end=434,
+)
+
+
+_VERIFIERS_REVIEWERLGTMVERIFIER = _descriptor.Descriptor(
+  name='ReviewerLgtmVerifier',
+  full_name='Verifiers.ReviewerLgtmVerifier',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='committer_list', full_name='Verifiers.ReviewerLgtmVerifier.committer_list', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='max_wait_secs', full_name='Verifiers.ReviewerLgtmVerifier.max_wait_secs', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='no_lgtm_msg', full_name='Verifiers.ReviewerLgtmVerifier.no_lgtm_msg', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=652,
+  serialized_end=742,
+)
+
+_VERIFIERS_TREESTATUSLGTMVERIFIER = _descriptor.Descriptor(
+  name='TreeStatusLgtmVerifier',
+  full_name='Verifiers.TreeStatusLgtmVerifier',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='tree_status_url', full_name='Verifiers.TreeStatusLgtmVerifier.tree_status_url', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=744,
+  serialized_end=793,
+)
+
+_VERIFIERS_TRYJOBVERIFIER_BUILDER = _descriptor.Descriptor(
+  name='Builder',
+  full_name='Verifiers.TryJobVerifier.Builder',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='Verifiers.TryJobVerifier.Builder.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='triggered', full_name='Verifiers.TryJobVerifier.Builder.triggered', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='experiment_percentage', full_name='Verifiers.TryJobVerifier.Builder.experiment_percentage', index=2,
+      number=4, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=865,
+  serialized_end=938,
+)
+
+_VERIFIERS_TRYJOBVERIFIER_BUCKET = _descriptor.Descriptor(
+  name='Bucket',
+  full_name='Verifiers.TryJobVerifier.Bucket',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='Verifiers.TryJobVerifier.Bucket.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='builders', full_name='Verifiers.TryJobVerifier.Bucket.builders', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=940,
+  serialized_end=1015,
+)
+
+_VERIFIERS_TRYJOBVERIFIER = _descriptor.Descriptor(
+  name='TryJobVerifier',
+  full_name='Verifiers.TryJobVerifier',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='buckets', full_name='Verifiers.TryJobVerifier.buckets', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_VERIFIERS_TRYJOBVERIFIER_BUILDER, _VERIFIERS_TRYJOBVERIFIER_BUCKET, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=796,
+  serialized_end=1015,
+)
+
+_VERIFIERS_SIGNCLAVERIFIER = _descriptor.Descriptor(
+  name='SignCLAVerifier',
+  full_name='Verifiers.SignCLAVerifier',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1017,
+  serialized_end=1034,
+)
+
+_VERIFIERS = _descriptor.Descriptor(
+  name='Verifiers',
+  full_name='Verifiers',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='reviewer_lgtm', full_name='Verifiers.reviewer_lgtm', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='tree_status', full_name='Verifiers.tree_status', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='try_job', full_name='Verifiers.try_job', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sign_cla', full_name='Verifiers.sign_cla', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_VERIFIERS_REVIEWERLGTMVERIFIER, _VERIFIERS_TREESTATUSLGTMVERIFIER, _VERIFIERS_TRYJOBVERIFIER, _VERIFIERS_SIGNCLAVERIFIER, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=437,
+  serialized_end=1034,
+)
+
+_CONFIG.fields_by_name['verifiers'].message_type = _VERIFIERS
+_CONFIG.fields_by_name['rietveld'].message_type = _RIETVELD
+_CONFIG.fields_by_name['verifiers_with_patch'].message_type = _VERIFIERS
+_VERIFIERS_REVIEWERLGTMVERIFIER.containing_type = _VERIFIERS
+_VERIFIERS_TREESTATUSLGTMVERIFIER.containing_type = _VERIFIERS
+_VERIFIERS_TRYJOBVERIFIER_BUILDER.containing_type = _VERIFIERS_TRYJOBVERIFIER
+_VERIFIERS_TRYJOBVERIFIER_BUCKET.fields_by_name['builders'].message_type = _VERIFIERS_TRYJOBVERIFIER_BUILDER
+_VERIFIERS_TRYJOBVERIFIER_BUCKET.containing_type = _VERIFIERS_TRYJOBVERIFIER
+_VERIFIERS_TRYJOBVERIFIER.fields_by_name['buckets'].message_type = _VERIFIERS_TRYJOBVERIFIER_BUCKET
+_VERIFIERS_TRYJOBVERIFIER.containing_type = _VERIFIERS
+_VERIFIERS_SIGNCLAVERIFIER.containing_type = _VERIFIERS
+_VERIFIERS.fields_by_name['reviewer_lgtm'].message_type = _VERIFIERS_REVIEWERLGTMVERIFIER
+_VERIFIERS.fields_by_name['tree_status'].message_type = _VERIFIERS_TREESTATUSLGTMVERIFIER
+_VERIFIERS.fields_by_name['try_job'].message_type = _VERIFIERS_TRYJOBVERIFIER
+_VERIFIERS.fields_by_name['sign_cla'].message_type = _VERIFIERS_SIGNCLAVERIFIER
+DESCRIPTOR.message_types_by_name['Config'] = _CONFIG
+DESCRIPTOR.message_types_by_name['Rietveld'] = _RIETVELD
+DESCRIPTOR.message_types_by_name['Verifiers'] = _VERIFIERS
+
+Config = _reflection.GeneratedProtocolMessageType('Config', (_message.Message,), dict(
+  DESCRIPTOR = _CONFIG,
+  __module__ = 'cq_pb2'
+  # @@protoc_insertion_point(class_scope:Config)
+  ))
+_sym_db.RegisterMessage(Config)
+
+Rietveld = _reflection.GeneratedProtocolMessageType('Rietveld', (_message.Message,), dict(
+  DESCRIPTOR = _RIETVELD,
+  __module__ = 'cq_pb2'
+  # @@protoc_insertion_point(class_scope:Rietveld)
+  ))
+_sym_db.RegisterMessage(Rietveld)
+
+Verifiers = _reflection.GeneratedProtocolMessageType('Verifiers', (_message.Message,), dict(
+
+  ReviewerLgtmVerifier = _reflection.GeneratedProtocolMessageType('ReviewerLgtmVerifier', (_message.Message,), dict(
+    DESCRIPTOR = _VERIFIERS_REVIEWERLGTMVERIFIER,
+    __module__ = 'cq_pb2'
+    # @@protoc_insertion_point(class_scope:Verifiers.ReviewerLgtmVerifier)
+    ))
+  ,
+
+  TreeStatusLgtmVerifier = _reflection.GeneratedProtocolMessageType('TreeStatusLgtmVerifier', (_message.Message,), dict(
+    DESCRIPTOR = _VERIFIERS_TREESTATUSLGTMVERIFIER,
+    __module__ = 'cq_pb2'
+    # @@protoc_insertion_point(class_scope:Verifiers.TreeStatusLgtmVerifier)
+    ))
+  ,
+
+  TryJobVerifier = _reflection.GeneratedProtocolMessageType('TryJobVerifier', (_message.Message,), dict(
+
+    Builder = _reflection.GeneratedProtocolMessageType('Builder', (_message.Message,), dict(
+      DESCRIPTOR = _VERIFIERS_TRYJOBVERIFIER_BUILDER,
+      __module__ = 'cq_pb2'
+      # @@protoc_insertion_point(class_scope:Verifiers.TryJobVerifier.Builder)
+      ))
+    ,
+
+    Bucket = _reflection.GeneratedProtocolMessageType('Bucket', (_message.Message,), dict(
+      DESCRIPTOR = _VERIFIERS_TRYJOBVERIFIER_BUCKET,
+      __module__ = 'cq_pb2'
+      # @@protoc_insertion_point(class_scope:Verifiers.TryJobVerifier.Bucket)
+      ))
+    ,
+    DESCRIPTOR = _VERIFIERS_TRYJOBVERIFIER,
+    __module__ = 'cq_pb2'
+    # @@protoc_insertion_point(class_scope:Verifiers.TryJobVerifier)
+    ))
+  ,
+
+  SignCLAVerifier = _reflection.GeneratedProtocolMessageType('SignCLAVerifier', (_message.Message,), dict(
+    DESCRIPTOR = _VERIFIERS_SIGNCLAVERIFIER,
+    __module__ = 'cq_pb2'
+    # @@protoc_insertion_point(class_scope:Verifiers.SignCLAVerifier)
+    ))
+  ,
+  DESCRIPTOR = _VERIFIERS,
+  __module__ = 'cq_pb2'
+  # @@protoc_insertion_point(class_scope:Verifiers)
+  ))
+_sym_db.RegisterMessage(Verifiers)
+_sym_db.RegisterMessage(Verifiers.ReviewerLgtmVerifier)
+_sym_db.RegisterMessage(Verifiers.TreeStatusLgtmVerifier)
+_sym_db.RegisterMessage(Verifiers.TryJobVerifier)
+_sym_db.RegisterMessage(Verifiers.TryJobVerifier.Builder)
+_sym_db.RegisterMessage(Verifiers.TryJobVerifier.Bucket)
+_sym_db.RegisterMessage(Verifiers.SignCLAVerifier)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/third_party/cq_client/test/cq_example.cfg b/third_party/cq_client/test/cq_example.cfg
new file mode 100644
index 0000000..dc69acf
--- /dev/null
+++ b/third_party/cq_client/test/cq_example.cfg
@@ -0,0 +1,55 @@
+version: 1
+cq_name: "infra"
+cq_status_url: "https://chromium-cq-status.appspot.com"
+hide_ref_in_committed_msg: true
+commit_burst_delay: 600
+max_commit_burst: 10
+in_production: false
+git_repo_url: "http://github.com/infra/infra.git"
+target_ref: "refs/pending/heads/master"
+
+rietveld {
+  url: "https://codereview.chromium.org"
+  project_bases: "https://chromium.googlesource.com/infra/infra.git@master"
+}
+
+verifiers {
+  reviewer_lgtm: {
+     committer_list: "chromium"
+     max_wait_secs: 600
+     no_lgtm_msg: "LGTM is missing"
+  }
+
+  tree_status: {
+     tree_status_url: "https://infra-status.appspot.com"
+  }
+
+  try_job {
+    buckets {
+      name: "tryserver.blink"
+      builders { name: "android_blink_compile_dbg" }
+      builders { name: "android_blink_compile_rel" }
+      builders {
+        name: "win_blink_rel"
+        triggered: true
+      }
+    }
+    buckets {
+      name: "tryserver.chromium.linux"
+      builders {
+        name: "android_arm64_dbg_recipe"
+      }
+      builders {
+        name: "linux_chromium_rel_ng"
+        experiment_percentage: 0.1
+      }
+    }
+    buckets {
+      name: "tryserver.chromium.mac"
+      builders {
+        name: "ios_dbg_simulator_ninja"
+        experiment_percentage: 1.0
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/third_party/cq_client/test/validate_config_test.py b/third_party/cq_client/test/validate_config_test.py
new file mode 100755
index 0000000..d7c0971
--- /dev/null
+++ b/third_party/cq_client/test/validate_config_test.py
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for tools/validate_config.py."""
+
+import mock
+import os
+import unittest
+
+from cq_client import cq_pb2
+from cq_client import validate_config
+
+
+TEST_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestValidateConfig(unittest.TestCase):
+  def test_is_valid(self):
+    with open(os.path.join(TEST_DIR, 'cq_example.cfg'), 'r') as test_config:
+      self.assertTrue(validate_config.IsValid(test_config.read()))
+
+  def test_has_field(self):
+    config = cq_pb2.Config()
+
+    self.assertFalse(validate_config._HasField(config, 'version'))
+    config.version = 1
+    self.assertTrue(validate_config._HasField(config, 'version'))
+
+    self.assertFalse(validate_config._HasField(
+        config, 'rietveld.project_bases'))
+    config.rietveld.project_bases.append('foo://bar')
+    self.assertTrue(validate_config._HasField(
+        config, 'rietveld.project_bases'))
+
+    self.assertFalse(validate_config._HasField(
+        config, 'verifiers.try_job.buckets'))
+    self.assertFalse(validate_config._HasField(
+        config, 'verifiers.try_job.buckets.name'))
+
+    bucket = config.verifiers.try_job.buckets.add()
+    bucket.name = 'tryserver.chromium.linux'
+
+
+    self.assertTrue(validate_config._HasField(
+        config, 'verifiers.try_job.buckets'))
+    self.assertTrue(validate_config._HasField(
+        config, 'verifiers.try_job.buckets.name'))
+
+    config.verifiers.try_job.buckets.add()
+    self.assertFalse(validate_config._HasField(
+        config, 'verifiers.try_job.buckets.name'))
diff --git a/third_party/cq_client/validate_config.py b/third_party/cq_client/validate_config.py
new file mode 100644
index 0000000..17d91db
--- /dev/null
+++ b/third_party/cq_client/validate_config.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""CQ config validation library."""
+
+import argparse
+from google import protobuf
+import logging
+import re
+import sys
+
+from cq_client import cq_pb2
+
+
+REQUIRED_FIELDS = [
+  'version',
+  'rietveld',
+  'rietveld.url',
+  'verifiers',
+  'cq_name',
+]
+
+LEGACY_FIELDS = [
+  'svn_repo_url',
+  'server_hooks_missing',
+  'verifiers_with_patch',
+]
+
+EMAIL_REGEXP = '^[^@]+@[^@]+\.[^@]+$'
+
+
+def _HasField(message, field_path):
+  """Checks that at least one field with given path exist in the proto message.
+
+  This function correctly handles repeated fields and will make sure that each
+  repeated field will have required sub-path, e.g. if 'abc' is a repeated field
+  and field_path is 'abc.def', then the function will only return True when each
+  entry for 'abc' will contain at least one value for 'def'.
+
+  Args:
+    message (google.protobuf.message.Message): Protocol Buffer message to check.
+    field_path (string): Path to the target field separated with ".".
+
+  Return:
+    True if at least one such field is explicitly set in the message.
+  """
+  path_parts = field_path.split('.', 1)
+  field_name = path_parts[0]
+  sub_path = path_parts[1] if len(path_parts) == 2 else None
+
+  field_labels = {fd.name: fd.label for fd in message.DESCRIPTOR.fields}
+  repeated_field = (field_labels[field_name] ==
+                    protobuf.descriptor.FieldDescriptor.LABEL_REPEATED)
+
+  if sub_path:
+    field = getattr(message, field_name)
+    if repeated_field:
+      if not field:
+        return False
+      return all(_HasField(entry, sub_path) for entry in field)
+    else:
+      return _HasField(field, sub_path)
+  else:
+    if repeated_field:
+      return len(getattr(message, field_name)) > 0
+    else:
+      return message.HasField(field_name)
+
+
+def IsValid(cq_config):
+  """Validates a CQ config and prints errors/warnings to the screen.
+
+  Args:
+    cq_config (string): Unparsed text format of the CQ config proto.
+
+  Returns:
+    True if the config is valid.
+  """
+  try:
+    config = cq_pb2.Config()
+    protobuf.text_format.Merge(cq_config, config)
+  except protobuf.text_format.ParseError as e:
+    logging.error('Failed to parse config as protobuf:\n%s', e)
+    return False
+
+  for fname in REQUIRED_FIELDS:
+    if not _HasField(config, fname):
+      logging.error('%s is a required field', fname)
+      return False
+
+  for fname in LEGACY_FIELDS:
+    if _HasField(config, fname):
+      logging.warn('%s is a legacy field', fname)
+
+
+  for base in config.rietveld.project_bases:
+    try:
+      re.compile(base)
+    except re.error:
+      logging.error('failed to parse "%s" in project_bases as a regexp', base)
+      return False
+
+  # TODO(sergiyb): For each field, check valid values depending on its
+  # semantics, e.g. email addresses, regular expressions etc.
+
+  return True
diff --git a/third_party/gsutil/gsutil b/third_party/gsutil/gsutil
index 49c83d0..53249b1 100755
--- a/third_party/gsutil/gsutil
+++ b/third_party/gsutil/gsutil
@@ -28,7 +28,7 @@
 import sys
 import traceback
 
-third_party_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+third_party_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
 sys.path.insert(0, os.path.dirname(third_party_dir))
 sys.path.insert(0, third_party_dir)
 
diff --git a/third_party/logilab/astng/__init__.py b/third_party/logilab/astng/__init__.py
deleted file mode 100644
index 70b2f3e..0000000
--- a/third_party/logilab/astng/__init__.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
-#
-# This file is part of logilab-astng.
-#
-# logilab-astng is free software: you can redistribute it and/or modify it
-# under the terms of the GNU Lesser General Public License as published by the
-# Free Software Foundation, either version 2.1 of the License, or (at your
-# option) any later version.
-#
-# logilab-astng is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
-# for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""Python Abstract Syntax Tree New Generation
-
-The aim of this module is to provide a common base representation of
-python source code for projects such as pychecker, pyreverse,
-pylint... Well, actually the development of this library is essentially
-governed by pylint's needs.
-
-It extends class defined in the python's _ast module with some
-additional methods and attributes. Instance attributes are added by a
-builder object, which can either generate extended ast (let's call
-them astng ;) by visiting an existent ast tree or by inspecting living
-object. Methods are added by monkey patching ast classes.
-
-Main modules are:
-
-* nodes and scoped_nodes for more information about methods and
-  attributes added to different node classes
-
-* the manager contains a high level object to get astng trees from
-  source files and living objects. It maintains a cache of previously
-  constructed tree for quick access
-
-* builder contains the class responsible to build astng trees
-"""
-__doctype__ = "restructuredtext en"
-
-import sys
-if sys.version_info >= (3, 0):
-    BUILTINS_MODULE = 'builtins'
-else:
-    BUILTINS_MODULE = '__builtin__'
-
-# WARNING: internal imports order matters !
-
-# make all exception classes accessible from astng package
-from logilab.astng.exceptions import *
-
-# make all node classes accessible from astng package
-from logilab.astng.nodes import *
-
-# trigger extra monkey-patching
-from logilab.astng import inference
-
-# more stuff available
-from logilab.astng import raw_building
-from logilab.astng.bases import YES, Instance, BoundMethod, UnboundMethod
-from logilab.astng.node_classes import are_exclusive, unpack_infer
-from logilab.astng.scoped_nodes import builtin_lookup
-
-# make a manager instance (borg) as well as Project and Package classes
-# accessible from astng package
-from logilab.astng.manager import ASTNGManager, Project
-MANAGER = ASTNGManager()
-del ASTNGManager
diff --git a/third_party/logilab/astng/__pkginfo__.py b/third_party/logilab/astng/__pkginfo__.py
deleted file mode 100644
index f671ac2..0000000
--- a/third_party/logilab/astng/__pkginfo__.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
-#
-# This file is part of logilab-astng.
-#
-# logilab-astng is free software: you can redistribute it and/or modify it
-# under the terms of the GNU Lesser General Public License as published by the
-# Free Software Foundation, either version 2.1 of the License, or (at your
-# option) any later version.
-#
-# logilab-astng is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
-# for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""logilab.astng packaging information"""
-
-distname = 'logilab-astng'
-
-modname = 'astng'
-subpackage_of = 'logilab'
-
-numversion = (0, 23, 1)
-version = '.'.join([str(num) for num in numversion])
-
-install_requires = ['logilab-common >= 0.53.0']
-
-license = 'LGPL'
-
-author = 'Logilab'
-author_email = 'python-projects@lists.logilab.org'
-mailinglist = "mailto://%s" % author_email
-web = "http://www.logilab.org/project/%s" % distname
-ftp = "ftp://ftp.logilab.org/pub/%s" % modname
-
-description = "rebuild a new abstract syntax tree from Python's ast"
-
-from os.path import join
-include_dirs = [join('test', 'regrtest_data'),
-                join('test', 'data'), join('test', 'data2')]
diff --git a/third_party/logilab/astng/as_string.py b/third_party/logilab/astng/as_string.py
deleted file mode 100644
index 0a42668..0000000
--- a/third_party/logilab/astng/as_string.py
+++ /dev/null
@@ -1,427 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
-#
-# This file is part of logilab-astng.
-#
-# logilab-astng is free software: you can redistribute it and/or modify it
-# under the terms of the GNU Lesser General Public License as published by the
-# Free Software Foundation, either version 2.1 of the License, or (at your
-# option) any later version.
-#
-# logilab-astng is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
-# for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""This module renders ASTNG nodes to string representation.
-
-It will probably not work on bare _ast trees.
-"""
-import sys
-
-
-INDENT = '    ' # 4 spaces ; keep indentation variable
-
-
-def _import_string(names):
-    """return a list of (name, asname) formatted as a string"""
-    _names = []
-    for name, asname in names:
-        if asname is not None:
-            _names.append('%s as %s' % (name, asname))
-        else:
-            _names.append(name)
-    return  ', '.join(_names)
-
-
-class AsStringVisitor(object):
-    """Visitor to render an ASTNG node as string """
-
-    def __call__(self, node):
-        """Makes this visitor behave as a simple function"""
-        return node.accept(self)
-
-    def _stmt_list(self, stmts):
-        """return a list of nodes to string"""
-        stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr])
-        return INDENT + stmts.replace('\n', '\n'+INDENT)
-
-
-    ## visit_<node> methods ###########################################
-
-    def visit_arguments(self, node):
-        """return an astng.Function node as string"""
-        return node.format_args()
-
-    def visit_assattr(self, node):
-        """return an astng.AssAttr node as string"""
-        return self.visit_getattr(node)
-
-    def visit_assert(self, node):
-        """return an astng.Assert node as string"""
-        if node.fail:
-            return 'assert %s, %s' % (node.test.accept(self),
-                                        node.fail.accept(self))
-        return 'assert %s' % node.test.accept(self)
-
-    def visit_assname(self, node):
-        """return an astng.AssName node as string"""
-        return node.name
-
-    def visit_assign(self, node):
-        """return an astng.Assign node as string"""
-        lhs = ' = '.join([n.accept(self) for n in node.targets])
-        return '%s = %s' % (lhs, node.value.accept(self))
-
-    def visit_augassign(self, node):
-        """return an astng.AugAssign node as string"""
-        return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self))
-
-    def visit_backquote(self, node):
-        """return an astng.Backquote node as string"""
-        return '`%s`' % node.value.accept(self)
-
-    def visit_binop(self, node):
-        """return an astng.BinOp node as string"""
-        return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self))
-
-    def visit_boolop(self, node):
-        """return an astng.BoolOp node as string"""
-        return (' %s ' % node.op).join(['(%s)' % n.accept(self)
-                                            for n in node.values])
-
-    def visit_break(self, node):
-        """return an astng.Break node as string"""
-        return 'break'
-
-    def visit_callfunc(self, node):
-        """return an astng.CallFunc node as string"""
-        expr_str = node.func.accept(self)
-        args = [arg.accept(self) for arg in node.args]
-        if node.starargs:
-            args.append( '*' + node.starargs.accept(self))
-        if node.kwargs:
-            args.append( '**' + node.kwargs.accept(self))
-        return '%s(%s)' % (expr_str, ', '.join(args))
-
-    def visit_class(self, node):
-        """return an astng.Class node as string"""
-        decorate = node.decorators and node.decorators.accept(self)  or ''
-        bases =  ', '.join([n.accept(self) for n in node.bases])
-        bases = bases and '(%s)' % bases or ''
-        docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
-        return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs,
-                                            self._stmt_list( node.body))
-
-    def visit_compare(self, node):
-        """return an astng.Compare node as string"""
-        rhs_str = ' '.join(['%s %s' % (op, expr.accept(self))
-                            for op, expr in node.ops])
-        return '%s %s' % (node.left.accept(self), rhs_str)
-
-    def visit_comprehension(self, node):
-        """return an astng.Comprehension node as string"""
-        ifs = ''.join([ ' if %s' % n.accept(self) for n in node.ifs])
-        return 'for %s in %s%s' % (node.target.accept(self),
-                                    node.iter.accept(self), ifs )
-
-    def visit_const(self, node):
-        """return an astng.Const node as string"""
-        return repr(node.value)
-
-    def visit_continue(self, node):
-        """return an astng.Continue node as string"""
-        return 'continue'
-
-    def visit_delete(self, node): # XXX check if correct
-        """return an astng.Delete node as string"""
-        return 'del %s' % ', '.join([child.accept(self)
-                                for child in node.targets])
-
-    def visit_delattr(self, node):
-        """return an astng.DelAttr node as string"""
-        return self.visit_getattr(node)
-
-    def visit_delname(self, node):
-        """return an astng.DelName node as string"""
-        return node.name
-
-    def visit_decorators(self, node):
-        """return an astng.Decorators node as string"""
-        return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes])
-
-    def visit_dict(self, node):
-        """return an astng.Dict node as string"""
-        return '{%s}' % ', '.join(['%s: %s' % (key.accept(self),
-                            value.accept(self)) for key, value in node.items])
-
-    def visit_dictcomp(self, node):
-        """return an astng.DictComp node as string"""
-        return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self),
-                ' '.join([n.accept(self) for n in node.generators]))
-
-    def visit_discard(self, node):
-        """return an astng.Discard node as string"""
-        return node.value.accept(self)
-
-    def visit_emptynode(self, node):
-        """dummy method for visiting an Empty node"""
-        return ''
-
-    def visit_excepthandler(self, node):
-        if node.type:
-            if node.name:
-                excs = 'except %s, %s' % (node.type.accept(self),
-                                        node.name.accept(self))
-            else:
-                excs = 'except %s' % node.type.accept(self)
-        else:
-            excs = 'except'
-        return '%s:\n%s' % (excs, self._stmt_list(node.body))
-
-    def visit_ellipsis(self, node):
-        """return an astng.Ellipsis node as string"""
-        return '...'
-
-    def visit_empty(self, node):
-        """return an Empty node as string"""
-        return ''
-
-    def visit_exec(self, node):
-        """return an astng.Exec node as string"""
-        if node.locals:
-            return 'exec %s in %s, %s' % (node.expr.accept(self),
-                                          node.locals.accept(self),
-                                          node.globals.accept(self))
-        if node.globals:
-            return 'exec %s in %s' % (node.expr.accept(self),
-                                      node.globals.accept(self))
-        return 'exec %s' % node.expr.accept(self)
-
-    def visit_extslice(self, node):
-        """return an astng.ExtSlice node as string"""
-        return ','.join( [dim.accept(self) for dim in node.dims] )
-
-    def visit_for(self, node):
-        """return an astng.For node as string"""
-        fors = 'for %s in %s:\n%s' % (node.target.accept(self),
-                                    node.iter.accept(self),
-                                    self._stmt_list( node.body))
-        if node.orelse:
-            fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse))
-        return fors
-
-    def visit_from(self, node):
-        """return an astng.From node as string"""
-        return 'from %s import %s' % ('.' * (node.level or 0) + node.modname,
-                                      _import_string(node.names))
-
-    def visit_function(self, node):
-        """return an astng.Function node as string"""
-        decorate = node.decorators and node.decorators.accept(self)  or ''
-        docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
-        return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self),
-                                        docs, self._stmt_list(node.body))
-
-    def visit_genexpr(self, node):
-        """return an astng.GenExpr node as string"""
-        return '(%s %s)' % (node.elt.accept(self), ' '.join([n.accept(self)
-                                                    for n in node.generators]))
-
-    def visit_getattr(self, node):
-        """return an astng.Getattr node as string"""
-        return '%s.%s' % (node.expr.accept(self), node.attrname)
-
-    def visit_global(self, node):
-        """return an astng.Global node as string"""
-        return 'global %s' % ', '.join(node.names)
-
-    def visit_if(self, node):
-        """return an astng.If node as string"""
-        ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))]
-        if node.orelse:# XXX use elif ???
-            ifs.append('else:\n%s' % self._stmt_list(node.orelse))
-        return '\n'.join(ifs)
-
-    def visit_ifexp(self, node):
-        """return an astng.IfExp node as string"""
-        return '%s if %s else %s' % (node.body.accept(self),
-                node.test.accept(self), node.orelse.accept(self))
-
-    def visit_import(self, node):
-        """return an astng.Import node as string"""
-        return 'import %s' % _import_string(node.names)
-
-    def visit_keyword(self, node):
-        """return an astng.Keyword node as string"""
-        return '%s=%s' % (node.arg, node.value.accept(self))
-
-    def visit_lambda(self, node):
-        """return an astng.Lambda node as string"""
-        return 'lambda %s: %s' % (node.args.accept(self), node.body.accept(self))
-
-    def visit_list(self, node):
-        """return an astng.List node as string"""
-        return '[%s]' % ', '.join([child.accept(self) for child in node.elts])
-
-    def visit_listcomp(self, node):
-        """return an astng.ListComp node as string"""
-        return '[%s %s]' % (node.elt.accept(self), ' '.join([n.accept(self)
-                                                for n in node.generators]))
-
-    def visit_module(self, node):
-        """return an astng.Module node as string"""
-        docs = node.doc and '"""%s"""\n\n' % node.doc or ''
-        return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n'
-
-    def visit_name(self, node):
-        """return an astng.Name node as string"""
-        return node.name
-
-    def visit_pass(self, node):
-        """return an astng.Pass node as string"""
-        return 'pass'
-
-    def visit_print(self, node):
-        """return an astng.Print node as string"""
-        nodes = ', '.join([n.accept(self) for n in node.values])
-        if not node.nl:
-            nodes = '%s,' % nodes
-        if node.dest:
-            return 'print >> %s, %s' % (node.dest.accept(self), nodes)
-        return 'print %s' % nodes
-
-    def visit_raise(self, node):
-        """return an astng.Raise node as string"""
-        if node.exc:
-            if node.inst:
-                if node.tback:
-                    return 'raise %s, %s, %s' % (node.exc.accept(self),
-                                                node.inst.accept(self),
-                                                node.tback.accept(self))
-                return 'raise %s, %s' % (node.exc.accept(self),
-                                        node.inst.accept(self))
-            return 'raise %s' % node.exc.accept(self)
-        return 'raise'
-
-    def visit_return(self, node):
-        """return an astng.Return node as string"""
-        if node.value:
-            return 'return %s' % node.value.accept(self)
-        else:
-            return 'return'
-
-    def visit_index(self, node):
-        """return a astng.Index node as string"""
-        return node.value.accept(self)
-
-    def visit_set(self, node):
-        """return an astng.Set node as string"""
-        return '{%s}' % ', '.join([child.accept(self) for child in node.elts])
-
-    def visit_setcomp(self, node):
-        """return an astng.SetComp node as string"""
-        return '{%s %s}' % (node.elt.accept(self), ' '.join([n.accept(self)
-                                                for n in node.generators]))
-
-    def visit_slice(self, node):
-        """return a astng.Slice node as string"""
-        lower = node.lower and node.lower.accept(self) or ''
-        upper = node.upper and node.upper.accept(self) or ''
-        step = node.step and node.step.accept(self) or ''
-        if step:
-            return '%s:%s:%s' % (lower, upper, step)
-        return  '%s:%s' % (lower, upper)
-
-    def visit_subscript(self, node):
-        """return an astng.Subscript node as string"""
-        return '%s[%s]' % (node.value.accept(self), node.slice.accept(self))
-
-    def visit_tryexcept(self, node):
-        """return an astng.TryExcept node as string"""
-        trys = ['try:\n%s' % self._stmt_list( node.body)]
-        for handler in node.handlers:
-            trys.append(handler.accept(self))
-        if node.orelse:
-            trys.append('else:\n%s' % self._stmt_list(node.orelse))
-        return '\n'.join(trys)
-
-    def visit_tryfinally(self, node):
-        """return an astng.TryFinally node as string"""
-        return 'try:\n%s\nfinally:\n%s' % (self._stmt_list( node.body),
-                                        self._stmt_list(node.finalbody))
-
-    def visit_tuple(self, node):
-        """return an astng.Tuple node as string"""
-        return '(%s)' % ', '.join([child.accept(self) for child in node.elts])
-
-    def visit_unaryop(self, node):
-        """return an astng.UnaryOp node as string"""
-        if node.op == 'not':
-            operator = 'not '
-        else:
-            operator = node.op
-        return '%s%s' % (operator, node.operand.accept(self))
-
-    def visit_while(self, node):
-        """return an astng.While node as string"""
-        whiles = 'while %s:\n%s' % (node.test.accept(self),
-                                    self._stmt_list(node.body))
-        if node.orelse:
-            whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse))
-        return whiles
-
-    def visit_with(self, node): # 'with' without 'as' is possible
-        """return an astng.With node as string"""
-        as_var = node.vars and " as (%s)" % (node.vars.accept(self)) or ""
-        withs = 'with (%s)%s:\n%s' % (node.expr.accept(self), as_var,
-                                        self._stmt_list( node.body))
-        return withs
-
-    def visit_yield(self, node):
-        """yield an ast.Yield node as string"""
-        yi_val = node.value and (" " + node.value.accept(self)) or ""
-        return 'yield' + yi_val
-
-
-class AsStringVisitor3k(AsStringVisitor):
-    """AsStringVisitor3k overwrites some AsStringVisitor methods"""
-
-    def visit_excepthandler(self, node):
-        if node.type:
-            if node.name:
-                excs = 'except %s as %s' % (node.type.accept(self),
-                                        node.name.accept(self))
-            else:
-                excs = 'except %s' % node.type.accept(self)
-        else:
-            excs = 'except'
-        return '%s:\n%s' % (excs, self._stmt_list(node.body))
-
-    def visit_nonlocal(self, node):
-        """return an astng.Nonlocal node as string"""
-        return 'nonlocal %s' % ', '.join(node.names)
-
-    def visit_raise(self, node):
-        """return an astng.Raise node as string"""
-        if node.exc:
-            if node.cause:
-                return 'raise %s from %s' % (node.exc.accept(self),
-                                             node.cause.accept(self))
-            return 'raise %s' % node.exc.accept(self)
-        return 'raise'
-
-    def visit_starred(self, node):
-        """return Starred node as string"""
-        return "*" + node.value.accept(self)
-
-if sys.version_info >= (3, 0):
-    AsStringVisitor = AsStringVisitor3k
-
-# this visitor is stateless, thus it can be reused
-as_string = AsStringVisitor()
-
diff --git a/third_party/logilab/astng/exceptions.py b/third_party/logilab/astng/exceptions.py
deleted file mode 100644
index 7dd6135..0000000
--- a/third_party/logilab/astng/exceptions.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# This program is free software; you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
-#
-# This file is part of logilab-astng.
-#
-# logilab-astng is free software: you can redistribute it and/or modify it
-# under the terms of the GNU Lesser General Public License as published by the
-# Free Software Foundation, either version 2.1 of the License, or (at your
-# option) any later version.
-#
-# logilab-astng is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
-# for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""this module contains exceptions used in the astng library
-
-"""
-
-__doctype__ = "restructuredtext en"
-
-class ASTNGError(Exception):
-    """base exception class for all astng related exceptions"""
-
-class ASTNGBuildingException(ASTNGError):
-    """exception class when we are unable to build an astng representation"""
-
-class ResolveError(ASTNGError):
-    """base class of astng resolution/inference error"""
-
-class NotFoundError(ResolveError):
-    """raised when we are unable to resolve a name"""
-
-class InferenceError(ResolveError):
-    """raised when we are unable to infer a node"""
-
-class UnresolvableName(InferenceError):
-    """raised when we are unable to resolve a name"""
-
-class NoDefault(ASTNGError):
-    """raised by function's `default_value` method when an argument has
-    no default value
-    """
-
diff --git a/third_party/logilab/astng/manager.py b/third_party/logilab/astng/manager.py
deleted file mode 100644
index 8a4f02b..0000000
--- a/third_party/logilab/astng/manager.py
+++ /dev/null
@@ -1,299 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
-#
-# This file is part of logilab-astng.
-#
-# logilab-astng is free software: you can redistribute it and/or modify it
-# under the terms of the GNU Lesser General Public License as published by the
-# Free Software Foundation, either version 2.1 of the License, or (at your
-# option) any later version.
-#
-# logilab-astng is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
-# for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""astng manager: avoid multiple astng build of a same module when
-possible by providing a class responsible to get astng representation
-from various source and using a cache of built modules)
-"""
-
-__docformat__ = "restructuredtext en"
-
-import sys
-import os
-from os.path import dirname, basename, abspath, join, isdir, exists
-
-from logilab.common.modutils import NoSourceFile, is_python_source, \
-     file_from_modpath, load_module_from_name, modpath_from_file, \
-     get_module_files, get_source_file, zipimport
-from logilab.common.configuration import OptionsProviderMixIn
-
-from logilab.astng.exceptions import ASTNGBuildingException
-
-def astng_wrapper(func, modname):
-    """wrapper to give to ASTNGManager.project_from_files"""
-    print 'parsing %s...' % modname
-    try:
-        return func(modname)
-    except ASTNGBuildingException, exc:
-        print exc
-    except Exception, exc:
-        import traceback
-        traceback.print_exc()
-
-def _silent_no_wrap(func, modname):
-    """silent wrapper that doesn't do anything; can be used for tests"""
-    return func(modname)
-
-def safe_repr(obj):
-    try:
-        return repr(obj)
-    except:
-        return '???'
-
-
-
-class ASTNGManager(OptionsProviderMixIn):
-    """the astng manager, responsible to build astng from files
-     or modules.
-
-    Use the Borg pattern.
-    """
-
-    name = 'astng loader'
-    options = (("ignore",
-                {'type' : "csv", 'metavar' : "<file>",
-                 'dest' : "black_list", "default" : ('CVS',),
-                 'help' : "add <file> (may be a directory) to the black list\
-. It should be a base name, not a path. You may set this option multiple times\
-."}),
-               ("project",
-                {'default': "No Name", 'type' : 'string', 'short': 'p',
-                 'metavar' : '<project name>',
-                 'help' : 'set the project name.'}),
-               )
-    brain = {}
-    def __init__(self):
-        self.__dict__ = ASTNGManager.brain
-        if not self.__dict__:
-            OptionsProviderMixIn.__init__(self)
-            self.load_defaults()
-            # NOTE: cache entries are added by the [re]builder
-            self.astng_cache = {}
-            self._mod_file_cache = {}
-            self.transformers = []
-
-    def astng_from_file(self, filepath, modname=None, fallback=True, source=False):
-        """given a module name, return the astng object"""
-        try:
-            filepath = get_source_file(filepath, include_no_ext=True)
-            source = True
-        except NoSourceFile:
-            pass
-        if modname is None:
-            try:
-                modname = '.'.join(modpath_from_file(filepath))
-            except ImportError:
-                modname = filepath
-        if modname in self.astng_cache:
-            return self.astng_cache[modname]
-        if source:
-            from logilab.astng.builder import ASTNGBuilder
-            return ASTNGBuilder(self).file_build(filepath, modname)
-        elif fallback and modname:
-            return self.astng_from_module_name(modname)
-        raise ASTNGBuildingException('unable to get astng for file %s' %
-                                     filepath)
-
-    def astng_from_module_name(self, modname, context_file=None):
-        """given a module name, return the astng object"""
-        if modname in self.astng_cache:
-            return self.astng_cache[modname]
-        if modname == '__main__':
-            from logilab.astng.builder import ASTNGBuilder
-            return ASTNGBuilder(self).string_build('', modname)
-        old_cwd = os.getcwd()
-        if context_file:
-            os.chdir(dirname(context_file))
-        try:
-            filepath = self.file_from_module_name(modname, context_file)
-            if filepath is not None and not is_python_source(filepath):
-                module = self.zip_import_data(filepath)
-                if module is not None:
-                    return module
-            if filepath is None or not is_python_source(filepath):
-                try:
-                    module = load_module_from_name(modname)
-                except Exception, ex:
-                    msg = 'Unable to load module %s (%s)' % (modname, ex)
-                    raise ASTNGBuildingException(msg)
-                return self.astng_from_module(module, modname)
-            return self.astng_from_file(filepath, modname, fallback=False)
-        finally:
-            os.chdir(old_cwd)
-
-    def zip_import_data(self, filepath):
-        if zipimport is None:
-            return None
-        from logilab.astng.builder import ASTNGBuilder
-        builder = ASTNGBuilder(self)
-        for ext in ('.zip', '.egg'):
-            try:
-                eggpath, resource = filepath.rsplit(ext + '/', 1)
-            except ValueError:
-                continue
-            try:
-                importer = zipimport.zipimporter(eggpath + ext)
-                zmodname = resource.replace('/', '.')
-                if importer.is_package(resource):
-                    zmodname =  zmodname + '.__init__'
-                module = builder.string_build(importer.get_source(resource),
-                                              zmodname, filepath)
-                return module
-            except:
-                continue
-        return None
-
-    def file_from_module_name(self, modname, contextfile):
-        try:
-            value = self._mod_file_cache[(modname, contextfile)]
-        except KeyError:
-            try:
-                value = file_from_modpath(modname.split('.'),
-                                          context_file=contextfile)
-            except ImportError, ex:
-                msg = 'Unable to load module %s (%s)' % (modname, ex)
-                value = ASTNGBuildingException(msg)
-            self._mod_file_cache[(modname, contextfile)] = value
-        if isinstance(value, ASTNGBuildingException):
-            raise value
-        return value
-
-    def astng_from_module(self, module, modname=None):
-        """given an imported module, return the astng object"""
-        modname = modname or module.__name__
-        if modname in self.astng_cache:
-            return self.astng_cache[modname]
-        try:
-            # some builtin modules don't have __file__ attribute
-            filepath = module.__file__
-            if is_python_source(filepath):
-                return self.astng_from_file(filepath, modname)
-        except AttributeError:
-            pass
-        from logilab.astng.builder import ASTNGBuilder
-        return ASTNGBuilder(self).module_build(module, modname)
-
-    def astng_from_class(self, klass, modname=None):
-        """get astng for the given class"""
-        if modname is None:
-            try:
-                modname = klass.__module__
-            except AttributeError:
-                raise ASTNGBuildingException(
-                    'Unable to get module for class %s' % safe_repr(klass))
-        modastng = self.astng_from_module_name(modname)
-        return modastng.getattr(klass.__name__)[0] # XXX
-
-
-    def infer_astng_from_something(self, obj, context=None):
-        """infer astng for the given class"""
-        if hasattr(obj, '__class__') and not isinstance(obj, type):
-            klass = obj.__class__
-        else:
-            klass = obj
-        try:
-            modname = klass.__module__
-        except AttributeError:
-            raise ASTNGBuildingException(
-                'Unable to get module for %s' % safe_repr(klass))
-        except Exception, ex:
-            raise ASTNGBuildingException(
-                'Unexpected error while retrieving module for %s: %s'
-                % (safe_repr(klass), ex))
-        try:
-            name = klass.__name__
-        except AttributeError:
-            raise ASTNGBuildingException(
-                'Unable to get name for %s' % safe_repr(klass))
-        except Exception, ex:
-            raise ASTNGBuildingException(
-                'Unexpected error while retrieving name for %s: %s'
-                % (safe_repr(klass), ex))
-        # take care, on living object __module__ is regularly wrong :(
-        modastng = self.astng_from_module_name(modname)
-        if klass is obj:
-            for  infered in modastng.igetattr(name, context):
-                yield infered
-        else:
-            for infered in modastng.igetattr(name, context):
-                yield infered.instanciate_class()
-
-    def project_from_files(self, files, func_wrapper=astng_wrapper,
-                           project_name=None, black_list=None):
-        """return a Project from a list of files or modules"""
-        # build the project representation
-        project_name = project_name or self.config.project
-        black_list = black_list or self.config.black_list
-        project = Project(project_name)
-        for something in files:
-            if not exists(something):
-                fpath = file_from_modpath(something.split('.'))
-            elif isdir(something):
-                fpath = join(something, '__init__.py')
-            else:
-                fpath = something
-            astng = func_wrapper(self.astng_from_file, fpath)
-            if astng is None:
-                continue
-            # XXX why is first file defining the project.path ?
-            project.path = project.path or astng.file
-            project.add_module(astng)
-            base_name = astng.name
-            # recurse in package except if __init__ was explicitly given
-            if astng.package and something.find('__init__') == -1:
-                # recurse on others packages / modules if this is a package
-                for fpath in get_module_files(dirname(astng.file),
-                                              black_list):
-                    astng = func_wrapper(self.astng_from_file, fpath)
-                    if astng is None or astng.name == base_name:
-                        continue
-                    project.add_module(astng)
-        return project
-
-    def register_transformer(self, transformer):
-        self.transformers.append(transformer)
-
-class Project:
-    """a project handle a set of modules / packages"""
-    def __init__(self, name=''):
-        self.name = name
-        self.path = None
-        self.modules = []
-        self.locals = {}
-        self.__getitem__ = self.locals.__getitem__
-        self.__iter__ = self.locals.__iter__
-        self.values = self.locals.values
-        self.keys = self.locals.keys
-        self.items = self.locals.items
-
-    def add_module(self, node):
-        self.locals[node.name] = node
-        self.modules.append(node)
-
-    def get_module(self, name):
-        return self.locals[name]
-
-    def get_children(self):
-        return self.modules
-
-    def __repr__(self):
-        return '<Project %r at %s (%s modules)>' % (self.name, id(self),
-                                                    len(self.modules))
-
-
diff --git a/third_party/logilab/astng/scoped_nodes.py b/third_party/logilab/astng/scoped_nodes.py
deleted file mode 100644
index 52b9746..0000000
--- a/third_party/logilab/astng/scoped_nodes.py
+++ /dev/null
@@ -1,977 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
-#
-# This file is part of logilab-astng.
-#
-# logilab-astng is free software: you can redistribute it and/or modify it
-# under the terms of the GNU Lesser General Public License as published by the
-# Free Software Foundation, either version 2.1 of the License, or (at your
-# option) any later version.
-#
-# logilab-astng is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
-# for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""This module contains the classes for "scoped" node, i.e. which are opening a
-new local scope in the language definition : Module, Class, Function (and
-Lambda, GenExpr, DictComp and SetComp to some extent).
-"""
-from __future__ import with_statement
-
-__doctype__ = "restructuredtext en"
-
-import sys
-from itertools import chain
-
-from logilab.common.compat import builtins
-from logilab.common.decorators import cached
-
-from logilab.astng import BUILTINS_MODULE
-from logilab.astng.exceptions import NotFoundError, NoDefault, \
-     ASTNGBuildingException, InferenceError
-from logilab.astng.node_classes import Const, DelName, DelAttr, \
-     Dict, From, List, Name, Pass, Raise, Return, Tuple, Yield, \
-     are_exclusive, LookupMixIn, const_factory as cf, unpack_infer
-from logilab.astng.bases import NodeNG, InferenceContext, Instance,\
-     YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \
-     BUILTINS_NAME
-from logilab.astng.mixins import FilterStmtsMixin
-from logilab.astng.bases import Statement
-from logilab.astng.manager import ASTNGManager
-
-
-def remove_nodes(func, cls):
-    def wrapper(*args, **kwargs):
-        nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)]
-        if not nodes:
-            raise NotFoundError()
-        return nodes
-    return wrapper
-
-
-def function_to_method(n, klass):
-    if isinstance(n, Function):
-        if n.type == 'classmethod':
-            return BoundMethod(n, klass)
-        if n.type != 'staticmethod':
-            return UnboundMethod(n)
-    return n
-
-def std_special_attributes(self, name, add_locals=True):
-    if add_locals:
-        locals = self.locals
-    else:
-        locals = {}
-    if name == '__name__':
-        return [cf(self.name)] + locals.get(name, [])
-    if name == '__doc__':
-        return [cf(self.doc)] + locals.get(name, [])
-    if name == '__dict__':
-        return [Dict()] + locals.get(name, [])
-    raise NotFoundError(name)
-
-MANAGER = ASTNGManager()
-def builtin_lookup(name):
-    """lookup a name into the builtin module
-    return the list of matching statements and the astng for the builtin
-    module
-    """
-    builtin_astng = MANAGER.astng_from_module(builtins)
-    if name == '__dict__':
-        return builtin_astng, ()
-    try:
-        stmts = builtin_astng.locals[name]
-    except KeyError:
-        stmts = ()
-    return builtin_astng, stmts
-
-
-# TODO move this Mixin to mixins.py; problem: 'Function' in _scope_lookup
-class LocalsDictNodeNG(LookupMixIn, NodeNG):
-    """ this class provides locals handling common to Module, Function
-    and Class nodes, including a dict like interface for direct access
-    to locals information
-    """
-
-    # attributes below are set by the builder module or by raw factories
-
-    # dictionary of locals with name as key and node defining the local as
-    # value
-
-    def qname(self):
-        """return the 'qualified' name of the node, eg module.name,
-        module.class.name ...
-        """
-        if self.parent is None:
-            return self.name
-        return '%s.%s' % (self.parent.frame().qname(), self.name)
-
-    def frame(self):
-        """return the first parent frame node (i.e. Module, Function or Class)
-        """
-        return self
-
-    def scope(self):
-        """return the first node defining a new scope (i.e. Module,
-        Function, Class, Lambda but also GenExpr, DictComp and SetComp)
-        """
-        return self
-
-
-    def _scope_lookup(self, node, name, offset=0):
-        """XXX method for interfacing the scope lookup"""
-        try:
-            stmts = node._filter_stmts(self.locals[name], self, offset)
-        except KeyError:
-            stmts = ()
-        if stmts:
-            return self, stmts
-        if self.parent: # i.e. not Module
-            # nested scope: if parent scope is a function, that's fine
-            # else jump to the module
-            pscope = self.parent.scope()
-            if not pscope.is_function:
-                pscope = pscope.root()
-            return pscope.scope_lookup(node, name)
-        return builtin_lookup(name) # Module
-
-
-
-    def set_local(self, name, stmt):
-        """define <name> in locals (<stmt> is the node defining the name)
-        if the node is a Module node (i.e. has globals), add the name to
-        globals
-
-        if the name is already defined, ignore it
-        """
-        #assert not stmt in self.locals.get(name, ()), (self, stmt)
-        self.locals.setdefault(name, []).append(stmt)
-
-    __setitem__ = set_local
-
-    def _append_node(self, child):
-        """append a child, linking it in the tree"""
-        self.body.append(child)
-        child.parent = self
-
-    def add_local_node(self, child_node, name=None):
-        """append a child which should alter locals to the given node"""
-        if name != '__class__':
-            # add __class__ node as a child will cause infinite recursion later!
-            self._append_node(child_node)
-        self.set_local(name or child_node.name, child_node)
-
-
-    def __getitem__(self, item):
-        """method from the `dict` interface returning the first node
-        associated with the given name in the locals dictionary
-
-        :type item: str
-        :param item: the name of the locally defined object
-        :raises KeyError: if the name is not defined
-        """
-        return self.locals[item][0]
-
-    def __iter__(self):
-        """method from the `dict` interface returning an iterator on
-        `self.keys()`
-        """
-        return iter(self.keys())
-
-    def keys(self):
-        """method from the `dict` interface returning a tuple containing
-        locally defined names
-        """
-        return self.locals.keys()
-
-    def values(self):
-        """method from the `dict` interface returning a tuple containing
-        locally defined nodes which are instance of `Function` or `Class`
-        """
-        return [self[key] for key in self.keys()]
-
-    def items(self):
-        """method from the `dict` interface returning a list of tuple
-        containing each locally defined name with its associated node,
-        which is an instance of `Function` or `Class`
-        """
-        return zip(self.keys(), self.values())
-
-
-    def __contains__(self, name):
-        return name in self.locals
-    has_key = __contains__
-
-# Module  #####################################################################
-
-class Module(LocalsDictNodeNG):
-    _astng_fields = ('body',)
-
-    fromlineno = 0
-    lineno = 0
-
-    # attributes below are set by the builder module or by raw factories
-
-    # the file from which as been extracted the astng representation. It may
-    # be None if the representation has been built from a built-in module
-    file = None
-    # the module name
-    name = None
-    # boolean for astng built from source (i.e. ast)
-    pure_python = None
-    # boolean for package module
-    package = None
-    # dictionary of globals with name as key and node defining the global
-    # as value
-    globals = None
-
-    # names of python special attributes (handled by getattr impl.)
-    special_attributes = set(('__name__', '__doc__', '__file__', '__path__',
-                              '__dict__'))
-    # names of module attributes available through the global scope
-    scope_attrs = set(('__name__', '__doc__', '__file__', '__path__'))
-
-    def __init__(self, name, doc, pure_python=True):
-        self.name = name
-        self.doc = doc
-        self.pure_python = pure_python
-        self.locals = self.globals = {}
-        self.body = []
-
-    @property
-    def file_stream(self):
-        if self.file is not None:
-            return file(self.file)
-        return None
-
-    def block_range(self, lineno):
-        """return block line numbers.
-
-        start from the beginning whatever the given lineno
-        """
-        return self.fromlineno, self.tolineno
-
-    def scope_lookup(self, node, name, offset=0):
-        if name in self.scope_attrs and not name in self.locals:
-            try:
-                return self, self.getattr(name)
-            except NotFoundError:
-                return self, ()
-        return self._scope_lookup(node, name, offset)
-
-    def pytype(self):
-        return '%s.module' % BUILTINS_MODULE
-
-    def display_type(self):
-        return 'Module'
-
-    def getattr(self, name, context=None, ignore_locals=False):
-        if name in self.special_attributes:
-            if name == '__file__':
-                return [cf(self.file)] + self.locals.get(name, [])
-            if name == '__path__' and self.package:
-                return [List()] + self.locals.get(name, [])
-            return std_special_attributes(self, name)
-        if not ignore_locals and name in self.locals:
-            return self.locals[name]
-        if self.package:
-            try:
-                return [self.import_module(name, relative_only=True)]
-            except ASTNGBuildingException:
-                raise NotFoundError(name)
-            except Exception:# XXX pylint tests never pass here; do we need it?
-                import traceback
-                traceback.print_exc()
-        raise NotFoundError(name)
-    getattr = remove_nodes(getattr, DelName)
-
-    def igetattr(self, name, context=None):
-        """inferred getattr"""
-        # set lookup name since this is necessary to infer on import nodes for
-        # instance
-        context = copy_context(context)
-        context.lookupname = name
-        try:
-            return _infer_stmts(self.getattr(name, context), context, frame=self)
-        except NotFoundError:
-            raise InferenceError(name)
-
-    def fully_defined(self):
-        """return True if this module has been built from a .py file
-        and so contains a complete representation including the code
-        """
-        return self.file is not None and self.file.endswith('.py')
-
-    def statement(self):
-        """return the first parent node marked as statement node
-        consider a module as a statement...
-        """
-        return self
-
-    def previous_sibling(self):
-        """module has no sibling"""
-        return
-
-    def next_sibling(self):
-        """module has no sibling"""
-        return
-
-    if sys.version_info < (2, 8):
-        def absolute_import_activated(self):
-            for stmt in self.locals.get('absolute_import', ()):
-                if isinstance(stmt, From) and stmt.modname == '__future__':
-                    return True
-            return False
-    else:
-        absolute_import_activated = lambda self: True
-
-    def import_module(self, modname, relative_only=False, level=None):
-        """import the given module considering self as context"""
-        if relative_only and level is None:
-            level = 0
-        absmodname = self.relative_to_absolute_name(modname, level)
-        try:
-            return MANAGER.astng_from_module_name(absmodname)
-        except ASTNGBuildingException:
-            # we only want to import a sub module or package of this module,
-            # skip here
-            if relative_only:
-                raise
-        return MANAGER.astng_from_module_name(modname)
-
-    def relative_to_absolute_name(self, modname, level):
-        """return the absolute module name for a relative import.
-
-        The relative import can be implicit or explicit.
-        """
-        # XXX this returns non sens when called on an absolute import
-        # like 'pylint.checkers.logilab.astng.utils'
-        # XXX doesn't return absolute name if self.name isn't absolute name
-        if self.absolute_import_activated() and level is None:
-            return modname
-        if level:
-            if self.package:
-                level = level - 1
-            package_name = self.name.rsplit('.', level)[0]
-        elif self.package:
-            package_name = self.name
-        else:
-            package_name = self.name.rsplit('.', 1)[0]
-        if package_name:
-            if not modname:
-                return package_name
-            return '%s.%s' % (package_name, modname)
-        return modname
-
-
-    def wildcard_import_names(self):
-        """return the list of imported names when this module is 'wildcard
-        imported'
-
-        It doesn't include the '__builtins__' name which is added by the
-        current CPython implementation of wildcard imports.
-        """
-        # take advantage of a living module if it exists
-        try:
-            living = sys.modules[self.name]
-        except KeyError:
-            pass
-        else:
-            try:
-                return living.__all__
-            except AttributeError:
-                return [name for name in living.__dict__.keys()
-                        if not name.startswith('_')]
-        # else lookup the astng
-        #
-        # We separate the different steps of lookup in try/excepts
-        # to avoid catching too many Exceptions
-        # However, we can not analyse dynamically constructed __all__
-        try:
-            all = self['__all__']
-        except KeyError:
-            return [name for name in self.keys() if not name.startswith('_')]
-        try:
-            explicit = all.assigned_stmts().next()
-        except InferenceError:
-            return [name for name in self.keys() if not name.startswith('_')]
-        except AttributeError:
-            # not an assignment node
-            # XXX infer?
-            return [name for name in self.keys() if not name.startswith('_')]
-        try:
-            # should be a Tuple/List of constant string / 1 string not allowed
-            return [const.value for const in explicit.elts]
-        except AttributeError:
-            return [name for name in self.keys() if not name.startswith('_')]
-
-
-class ComprehensionScope(LocalsDictNodeNG):
-    def frame(self):
-        return self.parent.frame()
-
-    scope_lookup = LocalsDictNodeNG._scope_lookup
-
-
-class GenExpr(ComprehensionScope):
-    _astng_fields = ('elt', 'generators')
-
-    def __init__(self):
-        self.locals = {}
-        self.elt = None
-        self.generators = []
-
-
-class DictComp(ComprehensionScope):
-    _astng_fields = ('key', 'value', 'generators')
-
-    def __init__(self):
-        self.locals = {}
-        self.key = None
-        self.value = None
-        self.generators = []
-
-
-class SetComp(ComprehensionScope):
-    _astng_fields = ('elt', 'generators')
-
-    def __init__(self):
-        self.locals = {}
-        self.elt = None
-        self.generators = []
-
-
-class _ListComp(NodeNG):
-    """class representing a ListComp node"""
-    _astng_fields = ('elt', 'generators')
-    elt = None
-    generators = None
-
-if sys.version_info >= (3, 0):
-    class ListComp(_ListComp, ComprehensionScope):
-        """class representing a ListComp node"""
-        def __init__(self):
-            self.locals = {}
-else:
-    class ListComp(_ListComp):
-        """class representing a ListComp node"""
-
-# Function  ###################################################################
-
-
-class Lambda(LocalsDictNodeNG, FilterStmtsMixin):
-    _astng_fields = ('args', 'body',)
-    name = '<lambda>'
-
-    # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod'
-    type = 'function'
-
-    def __init__(self):
-        self.locals = {}
-        self.args = []
-        self.body = []
-
-    def pytype(self):
-        if 'method' in self.type:
-            return '%s.instancemethod' % BUILTINS_MODULE
-        return '%s.function' % BUILTINS_MODULE
-
-    def display_type(self):
-        if 'method' in self.type:
-            return 'Method'
-        return 'Function'
-
-    def callable(self):
-        return True
-
-    def argnames(self):
-        """return a list of argument names"""
-        if self.args.args: # maybe None with builtin functions
-            names = _rec_get_names(self.args.args)
-        else:
-            names = []
-        if self.args.vararg:
-            names.append(self.args.vararg)
-        if self.args.kwarg:
-            names.append(self.args.kwarg)
-        return names
-
-    def infer_call_result(self, caller, context=None):
-        """infer what a function is returning when called"""
-        return self.body.infer(context)
-
-    def scope_lookup(self, node, name, offset=0):
-        if node in self.args.defaults:
-            frame = self.parent.frame()
-            # line offset to avoid that def func(f=func) resolve the default
-            # value to the defined function
-            offset = -1
-        else:
-            # check this is not used in function decorators
-            frame = self
-        return frame._scope_lookup(node, name, offset)
-
-
-class Function(Statement, Lambda):
-    _astng_fields = ('decorators', 'args', 'body')
-
-    special_attributes = set(('__name__', '__doc__', '__dict__'))
-    is_function = True
-    # attributes below are set by the builder module or by raw factories
-    blockstart_tolineno = None
-    decorators = None
-
-    def __init__(self, name, doc):
-        self.locals = {}
-        self.args = []
-        self.body = []
-        self.decorators = None
-        self.name = name
-        self.doc = doc
-        self.extra_decorators = []
-        self.instance_attrs = {}
-
-    def set_line_info(self, lastchild):
-        self.fromlineno = self.lineno
-        # lineno is the line number of the first decorator, we want the def statement lineno
-        if self.decorators is not None:
-            self.fromlineno += len(self.decorators.nodes)
-        self.tolineno = lastchild.tolineno
-        self.blockstart_tolineno = self.args.tolineno
-
-    def block_range(self, lineno):
-        """return block line numbers.
-
-        start from the "def" position whatever the given lineno
-        """
-        return self.fromlineno, self.tolineno
-
-    def getattr(self, name, context=None):
-        """this method doesn't look in the instance_attrs dictionary since it's
-        done by an Instance proxy at inference time.
-        """
-        if name == '__module__':
-            return [cf(self.root().qname())]
-        if name in self.instance_attrs:
-            return self.instance_attrs[name]
-        return std_special_attributes(self, name, False)
-
-    def is_method(self):
-        """return true if the function node should be considered as a method"""
-        # check we are defined in a Class, because this is usually expected
-        # (e.g. pylint...) when is_method() return True
-        return self.type != 'function' and isinstance(self.parent.frame(), Class)
-
-    def decoratornames(self):
-        """return a list of decorator qualified names"""
-        result = set()
-        decoratornodes = []
-        if self.decorators is not None:
-            decoratornodes += self.decorators.nodes
-        decoratornodes += self.extra_decorators
-        for decnode in decoratornodes:
-            for infnode in decnode.infer():
-                result.add(infnode.qname())
-        return result
-    decoratornames = cached(decoratornames)
-
-    def is_bound(self):
-        """return true if the function is bound to an Instance or a class"""
-        return self.type == 'classmethod'
-
-    def is_abstract(self, pass_is_abstract=True):
-        """return true if the method is abstract
-        It's considered as abstract if the only statement is a raise of
-        NotImplementError, or, if pass_is_abstract, a pass statement
-        """
-        for child_node in self.body:
-            if isinstance(child_node, Raise):
-                if child_node.raises_not_implemented():
-                    return True
-            if pass_is_abstract and isinstance(child_node, Pass):
-                return True
-            return False
-        # empty function is the same as function with a single "pass" statement
-        if pass_is_abstract:
-            return True
-
-    def is_generator(self):
-        """return true if this is a generator function"""
-        # XXX should be flagged, not computed
-        try:
-            return self.nodes_of_class(Yield, skip_klass=Function).next()
-        except StopIteration:
-            return False
-
-    def infer_call_result(self, caller, context=None):
-        """infer what a function is returning when called"""
-        if self.is_generator():
-            yield Generator(self)
-            return
-        returns = self.nodes_of_class(Return, skip_klass=Function)
-        for returnnode in returns:
-            if returnnode.value is None:
-                yield Const(None)
-            else:
-                try:
-                    for infered in returnnode.value.infer(context):
-                        yield infered
-                except InferenceError:
-                    yield YES
-
-
-def _rec_get_names(args, names=None):
-    """return a list of all argument names"""
-    if names is None:
-        names = []
-    for arg in args:
-        if isinstance(arg, Tuple):
-            _rec_get_names(arg.elts, names)
-        else:
-            names.append(arg.name)
-    return names
-
-
-# Class ######################################################################
-
-def _class_type(klass, ancestors=None):
-    """return a Class node type to differ metaclass, interface and exception
-    from 'regular' classes
-    """
-    # XXX we have to store ancestors in case we have a ancestor loop
-    if klass._type is not None:
-        return klass._type
-    if klass.name == 'type':
-        klass._type = 'metaclass'
-    elif klass.name.endswith('Interface'):
-        klass._type = 'interface'
-    elif klass.name.endswith('Exception'):
-        klass._type = 'exception'
-    else:
-        if ancestors is None:
-            ancestors = set()
-        if klass in ancestors:
-            # XXX we are in loop ancestors, and have found no type
-            klass._type = 'class'
-            return 'class'
-        ancestors.add(klass)
-        # print >> sys.stderr, '_class_type', repr(klass)
-        for base in klass.ancestors(recurs=False):
-            if _class_type(base, ancestors) != 'class':
-                klass._type = base.type
-                break
-    if klass._type is None:
-        klass._type = 'class'
-    return klass._type
-
-def _iface_hdlr(iface_node):
-    """a handler function used by interfaces to handle suspicious
-    interface nodes
-    """
-    return True
-
-
-class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
-
-    # some of the attributes below are set by the builder module or
-    # by a raw factories
-
-    # a dictionary of class instances attributes
-    _astng_fields = ('decorators', 'bases', 'body') # name
-
-    decorators = None
-    special_attributes = set(('__name__', '__doc__', '__dict__', '__module__',
-                              '__bases__', '__mro__', '__subclasses__'))
-    blockstart_tolineno = None
-
-    _type = None
-    type = property(_class_type,
-                    doc="class'type, possible values are 'class' | "
-                    "'metaclass' | 'interface' | 'exception'")
-
-    def __init__(self, name, doc):
-        self.instance_attrs = {}
-        self.locals = {}
-        self.bases = []
-        self.body = []
-        self.name = name
-        self.doc = doc
-
-    def _newstyle_impl(self, context=None):
-        if context is None:
-            context = InferenceContext()
-        if self._newstyle is not None:
-            return self._newstyle
-        for base in self.ancestors(recurs=False, context=context):
-            if base._newstyle_impl(context):
-                self._newstyle = True
-                break
-        if self._newstyle is None:
-            self._newstyle = False
-        return self._newstyle
-
-    _newstyle = None
-    newstyle = property(_newstyle_impl,
-                        doc="boolean indicating if it's a new style class"
-                        "or not")
-
-    def set_line_info(self, lastchild):
-        self.fromlineno = self.lineno
-        self.blockstart_tolineno = self.bases and self.bases[-1].tolineno or self.fromlineno
-        if lastchild is not None:
-            self.tolineno = lastchild.tolineno
-        # else this is a class with only a docstring, then tolineno is (should be) already ok
-
-    def block_range(self, lineno):
-        """return block line numbers.
-
-        start from the "class" position whatever the given lineno
-        """
-        return self.fromlineno, self.tolineno
-
-    def pytype(self):
-        if self.newstyle:
-            return '%s.type' % BUILTINS_MODULE
-        return '%s.classobj' % BUILTINS_MODULE
-
-    def display_type(self):
-        return 'Class'
-
-    def callable(self):
-        return True
-
-    def infer_call_result(self, caller, context=None):
-        """infer what a class is returning when called"""
-        yield Instance(self)
-
-    def scope_lookup(self, node, name, offset=0):
-        if node in self.bases:
-            frame = self.parent.frame()
-            # line offset to avoid that class A(A) resolve the ancestor to
-            # the defined class
-            offset = -1
-        else:
-            frame = self
-        return frame._scope_lookup(node, name, offset)
-
-    # list of parent class as a list of string (i.e. names as they appear
-    # in the class definition) XXX bw compat
-    def basenames(self):
-        return [bnode.as_string() for bnode in self.bases]
-    basenames = property(basenames)
-
-    def ancestors(self, recurs=True, context=None):
-        """return an iterator on the node base classes in a prefixed
-        depth first order
-
-        :param recurs:
-          boolean indicating if it should recurse or return direct
-          ancestors only
-        """
-        # FIXME: should be possible to choose the resolution order
-        # XXX inference make infinite loops possible here (see BaseTransformer
-        # manipulation in the builder module for instance)
-        yielded = set([self])
-        if context is None:
-            context = InferenceContext()
-        for stmt in self.bases:
-            with context.restore_path():
-                try:
-                    for baseobj in stmt.infer(context):
-                        if not isinstance(baseobj, Class):
-                            # duh ?
-                            continue
-                        if baseobj in yielded:
-                            continue # cf xxx above
-                        yielded.add(baseobj)
-                        yield baseobj
-                        if recurs:
-                            for grandpa in baseobj.ancestors(True, context):
-                                if grandpa in yielded:
-                                    continue # cf xxx above
-                                yielded.add(grandpa)
-                                yield grandpa
-                except InferenceError:
-                    # XXX log error ?
-                    continue
-
-    def local_attr_ancestors(self, name, context=None):
-        """return an iterator on astng representation of parent classes
-        which have <name> defined in their locals
-        """
-        for astng in self.ancestors(context=context):
-            if name in astng:
-                yield astng
-
-    def instance_attr_ancestors(self, name, context=None):
-        """return an iterator on astng representation of parent classes
-        which have <name> defined in their instance attribute dictionary
-        """
-        for astng in self.ancestors(context=context):
-            if name in astng.instance_attrs:
-                yield astng
-
-    def has_base(self, node):
-        return node in self.bases
-
-    def local_attr(self, name, context=None):
-        """return the list of assign node associated to name in this class
-        locals or in its parents
-
-        :raises `NotFoundError`:
-          if no attribute with this name has been find in this class or
-          its parent classes
-        """
-        try:
-            return self.locals[name]
-        except KeyError:
-            # get if from the first parent implementing it if any
-            for class_node in self.local_attr_ancestors(name, context):
-                return class_node.locals[name]
-        raise NotFoundError(name)
-    local_attr = remove_nodes(local_attr, DelAttr)
-
-    def instance_attr(self, name, context=None):
-        """return the astng nodes associated to name in this class instance
-        attributes dictionary and in its parents
-
-        :raises `NotFoundError`:
-          if no attribute with this name has been find in this class or
-          its parent classes
-        """
-        values = self.instance_attrs.get(name, [])
-        # get all values from parents
-        for class_node in self.instance_attr_ancestors(name, context):
-            values += class_node.instance_attrs[name]
-        if not values:
-            raise NotFoundError(name)
-        return values
-    instance_attr = remove_nodes(instance_attr, DelAttr)
-
-    def instanciate_class(self):
-        """return Instance of Class node, else return self"""
-        return Instance(self)
-
-    def getattr(self, name, context=None):
-        """this method doesn't look in the instance_attrs dictionary since it's
-        done by an Instance proxy at inference time.
-
-        It may return a YES object if the attribute has not been actually
-        found but a __getattr__ or __getattribute__ method is defined
-        """
-        values = self.locals.get(name, [])
-        if name in self.special_attributes:
-            if name == '__module__':
-                return [cf(self.root().qname())] + values
-            # FIXME : what is expected by passing the list of ancestors to cf:
-            # you can just do [cf(tuple())] + values without breaking any test
-            # this is ticket http://www.logilab.org/ticket/52785
-            if name == '__bases__':
-                return [cf(tuple(self.ancestors(recurs=False, context=context)))] + values
-            # XXX need proper meta class handling + MRO implementation
-            if name == '__mro__' and self.newstyle:
-                # XXX mro is read-only but that's not our job to detect that
-                return [cf(tuple(self.ancestors(recurs=True, context=context)))] + values
-            return std_special_attributes(self, name)
-        # don't modify the list in self.locals!
-        values = list(values)
-        for classnode in self.ancestors(recurs=True, context=context):
-            values += classnode.locals.get(name, [])
-        if not values:
-            raise NotFoundError(name)
-        return values
-
-    def igetattr(self, name, context=None):
-        """inferred getattr, need special treatment in class to handle
-        descriptors
-        """
-        # set lookup name since this is necessary to infer on import nodes for
-        # instance
-        context = copy_context(context)
-        context.lookupname = name
-        try:
-            for infered in _infer_stmts(self.getattr(name, context), context,
-                                        frame=self):
-                # yield YES object instead of descriptors when necessary
-                if not isinstance(infered, Const) and isinstance(infered, Instance):
-                    try:
-                        infered._proxied.getattr('__get__', context)
-                    except NotFoundError:
-                        yield infered
-                    else:
-                        yield YES
-                else:
-                    yield function_to_method(infered, self)
-        except NotFoundError:
-            if not name.startswith('__') and self.has_dynamic_getattr(context):
-                # class handle some dynamic attributes, return a YES object
-                yield YES
-            else:
-                raise InferenceError(name)
-
-    def has_dynamic_getattr(self, context=None):
-        """return True if the class has a custom __getattr__ or
-        __getattribute__ method
-        """
-        # need to explicitly handle optparse.Values (setattr is not detected)
-        if self.name == 'Values' and self.root().name == 'optparse':
-            return True
-        try:
-            self.getattr('__getattr__', context)
-            return True
-        except NotFoundError:
-            #if self.newstyle: XXX cause an infinite recursion error
-            try:
-                getattribute = self.getattr('__getattribute__', context)[0]
-                if getattribute.root().name != BUILTINS_NAME:
-                    # class has a custom __getattribute__ defined
-                    return True
-            except NotFoundError:
-                pass
-        return False
-
-    def methods(self):
-        """return an iterator on all methods defined in the class and
-        its ancestors
-        """
-        done = {}
-        for astng in chain(iter((self,)), self.ancestors()):
-            for meth in astng.mymethods():
-                if meth.name in done:
-                    continue
-                done[meth.name] = None
-                yield meth
-
-    def mymethods(self):
-        """return an iterator on all methods defined in the class"""
-        for member in self.values():
-            if isinstance(member, Function):
-                yield member
-
-    def interfaces(self, herited=True, handler_func=_iface_hdlr):
-        """return an iterator on interfaces implemented by the given
-        class node
-        """
-        # FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)...
-        try:
-            implements = Instance(self).getattr('__implements__')[0]
-        except NotFoundError:
-            return
-        if not herited and not implements.frame() is self:
-            return
-        found = set()
-        missing = False
-        for iface in unpack_infer(implements):
-            if iface is YES:
-                missing = True
-                continue
-            if not iface in found and handler_func(iface):
-                found.add(iface)
-                yield iface
-        if missing:
-            raise InferenceError()
diff --git a/third_party/logilab/astng/LICENSE.txt b/third_party/logilab/astroid/LICENSE.txt
similarity index 100%
rename from third_party/logilab/astng/LICENSE.txt
rename to third_party/logilab/astroid/LICENSE.txt
diff --git a/third_party/logilab/astng/README.chromium b/third_party/logilab/astroid/README.chromium
similarity index 92%
rename from third_party/logilab/astng/README.chromium
rename to third_party/logilab/astroid/README.chromium
index b78f091..01387b8 100644
--- a/third_party/logilab/astng/README.chromium
+++ b/third_party/logilab/astroid/README.chromium
@@ -1,5 +1,5 @@
 URL: http://www.logilab.org/project/logilab-astng
-Version: 0.23.1
+Version: 1.3.4
 License: GPL
 License File: LICENSE.txt
 
diff --git a/third_party/logilab/astroid/__init__.py b/third_party/logilab/astroid/__init__.py
new file mode 100644
index 0000000..d4fd12c
--- /dev/null
+++ b/third_party/logilab/astroid/__init__.py
@@ -0,0 +1,131 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""Python Abstract Syntax Tree New Generation
+
+The aim of this module is to provide a common base representation of
+python source code for projects such as pychecker, pyreverse,
+pylint... Well, actually the development of this library is essentially
+governed by pylint's needs.
+
+It extends class defined in the python's _ast module with some
+additional methods and attributes. Instance attributes are added by a
+builder object, which can either generate extended ast (let's call
+them astroid ;) by visiting an existent ast tree or by inspecting living
+object. Methods are added by monkey patching ast classes.
+
+Main modules are:
+
+* nodes and scoped_nodes for more information about methods and
+  attributes added to different node classes
+
+* the manager contains a high level object to get astroid trees from
+  source files and living objects. It maintains a cache of previously
+  constructed tree for quick access
+
+* builder contains the class responsible to build astroid trees
+"""
+__doctype__ = "restructuredtext en"
+
+import sys
+import re
+from operator import attrgetter
+
+# WARNING: internal imports order matters !
+
+# make all exception classes accessible from astroid package
+from astroid.exceptions import *
+
+# make all node classes accessible from astroid package
+from astroid.nodes import *
+
+# trigger extra monkey-patching
+from astroid import inference
+
+# more stuff available
+from astroid import raw_building
+from astroid.bases import YES, Instance, BoundMethod, UnboundMethod
+from astroid.node_classes import are_exclusive, unpack_infer
+from astroid.scoped_nodes import builtin_lookup
+
+# make a manager instance (borg) as well as Project and Package classes
+# accessible from astroid package
+from astroid.manager import AstroidManager, Project
+MANAGER = AstroidManager()
+del AstroidManager
+
+# transform utilities (filters and decorator)
+
+class AsStringRegexpPredicate(object):
+    """Class to be used as predicate that may be given to `register_transform`
+
+    First argument is a regular expression that will be searched against the `as_string`
+    representation of the node onto which it's applied.
+
+    If specified, the second argument is an `attrgetter` expression that will be
+    applied on the node first to get the actual node on which `as_string` should
+    be called.
+
+    WARNING: This can be fairly slow, as it has to convert every AST node back
+    to Python code; you should consider examining the AST directly instead.
+    """
+    def __init__(self, regexp, expression=None):
+        self.regexp = re.compile(regexp)
+        self.expression = expression
+
+    def __call__(self, node):
+        if self.expression is not None:
+            node = attrgetter(self.expression)(node)
+        return self.regexp.search(node.as_string())
+
+def inference_tip(infer_function):
+    """Given an instance specific inference function, return a function to be
+    given to MANAGER.register_transform to set this inference function.
+
+    Typical usage
+
+    .. sourcecode:: python
+
+       MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple),
+                                  predicate)
+    """
+    def transform(node, infer_function=infer_function):
+        node._explicit_inference = infer_function
+        return node
+    return transform
+
+
+def register_module_extender(manager, module_name, get_extension_mod):
+    def transform(node):
+        extension_module = get_extension_mod()
+        for name, obj in extension_module.locals.items():
+            node.locals[name] = obj
+
+    manager.register_transform(Module, transform, lambda n: n.name == module_name)
+
+
+# load brain plugins
+from os import listdir
+from os.path import join, dirname
+BRAIN_MODULES_DIR = join(dirname(__file__), 'brain')
+if BRAIN_MODULES_DIR not in sys.path:
+    # add it to the end of the list so user path take precedence
+    sys.path.append(BRAIN_MODULES_DIR)
+# load modules in this directory
+for module in listdir(BRAIN_MODULES_DIR):
+    if module.endswith('.py'):
+        __import__(module[:-3])
diff --git a/third_party/logilab/astroid/__pkginfo__.py b/third_party/logilab/astroid/__pkginfo__.py
new file mode 100644
index 0000000..0c92414
--- /dev/null
+++ b/third_party/logilab/astroid/__pkginfo__.py
@@ -0,0 +1,42 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""astroid packaging information"""
+distname = 'astroid'
+
+modname = 'astroid'
+
+numversion = (1, 3, 4)
+version = '.'.join([str(num) for num in numversion])
+
+install_requires = ['logilab-common >= 0.60.0', 'six']
+
+license = 'LGPL'
+
+author = 'Logilab'
+author_email = 'pylint-dev@lists.logilab.org'
+mailinglist = "mailto://%s" % author_email
+web = 'http://bitbucket.org/logilab/astroid'
+
+description = "A abstract syntax tree for Python with inference support."
+
+classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
+               "Topic :: Software Development :: Quality Assurance",
+               "Programming Language :: Python",
+               "Programming Language :: Python :: 2",
+               "Programming Language :: Python :: 3",
+              ]
diff --git a/third_party/logilab/astroid/as_string.py b/third_party/logilab/astroid/as_string.py
new file mode 100644
index 0000000..f627f9e
--- /dev/null
+++ b/third_party/logilab/astroid/as_string.py
@@ -0,0 +1,499 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""This module renders Astroid nodes as string:
+
+* :func:`to_code` function return equivalent (hopefuly valid) python string
+
+* :func:`dump` function return an internal representation of nodes found
+  in the tree, useful for debugging or understanding the tree structure
+"""
+
+import sys
+
+INDENT = '    ' # 4 spaces ; keep indentation variable
+
+
+def dump(node, ids=False):
+    """print a nice astroid tree representation.
+
+    :param ids: if true, we also print the ids (usefull for debugging)
+    """
+    result = []
+    _repr_tree(node, result, ids=ids)
+    return "\n".join(result)
+
+def _repr_tree(node, result, indent='', _done=None, ids=False):
+    """built a tree representation of a node as a list of lines"""
+    if _done is None:
+        _done = set()
+    if not hasattr(node, '_astroid_fields'): # not a astroid node
+        return
+    if node in _done:
+        result.append(indent + 'loop in tree: %s' % node)
+        return
+    _done.add(node)
+    node_str = str(node)
+    if ids:
+        node_str += '  . \t%x' % id(node)
+    result.append(indent + node_str)
+    indent += INDENT
+    for field in node._astroid_fields:
+        value = getattr(node, field)
+        if isinstance(value, (list, tuple)):
+            result.append(indent + field + " = [")
+            for child in value:
+                if isinstance(child, (list, tuple)):
+                    # special case for Dict # FIXME
+                    _repr_tree(child[0], result, indent, _done, ids)
+                    _repr_tree(child[1], result, indent, _done, ids)
+                    result.append(indent + ',')
+                else:
+                    _repr_tree(child, result, indent, _done, ids)
+            result.append(indent + "]")
+        else:
+            result.append(indent + field + " = ")
+            _repr_tree(value, result, indent, _done, ids)
+
+
+class AsStringVisitor(object):
+    """Visitor to render an Astroid node as a valid python code string"""
+
+    def __call__(self, node):
+        """Makes this visitor behave as a simple function"""
+        return node.accept(self)
+
+    def _stmt_list(self, stmts):
+        """return a list of nodes to string"""
+        stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr])
+        return INDENT + stmts.replace('\n', '\n'+INDENT)
+
+
+    ## visit_<node> methods ###########################################
+
+    def visit_arguments(self, node):
+        """return an astroid.Function node as string"""
+        return node.format_args()
+
+    def visit_assattr(self, node):
+        """return an astroid.AssAttr node as string"""
+        return self.visit_getattr(node)
+
+    def visit_assert(self, node):
+        """return an astroid.Assert node as string"""
+        if node.fail:
+            return 'assert %s, %s' % (node.test.accept(self),
+                                      node.fail.accept(self))
+        return 'assert %s' % node.test.accept(self)
+
+    def visit_assname(self, node):
+        """return an astroid.AssName node as string"""
+        return node.name
+
+    def visit_assign(self, node):
+        """return an astroid.Assign node as string"""
+        lhs = ' = '.join([n.accept(self) for n in node.targets])
+        return '%s = %s' % (lhs, node.value.accept(self))
+
+    def visit_augassign(self, node):
+        """return an astroid.AugAssign node as string"""
+        return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self))
+
+    def visit_backquote(self, node):
+        """return an astroid.Backquote node as string"""
+        return '`%s`' % node.value.accept(self)
+
+    def visit_binop(self, node):
+        """return an astroid.BinOp node as string"""
+        return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self))
+
+    def visit_boolop(self, node):
+        """return an astroid.BoolOp node as string"""
+        return (' %s ' % node.op).join(['(%s)' % n.accept(self)
+                                        for n in node.values])
+
+    def visit_break(self, node):
+        """return an astroid.Break node as string"""
+        return 'break'
+
+    def visit_callfunc(self, node):
+        """return an astroid.CallFunc node as string"""
+        expr_str = node.func.accept(self)
+        args = [arg.accept(self) for arg in node.args]
+        if node.starargs:
+            args.append('*' + node.starargs.accept(self))
+        if node.kwargs:
+            args.append('**' + node.kwargs.accept(self))
+        return '%s(%s)' % (expr_str, ', '.join(args))
+
+    def visit_class(self, node):
+        """return an astroid.Class node as string"""
+        decorate = node.decorators and node.decorators.accept(self)  or ''
+        bases = ', '.join([n.accept(self) for n in node.bases])
+        if sys.version_info[0] == 2:
+            bases = bases and '(%s)' % bases or ''
+        else:
+            metaclass = node.metaclass()
+            if metaclass and not node.has_metaclass_hack():
+                if bases:
+                    bases = '(%s, metaclass=%s)' % (bases, metaclass.name)
+                else:
+                    bases = '(metaclass=%s)' % metaclass.name
+            else:
+                bases = bases and '(%s)' % bases or ''
+        docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
+        return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs,
+                                              self._stmt_list(node.body))
+
+    def visit_compare(self, node):
+        """return an astroid.Compare node as string"""
+        rhs_str = ' '.join(['%s %s' % (op, expr.accept(self))
+                            for op, expr in node.ops])
+        return '%s %s' % (node.left.accept(self), rhs_str)
+
+    def visit_comprehension(self, node):
+        """return an astroid.Comprehension node as string"""
+        ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs])
+        return 'for %s in %s%s' % (node.target.accept(self),
+                                   node.iter.accept(self), ifs)
+
+    def visit_const(self, node):
+        """return an astroid.Const node as string"""
+        return repr(node.value)
+
+    def visit_continue(self, node):
+        """return an astroid.Continue node as string"""
+        return 'continue'
+
+    def visit_delete(self, node): # XXX check if correct
+        """return an astroid.Delete node as string"""
+        return 'del %s' % ', '.join([child.accept(self)
+                                     for child in node.targets])
+
+    def visit_delattr(self, node):
+        """return an astroid.DelAttr node as string"""
+        return self.visit_getattr(node)
+
+    def visit_delname(self, node):
+        """return an astroid.DelName node as string"""
+        return node.name
+
+    def visit_decorators(self, node):
+        """return an astroid.Decorators node as string"""
+        return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes])
+
+    def visit_dict(self, node):
+        """return an astroid.Dict node as string"""
+        return '{%s}' % ', '.join(['%s: %s' % (key.accept(self),
+                                               value.accept(self))
+                                   for key, value in node.items])
+
+    def visit_dictcomp(self, node):
+        """return an astroid.DictComp node as string"""
+        return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self),
+                                ' '.join([n.accept(self) for n in node.generators]))
+
+    def visit_discard(self, node):
+        """return an astroid.Discard node as string"""
+        return node.value.accept(self)
+
+    def visit_emptynode(self, node):
+        """dummy method for visiting an Empty node"""
+        return ''
+
+    def visit_excepthandler(self, node):
+        if node.type:
+            if node.name:
+                excs = 'except %s, %s' % (node.type.accept(self),
+                                          node.name.accept(self))
+            else:
+                excs = 'except %s' % node.type.accept(self)
+        else:
+            excs = 'except'
+        return '%s:\n%s' % (excs, self._stmt_list(node.body))
+
+    def visit_ellipsis(self, node):
+        """return an astroid.Ellipsis node as string"""
+        return '...'
+
+    def visit_empty(self, node):
+        """return an Empty node as string"""
+        return ''
+
+    def visit_exec(self, node):
+        """return an astroid.Exec node as string"""
+        if node.locals:
+            return 'exec %s in %s, %s' % (node.expr.accept(self),
+                                          node.locals.accept(self),
+                                          node.globals.accept(self))
+        if node.globals:
+            return 'exec %s in %s' % (node.expr.accept(self),
+                                      node.globals.accept(self))
+        return 'exec %s' % node.expr.accept(self)
+
+    def visit_extslice(self, node):
+        """return an astroid.ExtSlice node as string"""
+        return ','.join([dim.accept(self) for dim in node.dims])
+
+    def visit_for(self, node):
+        """return an astroid.For node as string"""
+        fors = 'for %s in %s:\n%s' % (node.target.accept(self),
+                                      node.iter.accept(self),
+                                      self._stmt_list(node.body))
+        if node.orelse:
+            fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse))
+        return fors
+
+    def visit_from(self, node):
+        """return an astroid.From node as string"""
+        return 'from %s import %s' % ('.' * (node.level or 0) + node.modname,
+                                      _import_string(node.names))
+
+    def visit_function(self, node):
+        """return an astroid.Function node as string"""
+        decorate = node.decorators and node.decorators.accept(self)  or ''
+        docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
+        return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self),
+                                          docs, self._stmt_list(node.body))
+
+    def visit_genexpr(self, node):
+        """return an astroid.GenExpr node as string"""
+        return '(%s %s)' % (node.elt.accept(self),
+                            ' '.join([n.accept(self) for n in node.generators]))
+
+    def visit_getattr(self, node):
+        """return an astroid.Getattr node as string"""
+        return '%s.%s' % (node.expr.accept(self), node.attrname)
+
+    def visit_global(self, node):
+        """return an astroid.Global node as string"""
+        return 'global %s' % ', '.join(node.names)
+
+    def visit_if(self, node):
+        """return an astroid.If node as string"""
+        ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))]
+        if node.orelse:# XXX use elif ???
+            ifs.append('else:\n%s' % self._stmt_list(node.orelse))
+        return '\n'.join(ifs)
+
+    def visit_ifexp(self, node):
+        """return an astroid.IfExp node as string"""
+        return '%s if %s else %s' % (node.body.accept(self),
+                                     node.test.accept(self),
+                                     node.orelse.accept(self))
+
+    def visit_import(self, node):
+        """return an astroid.Import node as string"""
+        return 'import %s' % _import_string(node.names)
+
+    def visit_keyword(self, node):
+        """return an astroid.Keyword node as string"""
+        return '%s=%s' % (node.arg, node.value.accept(self))
+
+    def visit_lambda(self, node):
+        """return an astroid.Lambda node as string"""
+        return 'lambda %s: %s' % (node.args.accept(self),
+                                  node.body.accept(self))
+
+    def visit_list(self, node):
+        """return an astroid.List node as string"""
+        return '[%s]' % ', '.join([child.accept(self) for child in node.elts])
+
+    def visit_listcomp(self, node):
+        """return an astroid.ListComp node as string"""
+        return '[%s %s]' % (node.elt.accept(self),
+                            ' '.join([n.accept(self) for n in node.generators]))
+
+    def visit_module(self, node):
+        """return an astroid.Module node as string"""
+        docs = node.doc and '"""%s"""\n\n' % node.doc or ''
+        return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n'
+
+    def visit_name(self, node):
+        """return an astroid.Name node as string"""
+        return node.name
+
+    def visit_pass(self, node):
+        """return an astroid.Pass node as string"""
+        return 'pass'
+
+    def visit_print(self, node):
+        """return an astroid.Print node as string"""
+        nodes = ', '.join([n.accept(self) for n in node.values])
+        if not node.nl:
+            nodes = '%s,' % nodes
+        if node.dest:
+            return 'print >> %s, %s' % (node.dest.accept(self), nodes)
+        return 'print %s' % nodes
+
+    def visit_raise(self, node):
+        """return an astroid.Raise node as string"""
+        if node.exc:
+            if node.inst:
+                if node.tback:
+                    return 'raise %s, %s, %s' % (node.exc.accept(self),
+                                                 node.inst.accept(self),
+                                                 node.tback.accept(self))
+                return 'raise %s, %s' % (node.exc.accept(self),
+                                         node.inst.accept(self))
+            return 'raise %s' % node.exc.accept(self)
+        return 'raise'
+
+    def visit_return(self, node):
+        """return an astroid.Return node as string"""
+        if node.value:
+            return 'return %s' % node.value.accept(self)
+        else:
+            return 'return'
+
+    def visit_index(self, node):
+        """return a astroid.Index node as string"""
+        return node.value.accept(self)
+
+    def visit_set(self, node):
+        """return an astroid.Set node as string"""
+        return '{%s}' % ', '.join([child.accept(self) for child in node.elts])
+
+    def visit_setcomp(self, node):
+        """return an astroid.SetComp node as string"""
+        return '{%s %s}' % (node.elt.accept(self),
+                            ' '.join([n.accept(self) for n in node.generators]))
+
+    def visit_slice(self, node):
+        """return a astroid.Slice node as string"""
+        lower = node.lower and node.lower.accept(self) or ''
+        upper = node.upper and node.upper.accept(self) or ''
+        step = node.step and node.step.accept(self) or ''
+        if step:
+            return '%s:%s:%s' % (lower, upper, step)
+        return  '%s:%s' % (lower, upper)
+
+    def visit_subscript(self, node):
+        """return an astroid.Subscript node as string"""
+        return '%s[%s]' % (node.value.accept(self), node.slice.accept(self))
+
+    def visit_tryexcept(self, node):
+        """return an astroid.TryExcept node as string"""
+        trys = ['try:\n%s' % self._stmt_list(node.body)]
+        for handler in node.handlers:
+            trys.append(handler.accept(self))
+        if node.orelse:
+            trys.append('else:\n%s' % self._stmt_list(node.orelse))
+        return '\n'.join(trys)
+
+    def visit_tryfinally(self, node):
+        """return an astroid.TryFinally node as string"""
+        return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body),
+                                           self._stmt_list(node.finalbody))
+
+    def visit_tuple(self, node):
+        """return an astroid.Tuple node as string"""
+        if len(node.elts) == 1:
+            return '(%s, )' % node.elts[0].accept(self)
+        return '(%s)' % ', '.join([child.accept(self) for child in node.elts])
+
+    def visit_unaryop(self, node):
+        """return an astroid.UnaryOp node as string"""
+        if node.op == 'not':
+            operator = 'not '
+        else:
+            operator = node.op
+        return '%s%s' % (operator, node.operand.accept(self))
+
+    def visit_while(self, node):
+        """return an astroid.While node as string"""
+        whiles = 'while %s:\n%s' % (node.test.accept(self),
+                                    self._stmt_list(node.body))
+        if node.orelse:
+            whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse))
+        return whiles
+
+    def visit_with(self, node): # 'with' without 'as' is possible
+        """return an astroid.With node as string"""
+        items = ', '.join(('(%s)' % expr.accept(self)) +
+                          (vars and ' as (%s)' % (vars.accept(self)) or '')
+                          for expr, vars in node.items)
+        return 'with %s:\n%s' % (items, self._stmt_list(node.body))
+
+    def visit_yield(self, node):
+        """yield an ast.Yield node as string"""
+        yi_val = node.value and (" " + node.value.accept(self)) or ""
+        expr = 'yield' + yi_val
+        if node.parent.is_statement:
+            return expr
+        else:
+            return "(%s)" % (expr,)
+
+
+class AsStringVisitor3k(AsStringVisitor):
+    """AsStringVisitor3k overwrites some AsStringVisitor methods"""
+
+    def visit_excepthandler(self, node):
+        if node.type:
+            if node.name:
+                excs = 'except %s as %s' % (node.type.accept(self),
+                                            node.name.accept(self))
+            else:
+                excs = 'except %s' % node.type.accept(self)
+        else:
+            excs = 'except'
+        return '%s:\n%s' % (excs, self._stmt_list(node.body))
+
+    def visit_nonlocal(self, node):
+        """return an astroid.Nonlocal node as string"""
+        return 'nonlocal %s' % ', '.join(node.names)
+
+    def visit_raise(self, node):
+        """return an astroid.Raise node as string"""
+        if node.exc:
+            if node.cause:
+                return 'raise %s from %s' % (node.exc.accept(self),
+                                             node.cause.accept(self))
+            return 'raise %s' % node.exc.accept(self)
+        return 'raise'
+
+    def visit_starred(self, node):
+        """return Starred node as string"""
+        return "*" + node.value.accept(self)
+
+    def visit_yieldfrom(self, node):
+        """ Return an astroid.YieldFrom node as string. """
+        yi_val = node.value and (" " + node.value.accept(self)) or ""
+        expr = 'yield from' + yi_val
+        if node.parent.is_statement:
+            return expr
+        else:
+            return "(%s)" % (expr,)
+
+
+def _import_string(names):
+    """return a list of (name, asname) formatted as a string"""
+    _names = []
+    for name, asname in names:
+        if asname is not None:
+            _names.append('%s as %s' % (name, asname))
+        else:
+            _names.append(name)
+    return  ', '.join(_names)
+
+
+if sys.version_info >= (3, 0):
+    AsStringVisitor = AsStringVisitor3k
+
+# this visitor is stateless, thus it can be reused
+to_code = AsStringVisitor()
+
diff --git a/third_party/logilab/astng/bases.py b/third_party/logilab/astroid/bases.py
similarity index 70%
rename from third_party/logilab/astng/bases.py
rename to third_party/logilab/astroid/bases.py
index 92f12aa..f1f4cc4 100644
--- a/third_party/logilab/astng/bases.py
+++ b/third_party/logilab/astroid/bases.py
@@ -1,43 +1,45 @@
-# -*- coding: utf-8 -*-
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
 """This module contains base classes and functions for the nodes and some
 inference utils.
 """
 
 __docformat__ = "restructuredtext en"
 
+import sys
 from contextlib import contextmanager
 
-from logilab.common.compat import builtins
+from logilab.common.decorators import cachedproperty
 
-from logilab.astng import BUILTINS_MODULE
-from logilab.astng.exceptions import InferenceError, ASTNGError, \
-                                       NotFoundError, UnresolvableName
-from logilab.astng.as_string import as_string
+from astroid.exceptions import (InferenceError, AstroidError, NotFoundError,
+                                UnresolvableName, UseInferenceDefault)
 
-BUILTINS_NAME = builtins.__name__
+
+if sys.version_info >= (3, 0):
+    BUILTINS = 'builtins'
+else:
+    BUILTINS = '__builtin__'
+
 
 class Proxy(object):
     """a simple proxy object"""
-    _proxied = None
+
+    _proxied = None # proxied object may be set by class or by instance
 
     def __init__(self, proxied=None):
         if proxied is not None:
@@ -56,63 +58,84 @@
 
 # Inference ##################################################################
 
-class InferenceContext(object):
-    __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode')
+MISSING = object()
 
-    def __init__(self, path=None):
+
+class InferenceContext(object):
+    __slots__ = ('path', 'callcontext', 'boundnode', 'infered')
+
+    def __init__(self,
+            path=None, callcontext=None, boundnode=None, infered=None):
         if path is None:
-            self.path = set()
+            self.path = frozenset()
         else:
             self.path = path
-        self.lookupname = None
-        self.callcontext = None
-        self.boundnode = None
+        self.callcontext = callcontext
+        self.boundnode = boundnode
+        if infered is None:
+            self.infered = {}
+        else:
+            self.infered = infered
 
-    def push(self, node):
-        name = self.lookupname
-        if (node, name) in self.path:
-            raise StopIteration()
-        self.path.add( (node, name) )
+    def push(self, key):
+        # This returns a NEW context with the same attributes, but a new key
+        # added to `path`.  The intention is that it's only passed to callees
+        # and then destroyed; otherwise scope() may not work correctly.
+        # The cache will be shared, since it's the same exact dict.
+        if key in self.path:
+            # End the containing generator
+            raise StopIteration
 
-    def clone(self):
-        # XXX copy lookupname/callcontext ?
-        clone = InferenceContext(self.path)
-        clone.callcontext = self.callcontext
-        clone.boundnode = self.boundnode
-        return clone
+        return InferenceContext(
+            self.path.union([key]),
+            self.callcontext,
+            self.boundnode,
+            self.infered,
+        )
 
     @contextmanager
-    def restore_path(self):
-        path = set(self.path)
-        yield
-        self.path = path
+    def scope(self, callcontext=MISSING, boundnode=MISSING):
+        try:
+            orig = self.callcontext, self.boundnode
+            if callcontext is not MISSING:
+                self.callcontext = callcontext
+            if boundnode is not MISSING:
+                self.boundnode = boundnode
+            yield
+        finally:
+            self.callcontext, self.boundnode = orig
 
-def copy_context(context):
-    if context is not None:
-        return context.clone()
-    else:
-        return InferenceContext()
+    def cache_generator(self, key, generator):
+        results = []
+        for result in generator:
+            results.append(result)
+            yield result
+
+        self.infered[key] = tuple(results)
+        return
 
 
-def _infer_stmts(stmts, context, frame=None):
+def _infer_stmts(stmts, context, frame=None, lookupname=None):
     """return an iterator on statements inferred by each statement in <stmts>
     """
     stmt = None
     infered = False
-    if context is not None:
-        name = context.lookupname
-        context = context.clone()
-    else:
-        name = None
+    if context is None:
         context = InferenceContext()
     for stmt in stmts:
         if stmt is YES:
             yield stmt
             infered = True
             continue
-        context.lookupname = stmt._infer_name(frame, name)
+
+        kw = {}
+        infered_name = stmt._infer_name(frame, lookupname)
+        if infered_name is not None:
+            # only returns not None if .infer() accepts a lookupname kwarg
+            kw['lookupname'] = infered_name
+
         try:
-            for infered in stmt.infer(context):
+            for infered in stmt.infer(context, **kw):
                 yield infered
                 infered = True
         except UnresolvableName:
@@ -131,6 +154,8 @@
     def __repr__(self):
         return 'YES'
     def __getattribute__(self, name):
+        if name == 'next':
+            raise AttributeError('next method should not be called')
         if name.startswith('__') and name.endswith('__'):
             # to avoid inspection pb
             return super(_Yes, self).__getattribute__(name)
@@ -168,17 +193,24 @@
 
     def igetattr(self, name, context=None):
         """inferred getattr"""
+        if not context:
+            context = InferenceContext()
         try:
+            # avoid recursively inferring the same attr on the same class
+            new_context = context.push((self._proxied, name))
             # XXX frame should be self._proxied, or not ?
-            get_attr = self.getattr(name, context, lookupclass=False)
-            return _infer_stmts(self._wrap_attr(get_attr, context), context,
-                                frame=self)
+            get_attr = self.getattr(name, new_context, lookupclass=False)
+            return _infer_stmts(
+                self._wrap_attr(get_attr, new_context),
+                new_context,
+                frame=self,
+            )
         except NotFoundError:
             try:
                 # fallback to class'igetattr since it has some logic to handle
                 # descriptors
                 return self._wrap_attr(self._proxied.igetattr(name, context),
-                                       context)
+                                            context)
             except NotFoundError:
                 raise InferenceError(name)
 
@@ -186,7 +218,7 @@
         """wrap bound methods of attrs in a InstanceMethod proxies"""
         for attr in attrs:
             if isinstance(attr, UnboundMethod):
-                if BUILTINS_NAME + '.property' in attr.decoratornames():
+                if BUILTINS + '.property' in attr.decoratornames():
                     for infered in attr.infer_call_result(self, context):
                         yield infered
                 else:
@@ -198,6 +230,8 @@
         """infer what a class instance is returning when called"""
         infered = False
         for node in self._proxied.igetattr('__call__', context):
+            if node is YES:
+                continue
             for res in node.infer_call_result(caller, context):
                 infered = True
                 yield res
@@ -251,14 +285,15 @@
         # If we're unbound method __new__ of builtin object, the result is an
         # instance of the class given as first argument.
         if (self._proxied.name == '__new__' and
-                self._proxied.parent.frame().qname() == '%s.object' % BUILTINS_MODULE):
-            return (x is YES and x or Instance(x) for x in caller.args[0].infer())
+                self._proxied.parent.frame().qname() == '%s.object' % BUILTINS):
+            infer = caller.args[0].infer() if caller.args else []
+            return ((x is YES and x or Instance(x)) for x in infer)
         return self._proxied.infer_call_result(caller, context)
 
 
 class BoundMethod(UnboundMethod):
     """a special node representing a method bound to an instance"""
-    def __init__(self,  proxy, bound):
+    def __init__(self, proxy, bound):
         UnboundMethod.__init__(self, proxy)
         self.bound = bound
 
@@ -266,18 +301,21 @@
         return True
 
     def infer_call_result(self, caller, context):
-        context = context.clone()
-        context.boundnode = self.bound
-        return self._proxied.infer_call_result(caller, context)
+        with context.scope(boundnode=self.bound):
+            for infered in self._proxied.infer_call_result(caller, context):
+                yield infered
 
 
 class Generator(Instance):
-    """a special node representing a generator"""
+    """a special node representing a generator.
+
+    Proxied class is set once for all in raw_building.
+    """
     def callable(self):
-        return True
+        return False
 
     def pytype(self):
-        return '%s.generator' % BUILTINS_MODULE
+        return '%s.generator' % BUILTINS
 
     def display_type(self):
         return 'Generator'
@@ -297,7 +335,8 @@
         """wrapper function handling context"""
         if context is None:
             context = InferenceContext()
-        context.push(node)
+        context = context.push((node, kwargs.get('lookupname')))
+
         yielded = set()
         for res in _func(node, context, **kwargs):
             # unproxy only true instance, not const, tuple, dict...
@@ -334,7 +373,7 @@
 # Node  ######################################################################
 
 class NodeNG(object):
-    """Base Class for all ASTNG node classes.
+    """Base Class for all Astroid node classes.
 
     It represents a node of the new abstract syntax tree.
     """
@@ -349,7 +388,32 @@
     # parent node in the tree
     parent = None
     # attributes containing child node(s) redefined in most concrete classes:
-    _astng_fields = ()
+    _astroid_fields = ()
+    # instance specific inference function infer(node, context)
+    _explicit_inference = None
+
+    def infer(self, context=None, **kwargs):
+        """main interface to the interface system, return a generator on infered
+        values.
+
+        If the instance has some explicit inference function set, it will be
+        called instead of the default interface.
+        """
+        if self._explicit_inference is not None:
+            # explicit_inference is not bound, give it self explicitly
+            try:
+                return self._explicit_inference(self, context, **kwargs)
+            except UseInferenceDefault:
+                pass
+
+        if not context:
+            return self._infer(context, **kwargs)
+
+        key = (self, kwargs.get('lookupname'), context.callcontext, context.boundnode)
+        if key in context.infered:
+            return iter(context.infered[key])
+
+        return context.cache_generator(key, self._infer(context, **kwargs))
 
     def _repr_name(self):
         """return self.name or self.attrname or '' for nice representation"""
@@ -359,20 +423,19 @@
         return '%s(%s)' % (self.__class__.__name__, self._repr_name())
 
     def __repr__(self):
-        return '<%s(%s) l.%s [%s] at Ox%x>' % (self.__class__.__name__,
-                                           self._repr_name(),
-                                           self.fromlineno,
-                                           self.root().name,
-                                           id(self))
+        return '<%s(%s) l.%s [%s] at 0x%x>' % (self.__class__.__name__,
+                                               self._repr_name(),
+                                               self.fromlineno,
+                                               self.root().name,
+                                               id(self))
 
 
     def accept(self, visitor):
-        klass = self.__class__.__name__
         func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
         return func(self)
 
     def get_children(self):
-        for field in self._astng_fields:
+        for field in self._astroid_fields:
             attr = getattr(self, field)
             if attr is None:
                 continue
@@ -384,11 +447,11 @@
 
     def last_child(self):
         """an optimized version of list(get_children())[-1]"""
-        for field in self._astng_fields[::-1]:
+        for field in self._astroid_fields[::-1]:
             attr = getattr(self, field)
             if not attr: # None or empty listy / tuple
                 continue
-            if isinstance(attr, (list, tuple)):
+            if attr.__class__ in (list, tuple):
                 return attr[-1]
             else:
                 return attr
@@ -428,7 +491,7 @@
 
     def child_sequence(self, child):
         """search for the right sequence where the child lies in"""
-        for field in self._astng_fields:
+        for field in self._astroid_fields:
             node_or_sequence = getattr(self, field)
             if node_or_sequence is child:
                 return [node_or_sequence]
@@ -436,20 +499,20 @@
             if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
                 return node_or_sequence
         else:
-            msg = 'Could not found %s in %s\'s children'
-            raise ASTNGError(msg % (repr(child), repr(self)))
+            msg = 'Could not find %s in %s\'s children'
+            raise AstroidError(msg % (repr(child), repr(self)))
 
     def locate_child(self, child):
         """return a 2-uple (child attribute name, sequence or node)"""
-        for field in self._astng_fields:
+        for field in self._astroid_fields:
             node_or_sequence = getattr(self, field)
             # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
             if child is node_or_sequence:
                 return field, child
             if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
                 return field, node_or_sequence
-        msg = 'Could not found %s in %s\'s children'
-        raise ASTNGError(msg % (repr(child), repr(self)))
+        msg = 'Could not find %s in %s\'s children'
+        raise AstroidError(msg % (repr(child), repr(self)))
     # FIXME : should we merge child_sequence and locate_child ? locate_child
     # is only used in are_exclusive, child_sequence one time in pylint.
 
@@ -479,16 +542,28 @@
         # FIXME: raise an exception if nearest is None ?
         return nearest[0]
 
-    def set_line_info(self, lastchild):
+    # these are lazy because they're relatively expensive to compute for every
+    # single node, and they rarely get looked at
+
+    @cachedproperty
+    def fromlineno(self):
         if self.lineno is None:
-            self.fromlineno = self._fixed_source_line()
+            return self._fixed_source_line()
         else:
-            self.fromlineno = self.lineno
+            return self.lineno
+
+    @cachedproperty
+    def tolineno(self):
+        if not self._astroid_fields:
+            # can't have children
+            lastchild = None
+        else:
+            lastchild = self.last_child()
         if lastchild is None:
-            self.tolineno = self.fromlineno
+            return self.fromlineno
         else:
-            self.tolineno = lastchild.tolineno
-        return
+            return lastchild.tolineno
+
         # TODO / FIXME:
         assert self.fromlineno is not None, self
         assert self.tolineno is not None, self
@@ -503,7 +578,7 @@
         _node = self
         try:
             while line is None:
-                _node = _node.get_children().next()
+                _node = next(_node.get_children())
                 line = _node.lineno
         except StopIteration:
             _node = self.parent
@@ -538,7 +613,7 @@
         # overridden for From, Import, Global, TryExcept and Arguments
         return None
 
-    def infer(self, context=None):
+    def _infer(self, context=None):
         """we don't know how to resolve a statement by default"""
         # this method is overridden by most concrete classes
         raise InferenceError(self.__class__.__name__)
@@ -561,15 +636,12 @@
         return False
 
     def as_string(self):
-        return as_string(self)
+        from astroid.as_string import to_code
+        return to_code(self)
 
     def repr_tree(self, ids=False):
-        """print a nice astng tree representation.
-
-        :param ids: if true, we also print the ids (usefull for debugging)"""
-        result = []
-        _repr_tree(self, result, ids=ids)
-        return "\n".join(result)
+        from astroid.as_string import dump
+        return dump(self)
 
 
 class Statement(NodeNG):
@@ -591,39 +663,3 @@
         index = stmts.index(self)
         if index >= 1:
             return stmts[index -1]
-
-INDENT = "    "
-
-def _repr_tree(node, result, indent='', _done=None, ids=False):
-    """built a tree representation of a node as a list of lines"""
-    if _done is None:
-        _done = set()
-    if not hasattr(node, '_astng_fields'): # not a astng node
-        return
-    if node in _done:
-        result.append( indent + 'loop in tree: %s' % node )
-        return
-    _done.add(node)
-    node_str = str(node)
-    if ids:
-        node_str += '  . \t%x' % id(node)
-    result.append( indent + node_str )
-    indent += INDENT
-    for field in node._astng_fields:
-        value = getattr(node, field)
-        if isinstance(value, (list, tuple) ):
-            result.append(  indent + field + " = [" )
-            for child in value:
-                if isinstance(child, (list, tuple) ):
-                    # special case for Dict # FIXME
-                    _repr_tree(child[0], result, indent, _done, ids)
-                    _repr_tree(child[1], result, indent, _done, ids)
-                    result.append(indent + ',')
-                else:
-                    _repr_tree(child, result, indent, _done, ids)
-            result.append(  indent + "]" )
-        else:
-            result.append(  indent + field + " = " )
-            _repr_tree(value, result, indent, _done, ids)
-
-
diff --git a/third_party/logilab/astroid/brain/builtin_inference.py b/third_party/logilab/astroid/brain/builtin_inference.py
new file mode 100644
index 0000000..f60e791
--- /dev/null
+++ b/third_party/logilab/astroid/brain/builtin_inference.py
@@ -0,0 +1,245 @@
+"""Astroid hooks for various builtins."""
+
+import sys
+from functools import partial
+from textwrap import dedent
+
+import six
+from astroid import (MANAGER, UseInferenceDefault,
+                     inference_tip, YES, InferenceError, UnresolvableName)
+from astroid import nodes
+from astroid.builder import AstroidBuilder
+
+
+def _extend_str(class_node, rvalue):
+    """function to extend builtin str/unicode class"""
+    # TODO(cpopa): this approach will make astroid to believe
+    # that some arguments can be passed by keyword, but
+    # unfortunately, strings and bytes don't accept keyword arguments.
+    code = dedent('''
+    class whatever(object):
+        def join(self, iterable):
+            return {rvalue}
+        def replace(self, old, new, count=None):
+            return {rvalue}
+        def format(self, *args, **kwargs):
+            return {rvalue}
+        def encode(self, encoding='ascii', errors=None):
+            return ''
+        def decode(self, encoding='ascii', errors=None):
+            return u''
+        def capitalize(self):
+            return {rvalue}
+        def title(self):
+            return {rvalue}
+        def lower(self):
+            return {rvalue}
+        def upper(self):
+            return {rvalue}
+        def swapcase(self):
+            return {rvalue}
+        def index(self, sub, start=None, end=None):
+            return 0
+        def find(self, sub, start=None, end=None):
+            return 0
+        def count(self, sub, start=None, end=None):
+            return 0
+        def strip(self, chars=None):
+            return {rvalue}
+        def lstrip(self, chars=None):
+            return {rvalue}
+        def rstrip(self, chars=None):
+            return {rvalue}
+        def rjust(self, width, fillchar=None):
+            return {rvalue} 
+        def center(self, width, fillchar=None):
+            return {rvalue}
+        def ljust(self, width, fillchar=None):
+            return {rvalue}
+    ''')
+    code = code.format(rvalue=rvalue)
+    fake = AstroidBuilder(MANAGER).string_build(code)['whatever']
+    for method in fake.mymethods():
+        class_node.locals[method.name] = [method]
+        method.parent = class_node
+
+def extend_builtins(class_transforms):
+    from astroid.bases import BUILTINS
+    builtin_ast = MANAGER.astroid_cache[BUILTINS]
+    for class_name, transform in class_transforms.items():
+        transform(builtin_ast[class_name])
+
+if sys.version_info > (3, 0):
+    extend_builtins({'bytes': partial(_extend_str, rvalue="b''"),
+                     'str': partial(_extend_str, rvalue="''")})
+else:
+    extend_builtins({'str': partial(_extend_str, rvalue="''"),
+                     'unicode': partial(_extend_str, rvalue="u''")})
+
+
+def register_builtin_transform(transform, builtin_name):
+    """Register a new transform function for the given *builtin_name*.
+
+    The transform function must accept two parameters, a node and
+    an optional context.
+    """
+    def _transform_wrapper(node, context=None):
+        result = transform(node, context=context)
+        if result:
+            result.parent = node
+            result.lineno = node.lineno
+            result.col_offset = node.col_offset
+        return iter([result])
+
+    MANAGER.register_transform(nodes.CallFunc,
+                               inference_tip(_transform_wrapper),
+                               lambda n: (isinstance(n.func, nodes.Name) and
+                                          n.func.name == builtin_name))
+
+
+def _generic_inference(node, context, node_type, transform):
+    args = node.args
+    if not args:
+        return node_type()
+    if len(node.args) > 1:
+        raise UseInferenceDefault()
+
+    arg, = args
+    transformed = transform(arg)
+    if not transformed:
+        try:
+            infered = next(arg.infer(context=context))
+        except (InferenceError, StopIteration):
+            raise UseInferenceDefault()
+        if infered is YES:
+            raise UseInferenceDefault()
+        transformed = transform(infered)
+    if not transformed or transformed is YES:
+        raise UseInferenceDefault()
+    return transformed
+
+
+def _generic_transform(arg, klass, iterables, build_elts):
+    if isinstance(arg, klass):
+        return arg
+    elif isinstance(arg, iterables):
+        if not all(isinstance(elt, nodes.Const)
+                   for elt in arg.elts):
+            # TODO(cpopa): Don't support heterogenous elements.
+            # Not yet, though.
+            raise UseInferenceDefault()
+        elts = [elt.value for elt in arg.elts]
+    elif isinstance(arg, nodes.Dict):
+        if not all(isinstance(elt[0], nodes.Const)
+                   for elt in arg.items):
+            raise UseInferenceDefault()
+        elts = [item[0].value for item in arg.items]
+    elif (isinstance(arg, nodes.Const) and
+          isinstance(arg.value, (six.string_types, six.binary_type))):
+        elts = arg.value
+    else:
+        return
+    return klass(elts=build_elts(elts))
+
+
+def _infer_builtin(node, context,
+                   klass=None, iterables=None,
+                   build_elts=None):
+    transform_func = partial(
+        _generic_transform,
+        klass=klass,
+        iterables=iterables,
+        build_elts=build_elts)
+
+    return _generic_inference(node, context, klass, transform_func)
+
+# pylint: disable=invalid-name
+infer_tuple = partial(
+    _infer_builtin,
+    klass=nodes.Tuple,
+    iterables=(nodes.List, nodes.Set),
+    build_elts=tuple)
+
+infer_list = partial(
+    _infer_builtin,
+    klass=nodes.List,
+    iterables=(nodes.Tuple, nodes.Set),
+    build_elts=list)
+
+infer_set = partial(
+    _infer_builtin,
+    klass=nodes.Set,
+    iterables=(nodes.List, nodes.Tuple),
+    build_elts=set)
+
+
+def _get_elts(arg, context):
+    is_iterable = lambda n: isinstance(n,
+                                       (nodes.List, nodes.Tuple, nodes.Set))
+    try:
+        infered = next(arg.infer(context))
+    except (InferenceError, UnresolvableName):
+        raise UseInferenceDefault()
+    if isinstance(infered, nodes.Dict):
+        items = infered.items
+    elif is_iterable(infered):
+        items = []
+        for elt in infered.elts:
+            # If an item is not a pair of two items,
+            # then fallback to the default inference.
+            # Also, take in consideration only hashable items,
+            # tuples and consts. We are choosing Names as well.
+            if not is_iterable(elt):
+                raise UseInferenceDefault()
+            if len(elt.elts) != 2:
+                raise UseInferenceDefault()
+            if not isinstance(elt.elts[0],
+                              (nodes.Tuple, nodes.Const, nodes.Name)):
+                raise UseInferenceDefault()
+            items.append(tuple(elt.elts))
+    else:
+        raise UseInferenceDefault()
+    return items
+
+def infer_dict(node, context=None):
+    """Try to infer a dict call to a Dict node.
+
+    The function treats the following cases:
+
+        * dict()
+        * dict(mapping)
+        * dict(iterable)
+        * dict(iterable, **kwargs)
+        * dict(mapping, **kwargs)
+        * dict(**kwargs)
+
+    If a case can't be infered, we'll fallback to default inference.
+    """
+    has_keywords = lambda args: all(isinstance(arg, nodes.Keyword)
+                                    for arg in args)
+    if not node.args and not node.kwargs:
+        # dict()
+        return nodes.Dict()
+    elif has_keywords(node.args) and node.args:
+        # dict(a=1, b=2, c=4)
+        items = [(nodes.Const(arg.arg), arg.value) for arg in node.args]
+    elif (len(node.args) >= 2 and
+          has_keywords(node.args[1:])):
+        # dict(some_iterable, b=2, c=4)
+        elts = _get_elts(node.args[0], context)
+        keys = [(nodes.Const(arg.arg), arg.value) for arg in node.args[1:]]
+        items = elts + keys
+    elif len(node.args) == 1:
+        items = _get_elts(node.args[0], context)
+    else:
+        raise UseInferenceDefault()
+
+    empty = nodes.Dict()
+    empty.items = items
+    return empty
+
+# Builtins inference
+register_builtin_transform(infer_tuple, 'tuple')
+register_builtin_transform(infer_set, 'set')
+register_builtin_transform(infer_list, 'list')
+register_builtin_transform(infer_dict, 'dict')
diff --git a/third_party/logilab/astroid/brain/py2gi.py b/third_party/logilab/astroid/brain/py2gi.py
new file mode 100644
index 0000000..6747898
--- /dev/null
+++ b/third_party/logilab/astroid/brain/py2gi.py
@@ -0,0 +1,155 @@
+"""Astroid hooks for the Python 2 GObject introspection bindings.
+
+Helps with understanding everything imported from 'gi.repository'
+"""
+
+import inspect
+import itertools
+import sys
+import re
+
+from astroid import MANAGER, AstroidBuildingException
+from astroid.builder import AstroidBuilder
+
+
+_inspected_modules = {}
+
+_identifier_re = r'^[A-Za-z_]\w*$'
+
+def _gi_build_stub(parent):
+    """
+    Inspect the passed module recursively and build stubs for functions,
+    classes, etc.
+    """
+    classes = {}
+    functions = {}
+    constants = {}
+    methods = {}
+    for name in dir(parent):
+        if name.startswith("__"):
+            continue
+
+        # Check if this is a valid name in python
+        if not re.match(_identifier_re, name):
+            continue
+
+        try:
+            obj = getattr(parent, name)
+        except:
+            continue
+
+        if inspect.isclass(obj):
+            classes[name] = obj
+        elif (inspect.isfunction(obj) or
+              inspect.isbuiltin(obj)):
+            functions[name] = obj
+        elif (inspect.ismethod(obj) or
+              inspect.ismethoddescriptor(obj)):
+            methods[name] = obj
+        elif type(obj) in [int, str]:
+            constants[name] = obj
+        elif (str(obj).startswith("<flags") or
+              str(obj).startswith("<enum ") or
+              str(obj).startswith("<GType ") or
+              inspect.isdatadescriptor(obj)):
+            constants[name] = 0
+        elif callable(obj):
+            # Fall back to a function for anything callable
+            functions[name] = obj
+        else:
+            # Assume everything else is some manner of constant
+            constants[name] = 0
+
+    ret = ""
+
+    if constants:
+        ret += "# %s contants\n\n" % parent.__name__
+    for name in sorted(constants):
+        if name[0].isdigit():
+            # GDK has some busted constant names like
+            # Gdk.EventType.2BUTTON_PRESS
+            continue
+
+        val = constants[name]
+
+        strval = str(val)
+        if type(val) is str:
+            strval = '"%s"' % str(val).replace("\\", "\\\\")
+        ret += "%s = %s\n" % (name, strval)
+
+    if ret:
+        ret += "\n\n"
+    if functions:
+        ret += "# %s functions\n\n" % parent.__name__
+    for name in sorted(functions):
+        func = functions[name]
+        ret += "def %s(*args, **kwargs):\n" % name
+        ret += "    pass\n"
+
+    if ret:
+        ret += "\n\n"
+    if methods:
+        ret += "# %s methods\n\n" % parent.__name__
+    for name in sorted(methods):
+        func = methods[name]
+        ret += "def %s(self, *args, **kwargs):\n" % name
+        ret += "    pass\n"
+
+    if ret:
+        ret += "\n\n"
+    if classes:
+        ret += "# %s classes\n\n" % parent.__name__
+    for name in sorted(classes):
+        ret += "class %s(object):\n" % name
+
+        classret = _gi_build_stub(classes[name])
+        if not classret:
+            classret = "pass\n"
+
+        for line in classret.splitlines():
+            ret += "    " + line + "\n"
+        ret += "\n"
+
+    return ret
+
+def _import_gi_module(modname):
+    # we only consider gi.repository submodules
+    if not modname.startswith('gi.repository.'):
+        raise AstroidBuildingException()
+    # build astroid representation unless we already tried so
+    if modname not in _inspected_modules:
+        modnames = [modname]
+        optional_modnames = []
+
+        # GLib and GObject may have some special case handling
+        # in pygobject that we need to cope with. However at
+        # least as of pygobject3-3.13.91 the _glib module doesn't
+        # exist anymore, so if treat these modules as optional.
+        if modname == 'gi.repository.GLib':
+            optional_modnames.append('gi._glib')
+        elif modname == 'gi.repository.GObject':
+            optional_modnames.append('gi._gobject')
+
+        try:
+            modcode = ''
+            for m in itertools.chain(modnames, optional_modnames):
+                try:
+                    __import__(m)
+                    modcode += _gi_build_stub(sys.modules[m])
+                except ImportError:
+                    if m not in optional_modnames:
+                        raise
+        except ImportError:
+            astng = _inspected_modules[modname] = None
+        else:
+            astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
+            _inspected_modules[modname] = astng
+    else:
+        astng = _inspected_modules[modname]
+    if astng is None:
+        raise AstroidBuildingException('Failed to import module %r' % modname)
+    return astng
+
+
+MANAGER.register_failed_import_hook(_import_gi_module)
+
diff --git a/third_party/logilab/astroid/brain/py2mechanize.py b/third_party/logilab/astroid/brain/py2mechanize.py
new file mode 100644
index 0000000..20a253a
--- /dev/null
+++ b/third_party/logilab/astroid/brain/py2mechanize.py
@@ -0,0 +1,18 @@
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+def mechanize_transform():
+    return AstroidBuilder(MANAGER).string_build('''
+
+class Browser(object):
+    def open(self, url, data=None, timeout=None):
+        return None
+    def open_novisit(self, url, data=None, timeout=None):
+        return None
+    def open_local_file(self, filename):
+        return None
+
+''')
+
+
+register_module_extender(MANAGER, 'mechanize', mechanize_transform)
diff --git a/third_party/logilab/astroid/brain/py2pytest.py b/third_party/logilab/astroid/brain/py2pytest.py
new file mode 100644
index 0000000..e24d449
--- /dev/null
+++ b/third_party/logilab/astroid/brain/py2pytest.py
@@ -0,0 +1,31 @@
+"""Astroid hooks for pytest."""
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def pytest_transform():
+    return AstroidBuilder(MANAGER).string_build('''
+
+try:
+    import _pytest.mark
+    import _pytest.recwarn
+    import _pytest.runner
+    import _pytest.python
+except ImportError:
+    pass
+else:
+    deprecated_call = _pytest.recwarn.deprecated_call
+    exit = _pytest.runner.exit
+    fail = _pytest.runner.fail
+    fixture = _pytest.python.fixture
+    importorskip = _pytest.runner.importorskip
+    mark = _pytest.mark.MarkGenerator()
+    raises = _pytest.python.raises
+    skip = _pytest.runner.skip
+    yield_fixture = _pytest.python.yield_fixture
+
+''')
+
+register_module_extender(MANAGER, 'pytest', pytest_transform)
+register_module_extender(MANAGER, 'py.test', pytest_transform)
diff --git a/third_party/logilab/astroid/brain/py2qt4.py b/third_party/logilab/astroid/brain/py2qt4.py
new file mode 100644
index 0000000..d557809
--- /dev/null
+++ b/third_party/logilab/astroid/brain/py2qt4.py
@@ -0,0 +1,22 @@
+"""Astroid hooks for the Python 2 qt4 module.
+
+Currently help understanding of :
+
+* PyQT4.QtCore
+"""
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def pyqt4_qtcore_transform():
+    return AstroidBuilder(MANAGER).string_build('''
+
+def SIGNAL(signal_name): pass
+
+class QObject(object):
+    def emit(self, signal): pass
+''')
+
+
+register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform)
diff --git a/third_party/logilab/astroid/brain/py2stdlib.py b/third_party/logilab/astroid/brain/py2stdlib.py
new file mode 100644
index 0000000..2bfcbcd
--- /dev/null
+++ b/third_party/logilab/astroid/brain/py2stdlib.py
@@ -0,0 +1,334 @@
+
+"""Astroid hooks for the Python 2 standard library.
+
+Currently help understanding of :
+
+* hashlib.md5 and hashlib.sha1
+"""
+
+import sys
+from functools import partial
+from textwrap import dedent
+
+from astroid import (
+    MANAGER, AsStringRegexpPredicate,
+    UseInferenceDefault, inference_tip,
+    YES, InferenceError, register_module_extender)
+from astroid import exceptions
+from astroid import nodes
+from astroid.builder import AstroidBuilder
+
+PY3K = sys.version_info > (3, 0)
+PY33 = sys.version_info >= (3, 3)
+
+# general function
+
+def infer_func_form(node, base_type, context=None, enum=False):
+    """Specific inference function for namedtuple or Python 3 enum. """
+    def infer_first(node):
+        try:
+            value = next(node.infer(context=context))
+            if value is YES:
+                raise UseInferenceDefault()
+            else:
+                return value
+        except StopIteration:
+            raise InferenceError()
+
+    # node is a CallFunc node, class name as first argument and generated class
+    # attributes as second argument
+    if len(node.args) != 2:
+        # something weird here, go back to class implementation
+        raise UseInferenceDefault()
+    # namedtuple or enums list of attributes can be a list of strings or a
+    # whitespace-separate string
+    try:
+        name = infer_first(node.args[0]).value
+        names = infer_first(node.args[1])
+        try:
+            attributes = names.value.replace(',', ' ').split()
+        except AttributeError:
+            if not enum:
+                attributes = [infer_first(const).value for const in names.elts]
+            else:
+                # Enums supports either iterator of (name, value) pairs
+                # or mappings.
+                # TODO: support only list, tuples and mappings.
+                if hasattr(names, 'items') and isinstance(names.items, list):
+                    attributes = [infer_first(const[0]).value
+                                  for const in names.items
+                                  if isinstance(const[0], nodes.Const)]
+                elif hasattr(names, 'elts'):
+                    # Enums can support either ["a", "b", "c"]
+                    # or [("a", 1), ("b", 2), ...], but they can't
+                    # be mixed.
+                    if all(isinstance(const, nodes.Tuple)
+                           for const in names.elts):
+                        attributes = [infer_first(const.elts[0]).value
+                                      for const in names.elts
+                                      if isinstance(const, nodes.Tuple)]
+                    else:
+                        attributes = [infer_first(const).value
+                                      for const in names.elts]
+                else:
+                    raise AttributeError
+                if not attributes:
+                    raise AttributeError
+    except (AttributeError, exceptions.InferenceError) as exc:
+        raise UseInferenceDefault()
+    # we want to return a Class node instance with proper attributes set
+    class_node = nodes.Class(name, 'docstring')
+    class_node.parent = node.parent
+    # set base class=tuple
+    class_node.bases.append(base_type)
+    # XXX add __init__(*attributes) method
+    for attr in attributes:
+        fake_node = nodes.EmptyNode()
+        fake_node.parent = class_node
+        class_node.instance_attrs[attr] = [fake_node]
+    return class_node, name, attributes
+
+
+# module specific transformation functions #####################################
+
+def hashlib_transform():
+    template = '''
+
+class %(name)s(object):
+  def __init__(self, value=''): pass
+  def digest(self):
+    return %(digest)s
+  def copy(self):
+    return self
+  def update(self, value): pass
+  def hexdigest(self):
+    return ''
+  @property
+  def name(self):
+    return %(name)r
+  @property
+  def block_size(self):
+    return 1
+  @property
+  def digest_size(self):
+    return 1
+'''
+    algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+    classes = "".join(
+        template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'} 
+        for hashfunc in algorithms)
+    return AstroidBuilder(MANAGER).string_build(classes)
+
+
+def collections_transform():
+    return AstroidBuilder(MANAGER).string_build('''
+
+class defaultdict(dict):
+    default_factory = None
+    def __missing__(self, key): pass
+
+class deque(object):
+    maxlen = 0
+    def __init__(self, iterable=None, maxlen=None): pass
+    def append(self, x): pass
+    def appendleft(self, x): pass
+    def clear(self): pass
+    def count(self, x): return 0
+    def extend(self, iterable): pass
+    def extendleft(self, iterable): pass
+    def pop(self): pass
+    def popleft(self): pass
+    def remove(self, value): pass
+    def reverse(self): pass
+    def rotate(self, n): pass
+    def __iter__(self): return self
+
+''')
+
+
+def pkg_resources_transform():
+    return AstroidBuilder(MANAGER).string_build('''
+
+def resource_exists(package_or_requirement, resource_name):
+    pass
+
+def resource_isdir(package_or_requirement, resource_name):
+    pass
+
+def resource_filename(package_or_requirement, resource_name):
+    pass
+
+def resource_stream(package_or_requirement, resource_name):
+    pass
+
+def resource_string(package_or_requirement, resource_name):
+    pass
+
+def resource_listdir(package_or_requirement, resource_name):
+    pass
+
+def extraction_error():
+    pass
+
+def get_cache_path(archive_name, names=()):
+    pass
+
+def postprocess(tempname, filename):
+    pass
+
+def set_extraction_path(path):
+    pass
+
+def cleanup_resources(force=False):
+    pass
+
+''')
+
+
+def subprocess_transform():
+    if PY3K:
+        communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
+        init = """
+    def __init__(self, args, bufsize=0, executable=None,
+                 stdin=None, stdout=None, stderr=None,
+                 preexec_fn=None, close_fds=False, shell=False,
+                 cwd=None, env=None, universal_newlines=False,
+                 startupinfo=None, creationflags=0, restore_signals=True,
+                 start_new_session=False, pass_fds=()):
+        pass
+        """
+    else:
+        communicate = ('string', 'string')
+        init = """
+    def __init__(self, args, bufsize=0, executable=None,
+                 stdin=None, stdout=None, stderr=None,
+                 preexec_fn=None, close_fds=False, shell=False,
+                 cwd=None, env=None, universal_newlines=False,
+                 startupinfo=None, creationflags=0):
+        pass
+        """
+    if PY33:
+        wait_signature = 'def wait(self, timeout=None)'
+    else:
+        wait_signature = 'def wait(self)'
+    return AstroidBuilder(MANAGER).string_build('''
+
+class Popen(object):
+    returncode = pid = 0
+    stdin = stdout = stderr = file()
+
+    %(init)s
+
+    def communicate(self, input=None):
+        return %(communicate)r
+    %(wait_signature)s:
+        return self.returncode
+    def poll(self):
+        return self.returncode
+    def send_signal(self, signal):
+        pass
+    def terminate(self):
+        pass
+    def kill(self):
+        pass
+   ''' % {'init': init,
+          'communicate': communicate,
+          'wait_signature': wait_signature})
+
+
+# namedtuple support ###########################################################
+
+def looks_like_namedtuple(node):
+    func = node.func
+    if type(func) is nodes.Getattr:
+        return func.attrname == 'namedtuple'
+    if type(func) is nodes.Name:
+        return func.name == 'namedtuple'
+    return False
+
+def infer_named_tuple(node, context=None):
+    """Specific inference function for namedtuple CallFunc node"""
+    class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
+                                                   context=context)
+    fake = AstroidBuilder(MANAGER).string_build('''
+class %(name)s(tuple):
+    _fields = %(fields)r
+    def _asdict(self):
+        return self.__dict__
+    @classmethod
+    def _make(cls, iterable, new=tuple.__new__, len=len):
+        return new(cls, iterable)
+    def _replace(_self, **kwds):
+        result = _self._make(map(kwds.pop, %(fields)r, _self))
+        if kwds:
+            raise ValueError('Got unexpected field names: %%r' %% list(kwds))
+        return result
+    ''' % {'name': name, 'fields': attributes})
+    class_node.locals['_asdict'] = fake.body[0].locals['_asdict']
+    class_node.locals['_make'] = fake.body[0].locals['_make']
+    class_node.locals['_replace'] = fake.body[0].locals['_replace']
+    class_node.locals['_fields'] = fake.body[0].locals['_fields']
+    # we use UseInferenceDefault, we can't be a generator so return an iterator
+    return iter([class_node])
+
+def infer_enum(node, context=None):
+    """ Specific inference function for enum CallFunc node. """
+    enum_meta = nodes.Class("EnumMeta", 'docstring')
+    class_node = infer_func_form(node, enum_meta,
+                                 context=context, enum=True)[0]
+    return iter([class_node.instanciate_class()])
+
+def infer_enum_class(node):
+    """ Specific inference for enums. """
+    names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
+    for basename in node.basenames:
+        # TODO: doesn't handle subclasses yet. This implementation
+        # is a hack to support enums.
+        if basename not in names:
+            continue
+        if node.root().name == 'enum':
+            # Skip if the class is directly from enum module.
+            break
+        for local, values in node.locals.items():
+            if any(not isinstance(value, nodes.AssName)
+                   for value in values):
+                continue
+
+            stmt = values[0].statement()
+            if isinstance(stmt.targets[0], nodes.Tuple):
+                targets = stmt.targets[0].itered()
+            else:
+                targets = stmt.targets
+
+            new_targets = []
+            for target in targets:
+                # Replace all the assignments with our mocked class.
+                classdef = dedent('''
+                class %(name)s(object):
+                    @property
+                    def value(self):
+                        # Not the best return.
+                        return None 
+                    @property
+                    def name(self):
+                        return %(name)r
+                ''' % {'name': target.name})
+                fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
+                fake.parent = target.parent
+                for method in node.mymethods():
+                    fake.locals[method.name] = [method]
+                new_targets.append(fake.instanciate_class())
+            node.locals[local] = new_targets
+        break
+    return node
+
+
+MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
+                           looks_like_namedtuple)
+MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum),
+                           AsStringRegexpPredicate('Enum', 'func'))
+MANAGER.register_transform(nodes.Class, infer_enum_class)
+register_module_extender(MANAGER, 'hashlib', hashlib_transform)
+register_module_extender(MANAGER, 'collections', collections_transform)
+register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform)
+register_module_extender(MANAGER, 'subprocess', subprocess_transform)
diff --git a/third_party/logilab/astroid/brain/pynose.py b/third_party/logilab/astroid/brain/pynose.py
new file mode 100644
index 0000000..6315a34
--- /dev/null
+++ b/third_party/logilab/astroid/brain/pynose.py
@@ -0,0 +1,56 @@
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+
+"""Hooks for nose library."""
+
+import re
+import unittest
+
+from astroid import List, MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def _pep8(name, caps=re.compile('([A-Z])')):
+    return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
+
+
+def nose_transform():
+    """Custom transform for the nose.tools module."""
+
+    builder = AstroidBuilder(MANAGER)
+    stub = AstroidBuilder(MANAGER).string_build('''__all__ = []''')
+    unittest_module = builder.module_build(unittest.case)
+    case = unittest_module['TestCase']
+    all_entries = ['ok_', 'eq_']
+
+    for method_name, method in case.locals.items():
+        if method_name.startswith('assert') and '_' not in method_name:
+            pep8_name = _pep8(method_name)
+            all_entries.append(pep8_name)
+            stub[pep8_name] = method[0]
+
+    # Update the __all__ variable, since nose.tools
+    # does this manually with .append.
+    all_assign = stub['__all__'].parent
+    all_object = List(all_entries)
+    all_object.parent = all_assign
+    all_assign.value = all_object
+    return stub
+
+
+register_module_extender(MANAGER, 'nose.tools.trivial', nose_transform)
diff --git a/third_party/logilab/astroid/brain/pysix_moves.py b/third_party/logilab/astroid/brain/pysix_moves.py
new file mode 100644
index 0000000..5648278
--- /dev/null
+++ b/third_party/logilab/astroid/brain/pysix_moves.py
@@ -0,0 +1,225 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# astroid is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Astroid hooks for six.moves."""
+
+import sys
+from textwrap import dedent
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def six_moves_transform_py2():
+    return AstroidBuilder(MANAGER).string_build(dedent('''
+    import urllib as _urllib
+    import urllib2 as _urllib2
+    import urlparse as _urlparse
+
+    class Moves(object):
+        import BaseHTTPServer
+        import CGIHTTPServer
+        import SimpleHTTPServer
+
+        from StringIO import StringIO
+        from cStringIO import StringIO as cStringIO
+        from UserDict import UserDict
+        from UserList import UserList
+        from UserString import UserString
+
+        import __builtin__ as builtins
+        import thread as _thread
+        import dummy_thread as _dummy_thread
+        import ConfigParser as configparser
+        import copy_reg as copyreg
+        from itertools import (imap as map,
+                               ifilter as filter,
+                               ifilterfalse as filterfalse,
+                               izip_longest as zip_longest,
+                               izip as zip)
+        import htmlentitydefs as html_entities
+        import HTMLParser as html_parser
+        import httplib as http_client
+        import cookielib as http_cookiejar
+        import Cookie as http_cookies
+        import Queue as queue
+        import repr as reprlib
+        from pipes import quote as shlex_quote
+        import SocketServer as socketserver
+        import SimpleXMLRPCServer as xmlrpc_server
+        import xmlrpclib as xmlrpc_client
+        import _winreg as winreg
+        import robotparser as urllib_robotparser
+
+        input = raw_input
+        intern = intern
+        range = xrange
+        xrange = xrange
+        reduce = reduce
+        reload_module = reload
+
+        class UrllibParse(object):
+            ParseResult = _urlparse.ParseResult
+            SplitResult = _urlparse.SplitResult
+            parse_qs = _urlparse.parse_qs
+            parse_qsl = _urlparse.parse_qsl
+            urldefrag = _urlparse.urldefrag
+            urljoin = _urlparse.urljoin
+            urlparse = _urlparse.urlparse
+            urlsplit = _urlparse.urlsplit
+            urlunparse = _urlparse.urlunparse
+            urlunsplit = _urlparse.urlunsplit
+            quote = _urllib.quote
+            quote_plus = _urllib.quote_plus
+            unquote = _urllib.unquote
+            unquote_plus = _urllib.unquote_plus
+            urlencode = _urllib.urlencode
+            splitquery = _urllib.splitquery
+            splittag = _urllib.splittag
+            splituser = _urllib.splituser
+            uses_fragment = _urlparse.uses_fragment       
+            uses_netloc = _urlparse.uses_netloc
+            uses_params = _urlparse.uses_params
+            uses_query = _urlparse.uses_query
+            uses_relative = _urlparse.uses_relative
+
+        class UrllibError(object):
+            URLError = _urllib2.URLError
+            HTTPError = _urllib2.HTTPError
+            ContentTooShortError = _urllib.ContentTooShortError
+
+        class DummyModule(object):
+            pass
+
+        class UrllibRequest(object):
+            urlopen = _urllib2.urlopen
+            install_opener = _urllib2.install_opener
+            build_opener = _urllib2.build_opener
+            pathname2url = _urllib.pathname2url
+            url2pathname = _urllib.url2pathname
+            getproxies = _urllib.getproxies
+            Request = _urllib2.Request
+            OpenerDirector = _urllib2.OpenerDirector
+            HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler
+            HTTPRedirectHandler = _urllib2.HTTPRedirectHandler
+            HTTPCookieProcessor = _urllib2.HTTPCookieProcessor
+            ProxyHandler = _urllib2.ProxyHandler
+            BaseHandler = _urllib2.BaseHandler
+            HTTPPasswordMgr = _urllib2.HTTPPasswordMgr
+            HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm
+            AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler
+            HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler
+            ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler
+            AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler
+            HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler
+            ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler
+            HTTPHandler = _urllib2.HTTPHandler
+            HTTPSHandler = _urllib2.HTTPSHandler
+            FileHandler = _urllib2.FileHandler
+            FTPHandler = _urllib2.FTPHandler
+            CacheFTPHandler = _urllib2.CacheFTPHandler
+            UnknownHandler = _urllib2.UnknownHandler
+            HTTPErrorProcessor = _urllib2.HTTPErrorProcessor
+            urlretrieve = _urllib.urlretrieve
+            urlcleanup = _urllib.urlcleanup
+            proxy_bypass = _urllib.proxy_bypass
+
+        urllib_parse = UrllibParse()
+        urllib_error = UrllibError()
+        urllib = DummyModule()
+        urllib.request = UrllibRequest()
+        urllib.parse = UrllibParse()
+        urllib.error = UrllibError()
+
+    moves = Moves()
+
+    '''))
+
+
+def six_moves_transform_py3():
+    return AstroidBuilder(MANAGER).string_build(dedent('''
+    class Moves(object):
+        import _io
+        cStringIO = _io.StringIO
+        filter = filter
+        from itertools import filterfalse
+        input = input
+        from sys import intern
+        map = map
+        range = range
+        from imp import reload as reload_module
+        from functools import reduce
+        from shlex import quote as shlex_quote
+        from io import StringIO
+        from collections import UserDict, UserList, UserString
+        xrange = range
+        zip = zip
+        from itertools import zip_longest
+        import builtins
+        import configparser
+        import copyreg
+        import _dummy_thread
+        import http.cookiejar as http_cookiejar
+        import http.cookies as http_cookies
+        import html.entities as html_entities
+        import html.parser as html_parser
+        import http.client as http_client
+        import http.server
+        BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
+        import pickle as cPickle
+        import queue
+        import reprlib
+        import socketserver
+        import _thread
+        import winreg
+        import xmlrpc.server as xmlrpc_server
+        import xmlrpc.client as xmlrpc_client
+        import urllib.robotparser as urllib_robotparser
+        import email.mime.multipart as email_mime_multipart
+        import email.mime.nonmultipart as email_mime_nonmultipart
+        import email.mime.text as email_mime_text
+        import email.mime.base as email_mime_base
+        import urllib.parse as urllib_parse
+        import urllib.error as urllib_error
+        import tkinter
+        import tkinter.dialog as tkinter_dialog
+        import tkinter.filedialog as tkinter_filedialog
+        import tkinter.scrolledtext as tkinter_scrolledtext
+        import tkinter.simpledialog as tkinder_simpledialog
+        import tkinter.tix as tkinter_tix
+        import tkinter.ttk as tkinter_ttk
+        import tkinter.constants as tkinter_constants
+        import tkinter.dnd as tkinter_dnd
+        import tkinter.colorchooser as tkinter_colorchooser
+        import tkinter.commondialog as tkinter_commondialog
+        import tkinter.filedialog as tkinter_tkfiledialog
+        import tkinter.font as tkinter_font
+        import tkinter.messagebox as tkinter_messagebox
+        import urllib.request
+        import urllib.robotparser as urllib_robotparser
+        import urllib.parse as urllib_parse
+        import urllib.error as urllib_error
+    moves = Moves()
+    '''))
+
+if sys.version_info[0] == 2:
+    TRANSFORM = six_moves_transform_py2
+else:
+    TRANSFORM = six_moves_transform_py3
+
+register_module_extender(MANAGER, 'six', TRANSFORM)
diff --git a/third_party/logilab/astng/builder.py b/third_party/logilab/astroid/builder.py
similarity index 63%
rename from third_party/logilab/astng/builder.py
rename to third_party/logilab/astroid/builder.py
index 9309793..1fe7a36 100644
--- a/third_party/logilab/astng/builder.py
+++ b/third_party/logilab/astroid/builder.py
@@ -1,40 +1,38 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""The ASTNGBuilder makes astng from living object and / or from _ast
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""The AstroidBuilder makes astroid from living object and / or from _ast
 
 The builder is not thread safe and can't be used to parse different sources
 at the same time.
 """
+from __future__ import with_statement
 
 __docformat__ = "restructuredtext en"
 
-import sys, re
-from os.path import splitext, basename, dirname, exists, abspath
+import sys
+from os.path import splitext, basename, exists, abspath
 
-from logilab.common.modutils import modpath_from_file
-
-from logilab.astng.exceptions import ASTNGBuildingException, InferenceError
-from logilab.astng.raw_building import InspectBuilder
-from logilab.astng.rebuilder import TreeRebuilder
-from logilab.astng.manager import ASTNGManager
-from logilab.astng.bases import YES, Instance
+from astroid.exceptions import AstroidBuildingException, InferenceError
+from astroid.raw_building import InspectBuilder
+from astroid.rebuilder import TreeRebuilder
+from astroid.manager import AstroidManager
+from astroid.bases import YES, Instance
+from astroid.modutils import modpath_from_file
 
 from _ast import PyCF_ONLY_AST
 def parse(string):
@@ -44,21 +42,21 @@
     from tokenize import detect_encoding
 
     def open_source_file(filename):
-        byte_stream = open(filename, 'bU')
-        encoding = detect_encoding(byte_stream.readline)[0]
-        stream = open(filename, 'U', encoding=encoding)
+        with open(filename, 'rb') as byte_stream:
+            encoding = detect_encoding(byte_stream.readline)[0]
+        stream = open(filename, 'r', newline=None, encoding=encoding)
         try:
             data = stream.read()
-        except UnicodeError, uex: # wrong encodingg
+        except UnicodeError: # wrong encodingg
             # detect_encoding returns utf-8 if no encoding specified
             msg = 'Wrong (%s) or no encoding specified' % encoding
-            raise ASTNGBuildingException(msg)
+            raise AstroidBuildingException(msg)
         return stream, encoding, data
 
 else:
     import re
 
-    _ENCODING_RGX = re.compile("\s*#+.*coding[:=]\s*([-\w.]+)")
+    _ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)")
 
     def _guess_encoding(string):
         """get encoding from a python file as string or return None if not found
@@ -81,17 +79,17 @@
 
 # ast NG builder ##############################################################
 
-MANAGER = ASTNGManager()
+MANAGER = AstroidManager()
 
-class ASTNGBuilder(InspectBuilder):
-    """provide astng building methods"""
-    rebuilder = TreeRebuilder()
+class AstroidBuilder(InspectBuilder):
+    """provide astroid building methods"""
 
     def __init__(self, manager=None):
+        InspectBuilder.__init__(self)
         self._manager = manager or MANAGER
 
     def module_build(self, module, modname=None):
-        """build an astng from a living module instance
+        """build an astroid from a living module instance
         """
         node = None
         path = getattr(module, '__file__', None)
@@ -103,52 +101,69 @@
             # this is a built-in module
             # get a partial representation by introspection
             node = self.inspect_build(module, modname=modname, path=path)
+            # we have to handle transformation by ourselves since the rebuilder
+            # isn't called for builtin nodes
+            #
+            # XXX it's then only called for Module nodes, not for underlying
+            # nodes
+            node = self._manager.transform(node)
         return node
 
     def file_build(self, path, modname=None):
-        """build astng from a source code file (i.e. from an ast)
+        """build astroid from a source code file (i.e. from an ast)
 
         path is expected to be a python source file
         """
         try:
             stream, encoding, data = open_source_file(path)
-        except IOError, exc:
+        except IOError as exc:
             msg = 'Unable to load file %r (%s)' % (path, exc)
-            raise ASTNGBuildingException(msg)
-        except SyntaxError, exc: # py3k encoding specification error
-            raise ASTNGBuildingException(exc)
-        except LookupError, exc: # unknown encoding
-            raise ASTNGBuildingException(exc)
-        # get module name if necessary
-        if modname is None:
-            try:
-                modname = '.'.join(modpath_from_file(path))
-            except ImportError:
-                modname = splitext(basename(path))[0]
-        # build astng representation
-        node = self.string_build(data, modname, path)
-        node.file_encoding = encoding
-        return node
+            raise AstroidBuildingException(msg)
+        except SyntaxError as exc: # py3k encoding specification error
+            raise AstroidBuildingException(exc)
+        except LookupError as exc: # unknown encoding
+            raise AstroidBuildingException(exc)
+        with stream:
+            # get module name if necessary
+            if modname is None:
+                try:
+                    modname = '.'.join(modpath_from_file(path))
+                except ImportError:
+                    modname = splitext(basename(path))[0]
+            # build astroid representation
+            module = self._data_build(data, modname, path)
+            return self._post_build(module, encoding)
 
     def string_build(self, data, modname='', path=None):
-        """build astng from source code string and return rebuilded astng"""
+        """build astroid from source code string and return rebuilded astroid"""
         module = self._data_build(data, modname, path)
-        self._manager.astng_cache[module.name] = module
+        module.file_bytes = data.encode('utf-8')
+        return self._post_build(module, 'utf-8')
+
+    def _post_build(self, module, encoding):
+        """handles encoding and delayed nodes
+        after a module has been built
+        """
+        module.file_encoding = encoding
+        self._manager.cache_module(module)
         # post tree building steps after we stored the module in the cache:
         for from_node in module._from_nodes:
+            if from_node.modname == '__future__':
+                for symbol, _ in from_node.names:
+                    module.future_imports.add(symbol)
             self.add_from_names_to_locals(from_node)
         # handle delayed assattr nodes
         for delayed in module._delayed_assattr:
             self.delayed_assattr(delayed)
-        if modname:
-            for transformer in self._manager.transformers:
-                transformer(module)
         return module
 
     def _data_build(self, data, modname, path):
         """build tree node from data and add some informations"""
         # this method could be wrapped with a pickle/cache function
-        node = parse(data + '\n')
+        try:
+            node = parse(data + '\n')
+        except TypeError as exc:
+            raise AstroidBuildingException(exc)
         if path is not None:
             node_file = abspath(path)
         else:
@@ -158,11 +173,10 @@
             package = True
         else:
             package = path and path.find('__init__.py') > -1 or False
-        self.rebuilder.init()
-        module = self.rebuilder.visit_module(node, modname, package)
-        module.file = module.path = node_file
-        module._from_nodes = self.rebuilder._from_nodes
-        module._delayed_assattr = self.rebuilder._delayed_assattr
+        rebuilder = TreeRebuilder(self._manager)
+        module = rebuilder.visit_module(node, modname, node_file, package)
+        module._from_nodes = rebuilder._from_nodes
+        module._delayed_assattr = rebuilder._delayed_assattr
         return module
 
     def add_from_names_to_locals(self, node):
@@ -176,8 +190,8 @@
         for (name, asname) in node.names:
             if name == '*':
                 try:
-                    imported = node.root().import_module(node.modname)
-                except ASTNGBuildingException:
+                    imported = node.do_import_module()
+                except InferenceError:
                     continue
                 for name in imported.wildcard_import_names():
                     node.parent.set_local(name, node)
diff --git a/third_party/logilab/astroid/exceptions.py b/third_party/logilab/astroid/exceptions.py
new file mode 100644
index 0000000..3889e2e
--- /dev/null
+++ b/third_party/logilab/astroid/exceptions.py
@@ -0,0 +1,51 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains exceptions used in the astroid library
+
+"""
+
+__doctype__ = "restructuredtext en"
+
+class AstroidError(Exception):
+    """base exception class for all astroid related exceptions"""
+
+class AstroidBuildingException(AstroidError):
+    """exception class when we are unable to build an astroid representation"""
+
+class ResolveError(AstroidError):
+    """base class of astroid resolution/inference error"""
+
+class NotFoundError(ResolveError):
+    """raised when we are unable to resolve a name"""
+
+class InferenceError(ResolveError):
+    """raised when we are unable to infer a node"""
+
+class UseInferenceDefault(Exception):
+    """exception to be raised in custom inference function to indicate that it
+    should go back to the default behaviour
+    """
+
+class UnresolvableName(InferenceError):
+    """raised when we are unable to resolve a name"""
+
+class NoDefault(AstroidError):
+    """raised by function's `default_value` method when an argument has
+    no default value
+    """
+
diff --git a/third_party/logilab/astng/inference.py b/third_party/logilab/astroid/inference.py
similarity index 60%
rename from third_party/logilab/astng/inference.py
rename to third_party/logilab/astroid/inference.py
index 62bd7d9..f29b3d1 100644
--- a/third_party/logilab/astng/inference.py
+++ b/third_party/logilab/astroid/inference.py
@@ -1,43 +1,43 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""this module contains a set of functions to handle inference on astng trees
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains a set of functions to handle inference on astroid trees
 """
 
 __doctype__ = "restructuredtext en"
 
 from itertools import chain
-import sys
 
-from logilab.astng import nodes
+from astroid import nodes
 
-from logilab.astng.manager import ASTNGManager
-from logilab.astng.exceptions import (ASTNGBuildingException, ASTNGError,
-    InferenceError, NoDefault, NotFoundError, UnresolvableName)
-from logilab.astng.bases import YES, Instance, InferenceContext, Generator, \
-     _infer_stmts, copy_context, path_wrapper, raise_if_nothing_infered
-from logilab.astng.protocols import _arguments_infer_argname
+from astroid.manager import AstroidManager
+from astroid.exceptions import (AstroidError, InferenceError, NoDefault,
+                                NotFoundError, UnresolvableName)
+from astroid.bases import (YES, Instance, InferenceContext,
+                           _infer_stmts, path_wrapper,
+                           raise_if_nothing_infered)
+from astroid.protocols import (
+    _arguments_infer_argname,
+    BIN_OP_METHOD, UNARY_OP_METHOD)
 
-MANAGER = ASTNGManager()
+MANAGER = AstroidManager()
 
 
-class CallContext:
+class CallContext(object):
     """when inferring a function call, this class is used to remember values
     given as argument
     """
@@ -58,7 +58,7 @@
         try:
             return self.nargs[name].infer(context)
         except KeyError:
-            # Function.args.args can be None in astng (means that we don't have
+            # Function.args.args can be None in astroid (means that we don't have
             # information on argnames)
             argindex = funcnode.args.find_argname(name)[0]
             if argindex is not None:
@@ -75,6 +75,11 @@
                         return iter((boundnode,))
                     if funcnode.type == 'classmethod':
                         return iter((boundnode,))
+                # if we have a method, extract one position
+                # from the index, so we'll take in account
+                # the extra parameter represented by `self` or `cls`
+                if funcnode.type in ('method', 'classmethod'):
+                    argindex -= 1
                 # 2. search arg index
                 try:
                     return self.args[argindex].infer(context)
@@ -129,142 +134,168 @@
     """inference's end for node such as Module, Class, Function, Const...
     """
     yield self
-nodes.Module.infer = infer_end
-nodes.Class.infer = infer_end
-nodes.Function.infer = infer_end
-nodes.Lambda.infer = infer_end
-nodes.Const.infer = infer_end
-nodes.List.infer = infer_end
-nodes.Tuple.infer = infer_end
-nodes.Dict.infer = infer_end
+nodes.Module._infer = infer_end
+nodes.Class._infer = infer_end
+nodes.Function._infer = infer_end
+nodes.Lambda._infer = infer_end
+nodes.Const._infer = infer_end
+nodes.List._infer = infer_end
+nodes.Tuple._infer = infer_end
+nodes.Dict._infer = infer_end
+nodes.Set._infer = infer_end
 
+def _higher_function_scope(node):
+    """ Search for the first function which encloses the given
+    scope. This can be used for looking up in that function's
+    scope, in case looking up in a lower scope for a particular
+    name fails.
+
+    :param node: A scope node.
+    :returns:
+        ``None``, if no parent function scope was found,
+        otherwise an instance of :class:`astroid.scoped_nodes.Function`,
+        which encloses the given node.
+    """
+    current = node
+    while current.parent and not isinstance(current.parent, nodes.Function):
+        current = current.parent
+    if current and current.parent:
+        return current.parent
 
 def infer_name(self, context=None):
     """infer a Name: use name lookup rules"""
     frame, stmts = self.lookup(self.name)
     if not stmts:
-        raise UnresolvableName(self.name)
-    context = context.clone()
-    context.lookupname = self.name
-    return _infer_stmts(stmts, context, frame)
-nodes.Name.infer = path_wrapper(infer_name)
+        # Try to see if the name is enclosed in a nested function
+        # and use the higher (first function) scope for searching.
+        # TODO: should this be promoted to other nodes as well?
+        parent_function = _higher_function_scope(self.scope())
+        if parent_function:
+            _, stmts = parent_function.lookup(self.name)
+
+        if not stmts:
+            raise UnresolvableName(self.name)
+    return _infer_stmts(stmts, context, frame, self.name)
+nodes.Name._infer = path_wrapper(infer_name)
 nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper
 
 
 def infer_callfunc(self, context=None):
     """infer a CallFunc node by trying to guess what the function returns"""
-    callcontext = context.clone()
-    callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs)
-    callcontext.boundnode = None
+    if context is None:
+        context = InferenceContext()
     for callee in self.func.infer(context):
-        if callee is YES:
-            yield callee
-            continue
-        try:
-            if hasattr(callee, 'infer_call_result'):
-                for infered in callee.infer_call_result(self, callcontext):
-                    yield infered
-        except InferenceError:
-            ## XXX log error ?
-            continue
-nodes.CallFunc.infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
+        with context.scope(
+            callcontext=CallContext(self.args, self.starargs, self.kwargs),
+            boundnode=None,
+        ):
+            if callee is YES:
+                yield callee
+                continue
+            try:
+                if hasattr(callee, 'infer_call_result'):
+                    for infered in callee.infer_call_result(self, context):
+                        yield infered
+            except InferenceError:
+                ## XXX log error ?
+                continue
+nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
 
 
-def infer_import(self, context=None, asname=True):
+def infer_import(self, context=None, asname=True, lookupname=None):
     """infer an Import node: return the imported module/object"""
-    name = context.lookupname
-    if name is None:
+    if lookupname is None:
         raise InferenceError()
     if asname:
-        yield self.do_import_module(self.real_name(name))
+        yield self.do_import_module(self.real_name(lookupname))
     else:
-        yield self.do_import_module(name)
-nodes.Import.infer = path_wrapper(infer_import)
+        yield self.do_import_module(lookupname)
+nodes.Import._infer = path_wrapper(infer_import)
 
 def infer_name_module(self, name):
     context = InferenceContext()
-    context.lookupname = name
-    return self.infer(context, asname=False)
+    return self.infer(context, asname=False, lookupname=name)
 nodes.Import.infer_name_module = infer_name_module
 
 
-def infer_from(self, context=None, asname=True):
+def infer_from(self, context=None, asname=True, lookupname=None):
     """infer a From nodes: return the imported module/object"""
-    name = context.lookupname
-    if name is None:
+    if lookupname is None:
         raise InferenceError()
     if asname:
-        name = self.real_name(name)
-    module = self.do_import_module(self.modname)
+        lookupname = self.real_name(lookupname)
+    module = self.do_import_module()
     try:
-        context = copy_context(context)
-        context.lookupname = name
-        return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context)
+        return _infer_stmts(module.getattr(lookupname, ignore_locals=module is self.root()), context, lookupname=lookupname)
     except NotFoundError:
-        raise InferenceError(name)
-nodes.From.infer = path_wrapper(infer_from)
+        raise InferenceError(lookupname)
+nodes.From._infer = path_wrapper(infer_from)
 
 
 def infer_getattr(self, context=None):
     """infer a Getattr node by using getattr on the associated object"""
-    #context = context.clone()
+    if not context:
+        context = InferenceContext()
     for owner in self.expr.infer(context):
         if owner is YES:
             yield owner
             continue
         try:
-            context.boundnode = owner
-            for obj in owner.igetattr(self.attrname, context):
-                yield obj
-            context.boundnode = None
+            with context.scope(boundnode=owner):
+                for obj in owner.igetattr(self.attrname, context):
+                    yield obj
         except (NotFoundError, InferenceError):
-            context.boundnode = None
+            pass
         except AttributeError:
             # XXX method / function
-            context.boundnode = None
-nodes.Getattr.infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
+            pass
+nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
 nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper
 
 
-def infer_global(self, context=None):
-    if context.lookupname is None:
+def infer_global(self, context=None, lookupname=None):
+    if lookupname is None:
         raise InferenceError()
     try:
-        return _infer_stmts(self.root().getattr(context.lookupname), context)
+        return _infer_stmts(self.root().getattr(lookupname), context)
     except NotFoundError:
         raise InferenceError()
-nodes.Global.infer = path_wrapper(infer_global)
+nodes.Global._infer = path_wrapper(infer_global)
 
 
 def infer_subscript(self, context=None):
     """infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]"""
-    if isinstance(self.slice, nodes.Index):
-        index = self.slice.value.infer(context).next()
-        if index is YES:
-            yield YES
-            return
+    value = next(self.value.infer(context))
+    if value is YES:
+        yield YES
+        return
+
+    index = next(self.slice.infer(context))
+    if index is YES:
+        yield YES
+        return
+
+    if isinstance(index, nodes.Const):
         try:
-            # suppose it's a Tuple/List node (attribute error else)
-            assigned = self.value.getitem(index.value, context)
+            assigned = value.getitem(index.value, context)
         except AttributeError:
             raise InferenceError()
         except (IndexError, TypeError):
             yield YES
             return
+
+        # Prevent inferring if the infered subscript
+        # is the same as the original subscripted object.
+        if self is assigned:
+            yield YES
+            return
         for infered in assigned.infer(context):
             yield infered
     else:
         raise InferenceError()
-nodes.Subscript.infer = path_wrapper(infer_subscript)
+nodes.Subscript._infer = path_wrapper(infer_subscript)
 nodes.Subscript.infer_lhs = raise_if_nothing_infered(infer_subscript)
 
-
-UNARY_OP_METHOD = {'+': '__pos__',
-                   '-': '__neg__',
-                   '~': '__invert__',
-                   'not': None, # XXX not '__nonzero__'
-                  }
-
 def infer_unaryop(self, context=None):
     for operand in self.operand.infer(context):
         try:
@@ -285,22 +316,7 @@
                     raise
                 except:
                     yield YES
-nodes.UnaryOp.infer = path_wrapper(infer_unaryop)
-
-
-BIN_OP_METHOD = {'+':  '__add__',
-                 '-':  '__sub__',
-                 '/':  '__div__',
-                 '//': '__floordiv__',
-                 '*':  '__mul__',
-                 '**': '__power__',
-                 '%':  '__mod__',
-                 '&':  '__and__',
-                 '|':  '__or__',
-                 '^':  '__xor__',
-                 '<<': '__lshift__',
-                 '>>': '__rshift__',
-                 }
+nodes.UnaryOp._infer = path_wrapper(infer_unaryop)
 
 def _infer_binop(operator, operand1, operand2, context, failures=None):
     if operand1 is YES:
@@ -330,15 +346,14 @@
         for rhs in self.right.infer(context):
             for val in _infer_binop(self.op, rhs, lhs, context):
                 yield val
-nodes.BinOp.infer = path_wrapper(infer_binop)
+nodes.BinOp._infer = path_wrapper(infer_binop)
 
 
-def infer_arguments(self, context=None):
-    name = context.lookupname
-    if name is None:
+def infer_arguments(self, context=None, lookupname=None):
+    if lookupname is None:
         raise InferenceError()
-    return _arguments_infer_argname(self, name, context)
-nodes.Arguments.infer = infer_arguments
+    return _arguments_infer_argname(self, lookupname, context)
+nodes.Arguments._infer = infer_arguments
 
 
 def infer_ass(self, context=None):
@@ -350,8 +365,8 @@
         return stmt.infer(context)
     stmts = list(self.assigned_stmts(context=context))
     return _infer_stmts(stmts, context)
-nodes.AssName.infer = path_wrapper(infer_ass)
-nodes.AssAttr.infer = path_wrapper(infer_ass)
+nodes.AssName._infer = path_wrapper(infer_ass)
+nodes.AssAttr._infer = path_wrapper(infer_ass)
 
 def infer_augassign(self, context=None):
     failures = []
@@ -362,7 +377,7 @@
         for rhs in self.value.infer(context):
             for val in _infer_binop(self.op, rhs, lhs, context):
                 yield val
-nodes.AugAssign.infer = path_wrapper(infer_augassign)
+nodes.AugAssign._infer = path_wrapper(infer_augassign)
 
 
 # no infer method on DelName and DelAttr (expected InferenceError)
@@ -373,10 +388,14 @@
         yield YES
     else:
         try:
-            for infered in MANAGER.infer_astng_from_something(self.object,
-                                                              context=context):
+            for infered in MANAGER.infer_ast_from_something(self.object,
+                                                            context=context):
                 yield infered
-        except ASTNGError:
+        except AstroidError:
             yield YES
-nodes.EmptyNode.infer = path_wrapper(infer_empty_node)
+nodes.EmptyNode._infer = path_wrapper(infer_empty_node)
 
+
+def infer_index(self, context=None):
+    return self.value.infer(context)
+nodes.Index._infer = infer_index
diff --git a/third_party/logilab/astng/inspector.py b/third_party/logilab/astroid/inspector.py
similarity index 75%
rename from third_party/logilab/astng/inspector.py
rename to third_party/logilab/astroid/inspector.py
index a4abd1f..1fc3192 100644
--- a/third_party/logilab/astng/inspector.py
+++ b/third_party/logilab/astroid/inspector.py
@@ -1,35 +1,21 @@
-# This program is free software; you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""visitor doing some postprocessing on the astng tree.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""visitor doing some postprocessing on the astroid tree.
 Try to resolve definitions (namespace) dictionary, relationship...
 
 This module has been imported from pyreverse
@@ -39,25 +25,23 @@
 
 from os.path import dirname
 
-from logilab.common.modutils import get_module_part, is_relative, \
-     is_standard_module
+import astroid
+from astroid.exceptions import InferenceError
+from astroid.utils import LocalsVisitor
+from astroid.modutils import get_module_part, is_relative, is_standard_module
 
-from logilab import astng
-from logilab.astng.exceptions import InferenceError
-from logilab.astng.utils import LocalsVisitor
-
-class IdGeneratorMixIn:
+class IdGeneratorMixIn(object):
     """
     Mixin adding the ability to generate integer uid
     """
     def __init__(self, start_value=0):
         self.id_count = start_value
-    
+
     def init_counter(self, start_value=0):
         """init the id counter
         """
         self.id_count = start_value
-        
+
     def generate_id(self):
         """generate a new identifier
         """
@@ -68,26 +52,26 @@
 class Linker(IdGeneratorMixIn, LocalsVisitor):
     """
     walk on the project tree and resolve relationships.
-    
+
     According to options the following attributes may be added to visited nodes:
-    
+
     * uid,
-      a unique identifier for the node (on astng.Project, astng.Module,
-      astng.Class and astng.locals_type). Only if the linker has been instantiated
+      a unique identifier for the node (on astroid.Project, astroid.Module,
+      astroid.Class and astroid.locals_type). Only if the linker has been instantiated
       with tag=True parameter (False by default).
-            
+
     * Function
       a mapping from locals names to their bounded value, which may be a
-      constant like a string or an integer, or an astng node (on astng.Module,
-      astng.Class and astng.Function).
+      constant like a string or an integer, or an astroid node (on astroid.Module,
+      astroid.Class and astroid.Function).
 
     * instance_attrs_type
-      as locals_type but for klass member attributes (only on astng.Class)
-      
+      as locals_type but for klass member attributes (only on astroid.Class)
+
     * implements,
-      list of implemented interface _objects_ (only on astng.Class nodes)
+      list of implemented interface _objects_ (only on astroid.Class nodes)
     """
-    
+
     def __init__(self, project, inherited_interfaces=0, tag=False):
         IdGeneratorMixIn.__init__(self)
         LocalsVisitor.__init__(self)
@@ -98,30 +82,30 @@
         # visited project
         self.project = project
 
-        
+
     def visit_project(self, node):
-        """visit an astng.Project node
-        
+        """visit an astroid.Project node
+
          * optionally tag the node with a unique id
         """
         if self.tag:
             node.uid = self.generate_id()
         for module in node.modules:
             self.visit(module)
-            
+
     def visit_package(self, node):
-        """visit an astng.Package node
-        
+        """visit an astroid.Package node
+
          * optionally tag the node with a unique id
         """
         if self.tag:
             node.uid = self.generate_id()
         for subelmt in node.values():
             self.visit(subelmt)
-            
+
     def visit_module(self, node):
-        """visit an astng.Module node
-        
+        """visit an astroid.Module node
+
          * set the locals_type mapping
          * set the depends mapping
          * optionally tag the node with a unique id
@@ -132,10 +116,10 @@
         node.depends = []
         if self.tag:
             node.uid = self.generate_id()
-    
+
     def visit_class(self, node):
-        """visit an astng.Class node
-        
+        """visit an astroid.Class node
+
          * set the locals_type and instance_attrs_type mappings
          * set the implements list and build it
          * optionally tag the node with a unique id
@@ -162,8 +146,8 @@
             node.implements = ()
 
     def visit_function(self, node):
-        """visit an astng.Function node
-        
+        """visit an astroid.Function node
+
          * set the locals_type mapping
          * optionally tag the node with a unique id
         """
@@ -172,14 +156,14 @@
         node.locals_type = {}
         if self.tag:
             node.uid = self.generate_id()
-            
+
     link_project = visit_project
     link_module = visit_module
     link_class = visit_class
     link_function = visit_function
-        
+
     def visit_assname(self, node):
-        """visit an astng.AssName node
+        """visit an astroid.AssName node
 
         handle locals_type
         """
@@ -192,7 +176,7 @@
             frame = node.frame()
         else:
             # the name has been defined as 'global' in the frame and belongs
-            # there. Btw the frame is not yet visited as the name is in the 
+            # there. Btw the frame is not yet visited as the name is in the
             # root locals; the frame hence has no locals_type attribute
             frame = node.root()
         try:
@@ -204,11 +188,11 @@
                         already_infered.append(valnode)
             except KeyError:
                 frame.locals_type[node.name] = values
-        except astng.InferenceError:
+        except astroid.InferenceError:
             pass
 
     def handle_assattr_type(self, node, parent):
-        """handle an astng.AssAttr node
+        """handle an astroid.AssAttr node
 
         handle instance_attrs_type
         """
@@ -221,23 +205,23 @@
                         already_infered.append(valnode)
             except KeyError:
                 parent.instance_attrs_type[node.attrname] = values
-        except astng.InferenceError:
+        except astroid.InferenceError:
             pass
-            
+
     def visit_import(self, node):
-        """visit an astng.Import node
-        
+        """visit an astroid.Import node
+
         resolve module dependencies
         """
         context_file = node.root().file
         for name in node.names:
             relative = is_relative(name[0], context_file)
             self._imported_module(node, name[0], relative)
-        
+
 
     def visit_from(self, node):
-        """visit an astng.From node
-        
+        """visit an astroid.From node
+
         resolve module dependencies
         """
         basename = node.modname
@@ -254,13 +238,13 @@
             if fullname.find('.') > -1:
                 try:
                     # XXX: don't use get_module_part, missing package precedence
-                    fullname = get_module_part(fullname)
+                    fullname = get_module_part(fullname, context_file)
                 except ImportError:
                     continue
             if fullname != basename:
                 self._imported_module(node, fullname, relative)
 
-        
+
     def compute_module(self, context_name, mod_path):
         """return true if the module should be added to dependencies"""
         package_dir = dirname(self.project.path)
@@ -269,7 +253,7 @@
         elif is_standard_module(mod_path, (package_dir,)):
             return 1
         return 0
-    
+
     # protected methods ########################################################
 
     def _imported_module(self, node, mod_path, relative):
diff --git a/third_party/logilab/astroid/manager.py b/third_party/logilab/astroid/manager.py
new file mode 100644
index 0000000..fe78713
--- /dev/null
+++ b/third_party/logilab/astroid/manager.py
@@ -0,0 +1,390 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""astroid manager: avoid multiple astroid build of a same module when
+possible by providing a class responsible to get astroid representation
+from various source and using a cache of built modules)
+"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+import collections
+import imp
+import os
+from os.path import dirname, join, isdir, exists
+from warnings import warn
+import zipimport
+
+from logilab.common.configuration import OptionsProviderMixIn
+
+from astroid.exceptions import AstroidBuildingException
+from astroid import modutils
+
+
+def astroid_wrapper(func, modname):
+    """wrapper to give to AstroidManager.project_from_files"""
+    print('parsing %s...' % modname)
+    try:
+        return func(modname)
+    except AstroidBuildingException as exc:
+        print(exc)
+    except Exception as exc:
+        import traceback
+        traceback.print_exc()
+
+def _silent_no_wrap(func, modname):
+    """silent wrapper that doesn't do anything; can be used for tests"""
+    return func(modname)
+
+def safe_repr(obj):
+    try:
+        return repr(obj)
+    except:
+        return '???'
+
+
+
+class AstroidManager(OptionsProviderMixIn):
+    """the astroid manager, responsible to build astroid from files
+     or modules.
+
+    Use the Borg pattern.
+    """
+
+    name = 'astroid loader'
+    options = (("ignore",
+                {'type' : "csv", 'metavar' : "<file>",
+                 'dest' : "black_list", "default" : ('CVS',),
+                 'help' : "add <file> (may be a directory) to the black list\
+. It should be a base name, not a path. You may set this option multiple times\
+."}),
+               ("project",
+                {'default': "No Name", 'type' : 'string', 'short': 'p',
+                 'metavar' : '<project name>',
+                 'help' : 'set the project name.'}),
+              )
+    brain = {}
+    def __init__(self):
+        self.__dict__ = AstroidManager.brain
+        if not self.__dict__:
+            OptionsProviderMixIn.__init__(self)
+            self.load_defaults()
+            # NOTE: cache entries are added by the [re]builder
+            self.astroid_cache = {}
+            self._mod_file_cache = {}
+            self.transforms = collections.defaultdict(list)
+            self._failed_import_hooks = []
+            self.always_load_extensions = False
+            self.extension_package_whitelist = set()
+
+    def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
+        """given a module name, return the astroid object"""
+        try:
+            filepath = modutils.get_source_file(filepath, include_no_ext=True)
+            source = True
+        except modutils.NoSourceFile:
+            pass
+        if modname is None:
+            try:
+                modname = '.'.join(modutils.modpath_from_file(filepath))
+            except ImportError:
+                modname = filepath
+        if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath:
+            return self.astroid_cache[modname]
+        if source:
+            from astroid.builder import AstroidBuilder
+            return AstroidBuilder(self).file_build(filepath, modname)
+        elif fallback and modname:
+            return self.ast_from_module_name(modname)
+        raise AstroidBuildingException('unable to get astroid for file %s' %
+                                       filepath)
+
+    def _build_stub_module(self, modname):
+        from astroid.builder import AstroidBuilder
+        return AstroidBuilder(self).string_build('', modname)
+
+    def _can_load_extension(self, modname):
+        if self.always_load_extensions:
+            return True
+        if modutils.is_standard_module(modname):
+            return True
+        parts = modname.split('.')
+        return any(
+            '.'.join(parts[:x]) in self.extension_package_whitelist
+            for x in range(1, len(parts) + 1))
+
+    def ast_from_module_name(self, modname, context_file=None):
+        """given a module name, return the astroid object"""
+        if modname in self.astroid_cache:
+            return self.astroid_cache[modname]
+        if modname == '__main__':
+            return self._build_stub_module(modname)
+        old_cwd = os.getcwd()
+        if context_file:
+            os.chdir(dirname(context_file))
+        try:
+            filepath, mp_type = self.file_from_module_name(modname, context_file)
+            if mp_type == modutils.PY_ZIPMODULE:
+                module = self.zip_import_data(filepath)
+                if module is not None:
+                    return module
+            elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION):
+                if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname):
+                    return self._build_stub_module(modname)
+                try:
+                    module = modutils.load_module_from_name(modname)
+                except Exception as ex:
+                    msg = 'Unable to load module %s (%s)' % (modname, ex)
+                    raise AstroidBuildingException(msg)
+                return self.ast_from_module(module, modname)
+            elif mp_type == imp.PY_COMPILED:
+                raise AstroidBuildingException("Unable to load compiled module %s" % (modname,))
+            if filepath is None:
+                raise AstroidBuildingException("Unable to load module %s" % (modname,))
+            return self.ast_from_file(filepath, modname, fallback=False)
+        except AstroidBuildingException as e:
+            for hook in self._failed_import_hooks:
+                try:
+                    return hook(modname)
+                except AstroidBuildingException:
+                    pass
+            raise e
+        finally:
+            os.chdir(old_cwd)
+
+    def zip_import_data(self, filepath):
+        if zipimport is None:
+            return None
+        from astroid.builder import AstroidBuilder
+        builder = AstroidBuilder(self)
+        for ext in ('.zip', '.egg'):
+            try:
+                eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
+            except ValueError:
+                continue
+            try:
+                importer = zipimport.zipimporter(eggpath + ext)
+                zmodname = resource.replace(os.path.sep, '.')
+                if importer.is_package(resource):
+                    zmodname = zmodname + '.__init__'
+                module = builder.string_build(importer.get_source(resource),
+                                              zmodname, filepath)
+                return module
+            except:
+                continue
+        return None
+
+    def file_from_module_name(self, modname, contextfile):
+        try:
+            value = self._mod_file_cache[(modname, contextfile)]
+        except KeyError:
+            try:
+                value = modutils.file_info_from_modpath(
+                    modname.split('.'), context_file=contextfile)
+            except ImportError as ex:
+                msg = 'Unable to load module %s (%s)' % (modname, ex)
+                value = AstroidBuildingException(msg)
+            self._mod_file_cache[(modname, contextfile)] = value
+        if isinstance(value, AstroidBuildingException):
+            raise value
+        return value
+
+    def ast_from_module(self, module, modname=None):
+        """given an imported module, return the astroid object"""
+        modname = modname or module.__name__
+        if modname in self.astroid_cache:
+            return self.astroid_cache[modname]
+        try:
+            # some builtin modules don't have __file__ attribute
+            filepath = module.__file__
+            if modutils.is_python_source(filepath):
+                return self.ast_from_file(filepath, modname)
+        except AttributeError:
+            pass
+        from astroid.builder import AstroidBuilder
+        return AstroidBuilder(self).module_build(module, modname)
+
+    def ast_from_class(self, klass, modname=None):
+        """get astroid for the given class"""
+        if modname is None:
+            try:
+                modname = klass.__module__
+            except AttributeError:
+                raise AstroidBuildingException(
+                    'Unable to get module for class %s' % safe_repr(klass))
+        modastroid = self.ast_from_module_name(modname)
+        return modastroid.getattr(klass.__name__)[0] # XXX
+
+
+    def infer_ast_from_something(self, obj, context=None):
+        """infer astroid for the given class"""
+        if hasattr(obj, '__class__') and not isinstance(obj, type):
+            klass = obj.__class__
+        else:
+            klass = obj
+        try:
+            modname = klass.__module__
+        except AttributeError:
+            raise AstroidBuildingException(
+                'Unable to get module for %s' % safe_repr(klass))
+        except Exception as ex:
+            raise AstroidBuildingException(
+                'Unexpected error while retrieving module for %s: %s'
+                % (safe_repr(klass), ex))
+        try:
+            name = klass.__name__
+        except AttributeError:
+            raise AstroidBuildingException(
+                'Unable to get name for %s' % safe_repr(klass))
+        except Exception as ex:
+            raise AstroidBuildingException(
+                'Unexpected error while retrieving name for %s: %s'
+                % (safe_repr(klass), ex))
+        # take care, on living object __module__ is regularly wrong :(
+        modastroid = self.ast_from_module_name(modname)
+        if klass is obj:
+            for  infered in modastroid.igetattr(name, context):
+                yield infered
+        else:
+            for infered in modastroid.igetattr(name, context):
+                yield infered.instanciate_class()
+
+    def project_from_files(self, files, func_wrapper=astroid_wrapper,
+                           project_name=None, black_list=None):
+        """return a Project from a list of files or modules"""
+        # build the project representation
+        project_name = project_name or self.config.project
+        black_list = black_list or self.config.black_list
+        project = Project(project_name)
+        for something in files:
+            if not exists(something):
+                fpath = modutils.file_from_modpath(something.split('.'))
+            elif isdir(something):
+                fpath = join(something, '__init__.py')
+            else:
+                fpath = something
+            astroid = func_wrapper(self.ast_from_file, fpath)
+            if astroid is None:
+                continue
+            # XXX why is first file defining the project.path ?
+            project.path = project.path or astroid.file
+            project.add_module(astroid)
+            base_name = astroid.name
+            # recurse in package except if __init__ was explicitly given
+            if astroid.package and something.find('__init__') == -1:
+                # recurse on others packages / modules if this is a package
+                for fpath in modutils.get_module_files(dirname(astroid.file),
+                                                       black_list):
+                    astroid = func_wrapper(self.ast_from_file, fpath)
+                    if astroid is None or astroid.name == base_name:
+                        continue
+                    project.add_module(astroid)
+        return project
+
+    def register_transform(self, node_class, transform, predicate=None):
+        """Register `transform(node)` function to be applied on the given
+        Astroid's `node_class` if `predicate` is None or returns true
+        when called with the node as argument.
+
+        The transform function may return a value which is then used to
+        substitute the original node in the tree.
+        """
+        self.transforms[node_class].append((transform, predicate))
+
+    def unregister_transform(self, node_class, transform, predicate=None):
+        """Unregister the given transform."""
+        self.transforms[node_class].remove((transform, predicate))
+
+    def register_failed_import_hook(self, hook):
+        """Registers a hook to resolve imports that cannot be found otherwise.
+
+        `hook` must be a function that accepts a single argument `modname` which
+        contains the name of the module or package that could not be imported.
+        If `hook` can resolve the import, must return a node of type `astroid.Module`,
+        otherwise, it must raise `AstroidBuildingException`.
+        """
+        self._failed_import_hooks.append(hook)
+
+    def transform(self, node):
+        """Call matching transforms for the given node if any and return the
+        transformed node.
+        """
+        cls = node.__class__
+        if cls not in self.transforms:
+            # no transform registered for this class of node
+            return node
+
+        transforms = self.transforms[cls]
+        orig_node = node  # copy the reference
+        for transform_func, predicate in transforms:
+            if predicate is None or predicate(node):
+                ret = transform_func(node)
+                # if the transformation function returns something, it's
+                # expected to be a replacement for the node
+                if ret is not None:
+                    if node is not orig_node:
+                        # node has already be modified by some previous
+                        # transformation, warn about it
+                        warn('node %s substituted multiple times' % node)
+                    node = ret
+        return node
+
+    def cache_module(self, module):
+        """Cache a module if no module with the same name is known yet."""
+        self.astroid_cache.setdefault(module.name, module)
+
+    def clear_cache(self, astroid_builtin=None):
+        # XXX clear transforms
+        self.astroid_cache.clear()
+        # force bootstrap again, else we may ends up with cache inconsistency
+        # between the manager and CONST_PROXY, making
+        # unittest_lookup.LookupTC.test_builtin_lookup fail depending on the
+        # test order
+        import astroid.raw_building
+        astroid.raw_building._astroid_bootstrapping(
+            astroid_builtin=astroid_builtin)
+
+
+class Project(object):
+    """a project handle a set of modules / packages"""
+    def __init__(self, name=''):
+        self.name = name
+        self.path = None
+        self.modules = []
+        self.locals = {}
+        self.__getitem__ = self.locals.__getitem__
+        self.__iter__ = self.locals.__iter__
+        self.values = self.locals.values
+        self.keys = self.locals.keys
+        self.items = self.locals.items
+
+    def add_module(self, node):
+        self.locals[node.name] = node
+        self.modules.append(node)
+
+    def get_module(self, name):
+        return self.locals[name]
+
+    def get_children(self):
+        return self.modules
+
+    def __repr__(self):
+        return '<Project %r at %s (%s modules)>' % (self.name, id(self),
+                                                    len(self.modules))
+
+
diff --git a/third_party/logilab/astng/mixins.py b/third_party/logilab/astroid/mixins.py
similarity index 69%
rename from third_party/logilab/astng/mixins.py
rename to third_party/logilab/astroid/mixins.py
index 869a25a..dbf1673 100644
--- a/third_party/logilab/astng/mixins.py
+++ b/third_party/logilab/astroid/mixins.py
@@ -1,47 +1,35 @@
-# This program is free software; you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
 """This module contains some mixins for the different nodes.
 """
 
-from logilab.astng.exceptions import (ASTNGBuildingException, InferenceError,
-                                      NotFoundError)
+from logilab.common.decorators import cachedproperty
+
+from astroid.exceptions import (AstroidBuildingException, InferenceError,
+                                NotFoundError)
 
 
 class BlockRangeMixIn(object):
     """override block range """
-    def set_line_info(self, lastchild):
-        self.fromlineno = self.lineno
-        self.tolineno = lastchild.tolineno
-        self.blockstart_tolineno = self._blockstart_toline()
+
+    @cachedproperty
+    def blockstart_tolineno(self):
+        return self.lineno
 
     def _elsed_block_range(self, lineno, orelse, last=None):
         """handle block line numbers range for try/finally, for, if and while
@@ -55,6 +43,7 @@
             return lineno, orelse[0].fromlineno - 1
         return lineno, last or self.tolineno
 
+
 class FilterStmtsMixin(object):
     """Mixin for statement filtering and assignment type"""
 
@@ -92,14 +81,13 @@
         return self.parent.ass_type()
 
 
-
 class FromImportMixIn(FilterStmtsMixin):
     """MixIn for From and Import Nodes"""
 
     def _infer_name(self, frame, name):
         return name
 
-    def do_import_module(self, modname):
+    def do_import_module(self, modname=None):
         """return the ast for a module whose name is <modname> imported by <self>
         """
         # handle special case where we are on a package node importing a module
@@ -108,6 +96,8 @@
         # XXX: no more needed ?
         mymodule = self.root()
         level = getattr(self, 'level', None) # Import as no level
+        if modname is None:
+            modname = self.modname
         # XXX we should investigate deeper if we really want to check
         # importing itself: modname and mymodule.name be relative or absolute
         if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
@@ -115,9 +105,9 @@
             return mymodule
         try:
             return mymodule.import_module(modname, level=level)
-        except ASTNGBuildingException:
+        except AstroidBuildingException:
             raise InferenceError(modname)
-        except SyntaxError, ex:
+        except SyntaxError as ex:
             raise InferenceError(str(ex))
 
     def real_name(self, asname):
@@ -132,5 +122,3 @@
                 return name
         raise NotFoundError(asname)
 
-
-
diff --git a/third_party/logilab/astroid/modutils.py b/third_party/logilab/astroid/modutils.py
new file mode 100644
index 0000000..68a2086
--- /dev/null
+++ b/third_party/logilab/astroid/modutils.py
@@ -0,0 +1,670 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# astroid is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid.  If not, see <http://www.gnu.org/licenses/>.
+"""Python modules manipulation utility functions.
+
+:type PY_SOURCE_EXTS: tuple(str)
+:var PY_SOURCE_EXTS: list of possible python source file extension
+
+:type STD_LIB_DIRS: set of str
+:var STD_LIB_DIRS: directories where standard modules are located
+
+:type BUILTIN_MODULES: dict
+:var BUILTIN_MODULES: dictionary with builtin module names has key
+"""
+from __future__ import with_statement
+
+__docformat__ = "restructuredtext en"
+
+import imp
+import os
+import sys
+from distutils.sysconfig import get_python_lib
+from distutils.errors import DistutilsPlatformError
+import zipimport
+
+try:
+    import pkg_resources
+except ImportError:
+    pkg_resources = None
+
+from logilab.common import _handle_blacklist
+
+PY_ZIPMODULE = object()
+
+if sys.platform.startswith('win'):
+    PY_SOURCE_EXTS = ('py', 'pyw')
+    PY_COMPILED_EXTS = ('dll', 'pyd')
+else:
+    PY_SOURCE_EXTS = ('py',)
+    PY_COMPILED_EXTS = ('so',)
+
+# Notes about STD_LIB_DIRS
+# Consider arch-specific installation for STD_LIB_DIRS definition
+# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on
+#
+# :see: `Problems with /usr/lib64 builds <http://bugs.python.org/issue1294959>`_
+# :see: `FHS <http://www.pathname.com/fhs/pub/fhs-2.3.html#LIBLTQUALGTALTERNATEFORMATESSENTIAL>`_
+try:
+    # The explicit sys.prefix is to work around a patch in virtualenv that
+    # replaces the 'real' sys.prefix (i.e. the location of the binary)
+    # with the prefix from which the virtualenv was created. This throws
+    # off the detection logic for standard library modules, thus the
+    # workaround.
+    STD_LIB_DIRS = {
+        get_python_lib(standard_lib=True, prefix=sys.prefix),
+        # Take care of installations where exec_prefix != prefix.
+        get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
+        get_python_lib(standard_lib=True)}
+    if os.name == 'nt':
+        STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls'))
+        try:
+            # real_prefix is defined when running inside virtualenv.
+            STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls'))
+        except AttributeError:
+            pass
+# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
+# non-valid path, see https://bugs.pypy.org/issue1164
+except DistutilsPlatformError:
+    STD_LIB_DIRS = set()
+
+EXT_LIB_DIR = get_python_lib()
+
+BUILTIN_MODULES = dict(zip(sys.builtin_module_names,
+                           [1]*len(sys.builtin_module_names)))
+
+
+class NoSourceFile(Exception):
+    """exception raised when we are not able to get a python
+    source file for a precompiled file
+    """
+
+def _normalize_path(path):
+    return os.path.normcase(os.path.abspath(path))
+
+
+_NORM_PATH_CACHE = {}
+
+def _cache_normalize_path(path):
+    """abspath with caching"""
+    # _module_file calls abspath on every path in sys.path every time it's
+    # called; on a larger codebase this easily adds up to half a second just
+    # assembling path components. This cache alleviates that.
+    try:
+        return _NORM_PATH_CACHE[path]
+    except KeyError:
+        if not path: # don't cache result for ''
+            return _normalize_path(path)
+        result = _NORM_PATH_CACHE[path] = _normalize_path(path)
+        return result
+
+def load_module_from_name(dotted_name, path=None, use_sys=1):
+    """Load a Python module from its name.
+
+    :type dotted_name: str
+    :param dotted_name: python name of a module or package
+
+    :type path: list or None
+    :param path:
+      optional list of path where the module or package should be
+      searched (use sys.path if nothing or None is given)
+
+    :type use_sys: bool
+    :param use_sys:
+      boolean indicating whether the sys.modules dictionary should be
+      used or not
+
+
+    :raise ImportError: if the module or package is not found
+
+    :rtype: module
+    :return: the loaded module
+    """
+    return load_module_from_modpath(dotted_name.split('.'), path, use_sys)
+
+
+def load_module_from_modpath(parts, path=None, use_sys=1):
+    """Load a python module from its splitted name.
+
+    :type parts: list(str) or tuple(str)
+    :param parts:
+      python name of a module or package splitted on '.'
+
+    :type path: list or None
+    :param path:
+      optional list of path where the module or package should be
+      searched (use sys.path if nothing or None is given)
+
+    :type use_sys: bool
+    :param use_sys:
+      boolean indicating whether the sys.modules dictionary should be used or not
+
+    :raise ImportError: if the module or package is not found
+
+    :rtype: module
+    :return: the loaded module
+    """
+    if use_sys:
+        try:
+            return sys.modules['.'.join(parts)]
+        except KeyError:
+            pass
+    modpath = []
+    prevmodule = None
+    for part in parts:
+        modpath.append(part)
+        curname = '.'.join(modpath)
+        module = None
+        if len(modpath) != len(parts):
+            # even with use_sys=False, should try to get outer packages from sys.modules
+            module = sys.modules.get(curname)
+        elif use_sys:
+            # because it may have been indirectly loaded through a parent
+            module = sys.modules.get(curname)
+        if module is None:
+            mp_file, mp_filename, mp_desc = imp.find_module(part, path)
+            module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
+            # mp_file still needs to be closed.
+            if mp_file:
+                mp_file.close()
+        if prevmodule:
+            setattr(prevmodule, part, module)
+        _file = getattr(module, '__file__', '')
+        if not _file and len(modpath) != len(parts):
+            raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]))
+        path = [os.path.dirname(_file)]
+        prevmodule = module
+    return module
+
+
+def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None):
+    """Load a Python module from it's path.
+
+    :type filepath: str
+    :param filepath: path to the python module or package
+
+    :type path: list or None
+    :param path:
+      optional list of path where the module or package should be
+      searched (use sys.path if nothing or None is given)
+
+    :type use_sys: bool
+    :param use_sys:
+      boolean indicating whether the sys.modules dictionary should be
+      used or not
+
+
+    :raise ImportError: if the module or package is not found
+
+    :rtype: module
+    :return: the loaded module
+    """
+    modpath = modpath_from_file(filepath, extrapath)
+    return load_module_from_modpath(modpath, path, use_sys)
+
+
+def _check_init(path, mod_path):
+    """check there are some __init__.py all along the way"""
+    for part in mod_path:
+        path = os.path.join(path, part)
+        if not _has_init(path):
+            return False
+    return True
+
+
+def modpath_from_file(filename, extrapath=None):
+    """given a file path return the corresponding splitted module's name
+    (i.e name of a module or package splitted on '.')
+
+    :type filename: str
+    :param filename: file's path for which we want the module's name
+
+    :type extrapath: dict
+    :param extrapath:
+      optional extra search path, with path as key and package name for the path
+      as value. This is usually useful to handle package splitted in multiple
+      directories using __path__ trick.
+
+
+    :raise ImportError:
+      if the corresponding module's name has not been found
+
+    :rtype: list(str)
+    :return: the corresponding splitted module's name
+    """
+    base = os.path.splitext(os.path.abspath(filename))[0]
+    if extrapath is not None:
+        for path_ in extrapath:
+            path = os.path.abspath(path_)
+            if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path):
+                submodpath = [pkg for pkg in base[len(path):].split(os.sep)
+                              if pkg]
+                if _check_init(path, submodpath[:-1]):
+                    return extrapath[path_].split('.') + submodpath
+    for path in sys.path:
+        path = _cache_normalize_path(path)
+        if path and os.path.normcase(base).startswith(path):
+            modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg]
+            if _check_init(path, modpath[:-1]):
+                return modpath
+    raise ImportError('Unable to find module for %s in %s' % (
+        filename, ', \n'.join(sys.path)))
+
+
+def file_from_modpath(modpath, path=None, context_file=None):
+    return file_info_from_modpath(modpath, path, context_file)[0]
+
+def file_info_from_modpath(modpath, path=None, context_file=None):
+    """given a mod path (i.e. splitted module / package name), return the
+    corresponding file, giving priority to source file over precompiled
+    file if it exists
+
+    :type modpath: list or tuple
+    :param modpath:
+      splitted module's name (i.e name of a module or package splitted
+      on '.')
+      (this means explicit relative imports that start with dots have
+      empty strings in this list!)
+
+    :type path: list or None
+    :param path:
+      optional list of path where the module or package should be
+      searched (use sys.path if nothing or None is given)
+
+    :type context_file: str or None
+    :param context_file:
+      context file to consider, necessary if the identifier has been
+      introduced using a relative import unresolvable in the actual
+      context (i.e. modutils)
+
+    :raise ImportError: if there is no such module in the directory
+
+    :rtype: (str or None, import type)
+    :return:
+      the path to the module's file or None if it's an integrated
+      builtin module such as 'sys'
+    """
+    if context_file is not None:
+        context = os.path.dirname(context_file)
+    else:
+        context = context_file
+    if modpath[0] == 'xml':
+        # handle _xmlplus
+        try:
+            return _file_from_modpath(['_xmlplus'] + modpath[1:], path, context)
+        except ImportError:
+            return _file_from_modpath(modpath, path, context)
+    elif modpath == ['os', 'path']:
+        # FIXME: currently ignoring search_path...
+        return os.path.__file__, imp.PY_SOURCE
+    return _file_from_modpath(modpath, path, context)
+
+
+def get_module_part(dotted_name, context_file=None):
+    """given a dotted name return the module part of the name :
+
+    >>> get_module_part('logilab.common.modutils.get_module_part')
+    'logilab.common.modutils'
+
+    :type dotted_name: str
+    :param dotted_name: full name of the identifier we are interested in
+
+    :type context_file: str or None
+    :param context_file:
+      context file to consider, necessary if the identifier has been
+      introduced using a relative import unresolvable in the actual
+      context (i.e. modutils)
+
+
+    :raise ImportError: if there is no such module in the directory
+
+    :rtype: str or None
+    :return:
+      the module part of the name or None if we have not been able at
+      all to import the given name
+
+    XXX: deprecated, since it doesn't handle package precedence over module
+    (see #10066)
+    """
+    # os.path trick
+    if dotted_name.startswith('os.path'):
+        return 'os.path'
+    parts = dotted_name.split('.')
+    if context_file is not None:
+        # first check for builtin module which won't be considered latter
+        # in that case (path != None)
+        if parts[0] in BUILTIN_MODULES:
+            if len(parts) > 2:
+                raise ImportError(dotted_name)
+            return parts[0]
+        # don't use += or insert, we want a new list to be created !
+    path = None
+    starti = 0
+    if parts[0] == '':
+        assert context_file is not None, \
+                'explicit relative import, but no context_file?'
+        path = [] # prevent resolving the import non-relatively
+        starti = 1
+    while parts[starti] == '': # for all further dots: change context
+        starti += 1
+        context_file = os.path.dirname(context_file)
+    for i in range(starti, len(parts)):
+        try:
+            file_from_modpath(parts[starti:i+1], path=path,
+                              context_file=context_file)
+        except ImportError:
+            if not i >= max(1, len(parts) - 2):
+                raise
+            return '.'.join(parts[:i])
+    return dotted_name
+
+
+def get_module_files(src_directory, blacklist):
+    """given a package directory return a list of all available python
+    module's files in the package and its subpackages
+
+    :type src_directory: str
+    :param src_directory:
+      path of the directory corresponding to the package
+
+    :type blacklist: list or tuple
+    :param blacklist:
+      optional list of files or directory to ignore, default to the value of
+      `logilab.common.STD_BLACKLIST`
+
+    :rtype: list
+    :return:
+      the list of all available python module's files in the package and
+      its subpackages
+    """
+    files = []
+    for directory, dirnames, filenames in os.walk(src_directory):
+        _handle_blacklist(blacklist, dirnames, filenames)
+        # check for __init__.py
+        if not '__init__.py' in filenames:
+            dirnames[:] = ()
+            continue
+        for filename in filenames:
+            if _is_python_file(filename):
+                src = os.path.join(directory, filename)
+                files.append(src)
+    return files
+
+
+def get_source_file(filename, include_no_ext=False):
+    """given a python module's file name return the matching source file
+    name (the filename will be returned identically if it's a already an
+    absolute path to a python source file...)
+
+    :type filename: str
+    :param filename: python module's file name
+
+
+    :raise NoSourceFile: if no source file exists on the file system
+
+    :rtype: str
+    :return: the absolute path of the source file if it exists
+    """
+    base, orig_ext = os.path.splitext(os.path.abspath(filename))
+    for ext in PY_SOURCE_EXTS:
+        source_path = '%s.%s' % (base, ext)
+        if os.path.exists(source_path):
+            return source_path
+    if include_no_ext and not orig_ext and os.path.exists(base):
+        return base
+    raise NoSourceFile(filename)
+
+
+def is_python_source(filename):
+    """
+    rtype: bool
+    return: True if the filename is a python source file
+    """
+    return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
+
+
+def is_standard_module(modname, std_path=None):
+    """try to guess if a module is a standard python module (by default,
+    see `std_path` parameter's description)
+
+    :type modname: str
+    :param modname: name of the module we are interested in
+
+    :type std_path: list(str) or tuple(str)
+    :param std_path: list of path considered has standard
+
+
+    :rtype: bool
+    :return:
+      true if the module:
+      - is located on the path listed in one of the directory in `std_path`
+      - is a built-in module
+    """
+    modname = modname.split('.')[0]
+    try:
+        filename = file_from_modpath([modname])
+    except ImportError:
+        # import failed, i'm probably not so wrong by supposing it's
+        # not standard...
+        return False
+    # modules which are not living in a file are considered standard
+    # (sys and __builtin__ for instance)
+    if filename is None:
+        return True
+    filename = _normalize_path(filename)
+    if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)):
+        return False
+    if std_path is None:
+        std_path = STD_LIB_DIRS
+    for path in std_path:
+        if filename.startswith(_cache_normalize_path(path)):
+            return True
+    return False
+
+
+
+def is_relative(modname, from_file):
+    """return true if the given module name is relative to the given
+    file name
+
+    :type modname: str
+    :param modname: name of the module we are interested in
+
+    :type from_file: str
+    :param from_file:
+      path of the module from which modname has been imported
+
+    :rtype: bool
+    :return:
+      true if the module has been imported relatively to `from_file`
+    """
+    if not os.path.isdir(from_file):
+        from_file = os.path.dirname(from_file)
+    if from_file in sys.path:
+        return False
+    try:
+        stream, _, _ = imp.find_module(modname.split('.')[0], [from_file])
+
+        # Close the stream to avoid ResourceWarnings.
+        if stream:
+            stream.close()
+        return True
+    except ImportError:
+        return False
+
+
+# internal only functions #####################################################
+
+def _file_from_modpath(modpath, path=None, context=None):
+    """given a mod path (i.e. splitted module / package name), return the
+    corresponding file
+
+    this function is used internally, see `file_from_modpath`'s
+    documentation for more information
+    """
+    assert len(modpath) > 0
+    if context is not None:
+        try:
+            mtype, mp_filename = _module_file(modpath, [context])
+        except ImportError:
+            mtype, mp_filename = _module_file(modpath, path)
+    else:
+        mtype, mp_filename = _module_file(modpath, path)
+    if mtype == imp.PY_COMPILED:
+        try:
+            return get_source_file(mp_filename), imp.PY_SOURCE
+        except NoSourceFile:
+            return mp_filename, imp.PY_COMPILED
+    elif mtype == imp.C_BUILTIN:
+        # integrated builtin module
+        return None, imp.C_BUILTIN
+    elif mtype == imp.PKG_DIRECTORY:
+        mp_filename = _has_init(mp_filename)
+        mtype = imp.PY_SOURCE
+    return mp_filename, mtype
+
+def _search_zip(modpath, pic):
+    for filepath, importer in pic.items():
+        if importer is not None:
+            if importer.find_module(modpath[0]):
+                if not importer.find_module(os.path.sep.join(modpath)):
+                    raise ImportError('No module named %s in %s/%s' % (
+                        '.'.join(modpath[1:]), filepath, modpath))
+                return PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath
+    raise ImportError('No module named %s' % '.'.join(modpath))
+
+
+def _module_file(modpath, path=None):
+    """get a module type / file path
+
+    :type modpath: list or tuple
+    :param modpath:
+      splitted module's name (i.e name of a module or package splitted
+      on '.'), with leading empty strings for explicit relative import
+
+    :type path: list or None
+    :param path:
+      optional list of path where the module or package should be
+      searched (use sys.path if nothing or None is given)
+
+
+    :rtype: tuple(int, str)
+    :return: the module type flag and the file path for a module
+    """
+    # egg support compat
+    try:
+        pic = sys.path_importer_cache
+        _path = (path is None and sys.path or path)
+        for __path in _path:
+            if not __path in pic:
+                try:
+                    pic[__path] = zipimport.zipimporter(__path)
+                except zipimport.ZipImportError:
+                    pic[__path] = None
+        checkeggs = True
+    except AttributeError:
+        checkeggs = False
+    # pkg_resources support (aka setuptools namespace packages)
+    if (pkg_resources is not None
+            and modpath[0] in pkg_resources._namespace_packages
+            and modpath[0] in sys.modules
+            and len(modpath) > 1):
+        # setuptools has added into sys.modules a module object with proper
+        # __path__, get back information from there
+        module = sys.modules[modpath.pop(0)]
+        path = module.__path__
+    imported = []
+    while modpath:
+        modname = modpath[0]
+        # take care to changes in find_module implementation wrt builtin modules
+        #
+        # Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23)
+        # >>> imp.find_module('posix')
+        # (None, 'posix', ('', '', 6))
+        #
+        # Python 3.3.1 (default, Apr 26 2013, 12:08:46)
+        # >>> imp.find_module('posix')
+        # (None, None, ('', '', 6))
+        try:
+            stream, mp_filename, mp_desc = imp.find_module(modname, path)
+        except ImportError:
+            if checkeggs:
+                return _search_zip(modpath, pic)[:2]
+            raise
+        else:
+            # Don't forget to close the stream to avoid
+            # spurious ResourceWarnings.
+            if stream:
+               stream.close()
+
+            if checkeggs and mp_filename:
+                fullabspath = [_cache_normalize_path(x) for x in _path]
+                try:
+                    pathindex = fullabspath.index(os.path.dirname(_normalize_path(mp_filename)))
+                    emtype, emp_filename, zippath = _search_zip(modpath, pic)
+                    if pathindex > _path.index(zippath):
+                        # an egg takes priority
+                        return emtype, emp_filename
+                except ValueError:
+                    # XXX not in _path
+                    pass
+                except ImportError:
+                    pass
+                checkeggs = False
+        imported.append(modpath.pop(0))
+        mtype = mp_desc[2]
+        if modpath:
+            if mtype != imp.PKG_DIRECTORY:
+                raise ImportError('No module %s in %s' % ('.'.join(modpath),
+                                                          '.'.join(imported)))
+            # XXX guess if package is using pkgutil.extend_path by looking for
+            # those keywords in the first four Kbytes
+            try:
+                with open(os.path.join(mp_filename, '__init__.py'), 'rb') as stream:
+                    data = stream.read(4096)
+            except IOError:
+                path = [mp_filename]
+            else:
+                if b'pkgutil' in data and b'extend_path' in data:
+                    # extend_path is called, search sys.path for module/packages
+                    # of this name see pkgutil.extend_path documentation
+                    path = [os.path.join(p, *imported) for p in sys.path
+                            if os.path.isdir(os.path.join(p, *imported))]
+                else:
+                    path = [mp_filename]
+    return mtype, mp_filename
+
+def _is_python_file(filename):
+    """return true if the given filename should be considered as a python file
+
+    .pyc and .pyo are ignored
+    """
+    for ext in ('.py', '.so', '.pyd', '.pyw'):
+        if filename.endswith(ext):
+            return True
+    return False
+
+
+def _has_init(directory):
+    """if the given directory has a valid __init__ file, return its path,
+    else return None
+    """
+    mod_or_pack = os.path.join(directory, '__init__')
+    for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'):
+        if os.path.exists(mod_or_pack + '.' + ext):
+            return mod_or_pack + '.' + ext
+    return None
diff --git a/third_party/logilab/astng/node_classes.py b/third_party/logilab/astroid/node_classes.py
similarity index 73%
rename from third_party/logilab/astng/node_classes.py
rename to third_party/logilab/astroid/node_classes.py
index 607ad90..71e512f 100644
--- a/third_party/logilab/astng/node_classes.py
+++ b/third_party/logilab/astroid/node_classes.py
@@ -1,33 +1,35 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
 """Module for some node classes. More nodes in scoped_nodes.py
 """
 
 import sys
 
-from logilab.astng import BUILTINS_MODULE
-from logilab.astng.exceptions import NoDefault
-from logilab.astng.bases import (NodeNG, Statement, Instance, InferenceContext,
-                                 _infer_stmts, YES)
-from logilab.astng.mixins import BlockRangeMixIn, AssignTypeMixin, \
-                                 ParentAssignTypeMixin, FromImportMixIn
+import six
+from logilab.common.decorators import cachedproperty
+
+from astroid.exceptions import NoDefault
+from astroid.bases import (NodeNG, Statement, Instance, InferenceContext,
+                           _infer_stmts, YES, BUILTINS)
+from astroid.mixins import (BlockRangeMixIn, AssignTypeMixin,
+                            ParentAssignTypeMixin, FromImportMixIn)
+
+PY3K = sys.version_info >= (3, 0)
 
 
 def unpack_infer(stmt, context=None):
@@ -39,13 +41,18 @@
             for infered_elt in unpack_infer(elt, context):
                 yield infered_elt
         return
-    infered = stmt.infer(context).next()
-    if infered is stmt or infered is YES:
+    # if infered is a final node, return it and stop
+    infered = next(stmt.infer(context))
+    if infered is stmt:
         yield infered
         return
+    # else, infer recursivly, except YES object that should be returned as is
     for infered in stmt.infer(context):
-        for inf_inf in unpack_infer(infered, context):
-            yield inf_inf
+        if infered is YES:
+            yield infered
+        else:
+            for inf_inf in unpack_infer(infered, context):
+                yield inf_inf
 
 
 def are_exclusive(stmt1, stmt2, exceptions=None):
@@ -80,16 +87,16 @@
             # nodes are in exclusive branches
             if isinstance(node, If) and exceptions is None:
                 if (node.locate_child(previous)[1]
-                    is not node.locate_child(children[node])[1]):
+                        is not node.locate_child(children[node])[1]):
                     return True
             elif isinstance(node, TryExcept):
                 c2attr, c2node = node.locate_child(previous)
                 c1attr, c1node = node.locate_child(children[node])
                 if c1node is not c2node:
                     if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or
-                        (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or
-                        (c2attr == 'handlers' and c1attr == 'orelse') or
-                        (c2attr == 'orelse' and c1attr == 'handlers')):
+                            (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or
+                            (c2attr == 'handlers' and c1attr == 'orelse') or
+                            (c2attr == 'orelse' and c1attr == 'handlers')):
                         return True
                 elif c2attr == 'handlers' and c1attr == 'handlers':
                     return previous is not children[node]
@@ -106,13 +113,13 @@
     def lookup(self, name):
         """lookup a variable name
 
-        return the scope node and the list of assignments associated to the given
-        name according to the scope where it has been found (locals, globals or
-        builtin)
+        return the scope node and the list of assignments associated to the
+        given name according to the scope where it has been found (locals,
+        globals or builtin)
 
-        The lookup is starting from self's scope. If self is not a frame itself and
-        the name is found in the inner frame locals, statements will be filtered
-        to remove ignorable statements according to self's location
+        The lookup is starting from self's scope. If self is not a frame itself
+        and the name is found in the inner frame locals, statements will be
+        filtered to remove ignorable statements according to self's location
         """
         return self.scope().scope_lookup(self, name)
 
@@ -123,8 +130,7 @@
         the lookup method
         """
         frame, stmts = self.lookup(name)
-        context = InferenceContext()
-        return _infer_stmts(stmts, context, frame)
+        return _infer_stmts(stmts, None, frame)
 
     def _filter_stmts(self, stmts, frame, offset):
         """filter statements to remove ignorable statements.
@@ -142,6 +148,20 @@
             myframe = self.frame().parent.frame()
         else:
             myframe = self.frame()
+            # If the frame of this node is the same as the statement
+            # of this node, then the node is part of a class or
+            # a function definition and the frame of this node should be the
+            # the upper frame, not the frame of the definition.
+            # For more information why this is important,
+            # see Pylint issue #295.
+            # For example, for 'b', the statement is the same
+            # as the frame / scope:
+            #
+            # def test(b=1):
+            #     ...
+
+            if self.statement() is myframe and myframe.parent:
+                myframe = myframe.parent.frame()
         if not myframe is frame or self is frame:
             return stmts
         mystmt = self.statement()
@@ -251,9 +271,30 @@
 
 class Arguments(NodeNG, AssignTypeMixin):
     """class representing an Arguments node"""
-    _astng_fields = ('args', 'defaults')
+    if PY3K:
+        # Python 3.4+ uses a different approach regarding annotations,
+        # each argument is a new class, _ast.arg, which exposes an
+        # 'annotation' attribute. In astroid though, arguments are exposed
+        # as is in the Arguments node and the only way to expose annotations
+        # is by using something similar with Python 3.3:
+        #  - we expose 'varargannotation' and 'kwargannotation' of annotations
+        #    of varargs and kwargs.
+        #  - we expose 'annotation', a list with annotations for
+        #    for each normal argument. If an argument doesn't have an
+        #    annotation, its value will be None.
+
+        _astroid_fields = ('args', 'defaults', 'kwonlyargs',
+                           'kw_defaults', 'annotations',
+                           'varargannotation', 'kwargannotation')
+        annotations = None
+        varargannotation = None
+        kwargannotation = None
+    else:
+        _astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults')
     args = None
     defaults = None
+    kwonlyargs = None
+    kw_defaults = None
 
     def __init__(self, vararg=None, kwarg=None):
         self.vararg = vararg
@@ -264,13 +305,24 @@
             return name
         return None
 
+    @cachedproperty
+    def fromlineno(self):
+        lineno = super(Arguments, self).fromlineno
+        return max(lineno, self.parent.fromlineno)
+
     def format_args(self):
         """return arguments formatted as string"""
-        result = [_format_args(self.args, self.defaults)]
+        result = []
+        if self.args:
+            result.append(_format_args(self.args, self.defaults))
         if self.vararg:
             result.append('*%s' % self.vararg)
         if self.kwarg:
             result.append('**%s' % self.kwarg)
+        if self.kwonlyargs:
+            if not self.vararg:
+                result.append('*')
+            result.append(_format_args(self.kwonlyargs, self.kw_defaults))
         return ', '.join(result)
 
     def default_value(self, argname):
@@ -283,6 +335,9 @@
             idx = i - (len(self.args) - len(self.defaults))
             if idx >= 0:
                 return self.defaults[idx]
+        i = _find_arg(argname, self.kwonlyargs)[0]
+        if i is not None and self.kw_defaults[i] is not None:
+            return self.kw_defaults[i]
         raise NoDefault()
 
     def is_argument(self, name):
@@ -299,6 +354,12 @@
             return _find_arg(argname, self.args, rec)
         return None, None
 
+    def get_children(self):
+        """override get_children to skip over None elements in kw_defaults"""
+        for child in super(Arguments, self).get_children():
+            if child is not None:
+                yield child
+
 
 def _find_arg(argname, args, rec=False):
     for i, arg in enumerate(args):
@@ -324,47 +385,48 @@
         else:
             values.append(arg.name)
             if defaults is not None and i >= default_offset:
-                values[-1] += '=' + defaults[i-default_offset].as_string()
+                if defaults[i-default_offset] is not None:
+                    values[-1] += '=' + defaults[i-default_offset].as_string()
     return ', '.join(values)
 
 
 class AssAttr(NodeNG, ParentAssignTypeMixin):
     """class representing an AssAttr node"""
-    _astng_fields = ('expr',)
+    _astroid_fields = ('expr',)
     expr = None
 
 class Assert(Statement):
     """class representing an Assert node"""
-    _astng_fields = ('test', 'fail',)
+    _astroid_fields = ('test', 'fail',)
     test = None
     fail = None
 
 class Assign(Statement, AssignTypeMixin):
     """class representing an Assign node"""
-    _astng_fields = ('targets', 'value',)
+    _astroid_fields = ('targets', 'value',)
     targets = None
     value = None
 
 class AugAssign(Statement, AssignTypeMixin):
     """class representing an AugAssign node"""
-    _astng_fields = ('target', 'value',)
+    _astroid_fields = ('target', 'value',)
     target = None
     value = None
 
 class Backquote(NodeNG):
     """class representing a Backquote node"""
-    _astng_fields = ('value',)
+    _astroid_fields = ('value',)
     value = None
 
 class BinOp(NodeNG):
     """class representing a BinOp node"""
-    _astng_fields = ('left', 'right',)
+    _astroid_fields = ('left', 'right',)
     left = None
     right = None
 
 class BoolOp(NodeNG):
     """class representing a BoolOp node"""
-    _astng_fields = ('values',)
+    _astroid_fields = ('values',)
     values = None
 
 class Break(Statement):
@@ -373,7 +435,7 @@
 
 class CallFunc(NodeNG):
     """class representing a CallFunc node"""
-    _astng_fields = ('func', 'args', 'starargs', 'kwargs')
+    _astroid_fields = ('func', 'args', 'starargs', 'kwargs')
     func = None
     args = None
     starargs = None
@@ -385,7 +447,7 @@
 
 class Compare(NodeNG):
     """class representing a Compare node"""
-    _astng_fields = ('left', 'ops',)
+    _astroid_fields = ('left', 'ops',)
     left = None
     ops = None
 
@@ -403,7 +465,7 @@
 
 class Comprehension(NodeNG):
     """class representing a Comprehension node"""
-    _astng_fields = ('target', 'iter' ,'ifs')
+    _astroid_fields = ('target', 'iter', 'ifs')
     target = None
     iter = None
     ifs = None
@@ -434,7 +496,7 @@
         self.value = value
 
     def getitem(self, index, context=None):
-        if isinstance(self.value, basestring):
+        if isinstance(self.value, six.string_types):
             return Const(self.value[index])
         raise TypeError('%r (value=%s)' % (self, self.value))
 
@@ -442,7 +504,7 @@
         return False
 
     def itered(self):
-        if isinstance(self.value, basestring):
+        if isinstance(self.value, six.string_types):
             return self.value
         raise TypeError()
 
@@ -456,7 +518,7 @@
 
 class Decorators(NodeNG):
     """class representing a Decorators node"""
-    _astng_fields = ('nodes',)
+    _astroid_fields = ('nodes',)
     nodes = None
 
     def __init__(self, nodes=None):
@@ -468,29 +530,29 @@
 
 class DelAttr(NodeNG, ParentAssignTypeMixin):
     """class representing a DelAttr node"""
-    _astng_fields = ('expr',)
+    _astroid_fields = ('expr',)
     expr = None
 
 
 class Delete(Statement, AssignTypeMixin):
     """class representing a Delete node"""
-    _astng_fields = ('targets',)
+    _astroid_fields = ('targets',)
     targets = None
 
 
 class Dict(NodeNG, Instance):
     """class representing a Dict node"""
-    _astng_fields = ('items',)
+    _astroid_fields = ('items',)
 
     def __init__(self, items=None):
         if items is None:
             self.items = []
         else:
             self.items = [(const_factory(k), const_factory(v))
-                          for k,v in items.iteritems()]
+                          for k, v in items.items()]
 
     def pytype(self):
-        return '%s.dict' % BUILTINS_MODULE
+        return '%s.dict' % BUILTINS
 
     def get_children(self):
         """get children of a Dict node"""
@@ -508,19 +570,22 @@
     def itered(self):
         return self.items[::2]
 
-    def getitem(self, key, context=None):
-        for i in xrange(0, len(self.items), 2):
-            for inferedkey in self.items[i].infer(context):
+    def getitem(self, lookup_key, context=None):
+        for key, value in self.items:
+            for inferedkey in key.infer(context):
                 if inferedkey is YES:
                     continue
-                if isinstance(inferedkey, Const) and inferedkey.value == key:
-                    return self.items[i+1]
-        raise IndexError(key)
+                if isinstance(inferedkey, Const) \
+                        and inferedkey.value == lookup_key:
+                    return value
+        # This should raise KeyError, but all call sites only catch
+        # IndexError. Let's leave it like that for now.
+        raise IndexError(lookup_key)
 
 
 class Discard(Statement):
     """class representing a Discard node"""
-    _astng_fields = ('value',)
+    _astroid_fields = ('value',)
     value = None
 
 
@@ -534,12 +599,13 @@
 
 class ExceptHandler(Statement, AssignTypeMixin):
     """class representing an ExceptHandler node"""
-    _astng_fields = ('type', 'name', 'body',)
+    _astroid_fields = ('type', 'name', 'body',)
     type = None
     name = None
     body = None
 
-    def _blockstart_toline(self):
+    @cachedproperty
+    def blockstart_tolineno(self):
         if self.name:
             return self.name.tolineno
         elif self.type:
@@ -547,11 +613,6 @@
         else:
             return self.lineno
 
-    def set_line_info(self, lastchild):
-        self.fromlineno = self.lineno
-        self.tolineno = lastchild.tolineno
-        self.blockstart_tolineno = self._blockstart_toline()
-
     def catch(self, exceptions):
         if self.type is None or exceptions is None:
             return True
@@ -562,7 +623,7 @@
 
 class Exec(Statement):
     """class representing an Exec node"""
-    _astng_fields = ('expr', 'globals', 'locals',)
+    _astroid_fields = ('expr', 'globals', 'locals',)
     expr = None
     globals = None
     locals = None
@@ -570,33 +631,34 @@
 
 class ExtSlice(NodeNG):
     """class representing an ExtSlice node"""
-    _astng_fields = ('dims',)
+    _astroid_fields = ('dims',)
     dims = None
 
 class For(BlockRangeMixIn, AssignTypeMixin, Statement):
     """class representing a For node"""
-    _astng_fields = ('target', 'iter', 'body', 'orelse',)
+    _astroid_fields = ('target', 'iter', 'body', 'orelse',)
     target = None
     iter = None
     body = None
     orelse = None
 
     optional_assign = True
-    def _blockstart_toline(self):
+    @cachedproperty
+    def blockstart_tolineno(self):
         return self.iter.tolineno
 
 
 class From(FromImportMixIn, Statement):
     """class representing a From node"""
 
-    def __init__(self,  fromname, names, level=0):
+    def __init__(self, fromname, names, level=0):
         self.modname = fromname
         self.names = names
         self.level = level
 
 class Getattr(NodeNG):
     """class representing a Getattr node"""
-    _astng_fields = ('expr',)
+    _astroid_fields = ('expr',)
     expr = None
 
 
@@ -612,12 +674,13 @@
 
 class If(BlockRangeMixIn, Statement):
     """class representing an If node"""
-    _astng_fields = ('test', 'body', 'orelse')
+    _astroid_fields = ('test', 'body', 'orelse')
     test = None
     body = None
     orelse = None
 
-    def _blockstart_toline(self):
+    @cachedproperty
+    def blockstart_tolineno(self):
         return self.test.tolineno
 
     def block_range(self, lineno):
@@ -632,7 +695,7 @@
 
 class IfExp(NodeNG):
     """class representing an IfExp node"""
-    _astng_fields = ('test', 'body', 'orelse')
+    _astroid_fields = ('test', 'body', 'orelse')
     test = None
     body = None
     orelse = None
@@ -644,19 +707,19 @@
 
 class Index(NodeNG):
     """class representing an Index node"""
-    _astng_fields = ('value',)
+    _astroid_fields = ('value',)
     value = None
 
 
 class Keyword(NodeNG):
     """class representing a Keyword node"""
-    _astng_fields = ('value',)
+    _astroid_fields = ('value',)
     value = None
 
 
 class List(NodeNG, Instance, ParentAssignTypeMixin):
     """class representing a List node"""
-    _astng_fields = ('elts',)
+    _astroid_fields = ('elts',)
 
     def __init__(self, elts=None):
         if elts is None:
@@ -665,7 +728,7 @@
             self.elts = [const_factory(e) for e in elts]
 
     def pytype(self):
-        return '%s.list' % BUILTINS_MODULE
+        return '%s.list' % BUILTINS
 
     def getitem(self, index, context=None):
         return self.elts[index]
@@ -690,7 +753,7 @@
 
 class Print(Statement):
     """class representing a Print node"""
-    _astng_fields = ('dest', 'values',)
+    _astroid_fields = ('dest', 'values',)
     dest = None
     values = None
 
@@ -699,11 +762,11 @@
     """class representing a Raise node"""
     exc = None
     if sys.version_info < (3, 0):
-        _astng_fields = ('exc', 'inst', 'tback')
+        _astroid_fields = ('exc', 'inst', 'tback')
         inst = None
         tback = None
     else:
-        _astng_fields = ('exc', 'cause')
+        _astroid_fields = ('exc', 'cause')
         exc = None
         cause = None
 
@@ -717,13 +780,13 @@
 
 class Return(Statement):
     """class representing a Return node"""
-    _astng_fields = ('value',)
+    _astroid_fields = ('value',)
     value = None
 
 
 class Set(NodeNG, Instance, ParentAssignTypeMixin):
     """class representing a Set node"""
-    _astng_fields = ('elts',)
+    _astroid_fields = ('elts',)
 
     def __init__(self, elts=None):
         if elts is None:
@@ -732,7 +795,7 @@
             self.elts = [const_factory(e) for e in elts]
 
     def pytype(self):
-        return '%s.set' % BUILTINS_MODULE
+        return '%s.set' % BUILTINS
 
     def itered(self):
         return self.elts
@@ -740,27 +803,27 @@
 
 class Slice(NodeNG):
     """class representing a Slice node"""
-    _astng_fields = ('lower', 'upper', 'step')
+    _astroid_fields = ('lower', 'upper', 'step')
     lower = None
     upper = None
     step = None
 
-class Starred(NodeNG):
+class Starred(NodeNG, ParentAssignTypeMixin):
     """class representing a Starred node"""
-    _astng_fields = ('value',)
+    _astroid_fields = ('value',)
     value = None
 
 
 class Subscript(NodeNG):
     """class representing a Subscript node"""
-    _astng_fields = ('value', 'slice')
+    _astroid_fields = ('value', 'slice')
     value = None
     slice = None
 
 
 class TryExcept(BlockRangeMixIn, Statement):
     """class representing a TryExcept node"""
-    _astng_fields = ('body', 'handlers', 'orelse',)
+    _astroid_fields = ('body', 'handlers', 'orelse',)
     body = None
     handlers = None
     orelse = None
@@ -768,9 +831,6 @@
     def _infer_name(self, frame, name):
         return name
 
-    def _blockstart_toline(self):
-        return self.lineno
-
     def block_range(self, lineno):
         """handle block line numbers range for try/except statements"""
         last = None
@@ -786,26 +846,23 @@
 
 class TryFinally(BlockRangeMixIn, Statement):
     """class representing a TryFinally node"""
-    _astng_fields = ('body', 'finalbody',)
+    _astroid_fields = ('body', 'finalbody',)
     body = None
     finalbody = None
 
-    def _blockstart_toline(self):
-        return self.lineno
-
     def block_range(self, lineno):
         """handle block line numbers range for try/finally statements"""
         child = self.body[0]
         # py2.5 try: except: finally:
         if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno
-            and lineno > self.fromlineno and lineno <= child.tolineno):
+                and lineno > self.fromlineno and lineno <= child.tolineno):
             return child.block_range(lineno)
         return self._elsed_block_range(lineno, self.finalbody)
 
 
 class Tuple(NodeNG, Instance, ParentAssignTypeMixin):
     """class representing a Tuple node"""
-    _astng_fields = ('elts',)
+    _astroid_fields = ('elts',)
 
     def __init__(self, elts=None):
         if elts is None:
@@ -814,7 +871,7 @@
             self.elts = [const_factory(e) for e in elts]
 
     def pytype(self):
-        return '%s.tuple' % BUILTINS_MODULE
+        return '%s.tuple' % BUILTINS
 
     def getitem(self, index, context=None):
         return self.elts[index]
@@ -825,18 +882,19 @@
 
 class UnaryOp(NodeNG):
     """class representing an UnaryOp node"""
-    _astng_fields = ('operand',)
+    _astroid_fields = ('operand',)
     operand = None
 
 
 class While(BlockRangeMixIn, Statement):
     """class representing a While node"""
-    _astng_fields = ('test', 'body', 'orelse',)
+    _astroid_fields = ('test', 'body', 'orelse',)
     test = None
     body = None
     orelse = None
 
-    def _blockstart_toline(self):
+    @cachedproperty
+    def blockstart_tolineno(self):
         return self.test.tolineno
 
     def block_range(self, lineno):
@@ -846,23 +904,30 @@
 
 class With(BlockRangeMixIn, AssignTypeMixin, Statement):
     """class representing a With node"""
-    _astng_fields = ('expr', 'vars', 'body')
-    expr = None
-    vars = None
+    _astroid_fields = ('items', 'body')
+    items = None
     body = None
 
-    def _blockstart_toline(self):
-        if self.vars:
-            return self.vars.tolineno
-        else:
-            return self.expr.tolineno
+    @cachedproperty
+    def blockstart_tolineno(self):
+        return self.items[-1][0].tolineno
 
+    def get_children(self):
+        for expr, var in self.items:
+            yield expr
+            if var:
+                yield var
+        for elt in self.body:
+            yield elt
 
 class Yield(NodeNG):
     """class representing a Yield node"""
-    _astng_fields = ('value',)
+    _astroid_fields = ('value',)
     value = None
 
+class YieldFrom(Yield):
+    """ Class representing a YieldFrom node. """
+
 # constants ##############################################################
 
 CONST_CLS = {
@@ -885,19 +950,16 @@
 _update_const_classes()
 
 def const_factory(value):
-    """return an astng node for a python value"""
-    # since const_factory is called to evaluate content of container (eg list,
-    # tuple), it may be called with some node as argument that should be left
-    # untouched
-    if isinstance(value, NodeNG):
-        return value
+    """return an astroid node for a python value"""
+    # XXX we should probably be stricter here and only consider stuff in
+    # CONST_CLS or do better treatment: in case where value is not in CONST_CLS,
+    # we should rather recall the builder on this value than returning an empty
+    # node (another option being that const_factory shouldn't be called with something
+    # not in CONST_CLS)
+    assert not isinstance(value, NodeNG)
     try:
         return CONST_CLS[value.__class__](value)
     except (KeyError, AttributeError):
-        # some constants (like from gtk._gtk) don't have their class in
-        # CONST_CLS, though we can "assert isinstance(value, tuple(CONST_CLS))"
-        if isinstance(value, tuple(CONST_CLS)):
-            return Const(value)
         node = EmptyNode()
         node.object = value
         return node
diff --git a/third_party/logilab/astng/nodes.py b/third_party/logilab/astroid/nodes.py
similarity index 75%
rename from third_party/logilab/astng/nodes.py
rename to third_party/logilab/astroid/nodes.py
index 56b9980..263ab47 100644
--- a/third_party/logilab/astng/nodes.py
+++ b/third_party/logilab/astroid/nodes.py
@@ -1,22 +1,20 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
 """
 on all nodes :
  .is_statement, returning true if the node should be considered as a
@@ -28,7 +26,7 @@
  .frame(), returning the first node defining a new local scope (i.e.
   Module, Function or Class)
  .set_local(name, node), define an identifier <name> on the first parent frame,
-  with the node defining it. This is used by the astng builder and should not
+  with the node defining it. This is used by the astroid builder and should not
   be used from out there.
 
 on From and Import :
@@ -39,15 +37,15 @@
 
 __docformat__ = "restructuredtext en"
 
-from logilab.astng.node_classes import Arguments, AssAttr, Assert, Assign, \
+from astroid.node_classes import Arguments, AssAttr, Assert, Assign, \
     AssName, AugAssign, Backquote, BinOp, BoolOp, Break, CallFunc, Compare, \
     Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, \
     Dict, Discard, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, \
     From, Getattr, Global, If, IfExp, Import, Index, Keyword, \
     List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, \
-    TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, \
+    TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, \
     const_factory
-from logilab.astng.scoped_nodes import Module, GenExpr, Lambda, DictComp, \
+from astroid.scoped_nodes import Module, GenExpr, Lambda, DictComp, \
     ListComp, SetComp, Function, Class
 
 ALL_NODE_CLASSES = (
@@ -70,6 +68,6 @@
     TryExcept, TryFinally, Tuple,
     UnaryOp,
     While, With,
-    Yield,
+    Yield, YieldFrom
     )
 
diff --git a/third_party/logilab/astng/protocols.py b/third_party/logilab/astroid/protocols.py
similarity index 80%
rename from third_party/logilab/astng/protocols.py
rename to third_party/logilab/astroid/protocols.py
index d8c02e3..4dd515f 100644
--- a/third_party/logilab/astng/protocols.py
+++ b/third_party/logilab/astroid/protocols.py
@@ -1,34 +1,52 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
 """this module contains a set of functions to handle python protocols for nodes
 where it makes sense.
 """
 
 __doctype__ = "restructuredtext en"
 
-from logilab.astng.exceptions import InferenceError, NoDefault
-from logilab.astng.node_classes import unpack_infer
-from logilab.astng.bases import copy_context, \
-     raise_if_nothing_infered, yes_if_nothing_infered, Instance, Generator, YES
-from logilab.astng.nodes import const_factory
-from logilab.astng import nodes
+from astroid.exceptions import InferenceError, NoDefault, NotFoundError
+from astroid.node_classes import unpack_infer
+from astroid.bases import InferenceContext, \
+     raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES
+from astroid.nodes import const_factory
+from astroid import nodes
+
+BIN_OP_METHOD = {'+':  '__add__',
+                 '-':  '__sub__',
+                 '/':  '__div__',
+                 '//': '__floordiv__',
+                 '*':  '__mul__',
+                 '**': '__power__',
+                 '%':  '__mod__',
+                 '&':  '__and__',
+                 '|':  '__or__',
+                 '^':  '__xor__',
+                 '<<': '__lshift__',
+                 '>>': '__rshift__',
+                }
+
+UNARY_OP_METHOD = {'+': '__pos__',
+                   '-': '__neg__',
+                   '~': '__invert__',
+                   'not': None, # XXX not '__nonzero__'
+                  }
 
 # unary operations ############################################################
 
@@ -72,8 +90,8 @@
                '^':  lambda a, b: a ^ b,
                '<<': lambda a, b: a << b,
                '>>': lambda a, b: a >> b,
-               }
-for key, impl in BIN_OP_IMPL.items():
+              }
+for key, impl in list(BIN_OP_IMPL.items()):
     BIN_OP_IMPL[key+'='] = impl
 
 def const_infer_binary_op(self, operator, other, context):
@@ -135,6 +153,25 @@
         # XXX else log TypeError
 nodes.Dict.infer_binary_op = yes_if_nothing_infered(dict_infer_binary_op)
 
+def instance_infer_binary_op(self, operator, other, context):
+    try:
+        methods = self.getattr(BIN_OP_METHOD[operator])
+    except (NotFoundError, KeyError):
+        # Unknown operator
+        yield YES
+    else:
+        for method in methods:
+            if not isinstance(method, nodes.Function):
+                continue
+            for result in method.infer_call_result(self, context):
+                if result is not YES:
+                    yield result
+            # We are interested only in the first infered method,
+            # don't go looking in the rest of the methods of the ancestors.
+            break
+
+Instance.infer_binary_op = yes_if_nothing_infered(instance_infer_binary_op)
+
 
 # assignment ##################################################################
 
@@ -168,7 +205,7 @@
                 assigned = stmt.getitem(index, context)
             except (AttributeError, IndexError):
                 continue
-            except TypeError, exc: # stmt is unsubscriptable Const
+            except TypeError: # stmt is unsubscriptable Const
                 continue
             if not asspath:
                 # we achieved to resolved the assignment path,
@@ -233,15 +270,20 @@
             yield self.parent.parent.frame()
             return
     if name == self.vararg:
-        yield const_factory(())
+        vararg = const_factory(())
+        vararg.parent = self
+        yield vararg
         return
     if name == self.kwarg:
-        yield const_factory({})
+        kwarg = const_factory({})
+        kwarg.parent = self
+        yield kwarg
         return
     # if there is a default value, yield it. And then yield YES to reflect
     # we can't guess given argument value
     try:
-        context = copy_context(context)
+        if context is None:
+            context = InferenceContext()
         for infered in self.default_value(name).infer(context):
             yield infered
         yield YES
@@ -253,13 +295,8 @@
     if context.callcontext:
         # reset call context/name
         callcontext = context.callcontext
-        context = copy_context(context)
-        context.callcontext = None
-        for infered in callcontext.infer_argument(self.parent, node.name, context):
-            yield infered
-        return
-    for infered in _arguments_infer_argname(self, node.name, context):
-        yield infered
+        return callcontext.infer_argument(self.parent, node.name, context)
+    return _arguments_infer_argname(self, node.name, context)
 nodes.Arguments.assigned_stmts = arguments_assigned_stmts
 
 
@@ -312,10 +349,13 @@
 
 def with_assigned_stmts(self, node, context=None, asspath=None):
     if asspath is None:
-        for lst in self.vars.infer(context):
-            if isinstance(lst, (nodes.Tuple, nodes.List)):
-                for item in lst.nodes:
-                    yield item
+        for _, vars in self.items:
+            if vars is None:
+                continue
+            for lst in vars.infer(context):
+                if isinstance(lst, (nodes.Tuple, nodes.List)):
+                    for item in lst.nodes:
+                        yield item
 nodes.With.assigned_stmts = raise_if_nothing_infered(with_assigned_stmts)
 
 
diff --git a/third_party/logilab/astng/raw_building.py b/third_party/logilab/astroid/raw_building.py
similarity index 74%
rename from third_party/logilab/astng/raw_building.py
rename to third_party/logilab/astroid/raw_building.py
index 395c26e..d5e8b3d 100644
--- a/third_party/logilab/astng/raw_building.py
+++ b/third_party/logilab/astroid/raw_building.py
@@ -1,23 +1,21 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
-"""this module contains a set of functions to create astng trees from scratch
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains a set of functions to create astroid trees from scratch
 (build_* functions) or from living object (object_build_* functions)
 """
 
@@ -26,18 +24,26 @@
 import sys
 from os.path import abspath
 from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
-                     ismethoddescriptor, isclass, isbuiltin)
+                     ismethoddescriptor, isclass, isbuiltin, ismodule)
+import six
 
-from logilab.astng import BUILTINS_MODULE
-from logilab.astng.node_classes import CONST_CLS
-from logilab.astng.nodes import (Module, Class, Const, const_factory, From,
-    Function, EmptyNode, Name, Arguments, Dict, List, Set, Tuple)
-from logilab.astng.bases import Generator
-from logilab.astng.manager import ASTNGManager
-MANAGER = ASTNGManager()
+from astroid.node_classes import CONST_CLS
+from astroid.nodes import (Module, Class, Const, const_factory, From,
+                           Function, EmptyNode, Name, Arguments)
+from astroid.bases import BUILTINS, Generator
+from astroid.manager import AstroidManager
+MANAGER = AstroidManager()
 
 _CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types
 
+def _io_discrepancy(member):
+    # _io module names itself `io`: http://bugs.python.org/issue18602
+    member_self = getattr(member, '__self__', None)
+    return (member_self and
+            ismodule(member_self) and
+            member_self.__name__ == '_io' and
+            member.__module__ == 'io')
+
 def _attach_local_node(parent, node, name):
     node.name = name # needed by add_local_node
     parent.add_local_node(node)
@@ -52,7 +58,10 @@
     enode.object = object
     _attach_local_node(node, enode, name)
 
-EmptyNode.has_underlying_object = lambda self: self.object is not _marker
+def _has_underlying_object(self):
+    return hasattr(self, 'object') and self.object is not _marker
+
+EmptyNode.has_underlying_object = _has_underlying_object
 
 def attach_const_node(node, name, value):
     """create a Const node and register it in the locals of the given
@@ -70,14 +79,14 @@
 
 
 def build_module(name, doc=None):
-    """create and initialize a astng Module node"""
+    """create and initialize a astroid Module node"""
     node = Module(name, doc, pure_python=False)
     node.package = False
     node.parent = None
     return node
 
 def build_class(name, basenames=(), doc=None):
-    """create and initialize a astng Class node"""
+    """create and initialize a astroid Class node"""
     node = Class(name, doc)
     for base in basenames:
         basenode = Name()
@@ -87,7 +96,7 @@
     return node
 
 def build_function(name, args=None, defaults=None, flag=0, doc=None):
-    """create and initialize a astng Function node"""
+    """create and initialize a astroid Function node"""
     args, defaults = args or [], defaults or []
     # first argument is now a list of decorators
     func = Function(name, doc)
@@ -110,7 +119,7 @@
 
 
 def build_from_import(fromname, names):
-    """create and initialize an astng From import statement"""
+    """create and initialize an astroid From import statement"""
     return From(fromname, [(name, None) for name in names])
 
 def register_arguments(func, args=None):
@@ -132,13 +141,13 @@
             register_arguments(func, arg.elts)
 
 def object_build_class(node, member, localname):
-    """create astng for a living class object"""
+    """create astroid for a living class object"""
     basenames = [base.__name__ for base in member.__bases__]
     return _base_class_object_build(node, member, basenames,
                                     localname=localname)
 
 def object_build_function(node, member, localname):
-    """create astng for a living function object"""
+    """create astroid for a living function object"""
     args, varargs, varkw, defaults = getargspec(member)
     if varargs is not None:
         args.append(varargs)
@@ -149,11 +158,11 @@
     node.add_local_node(func, localname)
 
 def object_build_datadescriptor(node, member, name):
-    """create astng for a living data descriptor object"""
+    """create astroid for a living data descriptor object"""
     return _base_class_object_build(node, member, [], name)
 
 def object_build_methoddescriptor(node, member, localname):
-    """create astng for a living method descriptor object"""
+    """create astroid for a living method descriptor object"""
     # FIXME get arguments ?
     func = build_function(getattr(member, '__name__', None) or localname,
                           doc=member.__doc__)
@@ -163,7 +172,7 @@
     node.add_local_node(func, localname)
 
 def _base_class_object_build(node, member, basenames, name=None, localname=None):
-    """create astng for a living class object, with a given set of base names
+    """create astroid for a living class object, with a given set of base names
     (e.g. ancestors)
     """
     klass = build_class(name or getattr(member, '__name__', None) or localname,
@@ -200,23 +209,28 @@
     Function and Class nodes and some others as guessed.
     """
 
-    # astng from living objects ###############################################
+    # astroid from living objects ###############################################
 
     def __init__(self):
         self._done = {}
         self._module = None
 
     def inspect_build(self, module, modname=None, path=None):
-        """build astng from a living module (i.e. using inspect)
+        """build astroid from a living module (i.e. using inspect)
         this is used when there is no python source code available (either
         because it's a built-in module or because the .py is not available)
         """
         self._module = module
         if modname is None:
             modname = module.__name__
-        node = build_module(modname, module.__doc__)
+        try:
+            node = build_module(modname, module.__doc__)
+        except AttributeError:
+            # in jython, java modules have no __doc__ (see #109562)
+            node = build_module(modname)
         node.file = node.path = path and abspath(path) or path
-        MANAGER.astng_cache[modname] = node
+        node.name = modname
+        MANAGER.cache_module(node)
         node.package = hasattr(module, '__path__')
         self._done = {}
         self.object_build(node, module)
@@ -237,17 +251,21 @@
                 attach_dummy_node(node, name)
                 continue
             if ismethod(member):
-                member = member.im_func
+                member = six.get_method_function(member)
             if isfunction(member):
                 # verify this is not an imported function
-                if member.func_code.co_filename != getattr(self._module, '__file__', None):
+                filename = getattr(six.get_function_code(member),
+                                   'co_filename', None)
+                if filename is None:
+                    assert isinstance(member, object)
+                    object_build_methoddescriptor(node, member, name)
+                elif filename != getattr(self._module, '__file__', None):
                     attach_dummy_node(node, name, member)
-                    continue
-                object_build_function(node, member, name)
+                else:
+                    object_build_function(node, member, name)
             elif isbuiltin(member):
-                if self.imported_member(node, member, name):
-                    #if obj is object:
-                    #    print 'skippp', obj, name, member
+                if (not _io_discrepancy(member) and
+                        self.imported_member(node, member, name)):
                     continue
                 object_build_methoddescriptor(node, member, name)
             elif isclass(member):
@@ -269,7 +287,7 @@
             elif isdatadescriptor(member):
                 assert isinstance(member, object)
                 object_build_datadescriptor(node, member, name)
-            elif isinstance(member, _CONSTANTS):
+            elif type(member) in _CONSTANTS:
                 attach_const_node(node, name, member)
             else:
                 # create an empty node so that the name is actually defined
@@ -284,7 +302,7 @@
             modname = getattr(member, '__module__', None)
         except:
             # XXX use logging
-            print 'unexpected error while building astng from living object'
+            print('unexpected error while building astroid from living object')
             import traceback
             traceback.print_exc()
             modname = None
@@ -293,7 +311,7 @@
                 # Python 2.5.1 (r251:54863, Sep  1 2010, 22:03:14)
                 # >>> print object.__new__.__module__
                 # None
-                modname = BUILTINS_MODULE
+                modname = BUILTINS
             else:
                 attach_dummy_node(node, name, member)
                 return True
@@ -310,28 +328,30 @@
         return False
 
 
-### astng boot strapping ################################################### ###
+### astroid bootstrapping ######################################################
+Astroid_BUILDER = InspectBuilder()
 
 _CONST_PROXY = {}
-def astng_boot_strapping():
-    """astng boot strapping the builtins module"""
+def _astroid_bootstrapping(astroid_builtin=None):
+    """astroid boot strapping the builtins module"""
     # this boot strapping is necessary since we need the Const nodes to
     # inspect_build builtins, and then we can proxy Const
-    builder = InspectBuilder()
-    from logilab.common.compat import builtins
-    astng_builtin = builder.inspect_build(builtins)
+    if astroid_builtin is None:
+        from logilab.common.compat import builtins
+        astroid_builtin = Astroid_BUILDER.inspect_build(builtins)
+
     for cls, node_cls in CONST_CLS.items():
         if cls is type(None):
             proxy = build_class('NoneType')
-            proxy.parent = astng_builtin
+            proxy.parent = astroid_builtin
         else:
-            proxy = astng_builtin.getattr(cls.__name__)[0] # XXX
+            proxy = astroid_builtin.getattr(cls.__name__)[0]
         if cls in (dict, list, set, tuple):
             node_cls._proxied = proxy
         else:
             _CONST_PROXY[cls] = proxy
 
-astng_boot_strapping()
+_astroid_bootstrapping()
 
 # TODO : find a nicer way to handle this situation;
 # However __proxied introduced an
@@ -340,6 +360,7 @@
     return _CONST_PROXY[const.value.__class__]
 Const._proxied = property(_set_proxied)
 
-# FIXME : is it alright that Generator._proxied is not a astng node?
-Generator._proxied = MANAGER.infer_astng_from_something(type(a for a in ()))
+from types import GeneratorType
+Generator._proxied = Class(GeneratorType.__name__, GeneratorType.__doc__)
+Astroid_BUILDER.object_build(Generator._proxied, GeneratorType)
 
diff --git a/third_party/logilab/astng/rebuilder.py b/third_party/logilab/astroid/rebuilder.py
similarity index 78%
rename from third_party/logilab/astng/rebuilder.py
rename to third_party/logilab/astroid/rebuilder.py
index bac7a09..14c606e 100644
--- a/third_party/logilab/astng/rebuilder.py
+++ b/third_party/logilab/astroid/rebuilder.py
@@ -1,30 +1,29 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
 """this module contains utilities for rebuilding a _ast tree in
-order to get a single ASTNG representation
+order to get a single Astroid representation
 """
 
 import sys
-from _ast import (Expr as Discard, Str,
+from _ast import (
+    Expr as Discard, Str,
     # binary operators
-    Add, Div, FloorDiv,  Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor,
+    Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor,
     LShift, RShift,
     # logical operators
     And, Or,
@@ -34,8 +33,7 @@
     Eq, Gt, GtE, In, Is, IsNot, Lt, LtE, NotEq, NotIn,
     )
 
-from logilab.astng.exceptions import ASTNGBuildingException
-from logilab.astng import nodes as new
+from astroid import nodes as new
 
 
 _BIN_OP_CLASSES = {Add: '+',
@@ -49,15 +47,18 @@
                    Pow: '**',
                    Sub: '-',
                    LShift: '<<',
-                   RShift: '>>'}
+                   RShift: '>>',
+                  }
 
 _BOOL_OP_CLASSES = {And: 'and',
-                    Or: 'or'}
+                    Or: 'or',
+                   }
 
 _UNARY_OP_CLASSES = {UAdd: '+',
                      USub: '-',
                      Not: 'not',
-                     Invert: '~'}
+                     Invert: '~',
+                    }
 
 _CMP_OP_CLASSES = {Eq: '==',
                    Gt: '>',
@@ -68,11 +69,13 @@
                    Lt: '<',
                    LtE: '<=',
                    NotEq: '!=',
-                   NotIn: 'not in'}
+                   NotIn: 'not in',
+                  }
 
 CONST_NAME_TRANSFORMS = {'None':  None,
                          'True':  True,
-                         'False': False}
+                         'False': False,
+                        }
 
 REDIRECT = {'arguments': 'Arguments',
             'Attribute': 'Getattr',
@@ -88,13 +91,14 @@
             'ImportFrom': 'From',
             'keyword': 'Keyword',
             'Repr': 'Backquote',
-            }
+           }
+PY3K = sys.version_info >= (3, 0)
+PY34 = sys.version_info >= (3, 4)
 
 def _init_set_doc(node, newnode):
     newnode.doc = None
     try:
         if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str):
-            newnode.tolineno = node.body[0].lineno
             newnode.doc = node.body[0].value.s
             node.body = node.body[1:]
 
@@ -103,10 +107,8 @@
 
 def _lineno_parent(oldnode, newnode, parent):
     newnode.parent = parent
-    if hasattr(oldnode, 'lineno'):
-        newnode.lineno = oldnode.lineno
-    if hasattr(oldnode, 'col_offset'):
-        newnode.col_offset = oldnode.col_offset
+    newnode.lineno = oldnode.lineno
+    newnode.col_offset = oldnode.col_offset
 
 def _set_infos(oldnode, newnode, parent):
     newnode.parent = parent
@@ -114,35 +116,47 @@
         newnode.lineno = oldnode.lineno
     if hasattr(oldnode, 'col_offset'):
         newnode.col_offset = oldnode.col_offset
-    newnode.set_line_info(newnode.last_child()) # set_line_info accepts None
 
-
+def _create_yield_node(node, parent, rebuilder, factory):
+    newnode = factory()
+    _lineno_parent(node, newnode, parent)
+    if node.value is not None:
+        newnode.value = rebuilder.visit(node.value, newnode)
+    return newnode
 
 
 class TreeRebuilder(object):
-    """Rebuilds the _ast tree to become an ASTNG tree"""
+    """Rebuilds the _ast tree to become an Astroid tree"""
 
-    _visit_meths = {}
-    def __init__(self):
-        self.init()
-
-    def init(self):
+    def __init__(self, manager):
+        self._manager = manager
         self.asscontext = None
-        self._metaclass = ['']
         self._global_names = []
         self._from_nodes = []
         self._delayed_assattr = []
+        self._visit_meths = {}
+        self._transform = manager.transform
+
+    def visit_module(self, node, modname, modpath, package):
+        """visit a Module node by returning a fresh instance of it"""
+        newnode = new.Module(modname, None)
+        newnode.package = package
+        newnode.parent = None
+        _init_set_doc(node, newnode)
+        newnode.body = [self.visit(child, newnode) for child in node.body]
+        newnode.file = newnode.path = modpath
+        return self._transform(newnode)
 
     def visit(self, node, parent):
         cls = node.__class__
         if cls in self._visit_meths:
-            return self._visit_meths[cls](node, parent)
+            visit_method = self._visit_meths[cls]
         else:
             cls_name = cls.__name__
             visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower()
             visit_method = getattr(self, visit_name)
             self._visit_meths[cls] = visit_method
-            return visit_method(node, parent)
+        return self._transform(visit_method(node, parent))
 
     def _save_assignment(self, node, name=None):
         """save assignement situation since node.parent is not available yet"""
@@ -155,19 +169,42 @@
     def visit_arguments(self, node, parent):
         """visit a Arguments node by returning a fresh instance of it"""
         newnode = new.Arguments()
-        _lineno_parent(node, newnode, parent)
+        newnode.parent = parent
         self.asscontext = "Ass"
         newnode.args = [self.visit(child, newnode) for child in node.args]
         self.asscontext = None
         newnode.defaults = [self.visit(child, newnode) for child in node.defaults]
-        newnode.vararg = node.vararg
-        newnode.kwarg = node.kwarg
+        newnode.kwonlyargs = []
+        newnode.kw_defaults = []
+        vararg, kwarg = node.vararg, node.kwarg
+        # change added in 82732 (7c5c678e4164), vararg and kwarg
+        # are instances of `_ast.arg`, not strings
+        if vararg:
+            if PY34:
+                if vararg.annotation:
+                    newnode.varargannotation = self.visit(vararg.annotation,
+                                                          newnode)
+                vararg = vararg.arg
+            elif PY3K and node.varargannotation:
+                newnode.varargannotation = self.visit(node.varargannotation,
+                                                      newnode)
+        if kwarg:
+            if PY34:
+                if kwarg.annotation:
+                    newnode.kwargannotation = self.visit(kwarg.annotation,
+                                                         newnode)
+                kwarg = kwarg.arg
+            elif PY3K:
+                if node.kwargannotation:
+                    newnode.kwargannotation = self.visit(node.kwargannotation,
+                                                         newnode)
+        newnode.vararg = vararg
+        newnode.kwarg = kwarg
         # save argument names in locals:
-        if node.vararg:
-            newnode.parent.set_local(newnode.vararg, newnode)
-        if node.kwarg:
-            newnode.parent.set_local(newnode.kwarg, newnode)
-        newnode.set_line_info(newnode.last_child())
+        if vararg:
+            newnode.parent.set_local(vararg, newnode)
+        if kwarg:
+            newnode.parent.set_local(kwarg, newnode)
         return newnode
 
     def visit_assattr(self, node, parent):
@@ -178,7 +215,6 @@
         newnode.expr = self.visit(node.expr, newnode)
         self.asscontext = assc
         self._delayed_assattr.append(newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_assert(self, node, parent):
@@ -188,7 +224,6 @@
         newnode.test = self.visit(node.test, newnode)
         if node.msg is not None:
             newnode.fail = self.visit(node.msg, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_assign(self, node, parent):
@@ -202,8 +237,8 @@
         # set some function or metaclass infos  XXX explain ?
         klass = newnode.parent.frame()
         if (isinstance(klass, new.Class)
-            and isinstance(newnode.value, new.CallFunc)
-            and isinstance(newnode.value.func, new.Name)):
+                and isinstance(newnode.value, new.CallFunc)
+                and isinstance(newnode.value.func, new.Name)):
             func_name = newnode.value.func.name
             for ass_node in newnode.targets:
                 try:
@@ -216,10 +251,6 @@
                         meth.extra_decorators.append(newnode.value)
                 except (AttributeError, KeyError):
                     continue
-        elif getattr(newnode.targets[0], 'name', None) == '__metaclass__':
-            # XXX check more...
-            self._metaclass[-1] = 'type' # XXX get the actual metaclass
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_assname(self, node, parent, node_name=None):
@@ -239,7 +270,6 @@
         newnode.target = self.visit(node.target, newnode)
         self.asscontext = None
         newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_backquote(self, node, parent):
@@ -247,7 +277,6 @@
         newnode = new.Backquote()
         _lineno_parent(node, newnode, parent)
         newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_binop(self, node, parent):
@@ -257,7 +286,6 @@
         newnode.left = self.visit(node.left, newnode)
         newnode.right = self.visit(node.right, newnode)
         newnode.op = _BIN_OP_CLASSES[node.op.__class__]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_boolop(self, node, parent):
@@ -266,7 +294,6 @@
         _lineno_parent(node, newnode, parent)
         newnode.values = [self.visit(child, newnode) for child in node.values]
         newnode.op = _BOOL_OP_CLASSES[node.op.__class__]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_break(self, node, parent):
@@ -285,13 +312,12 @@
             newnode.starargs = self.visit(node.starargs, newnode)
         if node.kwargs is not None:
             newnode.kwargs = self.visit(node.kwargs, newnode)
-        newnode.args.extend(self.visit(child, newnode) for child in node.keywords)
-        newnode.set_line_info(newnode.last_child())
+        for child in node.keywords:
+            newnode.args.append(self.visit(child, newnode))
         return newnode
 
     def visit_class(self, node, parent):
-        """visit a Class node to become astng"""
-        self._metaclass.append(self._metaclass[-1])
+        """visit a Class node to become astroid"""
         newnode = new.Class(node.name, None)
         _lineno_parent(node, newnode, parent)
         _init_set_doc(node, newnode)
@@ -299,12 +325,6 @@
         newnode.body = [self.visit(child, newnode) for child in node.body]
         if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6
             newnode.decorators = self.visit_decorators(node, newnode)
-        newnode.set_line_info(newnode.last_child())
-        metaclass = self._metaclass.pop()
-        if not newnode.bases:
-            # no base classes, detect new / style old style according to
-            # current scope
-            newnode._newstyle = metaclass == 'type'
         newnode.parent.frame().set_local(newnode.name, newnode)
         return newnode
 
@@ -326,34 +346,31 @@
         _lineno_parent(node, newnode, parent)
         newnode.left = self.visit(node.left, newnode)
         newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode))
-                    for (op, expr) in zip(node.ops, node.comparators)]
-        newnode.set_line_info(newnode.last_child())
+                       for (op, expr) in zip(node.ops, node.comparators)]
         return newnode
 
     def visit_comprehension(self, node, parent):
         """visit a Comprehension node by returning a fresh instance of it"""
         newnode = new.Comprehension()
-        _lineno_parent(node, newnode, parent)
+        newnode.parent = parent
         self.asscontext = "Ass"
         newnode.target = self.visit(node.target, newnode)
         self.asscontext = None
         newnode.iter = self.visit(node.iter, newnode)
         newnode.ifs = [self.visit(child, newnode) for child in node.ifs]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_decorators(self, node, parent):
         """visit a Decorators node by returning a fresh instance of it"""
         # /!\ node is actually a _ast.Function node while
-        # parent is a astng.nodes.Function node
+        # parent is a astroid.nodes.Function node
         newnode = new.Decorators()
         _lineno_parent(node, newnode, parent)
         if 'decorators' in node._fields: # py < 2.6, i.e. 2.5
             decorators = node.decorators
         else:
-            decorators= node.decorator_list
+            decorators = node.decorator_list
         newnode.nodes = [self.visit(child, newnode) for child in decorators]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_delete(self, node, parent):
@@ -363,7 +380,6 @@
         self.asscontext = "Del"
         newnode.targets = [self.visit(child, newnode) for child in node.targets]
         self.asscontext = None
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_dict(self, node, parent):
@@ -371,8 +387,7 @@
         newnode = new.Dict()
         _lineno_parent(node, newnode, parent)
         newnode.items = [(self.visit(key, newnode), self.visit(value, newnode))
-                          for key, value in zip(node.keys, node.values)]
-        newnode.set_line_info(newnode.last_child())
+                         for key, value in zip(node.keys, node.values)]
         return newnode
 
     def visit_dictcomp(self, node, parent):
@@ -383,7 +398,6 @@
         newnode.value = self.visit(node.value, newnode)
         newnode.generators = [self.visit(child, newnode)
                               for child in node.generators]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_discard(self, node, parent):
@@ -391,7 +405,6 @@
         newnode = new.Discard()
         _lineno_parent(node, newnode, parent)
         newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_ellipsis(self, node, parent):
@@ -418,7 +431,6 @@
             newnode.name = self.visit(node.name, newnode)
             self.asscontext = None
         newnode.body = [self.visit(child, newnode) for child in node.body]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_exec(self, node, parent):
@@ -430,15 +442,13 @@
             newnode.globals = self.visit(node.globals, newnode)
         if node.locals is not None:
             newnode.locals = self.visit(node.locals, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_extslice(self, node, parent):
         """visit an ExtSlice node by returning a fresh instance of it"""
         newnode = new.ExtSlice()
-        _lineno_parent(node, newnode, parent)
+        newnode.parent = parent
         newnode.dims = [self.visit(dim, newnode) for dim in node.dims]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_for(self, node, parent):
@@ -451,20 +461,19 @@
         newnode.iter = self.visit(node.iter, newnode)
         newnode.body = [self.visit(child, newnode) for child in node.body]
         newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_from(self, node, parent):
         """visit a From node by returning a fresh instance of it"""
         names = [(alias.name, alias.asname) for alias in node.names]
-        newnode = new.From(node.module or '', names, node.level)
+        newnode = new.From(node.module or '', names, node.level or None)
         _set_infos(node, newnode, parent)
         # store From names to add them to locals after building
         self._from_nodes.append(newnode)
         return newnode
 
     def visit_function(self, node, parent):
-        """visit an Function node to become astng"""
+        """visit an Function node to become astroid"""
         self._global_names.append({})
         newnode = new.Function(node.name, None)
         _lineno_parent(node, newnode, parent)
@@ -478,21 +487,22 @@
         decorators = getattr(node, attr)
         if decorators:
             newnode.decorators = self.visit_decorators(node, newnode)
-        newnode.set_line_info(newnode.last_child())
+        if PY3K and node.returns:
+            newnode.returns = self.visit(node.returns, newnode)
         self._global_names.pop()
         frame = newnode.parent.frame()
         if isinstance(frame, new.Class):
             if newnode.name == '__new__':
-                newnode.type = 'classmethod'
+                newnode._type = 'classmethod'
             else:
-                newnode.type = 'method'
+                newnode._type = 'method'
         if newnode.decorators is not None:
             for decorator_expr in newnode.decorators.nodes:
                 if isinstance(decorator_expr, new.Name):
                     if decorator_expr.name in ('classmethod', 'staticmethod'):
-                        newnode.type = decorator_expr.name
+                        newnode._type = decorator_expr.name
                     elif decorator_expr.name == 'classproperty':
-                        newnode.type = 'classmethod'
+                        newnode._type = 'classmethod'
         frame.set_local(newnode.name, newnode)
         return newnode
 
@@ -502,7 +512,6 @@
         _lineno_parent(node, newnode, parent)
         newnode.elt = self.visit(node.elt, newnode)
         newnode.generators = [self.visit(child, newnode) for child in node.generators]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_getattr(self, node, parent):
@@ -522,11 +531,10 @@
         newnode.expr = self.visit(node.value, newnode)
         self.asscontext = asscontext
         newnode.attrname = node.attr
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_global(self, node, parent):
-        """visit an Global node to become astng"""
+        """visit an Global node to become astroid"""
         newnode = new.Global(node.names)
         _set_infos(node, newnode, parent)
         if self._global_names: # global at the module level, no effect
@@ -541,7 +549,6 @@
         newnode.test = self.visit(node.test, newnode)
         newnode.body = [self.visit(child, newnode) for child in node.body]
         newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_ifexp(self, node, parent):
@@ -551,7 +558,6 @@
         newnode.test = self.visit(node.test, newnode)
         newnode.body = self.visit(node.body, newnode)
         newnode.orelse = self.visit(node.orelse, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_import(self, node, parent):
@@ -568,18 +574,16 @@
     def visit_index(self, node, parent):
         """visit a Index node by returning a fresh instance of it"""
         newnode = new.Index()
-        _lineno_parent(node, newnode, parent)
+        newnode.parent = parent
         newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_keyword(self, node, parent):
         """visit a Keyword node by returning a fresh instance of it"""
         newnode = new.Keyword()
-        _lineno_parent(node, newnode, parent)
+        newnode.parent = parent
         newnode.arg = node.arg
         newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_lambda(self, node, parent):
@@ -588,7 +592,6 @@
         _lineno_parent(node, newnode, parent)
         newnode.args = self.visit(node.args, newnode)
         newnode.body = self.visit(node.body, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_list(self, node, parent):
@@ -596,7 +599,6 @@
         newnode = new.List()
         _lineno_parent(node, newnode, parent)
         newnode.elts = [self.visit(child, newnode) for child in node.elts]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_listcomp(self, node, parent):
@@ -606,17 +608,6 @@
         newnode.elt = self.visit(node.elt, newnode)
         newnode.generators = [self.visit(child, newnode)
                               for child in node.generators]
-        newnode.set_line_info(newnode.last_child())
-        return newnode
-
-    def visit_module(self, node, modname, package):
-        """visit a Module node by returning a fresh instance of it"""
-        newnode = new.Module(modname, None)
-        newnode.package = package
-        _lineno_parent(node, newnode, parent=None)
-        _init_set_doc(node, newnode)
-        newnode.body = [self.visit(child, newnode) for child in node.body]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_name(self, node, parent):
@@ -639,7 +630,6 @@
         # XXX REMOVE me :
         if self.asscontext in ('Del', 'Ass'): # 'Aug' ??
             self._save_assignment(newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_bytes(self, node, parent):
@@ -674,7 +664,6 @@
         if node.dest is not None:
             newnode.dest = self.visit(node.dest, newnode)
         newnode.values = [self.visit(child, newnode) for child in node.values]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_raise(self, node, parent):
@@ -687,7 +676,6 @@
             newnode.inst = self.visit(node.inst, newnode)
         if node.tback is not None:
             newnode.tback = self.visit(node.tback, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_return(self, node, parent):
@@ -696,15 +684,13 @@
         _lineno_parent(node, newnode, parent)
         if node.value is not None:
             newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_set(self, node, parent):
-        """visit a Tuple node by returning a fresh instance of it"""
+        """visit a Set node by returning a fresh instance of it"""
         newnode = new.Set()
         _lineno_parent(node, newnode, parent)
         newnode.elts = [self.visit(child, newnode) for child in node.elts]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_setcomp(self, node, parent):
@@ -714,20 +700,18 @@
         newnode.elt = self.visit(node.elt, newnode)
         newnode.generators = [self.visit(child, newnode)
                               for child in node.generators]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_slice(self, node, parent):
         """visit a Slice node by returning a fresh instance of it"""
         newnode = new.Slice()
-        _lineno_parent(node, newnode, parent)
+        newnode.parent = parent
         if node.lower is not None:
             newnode.lower = self.visit(node.lower, newnode)
         if node.upper is not None:
             newnode.upper = self.visit(node.upper, newnode)
         if node.step is not None:
             newnode.step = self.visit(node.step, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_subscript(self, node, parent):
@@ -738,7 +722,6 @@
         newnode.value = self.visit(node.value, newnode)
         newnode.slice = self.visit(node.slice, newnode)
         self.asscontext = subcontext
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_tryexcept(self, node, parent):
@@ -748,7 +731,6 @@
         newnode.body = [self.visit(child, newnode) for child in node.body]
         newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
         newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_tryfinally(self, node, parent):
@@ -757,7 +739,6 @@
         _lineno_parent(node, newnode, parent)
         newnode.body = [self.visit(child, newnode) for child in node.body]
         newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_tuple(self, node, parent):
@@ -765,7 +746,6 @@
         newnode = new.Tuple()
         _lineno_parent(node, newnode, parent)
         newnode.elts = [self.visit(child, newnode) for child in node.elts]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_unaryop(self, node, parent):
@@ -774,7 +754,6 @@
         _lineno_parent(node, newnode, parent)
         newnode.operand = self.visit(node.operand, newnode)
         newnode.op = _UNARY_OP_CLASSES[node.op.__class__]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_while(self, node, parent):
@@ -784,31 +763,25 @@
         newnode.test = self.visit(node.test, newnode)
         newnode.body = [self.visit(child, newnode) for child in node.body]
         newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_with(self, node, parent):
-        """visit a With node by returning a fresh instance of it"""
         newnode = new.With()
         _lineno_parent(node, newnode, parent)
-        newnode.expr = self.visit(node.context_expr, newnode)
+        expr = self.visit(node.context_expr, newnode)
         self.asscontext = "Ass"
         if node.optional_vars is not None:
-            newnode.vars = self.visit(node.optional_vars, newnode)
+            vars = self.visit(node.optional_vars, newnode)
+        else:
+            vars = None
         self.asscontext = None
+        newnode.items = [(expr, vars)]
         newnode.body = [self.visit(child, newnode) for child in node.body]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_yield(self, node, parent):
         """visit a Yield node by returning a fresh instance of it"""
-        newnode = new.Yield()
-        _lineno_parent(node, newnode, parent)
-        if node.value is not None:
-            newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
-        return newnode
-
+        return _create_yield_node(node, parent, self, new.Yield)
 
 class TreeRebuilder3k(TreeRebuilder):
     """extend and overwrite TreeRebuilder for python3k"""
@@ -816,9 +789,26 @@
     def visit_arg(self, node, parent):
         """visit a arg node by returning a fresh AssName instance"""
         # the <arg> node is coming from py>=3.0, but we use AssName in py2.x
-        # XXX or we should instead introduce a Arg node in astng ?
+        # XXX or we should instead introduce a Arg node in astroid ?
         return self.visit_assname(node, parent, node.arg)
 
+    def visit_nameconstant(self, node, parent):
+        # in Python 3.4 we have NameConstant for True / False / None
+        newnode = new.Const(node.value)
+        _set_infos(node, newnode, parent)
+        return newnode
+
+    def visit_arguments(self, node, parent):
+        newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent)
+        self.asscontext = "Ass"
+        newnode.kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs]
+        self.asscontext = None
+        newnode.kw_defaults = [self.visit(child, newnode) if child else None for child in node.kw_defaults]
+        newnode.annotations = [
+            self.visit(arg.annotation, newnode) if arg.annotation else None
+            for arg in node.args]
+        return newnode
+
     def visit_excepthandler(self, node, parent):
         """visit an ExceptHandler node by returning a fresh instance of it"""
         newnode = new.ExceptHandler()
@@ -828,7 +818,6 @@
         if node.name is not None:
             newnode.name = self.visit_assname(node, newnode, node.name)
         newnode.body = [self.visit(child, newnode) for child in node.body]
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_nonlocal(self, node, parent):
@@ -846,7 +835,6 @@
             newnode.exc = self.visit(node.exc, newnode)
         if node.cause is not None:
             newnode.cause = self.visit(node.cause, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
     def visit_starred(self, node, parent):
@@ -854,9 +842,63 @@
         newnode = new.Starred()
         _lineno_parent(node, newnode, parent)
         newnode.value = self.visit(node.value, newnode)
-        newnode.set_line_info(newnode.last_child())
         return newnode
 
+    def visit_try(self, node, parent):
+        # python 3.3 introduce a new Try node replacing TryFinally/TryExcept nodes
+        if node.finalbody:
+            newnode = new.TryFinally()
+            _lineno_parent(node, newnode, parent)
+            newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
+            if node.handlers:
+                excnode = new.TryExcept()
+                _lineno_parent(node, excnode, newnode)
+                excnode.body = [self.visit(child, excnode) for child in node.body]
+                excnode.handlers = [self.visit(child, excnode) for child in node.handlers]
+                excnode.orelse = [self.visit(child, excnode) for child in node.orelse]
+                newnode.body = [excnode]
+            else:
+                newnode.body = [self.visit(child, newnode) for child in node.body]
+        elif node.handlers:
+            newnode = new.TryExcept()
+            _lineno_parent(node, newnode, parent)
+            newnode.body = [self.visit(child, newnode) for child in node.body]
+            newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
+            newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
+        return newnode
+
+    def visit_with(self, node, parent):
+        if 'items' not in node._fields:
+            # python < 3.3
+            return super(TreeRebuilder3k, self).visit_with(node, parent)
+
+        newnode = new.With()
+        _lineno_parent(node, newnode, parent)
+        def visit_child(child):
+            expr = self.visit(child.context_expr, newnode)
+            self.asscontext = 'Ass'
+            if child.optional_vars:
+                var = self.visit(child.optional_vars, newnode)
+            else:
+                var = None
+            self.asscontext = None
+            return expr, var
+        newnode.items = [visit_child(child)
+                         for child in node.items]
+        newnode.body = [self.visit(child, newnode) for child in node.body]
+        return newnode
+
+    def visit_yieldfrom(self, node, parent):
+        return _create_yield_node(node, parent, self, new.YieldFrom)
+
+    def visit_class(self, node, parent):
+        newnode = super(TreeRebuilder3k, self).visit_class(node, parent)
+        newnode._newstyle = True
+        for keyword in node.keywords:
+            if keyword.arg == 'metaclass':
+                newnode._metaclass = self.visit(keyword, newnode).value
+                break
+        return newnode
 
 if sys.version_info >= (3, 0):
     TreeRebuilder = TreeRebuilder3k
diff --git a/third_party/logilab/astroid/scoped_nodes.py b/third_party/logilab/astroid/scoped_nodes.py
new file mode 100644
index 0000000..db39b8b
--- /dev/null
+++ b/third_party/logilab/astroid/scoped_nodes.py
@@ -0,0 +1,1459 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+"""This module contains the classes for "scoped" node, i.e. which are opening a
+new local scope in the language definition : Module, Class, Function (and
+Lambda, GenExpr, DictComp and SetComp to some extent).
+"""
+from __future__ import with_statement
+
+__doctype__ = "restructuredtext en"
+
+import sys
+import warnings
+from itertools import chain
+try:
+    from io import BytesIO
+except ImportError:
+    from cStringIO import StringIO as BytesIO
+
+import six
+from logilab.common.compat import builtins
+from logilab.common.decorators import cached, cachedproperty
+
+from astroid.exceptions import NotFoundError, \
+     AstroidBuildingException, InferenceError, ResolveError
+from astroid.node_classes import Const, DelName, DelAttr, \
+     Dict, From, List, Pass, Raise, Return, Tuple, Yield, YieldFrom, \
+     LookupMixIn, const_factory as cf, unpack_infer, Name, CallFunc
+from astroid.bases import NodeNG, InferenceContext, Instance,\
+     YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, \
+     BUILTINS
+from astroid.mixins import FilterStmtsMixin
+from astroid.bases import Statement
+from astroid.manager import AstroidManager
+
+ITER_METHODS = ('__iter__', '__getitem__')
+PY3K = sys.version_info >= (3, 0)
+
+def _c3_merge(sequences):
+    """Merges MROs in *sequences* to a single MRO using the C3 algorithm.
+
+    Adapted from http://www.python.org/download/releases/2.3/mro/.
+
+    """
+    result = []
+    while True:
+        sequences = [s for s in sequences if s]   # purge empty sequences
+        if not sequences:
+            return result
+        for s1 in sequences:   # find merge candidates among seq heads
+            candidate = s1[0]
+            for s2 in sequences:
+                if candidate in s2[1:]:
+                    candidate = None
+                    break      # reject the current head, it appears later
+            else:
+                break
+        if not candidate:
+            # Show all the remaining bases, which were considered as
+            # candidates for the next mro sequence.
+            bases = ["({})".format(", ".join(base.name
+                                             for base in subsequence))
+                     for subsequence in sequences]
+            raise ResolveError("Cannot create a consistent method resolution "
+                               "order for bases %s" % ", ".join(bases))
+        result.append(candidate)
+        # remove the chosen candidate
+        for seq in sequences:
+            if seq[0] == candidate:
+                del seq[0]
+
+
+def _verify_duplicates_mro(sequences):
+    for sequence in sequences:
+        names = [node.qname() for node in sequence]
+        if len(names) != len(set(names)):
+            raise ResolveError('Duplicates found in the mro.')
+
+
+def remove_nodes(func, cls):
+    def wrapper(*args, **kwargs):
+        nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)]
+        if not nodes:
+            raise NotFoundError()
+        return nodes
+    return wrapper
+
+
+def function_to_method(n, klass):
+    if isinstance(n, Function):
+        if n.type == 'classmethod':
+            return BoundMethod(n, klass)
+        if n.type != 'staticmethod':
+            return UnboundMethod(n)
+    return n
+
+def std_special_attributes(self, name, add_locals=True):
+    if add_locals:
+        locals = self.locals
+    else:
+        locals = {}
+    if name == '__name__':
+        return [cf(self.name)] + locals.get(name, [])
+    if name == '__doc__':
+        return [cf(self.doc)] + locals.get(name, [])
+    if name == '__dict__':
+        return [Dict()] + locals.get(name, [])
+    raise NotFoundError(name)
+
+MANAGER = AstroidManager()
+def builtin_lookup(name):
+    """lookup a name into the builtin module
+    return the list of matching statements and the astroid for the builtin
+    module
+    """
+    builtin_astroid = MANAGER.ast_from_module(builtins)
+    if name == '__dict__':
+        return builtin_astroid, ()
+    try:
+        stmts = builtin_astroid.locals[name]
+    except KeyError:
+        stmts = ()
+    return builtin_astroid, stmts
+
+
+# TODO move this Mixin to mixins.py; problem: 'Function' in _scope_lookup
+class LocalsDictNodeNG(LookupMixIn, NodeNG):
+    """ this class provides locals handling common to Module, Function
+    and Class nodes, including a dict like interface for direct access
+    to locals information
+    """
+
+    # attributes below are set by the builder module or by raw factories
+
+    # dictionary of locals with name as key and node defining the local as
+    # value
+
+    def qname(self):
+        """return the 'qualified' name of the node, eg module.name,
+        module.class.name ...
+        """
+        if self.parent is None:
+            return self.name
+        return '%s.%s' % (self.parent.frame().qname(), self.name)
+
+    def frame(self):
+        """return the first parent frame node (i.e. Module, Function or Class)
+        """
+        return self
+
+    def scope(self):
+        """return the first node defining a new scope (i.e. Module,
+        Function, Class, Lambda but also GenExpr, DictComp and SetComp)
+        """
+        return self
+
+
+    def _scope_lookup(self, node, name, offset=0):
+        """XXX method for interfacing the scope lookup"""
+        try:
+            stmts = node._filter_stmts(self.locals[name], self, offset)
+        except KeyError:
+            stmts = ()
+        if stmts:
+            return self, stmts
+        if self.parent: # i.e. not Module
+            # nested scope: if parent scope is a function, that's fine
+            # else jump to the module
+            pscope = self.parent.scope()
+            if not pscope.is_function:
+                pscope = pscope.root()
+            return pscope.scope_lookup(node, name)
+        return builtin_lookup(name) # Module
+
+
+
+    def set_local(self, name, stmt):
+        """define <name> in locals (<stmt> is the node defining the name)
+        if the node is a Module node (i.e. has globals), add the name to
+        globals
+
+        if the name is already defined, ignore it
+        """
+        #assert not stmt in self.locals.get(name, ()), (self, stmt)
+        self.locals.setdefault(name, []).append(stmt)
+
+    __setitem__ = set_local
+
+    def _append_node(self, child):
+        """append a child, linking it in the tree"""
+        self.body.append(child)
+        child.parent = self
+
+    def add_local_node(self, child_node, name=None):
+        """append a child which should alter locals to the given node"""
+        if name != '__class__':
+            # add __class__ node as a child will cause infinite recursion later!
+            self._append_node(child_node)
+        self.set_local(name or child_node.name, child_node)
+
+
+    def __getitem__(self, item):
+        """method from the `dict` interface returning the first node
+        associated with the given name in the locals dictionary
+
+        :type item: str
+        :param item: the name of the locally defined object
+        :raises KeyError: if the name is not defined
+        """
+        return self.locals[item][0]
+
+    def __iter__(self):
+        """method from the `dict` interface returning an iterator on
+        `self.keys()`
+        """
+        return iter(self.keys())
+
+    def keys(self):
+        """method from the `dict` interface returning a tuple containing
+        locally defined names
+        """
+        return list(self.locals.keys())
+
+    def values(self):
+        """method from the `dict` interface returning a tuple containing
+        locally defined nodes which are instance of `Function` or `Class`
+        """
+        return [self[key] for key in self.keys()]
+
+    def items(self):
+        """method from the `dict` interface returning a list of tuple
+        containing each locally defined name with its associated node,
+        which is an instance of `Function` or `Class`
+        """
+        return list(zip(self.keys(), self.values()))
+
+
+    def __contains__(self, name):
+        return name in self.locals
+    has_key = __contains__
+
+# Module  #####################################################################
+
+class Module(LocalsDictNodeNG):
+    _astroid_fields = ('body',)
+
+    fromlineno = 0
+    lineno = 0
+
+    # attributes below are set by the builder module or by raw factories
+
+    # the file from which as been extracted the astroid representation. It may
+    # be None if the representation has been built from a built-in module
+    file = None
+    # Alternatively, if built from a string/bytes, this can be set
+    file_bytes = None
+    # encoding of python source file, so we can get unicode out of it (python2
+    # only)
+    file_encoding = None
+    # the module name
+    name = None
+    # boolean for astroid built from source (i.e. ast)
+    pure_python = None
+    # boolean for package module
+    package = None
+    # dictionary of globals with name as key and node defining the global
+    # as value
+    globals = None
+
+    # Future imports
+    future_imports = None
+
+    # names of python special attributes (handled by getattr impl.)
+    special_attributes = set(('__name__', '__doc__', '__file__', '__path__',
+                              '__dict__'))
+    # names of module attributes available through the global scope
+    scope_attrs = set(('__name__', '__doc__', '__file__', '__path__'))
+
+    def __init__(self, name, doc, pure_python=True):
+        self.name = name
+        self.doc = doc
+        self.pure_python = pure_python
+        self.locals = self.globals = {}
+        self.body = []
+        self.future_imports = set()
+
+    def _get_stream(self):
+        if self.file_bytes is not None:
+            return BytesIO(self.file_bytes)
+        if self.file is not None:
+            stream = open(self.file, 'rb')
+            return stream
+        return None
+
+    @property
+    def file_stream(self):
+        warnings.warn("file_stream property is deprecated and "
+                      "it is slated for removal in astroid 1.6."
+                      "Use the new method 'stream' instead.",
+                      PendingDeprecationWarning,
+                      stacklevel=2)
+        return self._get_stream()
+
+    def stream(self):
+        """Get a stream to the underlying file or bytes."""
+        return self._get_stream()
+
+    def close(self):
+        """Close the underlying file streams."""
+        warnings.warn("close method is deprecated and it is "
+                      "slated for removal in astroid 1.6, along "
+                      "with 'file_stream' property. "
+                      "Its behaviour is replaced by managing each "
+                      "file stream returned by the 'stream' method.",
+                      PendingDeprecationWarning,
+                      stacklevel=2)
+
+    def block_range(self, lineno):
+        """return block line numbers.
+
+        start from the beginning whatever the given lineno
+        """
+        return self.fromlineno, self.tolineno
+
+    def scope_lookup(self, node, name, offset=0):
+        if name in self.scope_attrs and not name in self.locals:
+            try:
+                return self, self.getattr(name)
+            except NotFoundError:
+                return self, ()
+        return self._scope_lookup(node, name, offset)
+
+    def pytype(self):
+        return '%s.module' % BUILTINS
+
+    def display_type(self):
+        return 'Module'
+
+    def getattr(self, name, context=None, ignore_locals=False):
+        if name in self.special_attributes:
+            if name == '__file__':
+                return [cf(self.file)] + self.locals.get(name, [])
+            if name == '__path__' and self.package:
+                return [List()] + self.locals.get(name, [])
+            return std_special_attributes(self, name)
+        if not ignore_locals and name in self.locals:
+            return self.locals[name]
+        if self.package:
+            try:
+                return [self.import_module(name, relative_only=True)]
+            except AstroidBuildingException:
+                raise NotFoundError(name)
+            except SyntaxError:
+                raise NotFoundError(name)
+            except Exception:# XXX pylint tests never pass here; do we need it?
+                import traceback
+                traceback.print_exc()
+        raise NotFoundError(name)
+    getattr = remove_nodes(getattr, DelName)
+
+    def igetattr(self, name, context=None):
+        """inferred getattr"""
+        # set lookup name since this is necessary to infer on import nodes for
+        # instance
+        if not context:
+            context = InferenceContext()
+        try:
+            return _infer_stmts(self.getattr(name, context), context, frame=self, lookupname=name)
+        except NotFoundError:
+            raise InferenceError(name)
+
+    def fully_defined(self):
+        """return True if this module has been built from a .py file
+        and so contains a complete representation including the code
+        """
+        return self.file is not None and self.file.endswith('.py')
+
+    def statement(self):
+        """return the first parent node marked as statement node
+        consider a module as a statement...
+        """
+        return self
+
+    def previous_sibling(self):
+        """module has no sibling"""
+        return
+
+    def next_sibling(self):
+        """module has no sibling"""
+        return
+
+    if sys.version_info < (2, 8):
+        @cachedproperty
+        def _absolute_import_activated(self):
+            for stmt in self.locals.get('absolute_import', ()):
+                if isinstance(stmt, From) and stmt.modname == '__future__':
+                    return True
+            return False
+    else:
+        _absolute_import_activated = True
+
+    def absolute_import_activated(self):
+        return self._absolute_import_activated
+
+    def import_module(self, modname, relative_only=False, level=None):
+        """import the given module considering self as context"""
+        if relative_only and level is None:
+            level = 0
+        absmodname = self.relative_to_absolute_name(modname, level)
+        try:
+            return MANAGER.ast_from_module_name(absmodname)
+        except AstroidBuildingException:
+            # we only want to import a sub module or package of this module,
+            # skip here
+            if relative_only:
+                raise
+        return MANAGER.ast_from_module_name(modname)
+
+    def relative_to_absolute_name(self, modname, level):
+        """return the absolute module name for a relative import.
+
+        The relative import can be implicit or explicit.
+        """
+        # XXX this returns non sens when called on an absolute import
+        # like 'pylint.checkers.astroid.utils'
+        # XXX doesn't return absolute name if self.name isn't absolute name
+        if self.absolute_import_activated() and level is None:
+            return modname
+        if level:
+            if self.package:
+                level = level - 1
+            package_name = self.name.rsplit('.', level)[0]
+        elif self.package:
+            package_name = self.name
+        else:
+            package_name = self.name.rsplit('.', 1)[0]
+        if package_name:
+            if not modname:
+                return package_name
+            return '%s.%s' % (package_name, modname)
+        return modname
+
+
+    def wildcard_import_names(self):
+        """return the list of imported names when this module is 'wildcard
+        imported'
+
+        It doesn't include the '__builtins__' name which is added by the
+        current CPython implementation of wildcard imports.
+        """
+        # take advantage of a living module if it exists
+        try:
+            living = sys.modules[self.name]
+        except KeyError:
+            pass
+        else:
+            try:
+                return living.__all__
+            except AttributeError:
+                return [name for name in living.__dict__.keys()
+                        if not name.startswith('_')]
+        # else lookup the astroid
+        #
+        # We separate the different steps of lookup in try/excepts
+        # to avoid catching too many Exceptions
+        default = [name for name in self.keys() if not name.startswith('_')]
+        try:
+            all = self['__all__']
+        except KeyError:
+            return default
+        try:
+            explicit = next(all.assigned_stmts())
+        except InferenceError:
+            return default
+        except AttributeError:
+            # not an assignment node
+            # XXX infer?
+            return default
+
+        # Try our best to detect the exported name.
+        infered = []
+        try:
+            explicit = next(explicit.infer())
+        except InferenceError:
+            return default
+        if not isinstance(explicit, (Tuple, List)):
+            return default
+
+        str_const = lambda node: (isinstance(node, Const) and
+                                  isinstance(node.value, six.string_types))
+        for node in explicit.elts:
+            if str_const(node):
+                infered.append(node.value)
+            else:
+                try:
+                    infered_node = next(node.infer())
+                except InferenceError:
+                    continue
+                if str_const(infered_node):
+                    infered.append(infered_node.value)
+        return infered
+
+
+
+class ComprehensionScope(LocalsDictNodeNG):
+    def frame(self):
+        return self.parent.frame()
+
+    scope_lookup = LocalsDictNodeNG._scope_lookup
+
+
+class GenExpr(ComprehensionScope):
+    _astroid_fields = ('elt', 'generators')
+
+    def __init__(self):
+        self.locals = {}
+        self.elt = None
+        self.generators = []
+
+
+class DictComp(ComprehensionScope):
+    _astroid_fields = ('key', 'value', 'generators')
+
+    def __init__(self):
+        self.locals = {}
+        self.key = None
+        self.value = None
+        self.generators = []
+
+
+class SetComp(ComprehensionScope):
+    _astroid_fields = ('elt', 'generators')
+
+    def __init__(self):
+        self.locals = {}
+        self.elt = None
+        self.generators = []
+
+
+class _ListComp(NodeNG):
+    """class representing a ListComp node"""
+    _astroid_fields = ('elt', 'generators')
+    elt = None
+    generators = None
+
+if sys.version_info >= (3, 0):
+    class ListComp(_ListComp, ComprehensionScope):
+        """class representing a ListComp node"""
+        def __init__(self):
+            self.locals = {}
+else:
+    class ListComp(_ListComp):
+        """class representing a ListComp node"""
+
+# Function  ###################################################################
+
+def _infer_decorator_callchain(node):
+    """Detect decorator call chaining and see if the end result is a
+    static or a classmethod.
+    """
+    if not isinstance(node, Function):
+        return
+    if not node.parent:
+        return
+    try:
+       # TODO: We don't handle multiple inference results right now,
+       #       because there's no flow to reason when the return
+       #       is what we are looking for, a static or a class method.
+       result = next(node.infer_call_result(node.parent))
+    except (StopIteration, InferenceError):
+       return
+    if isinstance(result, Instance):
+       result = result._proxied
+    if isinstance(result, Class):
+       if result.is_subtype_of('%s.classmethod' % BUILTINS):
+           return 'classmethod'
+       if result.is_subtype_of('%s.staticmethod' % BUILTINS):
+           return 'staticmethod'
+
+
+def _function_type(self):
+    """
+    Function type, possible values are:
+    method, function, staticmethod, classmethod.
+    """
+    # Can't infer that this node is decorated
+    # with a subclass of `classmethod` where `type` is first set,
+    # so do it here.
+    if self.decorators:
+        for node in self.decorators.nodes:
+            if isinstance(node, CallFunc):
+                # Handle the following case:
+                # @some_decorator(arg1, arg2)
+                # def func(...)
+                #
+                try:
+                    current = next(node.func.infer())
+                except InferenceError:
+                    continue
+                _type = _infer_decorator_callchain(current)
+                if _type is not None:
+                    return _type
+
+            try:
+                for infered in node.infer():
+                    # Check to see if this returns a static or a class method.
+                    _type = _infer_decorator_callchain(infered)
+                    if _type is not None:
+                        return _type
+
+                    if not isinstance(infered, Class):
+                        continue
+                    for ancestor in infered.ancestors():
+                        if not isinstance(ancestor, Class):
+                            continue
+                        if ancestor.is_subtype_of('%s.classmethod' % BUILTINS):
+                            return 'classmethod'
+                        elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS):
+                            return 'staticmethod'
+            except InferenceError:
+                pass
+    return self._type
+
+
+class Lambda(LocalsDictNodeNG, FilterStmtsMixin):
+    _astroid_fields = ('args', 'body',)
+    name = '<lambda>'
+
+    # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod'
+    type = 'function'
+
+    def __init__(self):
+        self.locals = {}
+        self.args = []
+        self.body = []
+
+    def pytype(self):
+        if 'method' in self.type:
+            return '%s.instancemethod' % BUILTINS
+        return '%s.function' % BUILTINS
+
+    def display_type(self):
+        if 'method' in self.type:
+            return 'Method'
+        return 'Function'
+
+    def callable(self):
+        return True
+
+    def argnames(self):
+        """return a list of argument names"""
+        if self.args.args: # maybe None with builtin functions
+            names = _rec_get_names(self.args.args)
+        else:
+            names = []
+        if self.args.vararg:
+            names.append(self.args.vararg)
+        if self.args.kwarg:
+            names.append(self.args.kwarg)
+        return names
+
+    def infer_call_result(self, caller, context=None):
+        """infer what a function is returning when called"""
+        return self.body.infer(context)
+
+    def scope_lookup(self, node, name, offset=0):
+        if node in self.args.defaults or node in self.args.kw_defaults:
+            frame = self.parent.frame()
+            # line offset to avoid that def func(f=func) resolve the default
+            # value to the defined function
+            offset = -1
+        else:
+            # check this is not used in function decorators
+            frame = self
+        return frame._scope_lookup(node, name, offset)
+
+
+class Function(Statement, Lambda):
+    if PY3K:
+        _astroid_fields = ('decorators', 'args', 'body', 'returns')
+        returns = None
+    else:
+        _astroid_fields = ('decorators', 'args', 'body')
+
+    special_attributes = set(('__name__', '__doc__', '__dict__'))
+    is_function = True
+    # attributes below are set by the builder module or by raw factories
+    blockstart_tolineno = None
+    decorators = None
+    _type = "function"
+    type = cachedproperty(_function_type)
+
+    def __init__(self, name, doc):
+        self.locals = {}
+        self.args = []
+        self.body = []
+        self.name = name
+        self.doc = doc
+        self.extra_decorators = []
+        self.instance_attrs = {}
+
+    @cachedproperty
+    def fromlineno(self):
+        # lineno is the line number of the first decorator, we want the def
+        # statement lineno
+        lineno = self.lineno
+        if self.decorators is not None:
+            lineno += sum(node.tolineno - node.lineno + 1
+                                   for node in self.decorators.nodes)
+
+        return lineno
+
+    @cachedproperty
+    def blockstart_tolineno(self):
+        return self.args.tolineno
+
+    def block_range(self, lineno):
+        """return block line numbers.
+
+        start from the "def" position whatever the given lineno
+        """
+        return self.fromlineno, self.tolineno
+
+    def getattr(self, name, context=None):
+        """this method doesn't look in the instance_attrs dictionary since it's
+        done by an Instance proxy at inference time.
+        """
+        if name == '__module__':
+            return [cf(self.root().qname())]
+        if name in self.instance_attrs:
+            return self.instance_attrs[name]
+        return std_special_attributes(self, name, False)
+
+    def is_method(self):
+        """return true if the function node should be considered as a method"""
+        # check we are defined in a Class, because this is usually expected
+        # (e.g. pylint...) when is_method() return True
+        return self.type != 'function' and isinstance(self.parent.frame(), Class)
+
+    def decoratornames(self):
+        """return a list of decorator qualified names"""
+        result = set()
+        decoratornodes = []
+        if self.decorators is not None:
+            decoratornodes += self.decorators.nodes
+        decoratornodes += self.extra_decorators
+        for decnode in decoratornodes:
+            for infnode in decnode.infer():
+                result.add(infnode.qname())
+        return result
+    decoratornames = cached(decoratornames)
+
+    def is_bound(self):
+        """return true if the function is bound to an Instance or a class"""
+        return self.type == 'classmethod'
+
+    def is_abstract(self, pass_is_abstract=True):
+        """Returns True if the method is abstract.
+
+        A method is considered abstract if
+         - the only statement is 'raise NotImplementedError', or
+         - the only statement is 'pass' and pass_is_abstract is True, or
+         - the method is annotated with abc.astractproperty/abc.abstractmethod
+        """
+        if self.decorators:
+            for node in self.decorators.nodes:
+                try:
+                    infered = next(node.infer())
+                except InferenceError:
+                    continue
+                if infered and infered.qname() in ('abc.abstractproperty',
+                                                   'abc.abstractmethod'):
+                    return True
+
+        for child_node in self.body:
+            if isinstance(child_node, Raise):
+                if child_node.raises_not_implemented():
+                    return True
+            if pass_is_abstract and isinstance(child_node, Pass):
+                return True
+            return False
+        # empty function is the same as function with a single "pass" statement
+        if pass_is_abstract:
+            return True
+
+    def is_generator(self):
+        """return true if this is a generator function"""
+        # XXX should be flagged, not computed
+        return next(self.nodes_of_class((Yield, YieldFrom),
+                                        skip_klass=(Function, Lambda)), False)
+
+    def infer_call_result(self, caller, context=None):
+        """infer what a function is returning when called"""
+        if self.is_generator():
+            yield Generator()
+            return
+        # This is really a gigantic hack to work around metaclass generators
+        # that return transient class-generating functions. Pylint's AST structure
+        # cannot handle a base class object that is only used for calling __new__,
+        # but does not contribute to the inheritance structure itself. We inject
+        # a fake class into the hierarchy here for several well-known metaclass
+        # generators, and filter it out later.
+        if (self.name == 'with_metaclass' and
+                len(self.args.args) == 1 and
+                self.args.vararg is not None):
+            metaclass = next(caller.args[0].infer(context))
+            if isinstance(metaclass, Class):
+                c = Class('temporary_class', None)
+                c.hide = True
+                c.parent = self
+                c.bases = [next(b.infer(context)) for b in caller.args[1:]]
+                c._metaclass = metaclass
+                yield c
+                return
+        returns = self.nodes_of_class(Return, skip_klass=Function)
+        for returnnode in returns:
+            if returnnode.value is None:
+                yield Const(None)
+            else:
+                try:
+                    for infered in returnnode.value.infer(context):
+                        yield infered
+                except InferenceError:
+                    yield YES
+
+
+def _rec_get_names(args, names=None):
+    """return a list of all argument names"""
+    if names is None:
+        names = []
+    for arg in args:
+        if isinstance(arg, Tuple):
+            _rec_get_names(arg.elts, names)
+        else:
+            names.append(arg.name)
+    return names
+
+
+# Class ######################################################################
+
+
+def _is_metaclass(klass, seen=None):
+    """ Return if the given class can be
+    used as a metaclass.
+    """
+    if klass.name == 'type':
+        return True
+    if seen is None:
+        seen = set()
+    for base in klass.bases:
+        try:
+            for baseobj in base.infer():
+                if baseobj in seen:
+                    continue
+                else:
+                    seen.add(baseobj)
+                if isinstance(baseobj, Instance):
+                    # not abstract
+                    return False
+                if baseobj is YES:
+                    continue
+                if baseobj is klass:
+                    continue
+                if not isinstance(baseobj, Class):
+                    continue
+                if baseobj._type == 'metaclass':
+                    return True
+                if _is_metaclass(baseobj, seen):
+                    return True
+        except InferenceError:
+            continue
+    return False
+
+
+def _class_type(klass, ancestors=None):
+    """return a Class node type to differ metaclass, interface and exception
+    from 'regular' classes
+    """
+    # XXX we have to store ancestors in case we have a ancestor loop
+    if klass._type is not None:
+        return klass._type
+    if _is_metaclass(klass):
+        klass._type = 'metaclass'
+    elif klass.name.endswith('Interface'):
+        klass._type = 'interface'
+    elif klass.name.endswith('Exception'):
+        klass._type = 'exception'
+    else:
+        if ancestors is None:
+            ancestors = set()
+        if klass in ancestors:
+            # XXX we are in loop ancestors, and have found no type
+            klass._type = 'class'
+            return 'class'
+        ancestors.add(klass)
+        for base in klass.ancestors(recurs=False):
+            name = _class_type(base, ancestors)
+            if name != 'class':
+                if name == 'metaclass' and not _is_metaclass(klass):
+                    # don't propagate it if the current class
+                    # can't be a metaclass
+                    continue
+                klass._type = base.type
+                break
+    if klass._type is None:
+        klass._type = 'class'
+    return klass._type
+
+def _iface_hdlr(iface_node):
+    """a handler function used by interfaces to handle suspicious
+    interface nodes
+    """
+    return True
+
+
+class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
+
+    # some of the attributes below are set by the builder module or
+    # by a raw factories
+
+    # a dictionary of class instances attributes
+    _astroid_fields = ('decorators', 'bases', 'body') # name
+
+    decorators = None
+    special_attributes = set(('__name__', '__doc__', '__dict__', '__module__',
+                              '__bases__', '__mro__', '__subclasses__'))
+    blockstart_tolineno = None
+
+    _type = None
+    _metaclass_hack = False
+    hide = False
+    type = property(_class_type,
+                    doc="class'type, possible values are 'class' | "
+                    "'metaclass' | 'interface' | 'exception'")
+
+    def __init__(self, name, doc):
+        self.instance_attrs = {}
+        self.locals = {}
+        self.bases = []
+        self.body = []
+        self.name = name
+        self.doc = doc
+
+    def _newstyle_impl(self, context=None):
+        if context is None:
+            context = InferenceContext()
+        if self._newstyle is not None:
+            return self._newstyle
+        for base in self.ancestors(recurs=False, context=context):
+            if base._newstyle_impl(context):
+                self._newstyle = True
+                break
+        klass = self._explicit_metaclass()
+        # could be any callable, we'd need to infer the result of klass(name,
+        # bases, dict).  punt if it's not a class node.
+        if klass is not None and isinstance(klass, Class):
+            self._newstyle = klass._newstyle_impl(context)
+        if self._newstyle is None:
+            self._newstyle = False
+        return self._newstyle
+
+    _newstyle = None
+    newstyle = property(_newstyle_impl,
+                        doc="boolean indicating if it's a new style class"
+                        "or not")
+
+    @cachedproperty
+    def blockstart_tolineno(self):
+        if self.bases:
+            return self.bases[-1].tolineno
+        else:
+            return self.fromlineno
+
+    def block_range(self, lineno):
+        """return block line numbers.
+
+        start from the "class" position whatever the given lineno
+        """
+        return self.fromlineno, self.tolineno
+
+    def pytype(self):
+        if self.newstyle:
+            return '%s.type' % BUILTINS
+        return '%s.classobj' % BUILTINS
+
+    def display_type(self):
+        return 'Class'
+
+    def callable(self):
+        return True
+
+    def is_subtype_of(self, type_name, context=None):
+        if self.qname() == type_name:
+            return True
+        for anc in self.ancestors(context=context):
+            if anc.qname() == type_name:
+                return True
+
+    def infer_call_result(self, caller, context=None):
+        """infer what a class is returning when called"""
+        if self.is_subtype_of('%s.type' % (BUILTINS,), context) and len(caller.args) == 3:
+            name_node = next(caller.args[0].infer(context))
+            if (isinstance(name_node, Const) and
+                    isinstance(name_node.value, six.string_types)):
+                name = name_node.value
+            else:
+                yield YES
+                return
+            result = Class(name, None)
+            bases = next(caller.args[1].infer(context))
+            if isinstance(bases, (Tuple, List)):
+                result.bases = bases.itered()
+            else:
+                # There is currently no AST node that can represent an 'unknown'
+                # node (YES is not an AST node), therefore we simply return YES here
+                # although we know at least the name of the class.
+                yield YES
+                return
+            result.parent = caller.parent
+            yield result
+        else:
+            yield Instance(self)
+
+    def scope_lookup(self, node, name, offset=0):
+        if node in self.bases:
+            frame = self.parent.frame()
+            # line offset to avoid that class A(A) resolve the ancestor to
+            # the defined class
+            offset = -1
+        else:
+            frame = self
+        return frame._scope_lookup(node, name, offset)
+
+    # list of parent class as a list of string (i.e. names as they appear
+    # in the class definition) XXX bw compat
+    def basenames(self):
+        return [bnode.as_string() for bnode in self.bases]
+    basenames = property(basenames)
+
+    def ancestors(self, recurs=True, context=None):
+        """return an iterator on the node base classes in a prefixed
+        depth first order
+
+        :param recurs:
+          boolean indicating if it should recurse or return direct
+          ancestors only
+        """
+        # FIXME: should be possible to choose the resolution order
+        # FIXME: inference make infinite loops possible here
+        yielded = set([self])
+        if context is None:
+            context = InferenceContext()
+        if sys.version_info[0] >= 3:
+            if not self.bases and self.qname() != 'builtins.object':
+                yield builtin_lookup("object")[1][0]
+                return
+
+        for stmt in self.bases:
+            try:
+                for baseobj in stmt.infer(context):
+                    if not isinstance(baseobj, Class):
+                        if isinstance(baseobj, Instance):
+                            baseobj = baseobj._proxied
+                        else:
+                            # duh ?
+                            continue
+                    if not baseobj.hide:
+                        if baseobj in yielded:
+                            continue # cf xxx above
+                        yielded.add(baseobj)
+                        yield baseobj
+                    if recurs:
+                        for grandpa in baseobj.ancestors(recurs=True,
+                                                         context=context):
+                            if grandpa in yielded:
+                                continue # cf xxx above
+                            yielded.add(grandpa)
+                            yield grandpa
+            except InferenceError:
+                # XXX log error ?
+                continue
+
+    def local_attr_ancestors(self, name, context=None):
+        """return an iterator on astroid representation of parent classes
+        which have <name> defined in their locals
+        """
+        for astroid in self.ancestors(context=context):
+            if name in astroid:
+                yield astroid
+
+    def instance_attr_ancestors(self, name, context=None):
+        """return an iterator on astroid representation of parent classes
+        which have <name> defined in their instance attribute dictionary
+        """
+        for astroid in self.ancestors(context=context):
+            if name in astroid.instance_attrs:
+                yield astroid
+
+    def has_base(self, node):
+        return node in self.bases
+
+    def local_attr(self, name, context=None):
+        """return the list of assign node associated to name in this class
+        locals or in its parents
+
+        :raises `NotFoundError`:
+          if no attribute with this name has been find in this class or
+          its parent classes
+        """
+        try:
+            return self.locals[name]
+        except KeyError:
+            # get if from the first parent implementing it if any
+            for class_node in self.local_attr_ancestors(name, context):
+                return class_node.locals[name]
+        raise NotFoundError(name)
+    local_attr = remove_nodes(local_attr, DelAttr)
+
+    def instance_attr(self, name, context=None):
+        """return the astroid nodes associated to name in this class instance
+        attributes dictionary and in its parents
+
+        :raises `NotFoundError`:
+          if no attribute with this name has been find in this class or
+          its parent classes
+        """
+        # Return a copy, so we don't modify self.instance_attrs,
+        # which could lead to infinite loop.
+        values = list(self.instance_attrs.get(name, []))
+        # get all values from parents
+        for class_node in self.instance_attr_ancestors(name, context):
+            values += class_node.instance_attrs[name]
+        if not values:
+            raise NotFoundError(name)
+        return values
+    instance_attr = remove_nodes(instance_attr, DelAttr)
+
+    def instanciate_class(self):
+        """return Instance of Class node, else return self"""
+        return Instance(self)
+
+    def getattr(self, name, context=None):
+        """this method doesn't look in the instance_attrs dictionary since it's
+        done by an Instance proxy at inference time.
+
+        It may return a YES object if the attribute has not been actually
+        found but a __getattr__ or __getattribute__ method is defined
+        """
+        values = self.locals.get(name, [])
+        if name in self.special_attributes:
+            if name == '__module__':
+                return [cf(self.root().qname())] + values
+            # FIXME: do we really need the actual list of ancestors?
+            # returning [Tuple()] + values don't break any test
+            # this is ticket http://www.logilab.org/ticket/52785
+            # XXX need proper meta class handling + MRO implementation
+            if name == '__bases__' or (name == '__mro__' and self.newstyle):
+                node = Tuple()
+                node.items = self.ancestors(recurs=True, context=context)
+                return [node] + values
+            return std_special_attributes(self, name)
+        # don't modify the list in self.locals!
+        values = list(values)
+        for classnode in self.ancestors(recurs=True, context=context):
+            values += classnode.locals.get(name, [])
+        if not values:
+            raise NotFoundError(name)
+        return values
+
+    def igetattr(self, name, context=None):
+        """inferred getattr, need special treatment in class to handle
+        descriptors
+        """
+        # set lookup name since this is necessary to infer on import nodes for
+        # instance
+        if not context:
+            context = InferenceContext()
+        try:
+            for infered in _infer_stmts(self.getattr(name, context), context,
+                                        frame=self, lookupname=name):
+                # yield YES object instead of descriptors when necessary
+                if not isinstance(infered, Const) and isinstance(infered, Instance):
+                    try:
+                        infered._proxied.getattr('__get__', context)
+                    except NotFoundError:
+                        yield infered
+                    else:
+                        yield YES
+                else:
+                    yield function_to_method(infered, self)
+        except NotFoundError:
+            if not name.startswith('__') and self.has_dynamic_getattr(context):
+                # class handle some dynamic attributes, return a YES object
+                yield YES
+            else:
+                raise InferenceError(name)
+
+    def has_dynamic_getattr(self, context=None):
+        """return True if the class has a custom __getattr__ or
+        __getattribute__ method
+        """
+        # need to explicitly handle optparse.Values (setattr is not detected)
+        if self.name == 'Values' and self.root().name == 'optparse':
+            return True
+        try:
+            self.getattr('__getattr__', context)
+            return True
+        except NotFoundError:
+            #if self.newstyle: XXX cause an infinite recursion error
+            try:
+                getattribute = self.getattr('__getattribute__', context)[0]
+                if getattribute.root().name != BUILTINS:
+                    # class has a custom __getattribute__ defined
+                    return True
+            except NotFoundError:
+                pass
+        return False
+
+    def methods(self):
+        """return an iterator on all methods defined in the class and
+        its ancestors
+        """
+        done = {}
+        for astroid in chain(iter((self,)), self.ancestors()):
+            for meth in astroid.mymethods():
+                if meth.name in done:
+                    continue
+                done[meth.name] = None
+                yield meth
+
+    def mymethods(self):
+        """return an iterator on all methods defined in the class"""
+        for member in self.values():
+            if isinstance(member, Function):
+                yield member
+
+    def interfaces(self, herited=True, handler_func=_iface_hdlr):
+        """return an iterator on interfaces implemented by the given
+        class node
+        """
+        # FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)...
+        try:
+            implements = Instance(self).getattr('__implements__')[0]
+        except NotFoundError:
+            return
+        if not herited and not implements.frame() is self:
+            return
+        found = set()
+        missing = False
+        for iface in unpack_infer(implements):
+            if iface is YES:
+                missing = True
+                continue
+            if not iface in found and handler_func(iface):
+                found.add(iface)
+                yield iface
+        if missing:
+            raise InferenceError()
+
+    _metaclass = None
+    def _explicit_metaclass(self):
+        """ Return the explicit defined metaclass
+        for the current class.
+
+        An explicit defined metaclass is defined
+        either by passing the ``metaclass`` keyword argument
+        in the class definition line (Python 3) or (Python 2) by
+        having a ``__metaclass__`` class attribute, or if there are
+        no explicit bases but there is a global ``__metaclass__`` variable.
+        """
+        for base in self.bases:
+            try:
+                for baseobj in base.infer():
+                    if isinstance(baseobj, Class) and baseobj.hide:
+                        self._metaclass = baseobj._metaclass
+                        self._metaclass_hack = True
+                        break
+            except InferenceError:
+                pass
+
+        if self._metaclass:
+            # Expects this from Py3k TreeRebuilder
+            try:
+                return next(node for node in self._metaclass.infer()
+                            if node is not YES)
+            except (InferenceError, StopIteration):
+                return None
+        if sys.version_info >= (3, ):
+            return None
+
+        if '__metaclass__' in self.locals:
+            assignment = self.locals['__metaclass__'][-1]
+        elif self.bases:
+            return None
+        elif '__metaclass__' in self.root().locals:
+            assignments = [ass for ass in self.root().locals['__metaclass__']
+                           if ass.lineno < self.lineno]
+            if not assignments:
+                return None
+            assignment = assignments[-1]
+        else:
+            return None
+
+        try:
+            infered = next(assignment.infer())
+        except InferenceError:
+            return
+        if infered is YES: # don't expose this
+            return None
+        return infered
+
+    def metaclass(self):
+        """ Return the metaclass of this class.
+
+        If this class does not define explicitly a metaclass,
+        then the first defined metaclass in ancestors will be used
+        instead.
+        """
+        klass = self._explicit_metaclass()
+        if klass is None:
+            for parent in self.ancestors():
+                klass = parent.metaclass()
+                if klass is not None:
+                    break
+        return klass
+
+    def has_metaclass_hack(self):
+        return self._metaclass_hack
+
+    def _islots(self):
+        """ Return an iterator with the inferred slots. """
+        if '__slots__' not in self.locals:
+            return
+        for slots in self.igetattr('__slots__'):
+            # check if __slots__ is a valid type
+            for meth in ITER_METHODS:
+                try:
+                    slots.getattr(meth)
+                    break
+                except NotFoundError:
+                    continue
+            else:
+                continue
+
+            if isinstance(slots, Const):
+                # a string. Ignore the following checks,
+                # but yield the node, only if it has a value
+                if slots.value:
+                    yield slots
+                continue
+            if not hasattr(slots, 'itered'):
+                # we can't obtain the values, maybe a .deque?
+                continue
+
+            if isinstance(slots, Dict):
+                values = [item[0] for item in slots.items]
+            else:
+                values = slots.itered()
+            if values is YES:
+                continue
+
+            for elt in values:
+                try:
+                    for infered in elt.infer():
+                        if infered is YES:
+                            continue
+                        if (not isinstance(infered, Const) or
+                                not isinstance(infered.value,
+                                               six.string_types)):
+                            continue
+                        if not infered.value:
+                            continue
+                        yield infered
+                except InferenceError:
+                    continue
+
+    # Cached, because inferring them all the time is expensive
+    @cached
+    def slots(self):
+        """Get all the slots for this node.
+
+        If the class doesn't define any slot, through `__slots__`
+        variable, then this function will return a None.
+        Also, it will return None in the case the slots weren't inferred.
+        Otherwise, it will return a list of slot names.
+        """
+        slots = self._islots()
+        try:
+            first = next(slots)
+        except StopIteration:
+            # The class doesn't have a __slots__ definition.
+            return None
+        return [first] + list(slots)
+
+    def _inferred_bases(self, recurs=True, context=None):
+        # TODO(cpopa): really similar with .ancestors,
+        # but the difference is when one base is inferred,
+        # only the first object is wanted. That's because
+        # we aren't interested in superclasses, as in the following
+        # example:
+        #
+        # class SomeSuperClass(object): pass
+        # class SomeClass(SomeSuperClass): pass
+        # class Test(SomeClass): pass
+        #
+        # Inferring SomeClass from the Test's bases will give
+        # us both SomeClass and SomeSuperClass, but we are interested
+        # only in SomeClass.
+
+        if context is None:
+            context = InferenceContext()
+        if sys.version_info[0] >= 3:
+            if not self.bases and self.qname() != 'builtins.object':
+                yield builtin_lookup("object")[1][0]
+                return
+
+        for stmt in self.bases:
+            try:
+                baseobj = next(stmt.infer(context=context))
+            except InferenceError:
+                # XXX log error ?
+                continue
+            if isinstance(baseobj, Instance):
+                baseobj = baseobj._proxied
+            if not isinstance(baseobj, Class):
+                continue
+            if not baseobj.hide:
+                yield baseobj
+
+    def mro(self, context=None):
+        """Get the method resolution order, using C3 linearization.
+
+        It returns the list of ancestors sorted by the mro.
+        This will raise `NotImplementedError` for old-style classes, since
+        they don't have the concept of MRO.
+        """
+        if not self.newstyle:
+            raise NotImplementedError(
+                "Could not obtain mro for old-style classes.")
+
+        bases = list(self._inferred_bases(context=context))
+        unmerged_mro = [[self]] + [base.mro() for base in bases] + [bases]
+
+        _verify_duplicates_mro(unmerged_mro)
+        return _c3_merge(unmerged_mro)
diff --git a/third_party/logilab/astroid/test_utils.py b/third_party/logilab/astroid/test_utils.py
new file mode 100644
index 0000000..19bd7b9
--- /dev/null
+++ b/third_party/logilab/astroid/test_utils.py
@@ -0,0 +1,218 @@
+"""Utility functions for test code that uses astroid ASTs as input."""
+import functools
+import sys
+import textwrap
+
+from astroid import nodes
+from astroid import builder
+# The name of the transient function that is used to
+# wrap expressions to be extracted when calling
+# extract_node.
+_TRANSIENT_FUNCTION = '__'
+
+# The comment used to select a statement to be extracted
+# when calling extract_node.
+_STATEMENT_SELECTOR = '#@'
+
+
+def _extract_expressions(node):
+    """Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
+
+    The function walks the AST recursively to search for expressions that
+    are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
+    expression, it completely removes the function call node from the tree,
+    replacing it by the wrapped expression inside the parent.
+
+    :param node: An astroid node.
+    :type node:  astroid.bases.NodeNG
+    :yields: The sequence of wrapped expressions on the modified tree
+    expression can be found.
+    """
+    if (isinstance(node, nodes.CallFunc)
+            and isinstance(node.func, nodes.Name)
+            and node.func.name == _TRANSIENT_FUNCTION):
+        real_expr = node.args[0]
+        real_expr.parent = node.parent
+        # Search for node in all _astng_fields (the fields checked when
+        # get_children is called) of its parent. Some of those fields may
+        # be lists or tuples, in which case the elements need to be checked.
+        # When we find it, replace it by real_expr, so that the AST looks
+        # like no call to _TRANSIENT_FUNCTION ever took place.
+        for name in node.parent._astroid_fields:
+            child = getattr(node.parent, name)
+            if isinstance(child, (list, tuple)):
+                for idx, compound_child in enumerate(child):
+                    if compound_child is node:
+                        child[idx] = real_expr
+            elif child is node:
+                setattr(node.parent, name, real_expr)
+        yield real_expr
+    else:
+        for child in node.get_children():
+            for result in _extract_expressions(child):
+                yield result
+
+
+def _find_statement_by_line(node, line):
+    """Extracts the statement on a specific line from an AST.
+
+    If the line number of node matches line, it will be returned;
+    otherwise its children are iterated and the function is called
+    recursively.
+
+    :param node: An astroid node.
+    :type node: astroid.bases.NodeNG
+    :param line: The line number of the statement to extract.
+    :type line: int
+    :returns: The statement on the line, or None if no statement for the line
+      can be found.
+    :rtype:  astroid.bases.NodeNG or None
+    """
+    if isinstance(node, (nodes.Class, nodes.Function)):
+        # This is an inaccuracy in the AST: the nodes that can be
+        # decorated do not carry explicit information on which line
+        # the actual definition (class/def), but .fromline seems to
+        # be close enough.
+        node_line = node.fromlineno
+    else:
+        node_line = node.lineno
+
+    if node_line == line:
+        return node
+
+    for child in node.get_children():
+        result = _find_statement_by_line(child, line)
+        if result:
+            return result
+
+    return None
+
+def extract_node(code, module_name=''):
+    """Parses some Python code as a module and extracts a designated AST node.
+
+    Statements:
+     To extract one or more statement nodes, append #@ to the end of the line
+
+     Examples:
+       >>> def x():
+       >>>   def y():
+       >>>     return 1 #@
+
+       The return statement will be extracted.
+
+       >>> class X(object):
+       >>>   def meth(self): #@
+       >>>     pass
+
+      The funcion object 'meth' will be extracted.
+
+    Expressions:
+     To extract arbitrary expressions, surround them with the fake
+     function call __(...). After parsing, the surrounded expression
+     will be returned and the whole AST (accessible via the returned
+     node's parent attribute) will look like the function call was
+     never there in the first place.
+
+     Examples:
+       >>> a = __(1)
+
+       The const node will be extracted.
+
+       >>> def x(d=__(foo.bar)): pass
+
+       The node containing the default argument will be extracted.
+
+       >>> def foo(a, b):
+       >>>   return 0 < __(len(a)) < b
+
+       The node containing the function call 'len' will be extracted.
+
+    If no statements or expressions are selected, the last toplevel
+    statement will be returned.
+
+    If the selected statement is a discard statement, (i.e. an expression
+    turned into a statement), the wrapped expression is returned instead.
+
+    For convenience, singleton lists are unpacked.
+
+    :param str code: A piece of Python code that is parsed as
+    a module. Will be passed through textwrap.dedent first.
+    :param str module_name: The name of the module.
+    :returns: The designated node from the parse tree, or a list of nodes.
+    :rtype: astroid.bases.NodeNG, or a list of nodes.
+    """
+    def _extract(node):
+        if isinstance(node, nodes.Discard):
+            return node.value
+        else:
+            return node
+
+    requested_lines = []
+    for idx, line in enumerate(code.splitlines()):
+        if line.strip().endswith(_STATEMENT_SELECTOR):
+            requested_lines.append(idx + 1)
+
+    tree = build_module(code, module_name=module_name)
+    extracted = []
+    if requested_lines:
+        for line in requested_lines:
+            extracted.append(_find_statement_by_line(tree, line))
+
+    # Modifies the tree.
+    extracted.extend(_extract_expressions(tree))
+
+    if not extracted:
+        extracted.append(tree.body[-1])
+
+    extracted = [_extract(node) for node in extracted]
+    if len(extracted) == 1:
+        return extracted[0]
+    else:
+        return extracted
+
+
+def build_module(code, module_name='', path=None):
+    """Parses a string module with a builder.
+    :param code: The code for the module.
+    :type code: str
+    :param module_name: The name for the module
+    :type module_name: str
+    :param path: The path for the module
+    :type module_name: str
+    :returns: The module AST.
+    :rtype:  astroid.bases.NodeNG
+    """
+    code = textwrap.dedent(code)
+    return builder.AstroidBuilder(None).string_build(code, modname=module_name, path=path)
+
+
+def require_version(minver=None, maxver=None):
+    """ Compare version of python interpreter to the given one. Skip the test
+    if older.
+    """
+    def parse(string, default=None):
+        string = string or default
+        try:
+            return tuple(int(v) for v in string.split('.'))
+        except ValueError:
+            raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
+
+    def check_require_version(f):
+        current = sys.version_info[:3]
+        if parse(minver, "0") < current <= parse(maxver, "4"):
+            return f
+        else:
+            str_version = '.'.join(str(v) for v in sys.version_info)
+            @functools.wraps(f)
+            def new_f(self, *args, **kwargs):
+                if minver is not None:
+                    self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version))
+                elif maxver is not None:
+                    self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version))
+            return new_f
+
+
+    return check_require_version
+
+def get_name_node(start_from, name, index=0):
+    return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index]
diff --git a/third_party/logilab/astng/utils.py b/third_party/logilab/astroid/utils.py
similarity index 77%
rename from third_party/logilab/astng/utils.py
rename to third_party/logilab/astroid/utils.py
index ba317c8..ae72a92 100644
--- a/third_party/logilab/astng/utils.py
+++ b/third_party/logilab/astroid/utils.py
@@ -1,32 +1,32 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# copyright 2003-2010 Sylvain Thenault, all rights reserved.
-# contact mailto:thenault@gmail.com
 #
-# This file is part of logilab-astng.
+# This file is part of astroid.
 #
-# logilab-astng is free software: you can redistribute it and/or modify it
+# astroid is free software: you can redistribute it and/or modify it
 # under the terms of the GNU Lesser General Public License as published by the
 # Free Software Foundation, either version 2.1 of the License, or (at your
 # option) any later version.
 #
-# logilab-astng is distributed in the hope that it will be useful, but
+# astroid is distributed in the hope that it will be useful, but
 # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 # for more details.
 #
 # You should have received a copy of the GNU Lesser General Public License along
-# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
 """this module contains some utilities to navigate in the tree or to
 extract information from it
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
-from logilab.astng.exceptions import ASTNGBuildingException
+from astroid.exceptions import AstroidBuildingException
+from astroid.builder import parse
 
 
-class ASTWalker:
+class ASTWalker(object):
     """a walker visiting a tree in preorder, calling on the handler:
 
     * visit_<class name> on entering a node, where class name is the class of
@@ -99,7 +99,7 @@
         if methods[0] is not None:
             methods[0](node)
         if 'locals' in node.__dict__: # skip Instance and other proxy
-            for name, local_node in node.items():
+            for local_node in node.values():
                 self.visit(local_node)
         if methods[1] is not None:
             return methods[1](node)
@@ -110,30 +110,28 @@
     for child in node.get_children():
         ok = False
         if child is None:
-            print "Hm, child of %s is None" % node
+            print("Hm, child of %s is None" % node)
             continue
         if not hasattr(child, 'parent'):
-            print " ERROR: %s has child %s %x with no parent" % (node, child, id(child))
+            print(" ERROR: %s has child %s %x with no parent" % (
+                node, child, id(child)))
         elif not child.parent:
-            print " ERROR: %s has child %s %x with parent %r" % (node, child, id(child), child.parent)
+            print(" ERROR: %s has child %s %x with parent %r" % (
+                node, child, id(child), child.parent))
         elif child.parent is not node:
-            print " ERROR: %s %x has child %s %x with wrong parent %s" % (node,
-                                      id(node), child, id(child), child.parent)
+            print(" ERROR: %s %x has child %s %x with wrong parent %s" % (
+                node, id(node), child, id(child), child.parent))
         else:
             ok = True
         if not ok:
-            print "lines;", node.lineno, child.lineno
-            print "of module", node.root(), node.root().name
-            raise ASTNGBuildingException
+            print("lines;", node.lineno, child.lineno)
+            print("of module", node.root(), node.root().name)
+            raise AstroidBuildingException
         _check_children(child)
 
 
-from _ast import PyCF_ONLY_AST
-def parse(string):
-    return compile(string, "<string>", 'exec', PyCF_ONLY_AST)
-
 class TreeTester(object):
-    '''A helper class to see _ast tree and compare with astng tree
+    '''A helper class to see _ast tree and compare with astroid tree
 
     indent: string for tree indent representation
     lineno: bool to tell if we should print the line numbers
@@ -146,11 +144,11 @@
     .   <Print>
     .   .   nl = True
     .   ]
-    >>> print tester.astng_tree_repr()
+    >>> print tester.astroid_tree_repr()
     Module()
         body = [
         Print()
-            dest = 
+            dest =
             values = [
             ]
         ]
@@ -185,8 +183,8 @@
         if _done is None:
             _done = set()
         if node in _done:
-            self._string += '\nloop in tree: %r (%s)' % (node,
-                                            getattr(node, 'lineno', None))
+            self._string += '\nloop in tree: %r (%s)' % (
+                node, getattr(node, 'lineno', None))
             return
         _done.add(node)
         self._string += '\n' + indent +  '<%s>' % node.__class__.__name__
@@ -202,7 +200,7 @@
                     continue
                 if a in ("lineno", "col_offset") and not self.lineno:
                     continue
-                self._string +='\n' +  indent + a + " = " + repr(attr)
+                self._string += '\n' +  indent + a + " = " + repr(attr)
         for field in node._fields or ():
             attr = node_dict[field]
             if attr is None:
@@ -224,16 +222,16 @@
                 self._string += '\n' + indent + field + " = " + repr(attr)
 
 
-    def build_astng_tree(self):
-        """build astng tree from the _ast tree
+    def build_astroid_tree(self):
+        """build astroid tree from the _ast tree
         """
-        from logilab.astng.builder import ASTNGBuilder
-        tree = ASTNGBuilder().string_build(self.sourcecode)
+        from astroid.builder import AstroidBuilder
+        tree = AstroidBuilder().string_build(self.sourcecode)
         return tree
 
-    def astng_tree_repr(self, ids=False):
-        """build the astng tree and return a nice tree representation"""
-        mod = self.build_astng_tree()
+    def astroid_tree_repr(self, ids=False):
+        """build the astroid tree and return a nice tree representation"""
+        mod = self.build_astroid_tree()
         return mod.repr_tree(ids)
 
 
diff --git a/third_party/logilab/common/LICENSE.txt b/third_party/logilab/common/LICENSE.txt
index b7b5f53..d511905 100644
--- a/third_party/logilab/common/LICENSE.txt
+++ b/third_party/logilab/common/LICENSE.txt
@@ -1,8 +1,8 @@
 		    GNU GENERAL PUBLIC LICENSE
 		       Version 2, June 1991
 
- Copyright (C) 1989, 1991 Free Software Foundation, Inc.
-	51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  Everyone is permitted to copy and distribute verbatim copies
  of this license document, but changing it is not allowed.
 
@@ -15,7 +15,7 @@
 General Public License applies to most of the Free Software
 Foundation's software and to any other program whose authors commit to
 using it.  (Some other Free Software Foundation software is covered by
-the GNU Library General Public License instead.)  You can apply it to
+the GNU Lesser General Public License instead.)  You can apply it to
 your programs, too.
 
   When we speak of free software, we are referring to freedom, not
@@ -55,7 +55,7 @@
 
   The precise terms and conditions for copying, distribution and
 modification follow.
-
+
 		    GNU GENERAL PUBLIC LICENSE
    TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
 
@@ -110,7 +110,7 @@
     License.  (Exception: if the Program itself is interactive but
     does not normally print such an announcement, your work based on
     the Program is not required to print an announcement.)
-
+
 These requirements apply to the modified work as a whole.  If
 identifiable sections of that work are not derived from the Program,
 and can be reasonably considered independent and separate works in
@@ -168,7 +168,7 @@
 access to copy the source code from the same place counts as
 distribution of the source code, even though third parties are not
 compelled to copy the source along with the object code.
-
+
   4. You may not copy, modify, sublicense, or distribute the Program
 except as expressly provided under this License.  Any attempt
 otherwise to copy, modify, sublicense or distribute the Program is
@@ -225,7 +225,7 @@
 
 This section is intended to make thoroughly clear what is believed to
 be a consequence of the rest of this License.
-
+
   8. If the distribution and/or use of the Program is restricted in
 certain countries either by patents or by copyrighted interfaces, the
 original copyright holder who places the Program under this License
@@ -278,7 +278,7 @@
 POSSIBILITY OF SUCH DAMAGES.
 
 		     END OF TERMS AND CONDITIONS
-
+
 	    How to Apply These Terms to Your New Programs
 
   If you develop a new program, and you want it to be of the greatest
@@ -303,17 +303,16 @@
     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     GNU General Public License for more details.
 
-    You should have received a copy of the GNU General Public License
-    along with this program; if not, write to the Free Software
-    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301 USA
-
+    You should have received a copy of the GNU General Public License along
+    with this program; if not, write to the Free Software Foundation, Inc.,
+    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 Also add information on how to contact you by electronic and paper mail.
 
 If the program is interactive, make it output a short notice like this
 when it starts in an interactive mode:
 
-    Gnomovision version 69, Copyright (C) year  name of author
+    Gnomovision version 69, Copyright (C) year name of author
     Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
     This is free software, and you are welcome to redistribute it
     under certain conditions; type `show c' for details.
@@ -336,5 +335,5 @@
 This General Public License does not permit incorporating your program into
 proprietary programs.  If your program is a subroutine library, you may
 consider it more useful to permit linking proprietary applications with the
-library.  If this is what you want to do, use the GNU Library General
+library.  If this is what you want to do, use the GNU Lesser General
 Public License instead of this License.
diff --git a/third_party/logilab/common/README.chromium b/third_party/logilab/common/README.chromium
index 98e5135..2c66742 100644
--- a/third_party/logilab/common/README.chromium
+++ b/third_party/logilab/common/README.chromium
@@ -1,5 +1,5 @@
 URL: http://www.logilab.org/project/logilab-common
-Version: 0.57.1
+Version: 0.63.2
 License: GPL
 License File: LICENSE.txt
 
diff --git a/third_party/logilab/common/__init__.py b/third_party/logilab/common/__init__.py
index 8d063e2..2d1324c 100644
--- a/third_party/logilab/common/__init__.py
+++ b/third_party/logilab/common/__init__.py
@@ -25,6 +25,9 @@
 :var IGNORED_EXTENSIONS: file extensions that may usually be ignored
 """
 __docformat__ = "restructuredtext en"
+
+from six.moves import range
+
 from logilab.common.__pkginfo__ import version as __version__
 
 STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build')
@@ -57,8 +60,9 @@
 class nullobject(object):
     def __repr__(self):
         return '<nullobject>'
-    def __nonzero__(self):
+    def __bool__(self):
         return False
+    __nonzero__ = __bool__
 
 class tempattr(object):
     def __init__(self, obj, attr, value):
diff --git a/third_party/logilab/common/__pkginfo__.py b/third_party/logilab/common/__pkginfo__.py
index 83a43cf..55a2cc3 100644
--- a/third_party/logilab/common/__pkginfo__.py
+++ b/third_party/logilab/common/__pkginfo__.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -18,19 +18,19 @@
 """logilab.common packaging information"""
 __docformat__ = "restructuredtext en"
 import sys
+import os
 
 distname = 'logilab-common'
 modname = 'common'
 subpackage_of = 'logilab'
 subpackage_master = True
 
-numversion = (0, 57, 1)
+numversion = (0, 63, 2)
 version = '.'.join([str(num) for num in numversion])
 
 license = 'LGPL' # 2.1 or later
 description = "collection of low-level Python packages and modules used by Logilab projects"
 web = "http://www.logilab.org/project/%s" % distname
-ftp = "ftp://ftp.logilab.org/pub/%s" % modname
 mailinglist = "mailto://python-projects@lists.logilab.org"
 author = "Logilab"
 author_email = "contact@logilab.fr"
@@ -40,6 +40,18 @@
 scripts = [join('bin', 'pytest')]
 include_dirs = [join('test', 'data')]
 
-if sys.version_info < (2, 7):
-    install_requires = ['unittest2 >= 0.5.1']
+install_requires = [
+        'six >= 1.4.0',
+        ]
+test_require = ['pytz']
 
+if sys.version_info < (2, 7):
+    install_requires.append('unittest2 >= 0.5.1')
+if os.name == 'nt':
+    install_requires.append('colorama')
+
+classifiers = ["Topic :: Utilities",
+               "Programming Language :: Python",
+               "Programming Language :: Python :: 2",
+               "Programming Language :: Python :: 3",
+               ]
diff --git a/third_party/logilab/common/changelog.py b/third_party/logilab/common/changelog.py
index 74f5124..2fff2ed 100644
--- a/third_party/logilab/common/changelog.py
+++ b/third_party/logilab/common/changelog.py
@@ -49,6 +49,8 @@
 import sys
 from stat import S_IWRITE
 
+from six import string_types
+
 BULLET = '*'
 SUBBULLET = '-'
 INDENT = ' ' * 4
@@ -64,7 +66,7 @@
     correctly printing it as X.Y.Z
     """
     def __new__(cls, versionstr):
-        if isinstance(versionstr, basestring):
+        if isinstance(versionstr, string_types):
             versionstr = versionstr.strip(' :') # XXX (syt) duh?
             parsed = cls.parse(versionstr)
         else:
@@ -76,7 +78,7 @@
         versionstr = versionstr.strip(' :')
         try:
             return [int(i) for i in versionstr.split('.')]
-        except ValueError, ex:
+        except ValueError as ex:
             raise ValueError("invalid literal for version '%s' (%s)"%(versionstr, ex))
 
     def __str__(self):
diff --git a/third_party/logilab/common/clcommands.py b/third_party/logilab/common/clcommands.py
index 411931b..4778b99 100644
--- a/third_party/logilab/common/clcommands.py
+++ b/third_party/logilab/common/clcommands.py
@@ -22,6 +22,8 @@
 command'specific
 """
 
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -115,7 +117,7 @@
         if arg in ('-h', '--help'):
             self.usage_and_exit(0)
         if self.version is not None and arg in ('--version'):
-            print self.version
+            print(self.version)
             sys.exit(0)
         rcfile = self.rcfile
         if rcfile is not None and arg in ('-C', '--rc-file'):
@@ -127,21 +129,21 @@
         try:
             command = self.get_command(arg)
         except KeyError:
-            print 'ERROR: no %s command' % arg
-            print
+            print('ERROR: no %s command' % arg)
+            print()
             self.usage_and_exit(1)
         try:
             sys.exit(command.main_run(args, rcfile))
-        except KeyboardInterrupt, exc:
-            print 'Interrupted',
+        except KeyboardInterrupt as exc:
+            print('Interrupted', end=' ')
             if str(exc):
-                print ': %s' % exc,
-            print
+                print(': %s' % exc, end=' ')
+            print()
             sys.exit(4)
-        except BadCommandUsage, err:
-            print 'ERROR:', err
-            print
-            print command.help()
+        except BadCommandUsage as err:
+            print('ERROR:', err)
+            print()
+            print(command.help())
             sys.exit(1)
 
     def create_logger(self, handler, logthreshold=None):
@@ -164,32 +166,32 @@
         """display usage for the main program (i.e. when no command supplied)
         and exit
         """
-        print 'usage:', self.pgm,
+        print('usage:', self.pgm, end=' ')
         if self.rcfile:
-            print '[--rc-file=<configuration file>]',
-        print '<command> [options] <command argument>...'
+            print('[--rc-file=<configuration file>]', end=' ')
+        print('<command> [options] <command argument>...')
         if self.doc:
-            print '\n%s' % self.doc
-        print  '''
+            print('\n%s' % self.doc)
+        print('''
 Type "%(pgm)s <command> --help" for more information about a specific
-command. Available commands are :\n''' % self.__dict__
+command. Available commands are :\n''' % self.__dict__)
         max_len = max([len(cmd) for cmd in self])
         padding = ' ' * max_len
         for cmdname, cmd in sorted(self.items()):
             if not cmd.hidden:
-                print ' ', (cmdname + padding)[:max_len], cmd.short_description()
+                print(' ', (cmdname + padding)[:max_len], cmd.short_description())
         if self.rcfile:
-            print '''
+            print('''
 Use --rc-file=<configuration file> / -C <configuration file> before the command
 to specify a configuration file. Default to %s.
-''' % self.rcfile
-        print  '''%(pgm)s -h/--help
-      display this usage information and exit''' % self.__dict__
+''' % self.rcfile)
+        print('''%(pgm)s -h/--help
+      display this usage information and exit''' % self.__dict__)
         if self.version:
-            print  '''%(pgm)s -v/--version
-      display version configuration and exit''' % self.__dict__
+            print('''%(pgm)s -v/--version
+      display version configuration and exit''' % self.__dict__)
         if self.copyright:
-            print '\n', self.copyright
+            print('\n', self.copyright)
 
     def usage_and_exit(self, status):
         self.usage()
@@ -261,7 +263,7 @@
         try:
             self.check_args(args)
             self.run(args)
-        except CommandError, err:
+        except CommandError as err:
             self.logger.error(err)
             return 2
         return 0
@@ -283,14 +285,14 @@
             command = args.pop()
             cmd = _COMMANDS[command]
             for optname, optdict in cmd.options:
-                print '--help'
-                print '--' + optname
+                print('--help')
+                print('--' + optname)
         else:
             commands = sorted(_COMMANDS.keys())
             for command in commands:
                 cmd = _COMMANDS[command]
                 if not cmd.hidden:
-                    print command
+                    print(command)
 
 
 # deprecated stuff #############################################################
diff --git a/third_party/logilab/common/cli.py b/third_party/logilab/common/cli.py
index 4283732..cdeef97 100644
--- a/third_party/logilab/common/cli.py
+++ b/third_party/logilab/common/cli.py
@@ -33,18 +33,21 @@
 
         help_do_pionce = ("pionce", "pionce duree", _("met ton corps en veille"))
         def do_pionce(self):
-            print 'nap is good'
+            print('nap is good')
 
         help_do_ronfle = ("ronfle", "ronfle volume", _("met les autres en veille"))
         def do_ronfle(self):
-            print 'fuuuuuuuuuuuu rhhhhhrhrhrrh'
+            print('fuuuuuuuuuuuu rhhhhhrhrhrrh')
 
     cl = BookShell()
 """
 
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
-from logilab.common.compat import raw_input, builtins
+from six.moves import builtins, input
+
 if not hasattr(builtins, '_'):
     builtins._ = str
 
@@ -65,7 +68,7 @@
             import atexit
             atexit.register(readline.write_history_file, histfile)
     except:
-        print 'readline is not available :-('
+        print('readline is not available :-(')
 
 
 class Completer :
@@ -107,7 +110,7 @@
         """loop on user input, exit on EOF"""
         while True:
             try:
-                line = raw_input('>>> ')
+                line = input('>>> ')
             except EOFError:
                 print
                 break
@@ -156,10 +159,10 @@
         return self.commands.keys()
 
     def _print_help(self, cmd, syntax, explanation):
-        print _('Command %s') % cmd
-        print _('Syntax: %s') % syntax
-        print '\t', explanation
-        print
+        print(_('Command %s') % cmd)
+        print(_('Syntax: %s') % syntax)
+        print('\t', explanation)
+        print()
 
 
     # predefined commands #####################################################
@@ -169,20 +172,20 @@
         if command in self._command_help:
             self._print_help(*self._command_help[command])
         elif command is None or command not in self._topics:
-            print _("Use help <topic> or help <command>.")
-            print _("Available topics are:")
+            print(_("Use help <topic> or help <command>."))
+            print(_("Available topics are:"))
             topics = sorted(self._topics.keys())
             for topic in topics:
-                print '\t', topic
-            print
-            print _("Available commands are:")
+                print('\t', topic)
+            print()
+            print(_("Available commands are:"))
             commands = self.commands.keys()
             commands.sort()
             for command in commands:
-                print '\t', command[len(self.CMD_PREFIX):]
+                print('\t', command[len(self.CMD_PREFIX):])
 
         else:
-            print _('Available commands about %s:') % command
+            print(_('Available commands about %s:') % command)
             print
             for command_help_method in self._topics[command]:
                 try:
@@ -193,8 +196,8 @@
                 except:
                     import traceback
                     traceback.print_exc()
-                    print 'ERROR in help method %s'% (
-                        command_help_method.func_name)
+                    print('ERROR in help method %s'% (
+                        command_help_method.__name__))
 
     help_do_help = ("help", "help [topic|command]",
                     _("print help message for the given topic/command or \
diff --git a/third_party/logilab/common/compat.py b/third_party/logilab/common/compat.py
index 8983ece..f2eb590 100644
--- a/third_party/logilab/common/compat.py
+++ b/third_party/logilab/common/compat.py
@@ -26,7 +26,6 @@
     :mod:`unittest2.compatibility`
 """
 
-from __future__ import generators
 
 __docformat__ = "restructuredtext en"
 
@@ -35,7 +34,8 @@
 import types
 from warnings import warn
 
-import __builtin__ as builtins # 2to3 will tranform '__builtin__' to 'builtins'
+# not used here, but imported to preserve API
+from six.moves import builtins
 
 if sys.version_info < (3, 0):
     str_to_bytes = str
@@ -51,15 +51,6 @@
     def str_encode(string, encoding):
         return str(string)
 
-# XXX callable built-in seems back in all python versions
-try:
-    callable = builtins.callable
-except AttributeError:
-    from collections import Callable
-    def callable(something):
-        return isinstance(something, Callable)
-    del Callable
-
 # See also http://bugs.python.org/issue11776
 if sys.version_info[0] == 3:
     def method_type(callable, instance, klass):
@@ -69,11 +60,6 @@
     # alias types otherwise
     method_type = types.MethodType
 
-if sys.version_info < (3, 0):
-    raw_input = raw_input
-else:
-    raw_input = input
-
 # Pythons 2 and 3 differ on where to get StringIO
 if sys.version_info < (3, 0):
     from cStringIO import StringIO
@@ -84,160 +70,9 @@
     from io import FileIO, BytesIO, StringIO
     from imp import reload
 
-# Where do pickles come from?
-try:
-    import cPickle as pickle
-except ImportError:
-    import pickle
-
 from logilab.common.deprecation import deprecated
 
-from itertools import izip, chain, imap
-if sys.version_info < (3, 0):# 2to3 will remove the imports
-    izip = deprecated('izip exists in itertools since py2.3')(izip)
-    imap = deprecated('imap exists in itertools since py2.3')(imap)
-chain = deprecated('chain exists in itertools since py2.3')(chain)
-
-sum = deprecated('sum exists in builtins since py2.3')(sum)
-enumerate = deprecated('enumerate exists in builtins since py2.3')(enumerate)
-frozenset = deprecated('frozenset exists in builtins since py2.4')(frozenset)
-reversed = deprecated('reversed exists in builtins since py2.4')(reversed)
-sorted = deprecated('sorted exists in builtins since py2.4')(sorted)
-max = deprecated('max exists in builtins since py2.4')(max)
-
-
-# Python2.5 builtins
-try:
-    any = any
-    all = all
-except NameError:
-    def any(iterable):
-        """any(iterable) -> bool
-
-        Return True if bool(x) is True for any x in the iterable.
-        """
-        for elt in iterable:
-            if elt:
-                return True
-        return False
-
-    def all(iterable):
-        """all(iterable) -> bool
-
-        Return True if bool(x) is True for all values x in the iterable.
-        """
-        for elt in iterable:
-            if not elt:
-                return False
-        return True
-
-
-# Python2.5 subprocess added functions and exceptions
-try:
-    from subprocess import Popen
-except ImportError:
-    # gae or python < 2.3
-
-    class CalledProcessError(Exception):
-        """This exception is raised when a process run by check_call() returns
-        a non-zero exit status.  The exit status will be stored in the
-        returncode attribute."""
-        def __init__(self, returncode, cmd):
-            self.returncode = returncode
-            self.cmd = cmd
-        def __str__(self):
-            return "Command '%s' returned non-zero exit status %d" % (self.cmd,
-    self.returncode)
-
-    def call(*popenargs, **kwargs):
-        """Run command with arguments.  Wait for command to complete, then
-        return the returncode attribute.
-
-        The arguments are the same as for the Popen constructor.  Example:
-
-        retcode = call(["ls", "-l"])
-        """
-        # workaround: subprocess.Popen(cmd, stdout=sys.stdout) fails
-        # see http://bugs.python.org/issue1531862
-        if "stdout" in kwargs:
-            fileno = kwargs.get("stdout").fileno()
-            del kwargs['stdout']
-            return Popen(stdout=os.dup(fileno), *popenargs, **kwargs).wait()
-        return Popen(*popenargs, **kwargs).wait()
-
-    def check_call(*popenargs, **kwargs):
-        """Run command with arguments.  Wait for command to complete.  If
-        the exit code was zero then return, otherwise raise
-        CalledProcessError.  The CalledProcessError object will have the
-        return code in the returncode attribute.
-
-        The arguments are the same as for the Popen constructor.  Example:
-
-        check_call(["ls", "-l"])
-        """
-        retcode = call(*popenargs, **kwargs)
-        cmd = kwargs.get("args")
-        if cmd is None:
-            cmd = popenargs[0]
-        if retcode:
-            raise CalledProcessError(retcode, cmd)
-        return retcode
-
-try:
-    from os.path import relpath
-except ImportError: # python < 2.6
-    from os.path import curdir, abspath, sep, commonprefix, pardir, join
-    def relpath(path, start=curdir):
-        """Return a relative version of a path"""
-
-        if not path:
-            raise ValueError("no path specified")
-
-        start_list = abspath(start).split(sep)
-        path_list = abspath(path).split(sep)
-
-        # Work out how much of the filepath is shared by start and path.
-        i = len(commonprefix([start_list, path_list]))
-
-        rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
-        if not rel_list:
-            return curdir
-        return join(*rel_list)
-
-
-# XXX don't know why tests don't pass if I don't do that :
-_real_set, set = set, deprecated('set exists in builtins since py2.4')(set)
-if (2, 5) <= sys.version_info[:2]:
-    InheritableSet = _real_set
-else:
-    class InheritableSet(_real_set):
-        """hacked resolving inheritancy issue from old style class in 2.4"""
-        def __new__(cls, *args, **kwargs):
-            if args:
-                new_args = (args[0], )
-            else:
-                new_args = ()
-            obj = _real_set.__new__(cls, *new_args)
-            obj.__init__(*args, **kwargs)
-            return obj
-
-# XXX shouldn't we remove this and just let 2to3 do his job ?
-# range or xrange?
-try:
-    range = xrange
-except NameError:
-    range = range
-
-# ConfigParser was renamed to the more-standard configparser
-try:
-    import configparser
-except ImportError:
-    import ConfigParser as configparser
-
-try:
-    import json
-except ImportError:
-    try:
-        import simplejson as json
-    except ImportError:
-        json = None
+# Other projects import these from here, keep providing them for
+# backwards compat
+any = deprecated('use builtin "any"')(any)
+all = deprecated('use builtin "all"')(all)
diff --git a/third_party/logilab/common/configuration.py b/third_party/logilab/common/configuration.py
index 0eafa10..b292427 100644
--- a/third_party/logilab/common/configuration.py
+++ b/third_party/logilab/common/configuration.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -96,8 +96,19 @@
   multiple=4,5,6
 
   number=3
-  >>>
+
+  Note : starting with Python 2.7 ConfigParser is able to take into
+  account the order of occurrences of the options into a file (by
+  using an OrderedDict). If you have two options changing some common
+  state, like a 'disable-all-stuff' and a 'enable-some-stuff-a', their
+  order of appearance will be significant : the last specified in the
+  file wins. For earlier version of python and logilab.common newer
+  than 0.61 the behaviour is unspecified.
+
 """
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 __all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn',
@@ -109,16 +120,17 @@
 import re
 from os.path import exists, expanduser
 from copy import copy
-from ConfigParser import ConfigParser, NoOptionError, NoSectionError, \
-     DuplicateSectionError
 from warnings import warn
 
-from logilab.common.compat import callable, raw_input, str_encode as _encode
+from six import string_types
+from six.moves import range, configparser as cp, input
 
+from logilab.common.compat import str_encode as _encode
+from logilab.common.deprecation import deprecated
 from logilab.common.textutils import normalize_text, unquote
-from logilab.common import optik_ext as optparse
+from logilab.common import optik_ext
 
-OptionError = optparse.OptionError
+OptionError = optik_ext.OptionError
 
 REQUIRED = []
 
@@ -136,63 +148,66 @@
 
 # validation functions ########################################################
 
+# validators will return the validated value or raise optparse.OptionValueError
+# XXX add to documentation
+
 def choice_validator(optdict, name, value):
     """validate and return a converted value for option of type 'choice'
     """
     if not value in optdict['choices']:
         msg = "option %s: invalid value: %r, should be in %s"
-        raise optparse.OptionValueError(msg % (name, value, optdict['choices']))
+        raise optik_ext.OptionValueError(msg % (name, value, optdict['choices']))
     return value
 
 def multiple_choice_validator(optdict, name, value):
     """validate and return a converted value for option of type 'choice'
     """
     choices = optdict['choices']
-    values = optparse.check_csv(None, name, value)
+    values = optik_ext.check_csv(None, name, value)
     for value in values:
         if not value in choices:
             msg = "option %s: invalid value: %r, should be in %s"
-            raise optparse.OptionValueError(msg % (name, value, choices))
+            raise optik_ext.OptionValueError(msg % (name, value, choices))
     return values
 
 def csv_validator(optdict, name, value):
     """validate and return a converted value for option of type 'csv'
     """
-    return optparse.check_csv(None, name, value)
+    return optik_ext.check_csv(None, name, value)
 
 def yn_validator(optdict, name, value):
     """validate and return a converted value for option of type 'yn'
     """
-    return optparse.check_yn(None, name, value)
+    return optik_ext.check_yn(None, name, value)
 
 def named_validator(optdict, name, value):
     """validate and return a converted value for option of type 'named'
     """
-    return optparse.check_named(None, name, value)
+    return optik_ext.check_named(None, name, value)
 
 def file_validator(optdict, name, value):
     """validate and return a filepath for option of type 'file'"""
-    return optparse.check_file(None, name, value)
+    return optik_ext.check_file(None, name, value)
 
 def color_validator(optdict, name, value):
     """validate and return a valid color for option of type 'color'"""
-    return optparse.check_color(None, name, value)
+    return optik_ext.check_color(None, name, value)
 
 def password_validator(optdict, name, value):
     """validate and return a string for option of type 'password'"""
-    return optparse.check_password(None, name, value)
+    return optik_ext.check_password(None, name, value)
 
 def date_validator(optdict, name, value):
     """validate and return a mx DateTime object for option of type 'date'"""
-    return optparse.check_date(None, name, value)
+    return optik_ext.check_date(None, name, value)
 
 def time_validator(optdict, name, value):
     """validate and return a time object for option of type 'time'"""
-    return optparse.check_time(None, name, value)
+    return optik_ext.check_time(None, name, value)
 
 def bytes_validator(optdict, name, value):
     """validate and return an integer for option of type 'bytes'"""
-    return optparse.check_bytes(None, name, value)
+    return optik_ext.check_bytes(None, name, value)
 
 
 VALIDATORS = {'string': unquote,
@@ -222,14 +237,18 @@
     except TypeError:
         try:
             return VALIDATORS[opttype](value)
-        except optparse.OptionValueError:
+        except optik_ext.OptionValueError:
             raise
         except:
-            raise optparse.OptionValueError('%s value (%r) should be of type %s' %
+            raise optik_ext.OptionValueError('%s value (%r) should be of type %s' %
                                    (option, value, opttype))
 
 # user input functions ########################################################
 
+# user input functions will ask the user for input on stdin then validate
+# the result and return the validated value or raise optparse.OptionValueError
+# XXX add to documentation
+
 def input_password(optdict, question='password:'):
     from getpass import getpass
     while True:
@@ -237,23 +256,23 @@
         value2 = getpass('confirm: ')
         if value == value2:
             return value
-        print 'password mismatch, try again'
+        print('password mismatch, try again')
 
 def input_string(optdict, question):
-    value = raw_input(question).strip()
+    value = input(question).strip()
     return value or None
 
 def _make_input_function(opttype):
     def input_validator(optdict, question):
         while True:
-            value = raw_input(question)
+            value = input(question)
             if not value.strip():
                 return None
             try:
                 return _call_validator(opttype, optdict, None, value)
-            except optparse.OptionValueError, ex:
+            except optik_ext.OptionValueError as ex:
                 msg = str(ex).split(':', 1)[-1].strip()
-                print 'bad value: %s' % msg
+                print('bad value: %s' % msg)
     return input_validator
 
 INPUT_FUNCTIONS = {
@@ -264,6 +283,8 @@
 for opttype in VALIDATORS.keys():
     INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype))
 
+# utility functions ############################################################
+
 def expand_default(self, option):
     """monkey patch OptionParser.expand_default since we have a particular
     way to handle defaults to avoid overriding values in the configuration
@@ -278,15 +299,15 @@
         value = None
     else:
         optdict = provider.get_option_def(optname)
-        optname = provider.option_name(optname, optdict)
+        optname = provider.option_attrname(optname, optdict)
         value = getattr(provider.config, optname, optdict)
         value = format_option_value(optdict, value)
-    if value is optparse.NO_DEFAULT or not value:
+    if value is optik_ext.NO_DEFAULT or not value:
         value = self.NO_DEFAULT_VALUE
     return option.help.replace(self.default_tag, str(value))
 
 
-def convert(value, optdict, name=''):
+def _validate(value, optdict, name=''):
     """return a validated value for an option according to its type
 
     optional argument name is only used for error message formatting
@@ -297,6 +318,9 @@
         # FIXME
         return value
     return _call_validator(_type, optdict, name, value)
+convert = deprecated('[0.60] convert() was renamed _validate()')(_validate)
+
+# format and output functions ##################################################
 
 def comment(string):
     """return string as a comment"""
@@ -346,7 +370,7 @@
         value = value.pattern
     elif optdict.get('type') == 'yn':
         value = value and 'yes' or 'no'
-    elif isinstance(value, (str, unicode)) and value.isspace():
+    elif isinstance(value, string_types) and value.isspace():
         value = "'%s'" % value
     elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)):
         value = format_time(value)
@@ -358,8 +382,8 @@
     """format an options section using the INI format"""
     encoding = _get_encoding(encoding, stream)
     if doc:
-        print >> stream, _encode(comment(doc), encoding)
-    print >> stream, '[%s]' % section
+        print(_encode(comment(doc), encoding), file=stream)
+    print('[%s]' % section, file=stream)
     ini_format(stream, options, encoding)
 
 def ini_format(stream, options, encoding):
@@ -369,38 +393,38 @@
         help = optdict.get('help')
         if help:
             help = normalize_text(help, line_len=79, indent='# ')
-            print >> stream
-            print >> stream, _encode(help, encoding)
+            print(file=stream)
+            print(_encode(help, encoding), file=stream)
         else:
-            print >> stream
+            print(file=stream)
         if value is None:
-            print >> stream, '#%s=' % optname
+            print('#%s=' % optname, file=stream)
         else:
             value = _encode(value, encoding).strip()
-            print >> stream, '%s=%s' % (optname, value)
+            print('%s=%s' % (optname, value), file=stream)
 
 format_section = ini_format_section
 
 def rest_format_section(stream, section, options, encoding=None, doc=None):
-    """format an options section using the INI format"""
+    """format an options section using as ReST formatted output"""
     encoding = _get_encoding(encoding, stream)
     if section:
-        print >> stream, '%s\n%s' % (section, "'"*len(section))
+        print('%s\n%s' % (section, "'"*len(section)), file=stream)
     if doc:
-        print >> stream, _encode(normalize_text(doc, line_len=79, indent=''),
-                                 encoding)
-        print >> stream
+        print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream)
+        print(file=stream)
     for optname, optdict, value in options:
         help = optdict.get('help')
-        print >> stream, ':%s:' % optname
+        print(':%s:' % optname, file=stream)
         if help:
             help = normalize_text(help, line_len=79, indent='  ')
-            print >> stream, _encode(help, encoding)
+            print(_encode(help, encoding), file=stream)
         if value:
             value = _encode(format_option_value(optdict, value), encoding)
-            print >> stream, ''
-            print >> stream, '  Default: ``%s``' % value.replace("`` ", "```` ``")
+            print(file=stream)
+            print('  Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
 
+# Options Manager ##############################################################
 
 class OptionsManagerMixIn(object):
     """MixIn to handle a configuration from both a configuration file and
@@ -423,9 +447,9 @@
 
     def reset_parsers(self, usage='', version=None):
         # configuration file parser
-        self.cfgfile_parser = ConfigParser()
+        self.cfgfile_parser = cp.ConfigParser()
         # command line parser
-        self.cmdline_parser = optparse.OptionParser(usage=usage, version=version)
+        self.cmdline_parser = optik_ext.OptionParser(usage=usage, version=version)
         self.cmdline_parser.options_manager = self
         self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
 
@@ -461,7 +485,7 @@
         if group_name in self._mygroups:
             group = self._mygroups[group_name]
         else:
-            group = optparse.OptionGroup(self.cmdline_parser,
+            group = optik_ext.OptionGroup(self.cmdline_parser,
                                          title=group_name.capitalize())
             self.cmdline_parser.add_option_group(group)
             group.level = provider.level
@@ -497,9 +521,9 @@
         # default is handled here and *must not* be given to optik if you
         # want the whole machinery to work
         if 'default' in optdict:
-            if (optparse.OPTPARSE_FORMAT_DEFAULT and 'help' in optdict and
-                optdict.get('default') is not None and
-                not optdict['action'] in ('store_true', 'store_false')):
+            if ('help' in optdict
+                and optdict.get('default') is not None
+                and not optdict['action'] in ('store_true', 'store_false')):
                 optdict['help'] += ' [current: %default]'
             del optdict['default']
         args = ['--' + str(opt)]
@@ -508,7 +532,7 @@
             args.append('-' + optdict['short'])
             del optdict['short']
         # cleanup option definition dict before giving it to optik
-        for key in optdict.keys():
+        for key in list(optdict.keys()):
             if not key in self._optik_option_attrs:
                 optdict.pop(key)
         return args, optdict
@@ -555,7 +579,7 @@
         printed = False
         for section in sections:
             if printed:
-                print >> stream, '\n'
+                print('\n', file=stream)
             format_section(stream, section.upper(), options_by_section[section],
                            encoding)
             printed = True
@@ -566,7 +590,7 @@
         """
         self._monkeypatch_expand_default()
         try:
-            optparse.generate_manpage(self.cmdline_parser, pkginfo,
+            optik_ext.generate_manpage(self.cmdline_parser, pkginfo,
                                       section, stream=stream or sys.stdout,
                                       level=self._maxlevel)
         finally:
@@ -594,7 +618,7 @@
             if opt in self._all_options:
                 break # already processed
             def helpfunc(option, opt, val, p, level=helplevel):
-                print self.help(level)
+                print(self.help(level))
                 sys.exit(0)
             helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel)
             optdict = {'action' : 'callback', 'callback' : helpfunc,
@@ -616,7 +640,7 @@
                     parser._sections[sect.upper()] = values
         elif not self.quiet:
             msg = 'No config file found, using default configuration'
-            print >> sys.stderr, msg
+            print(msg, file=sys.stderr)
             return
 
     def input_config(self, onlysection=None, inputlevel=0, stream=None):
@@ -642,13 +666,13 @@
         options provider)
         """
         parser = self.cfgfile_parser
-        for provider in self.options_providers:
-            for section, option, optdict in provider.all_options():
-                try:
-                    value = parser.get(section, option)
-                    provider.set_option(option, value, optdict=optdict)
-                except (NoSectionError, NoOptionError), ex:
-                    continue
+        for section in parser.sections():
+             for option, value in parser.items(section):
+                  try:
+                       self.global_set_option(option, value)
+                  except (KeyError, OptionError):
+                       # TODO handle here undeclared options appearing in the config file
+                       continue
 
     def load_configuration(self, **kwargs):
         """override configuration according to given parameters
@@ -686,7 +710,7 @@
 
     def add_help_section(self, title, description, level=0):
         """add a dummy option section for help purpose """
-        group = optparse.OptionGroup(self.cmdline_parser,
+        group = optik_ext.OptionGroup(self.cmdline_parser,
                                      title=title.capitalize(),
                                      description=description)
         group.level = level
@@ -694,18 +718,18 @@
         self.cmdline_parser.add_option_group(group)
 
     def _monkeypatch_expand_default(self):
-        # monkey patch optparse to deal with our default values
+        # monkey patch optik_ext to deal with our default values
         try:
-            self.__expand_default_backup = optparse.HelpFormatter.expand_default
-            optparse.HelpFormatter.expand_default = expand_default
+            self.__expand_default_backup = optik_ext.HelpFormatter.expand_default
+            optik_ext.HelpFormatter.expand_default = expand_default
         except AttributeError:
             # python < 2.4: nothing to be done
             pass
     def _unmonkeypatch_expand_default(self):
         # remove monkey patch
-        if hasattr(optparse.HelpFormatter, 'expand_default'):
-            # unpatch optparse to avoid side effects
-            optparse.HelpFormatter.expand_default = self.__expand_default_backup
+        if hasattr(optik_ext.HelpFormatter, 'expand_default'):
+            # unpatch optik_ext to avoid side effects
+            optik_ext.HelpFormatter.expand_default = self.__expand_default_backup
 
     def help(self, level=0):
         """return the usage string for available options """
@@ -734,6 +758,7 @@
         assert self._inst, 'unbound method'
         return getattr(self._inst, self.method)(*args, **kwargs)
 
+# Options Provider #############################################################
 
 class OptionsProviderMixIn(object):
     """Mixin to provide options to an OptionsManager"""
@@ -745,7 +770,7 @@
     level = 0
 
     def __init__(self):
-        self.config = optparse.Values()
+        self.config = optik_ext.Values()
         for option in self.options:
             try:
                 option, optdict = option
@@ -777,41 +802,41 @@
             default = default()
         return default
 
-    def option_name(self, opt, optdict=None):
+    def option_attrname(self, opt, optdict=None):
         """get the config attribute corresponding to opt
         """
         if optdict is None:
             optdict = self.get_option_def(opt)
         return optdict.get('dest', opt.replace('-', '_'))
+    option_name = deprecated('[0.60] OptionsProviderMixIn.option_name() was renamed to option_attrname()')(option_attrname)
 
     def option_value(self, opt):
         """get the current value for the given option"""
-        return getattr(self.config, self.option_name(opt), None)
+        return getattr(self.config, self.option_attrname(opt), None)
 
     def set_option(self, opt, value, action=None, optdict=None):
         """method called to set an option (registered in the options list)
         """
-        # print "************ setting option", opt," to value", value
         if optdict is None:
             optdict = self.get_option_def(opt)
         if value is not None:
-            value = convert(value, optdict, opt)
+            value = _validate(value, optdict, opt)
         if action is None:
             action = optdict.get('action', 'store')
         if optdict.get('type') == 'named': # XXX need specific handling
-            optname = self.option_name(opt, optdict)
+            optname = self.option_attrname(opt, optdict)
             currentvalue = getattr(self.config, optname, None)
             if currentvalue:
                 currentvalue.update(value)
                 value = currentvalue
         if action == 'store':
-            setattr(self.config, self.option_name(opt, optdict), value)
+            setattr(self.config, self.option_attrname(opt, optdict), value)
         elif action in ('store_true', 'count'):
-            setattr(self.config, self.option_name(opt, optdict), 0)
+            setattr(self.config, self.option_attrname(opt, optdict), 0)
         elif action == 'store_false':
-            setattr(self.config, self.option_name(opt, optdict), 1)
+            setattr(self.config, self.option_attrname(opt, optdict), 1)
         elif action == 'append':
-            opt = self.option_name(opt, optdict)
+            opt = self.option_attrname(opt, optdict)
             _list = getattr(self.config, opt, None)
             if _list is None:
                 if isinstance(value, (list, tuple)):
@@ -839,12 +864,12 @@
             defaultstr = ': '
         else:
             defaultstr = '(default: %s): ' % format_option_value(optdict, default)
-        print ':%s:' % option
-        print optdict.get('help') or option
+        print(':%s:' % option)
+        print(optdict.get('help') or option)
         inputfunc = INPUT_FUNCTIONS[optdict['type']]
         value = inputfunc(optdict, defaultstr)
         while default is REQUIRED and not value:
-            print 'please specify a value'
+            print('please specify a value')
             value = inputfunc(optdict, '%s: ' % option)
         if value is None and default is not None:
             value = default
@@ -893,6 +918,7 @@
         for optname, optdict in options:
             yield (optname, optdict, self.option_value(optname))
 
+# configuration ################################################################
 
 class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
     """basic mixin for simple configurations which don't need the
@@ -913,7 +939,7 @@
                     continue
                 if not gdef in self.option_groups:
                     self.option_groups.append(gdef)
-        self.register_options_provider(self, own_group=0)
+        self.register_options_provider(self, own_group=False)
 
     def register_options(self, options):
         """add some options to the configuration"""
@@ -932,8 +958,8 @@
 
     def __getitem__(self, key):
         try:
-            return getattr(self.config, self.option_name(key))
-        except (optparse.OptionValueError, AttributeError):
+            return getattr(self.config, self.option_attrname(key))
+        except (optik_ext.OptionValueError, AttributeError):
             raise KeyError(key)
 
     def __setitem__(self, key, value):
@@ -941,7 +967,7 @@
 
     def get(self, key, default=None):
         try:
-            return getattr(self.config, self.option_name(key))
+            return getattr(self.config, self.option_attrname(key))
         except (OptionError, AttributeError):
             return default
 
@@ -977,20 +1003,21 @@
     def __getitem__(self, key):
         provider = self.config._all_options[key]
         try:
-            return getattr(provider.config, provider.option_name(key))
+            return getattr(provider.config, provider.option_attrname(key))
         except AttributeError:
             raise KeyError(key)
 
     def __setitem__(self, key, value):
-        self.config.global_set_option(self.config.option_name(key), value)
+        self.config.global_set_option(self.config.option_attrname(key), value)
 
     def get(self, key, default=None):
         provider = self.config._all_options[key]
         try:
-            return getattr(provider.config, provider.option_name(key))
+            return getattr(provider.config, provider.option_attrname(key))
         except AttributeError:
             return default
 
+# other functions ##############################################################
 
 def read_old_config(newconfig, changes, configfile):
     """initialize newconfig from a deprecated configuration file
@@ -1055,8 +1082,13 @@
             newconfig.set_option(optname, oldconfig[optname], optdict=optdef)
 
 
-def merge_options(options):
-    """preprocess options to remove duplicate"""
+def merge_options(options, optgroup=None):
+    """preprocess a list of options and remove duplicates, returning a new list
+    (tuple actually) of options.
+
+    Options dictionaries are copied to avoid later side-effect. Also, if
+    `otpgroup` argument is specified, ensure all options are in the given group.
+    """
     alloptions = {}
     options = list(options)
     for i in range(len(options)-1, -1, -1):
@@ -1065,5 +1097,9 @@
             options.pop(i)
             alloptions[optname].update(optdict)
         else:
+            optdict = optdict.copy()
+            options[i] = (optname, optdict)
             alloptions[optname] = optdict
+        if optgroup is not None:
+            alloptions[optname]['group'] = optgroup
     return tuple(options)
diff --git a/third_party/logilab/common/corbautils.py b/third_party/logilab/common/corbautils.py
index 8dfb2ba..65c301d 100644
--- a/third_party/logilab/common/corbautils.py
+++ b/third_party/logilab/common/corbautils.py
@@ -72,7 +72,7 @@
         name = [CosNaming.NameComponent(id, kind)]
         try:
             context = context.bind_new_context(name)
-        except CosNaming.NamingContext.AlreadyBound, ex:
+        except CosNaming.NamingContext.AlreadyBound as ex:
             context = context.resolve(name)._narrow(CosNaming.NamingContext)
             assert context is not None, \
                    'test context exists but is not a NamingContext'
@@ -81,7 +81,7 @@
     name = [CosNaming.NameComponent(id, kind)]
     try:
         context.bind(name, object._this())
-    except CosNaming.NamingContext.AlreadyBound, ex:
+    except CosNaming.NamingContext.AlreadyBound as ex:
         context.rebind(name, object._this())
 
 def activate_POA():
diff --git a/third_party/logilab/common/daemon.py b/third_party/logilab/common/daemon.py
index c8342a8..40319a4 100644
--- a/third_party/logilab/common/daemon.py
+++ b/third_party/logilab/common/daemon.py
@@ -26,6 +26,8 @@
 import time
 import warnings
 
+from six.moves import range
+
 def setugid(user):
     """Change process user and group ID
 
@@ -46,10 +48,10 @@
             raise OSError(err, os.strerror(err), 'initgroups')
     os.setgid(passwd.pw_gid)
     os.setuid(passwd.pw_uid)
-    os.putenv('HOME', passwd.pw_dir)
+    os.environ['HOME'] = passwd.pw_dir
 
 
-def daemonize(pidfile=None, uid=None, umask=077):
+def daemonize(pidfile=None, uid=None, umask=0o77):
     """daemonize a Unix process. Set paranoid umask by default.
 
     Return 1 in the original process, 2 in the first fork, and None for the
@@ -69,15 +71,12 @@
         return 2
     # move to the root to avoit mount pb
     os.chdir('/')
-    # set umask if specified
-    if umask is not None:
-        os.umask(umask)
     # redirect standard descriptors
     null = os.open('/dev/null', os.O_RDWR)
     for i in range(3):
         try:
             os.dup2(null, i)
-        except OSError, e:
+        except OSError as e:
             if e.errno != errno.EBADF:
                 raise
     os.close(null)
@@ -93,7 +92,9 @@
         f = file(pidfile, 'w')
         f.write(str(os.getpid()))
         f.close()
-        os.chmod(pidfile, 0644)
+    # set umask if specified
+    if umask is not None:
+        os.umask(umask)
     # change process uid
     if uid:
         setugid(uid)
diff --git a/third_party/logilab/common/date.py b/third_party/logilab/common/date.py
index b069a6f..a093a8a 100644
--- a/third_party/logilab/common/date.py
+++ b/third_party/logilab/common/date.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -22,11 +22,14 @@
 
 import math
 import re
-from locale import getpreferredencoding
+import sys
+from locale import getlocale, LC_TIME
 from datetime import date, time, datetime, timedelta
 from time import strptime as time_strptime
 from calendar import monthrange, timegm
 
+from six.moves import range
+
 try:
     from mx.DateTime import RelativeDateTime, Date, DateTimeType
 except ImportError:
@@ -130,7 +133,7 @@
     end = datefactory(end.year, end.month, end.day, end)
     holidays = [str2date(datestr, begin)
                 for datestr in FRENCH_MOBILE_HOLIDAYS.values()]
-    for year in xrange(begin.year, end.year+1):
+    for year in range(begin.year, end.year+1):
         for datestr in FRENCH_FIXED_HOLIDAYS.values():
             date = str2date(datestr % year, begin)
             if date not in holidays:
@@ -187,8 +190,8 @@
     end = todate(end)
     if incmonth:
         while begin < end:
-            begin = next_month(begin, incmonth)
             yield begin
+            begin = next_month(begin, incmonth)
     else:
         incr = get_step(begin, incday or 1)
         while begin < end:
@@ -279,34 +282,39 @@
 
 def ustrftime(somedate, fmt='%Y-%m-%d'):
     """like strftime, but returns a unicode string instead of an encoded
-    string which' may be problematic with localized date.
-
-    encoding is guessed by locale.getpreferredencoding()
+    string which may be problematic with localized date.
     """
-    encoding = getpreferredencoding(do_setlocale=False) or 'UTF-8'
-    try:
-        return unicode(somedate.strftime(str(fmt)), encoding)
-    except ValueError, exc:
-        if somedate.year >= 1900:
-            raise
-        # datetime is not happy with dates before 1900
-        # we try to work around this, assuming a simple
-        # format string
-        fields = {'Y': somedate.year,
-                  'm': somedate.month,
-                  'd': somedate.day,
-                  }
-        if isinstance(somedate, datetime):
-            fields.update({'H': somedate.hour,
-                           'M': somedate.minute,
-                           'S': somedate.second})
-        fmt = re.sub('%([YmdHMS])', r'%(\1)02d', fmt)
-        return unicode(fmt) % fields
+    if sys.version_info >= (3, 3):
+        # datetime.date.strftime() supports dates since year 1 in Python >=3.3.
+        return somedate.strftime(fmt)
+    else:
+        try:
+            if sys.version_info < (3, 0):
+                encoding = getlocale(LC_TIME)[1] or 'ascii'
+                return unicode(somedate.strftime(str(fmt)), encoding)
+            else:
+                return somedate.strftime(fmt)
+        except ValueError:
+            if somedate.year >= 1900:
+                raise
+            # datetime is not happy with dates before 1900
+            # we try to work around this, assuming a simple
+            # format string
+            fields = {'Y': somedate.year,
+                      'm': somedate.month,
+                      'd': somedate.day,
+                      }
+            if isinstance(somedate, datetime):
+                fields.update({'H': somedate.hour,
+                               'M': somedate.minute,
+                               'S': somedate.second})
+            fmt = re.sub('%([YmdHMS])', r'%(\1)02d', fmt)
+            return unicode(fmt) % fields
 
 def utcdatetime(dt):
     if dt.tzinfo is None:
         return dt
-    return datetime(*dt.utctimetuple()[:7])
+    return (dt.replace(tzinfo=None) - dt.utcoffset())
 
 def utctime(dt):
     if dt.tzinfo is None:
diff --git a/third_party/logilab/common/dbf.py b/third_party/logilab/common/dbf.py
index 8def2d2..ab142b2 100644
--- a/third_party/logilab/common/dbf.py
+++ b/third_party/logilab/common/dbf.py
@@ -30,13 +30,15 @@
 http://www.physics.ox.ac.uk/users/santoso/Software.Repository.html
 page says code is "available as is without any warranty or support".
 """
+from __future__ import print_function
 
 import struct
 import os, os.path
 import sys
 import csv
 import tempfile
-import ConfigParser
+
+from six.moves import range
 
 class Dbase:
     def __init__(self):
@@ -78,7 +80,7 @@
     def open(self, db_name):
         filesize = os.path.getsize(db_name)
         if filesize <= 68:
-            raise IOError, 'The file is not large enough to be a dbf file'
+            raise IOError('The file is not large enough to be a dbf file')
 
         self.fdb = open(db_name, 'rb')
 
@@ -151,7 +153,7 @@
         This function accept record number from 0 to N-1
         """
         if rec_no < 0 or rec_no > self.num_records:
-            raise Exception, 'Unable to extract data outside the range'
+            raise Exception('Unable to extract data outside the range')
 
         offset = self.header['Record Size'] * rec_no
         data = self.db_data[offset:offset+self.row_len]
@@ -226,4 +228,4 @@
 if __name__=='__main__':
     rec = readDbf('dbf/sptable.dbf')
     for line in rec:
-        print '%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip())
+        print('%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip()))
diff --git a/third_party/logilab/common/debugger.py b/third_party/logilab/common/debugger.py
index 7556322..1f540a1 100644
--- a/third_party/logilab/common/debugger.py
+++ b/third_party/logilab/common/debugger.py
@@ -26,6 +26,9 @@
 
 
 """
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 try:
@@ -36,9 +39,10 @@
 import os.path as osp
 import sys
 from pdb import Pdb
-from cStringIO import StringIO
 import inspect
 
+from logilab.common.compat import StringIO
+
 try:
     from IPython import PyColorize
 except ImportError:
@@ -182,8 +186,8 @@
         if not arg:
             try:
                 source, start_lineno = getsource(self.curframe)
-                print colorize(''.join(source), start_lineno,
-                               self.curframe.f_lineno)
+                print(colorize(''.join(source), start_lineno,
+                               self.curframe.f_lineno))
             except KeyboardInterrupt:
                 pass
             except IOError:
diff --git a/third_party/logilab/common/decorators.py b/third_party/logilab/common/decorators.py
index 43c3652..beafa20 100644
--- a/third_party/logilab/common/decorators.py
+++ b/third_party/logilab/common/decorators.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -16,26 +16,28 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with logilab-common.  If not, see <http://www.gnu.org/licenses/>.
 """ A few useful function/method decorators. """
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import sys
+import types
 from time import clock, time
+from inspect import isgeneratorfunction, getargspec
 
-from logilab.common.compat import callable, method_type
+from logilab.common.compat import method_type
 
 # XXX rewrite so we can use the decorator syntax when keyarg has to be specified
 
-def _is_generator_function(callableobj):
-    return callableobj.func_code.co_flags & 0x20
-
 class cached_decorator(object):
     def __init__(self, cacheattr=None, keyarg=None):
         self.cacheattr = cacheattr
         self.keyarg = keyarg
     def __call__(self, callableobj=None):
-        assert not _is_generator_function(callableobj), \
+        assert not isgeneratorfunction(callableobj), \
                'cannot cache generator function: %s' % callableobj
-        if callableobj.func_code.co_argcount == 1 or self.keyarg == 0:
+        if len(getargspec(callableobj).args) == 1 or self.keyarg == 0:
             cache = _SingleValueCache(callableobj, self.cacheattr)
         elif self.keyarg:
             cache = _MultiValuesKeyArgCache(callableobj, self.keyarg, self.cacheattr)
@@ -67,7 +69,6 @@
         try:
             wrapped.__doc__ = self.callable.__doc__
             wrapped.__name__ = self.callable.__name__
-            wrapped.func_name = self.callable.func_name
         except:
             pass
         return wrapped
@@ -226,8 +227,8 @@
         t = time()
         c = clock()
         res = f(*args, **kwargs)
-        print '%s clock: %.9f / time: %.9f' % (f.__name__,
-                                               clock() - c, time() - t)
+        print('%s clock: %.9f / time: %.9f' % (f.__name__,
+                                               clock() - c, time() - t))
         return res
     return wrap
 
@@ -249,7 +250,9 @@
 
 
 def monkeypatch(klass, methodname=None):
-    """Decorator extending class with the decorated callable
+    """Decorator extending class with the decorated callable. This is basically
+    a syntactic sugar vs class assignment.
+
     >>> class A:
     ...     pass
     >>> @monkeypatch(A)
@@ -273,11 +276,6 @@
             raise AttributeError('%s has no __name__ attribute: '
                                  'you should provide an explicit `methodname`'
                                  % func)
-        if callable(func) and sys.version_info < (3, 0):
-            setattr(klass, name, method_type(func, None, klass))
-        else:
-            # likely a property
-            # this is quite borderline but usage already in the wild ...
-            setattr(klass, name, func)
+        setattr(klass, name, func)
         return func
     return decorator
diff --git a/third_party/logilab/common/deprecation.py b/third_party/logilab/common/deprecation.py
index 4b2c3f4..1c81b63 100644
--- a/third_party/logilab/common/deprecation.py
+++ b/third_party/logilab/common/deprecation.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -22,15 +22,154 @@
 import sys
 from warnings import warn
 
-class class_deprecated(type):
-    """metaclass to print a warning on instantiation of a deprecated class"""
+from logilab.common.changelog import Version
 
-    def __call__(cls, *args, **kwargs):
-        msg = getattr(cls, "__deprecation_warning__",
-                      "%(cls)s is deprecated") % {'cls': cls.__name__}
-        warn(msg, DeprecationWarning, stacklevel=2)
-        return type.__call__(cls, *args, **kwargs)
 
+class DeprecationWrapper(object):
+    """proxy to print a warning on access to any attribute of the wrapped object
+    """
+    def __init__(self, proxied, msg=None):
+        self._proxied = proxied
+        self._msg = msg
+
+    def __getattr__(self, attr):
+        warn(self._msg, DeprecationWarning, stacklevel=2)
+        return getattr(self._proxied, attr)
+
+    def __setattr__(self, attr, value):
+        if attr in ('_proxied', '_msg'):
+            self.__dict__[attr] = value
+        else:
+            warn(self._msg, DeprecationWarning, stacklevel=2)
+            setattr(self._proxied, attr, value)
+
+
+class DeprecationManager(object):
+    """Manage the deprecation message handling. Messages are dropped for
+    versions more recent than the 'compatible' version. Example::
+
+        deprecator = deprecation.DeprecationManager("module_name")
+        deprecator.compatibility('1.3')
+
+        deprecator.warn('1.2', "message.")
+
+        @deprecator.deprecated('1.2', 'Message')
+        def any_func():
+            pass
+
+        class AnyClass(object):
+            __metaclass__ = deprecator.class_deprecated('1.2')
+    """
+    def __init__(self, module_name=None):
+        """
+        """
+        self.module_name = module_name
+        self.compatible_version = None
+
+    def compatibility(self, compatible_version):
+        """Set the compatible version.
+        """
+        self.compatible_version = Version(compatible_version)
+
+    def deprecated(self, version=None, reason=None, stacklevel=2, name=None, doc=None):
+        """Display a deprecation message only if the version is older than the
+        compatible version.
+        """
+        def decorator(func):
+            message = reason or 'The function "%s" is deprecated'
+            if '%s' in message:
+                message %= func.__name__
+            def wrapped(*args, **kwargs):
+                self.warn(version, message, stacklevel+1)
+                return func(*args, **kwargs)
+            return wrapped
+        return decorator
+
+    def class_deprecated(self, version=None):
+        class metaclass(type):
+            """metaclass to print a warning on instantiation of a deprecated class"""
+
+            def __call__(cls, *args, **kwargs):
+                msg = getattr(cls, "__deprecation_warning__",
+                              "%(cls)s is deprecated") % {'cls': cls.__name__}
+                self.warn(version, msg, stacklevel=3)
+                return type.__call__(cls, *args, **kwargs)
+        return metaclass
+
+    def moved(self, version, modpath, objname):
+        """use to tell that a callable has been moved to a new module.
+
+        It returns a callable wrapper, so that when its called a warning is printed
+        telling where the object can be found, import is done (and not before) and
+        the actual object is called.
+
+        NOTE: the usage is somewhat limited on classes since it will fail if the
+        wrapper is use in a class ancestors list, use the `class_moved` function
+        instead (which has no lazy import feature though).
+        """
+        def callnew(*args, **kwargs):
+            from logilab.common.modutils import load_module_from_name
+            message = "object %s has been moved to module %s" % (objname, modpath)
+            self.warn(version, message)
+            m = load_module_from_name(modpath)
+            return getattr(m, objname)(*args, **kwargs)
+        return callnew
+
+    def class_renamed(self, version, old_name, new_class, message=None):
+        clsdict = {}
+        if message is None:
+            message = '%s is deprecated, use %s' % (old_name, new_class.__name__)
+        clsdict['__deprecation_warning__'] = message
+        try:
+            # new-style class
+            return self.class_deprecated(version)(old_name, (new_class,), clsdict)
+        except (NameError, TypeError):
+            # old-style class
+            warn = self.warn
+            class DeprecatedClass(new_class):
+                """FIXME: There might be a better way to handle old/new-style class
+                """
+                def __init__(self, *args, **kwargs):
+                    warn(version, message, stacklevel=3)
+                    new_class.__init__(self, *args, **kwargs)
+            return DeprecatedClass
+
+    def class_moved(self, version, new_class, old_name=None, message=None):
+        """nice wrapper around class_renamed when a class has been moved into
+        another module
+        """
+        if old_name is None:
+            old_name = new_class.__name__
+        if message is None:
+            message = 'class %s is now available as %s.%s' % (
+                old_name, new_class.__module__, new_class.__name__)
+        return self.class_renamed(version, old_name, new_class, message)
+
+    def warn(self, version=None, reason="", stacklevel=2):
+        """Display a deprecation message only if the version is older than the
+        compatible version.
+        """
+        if (self.compatible_version is None
+            or version is None
+            or Version(version) < self.compatible_version):
+            if self.module_name and version:
+                reason = '[%s %s] %s' % (self.module_name, version, reason)
+            elif self.module_name:
+                reason = '[%s] %s' % (self.module_name, reason)
+            elif version:
+                reason = '[%s] %s' % (version, reason)
+            warn(reason, DeprecationWarning, stacklevel=stacklevel)
+
+_defaultdeprecator = DeprecationManager()
+
+def deprecated(reason=None, stacklevel=2, name=None, doc=None):
+    return _defaultdeprecator.deprecated(None, reason, stacklevel, name, doc)
+
+class_deprecated = _defaultdeprecator.class_deprecated()
+
+def moved(modpath, objname):
+    return _defaultdeprecator.moved(None, modpath, objname)
+moved.__doc__ = _defaultdeprecator.moved.__doc__
 
 def class_renamed(old_name, new_class, message=None):
     """automatically creates a class which fires a DeprecationWarning
@@ -39,74 +178,12 @@
     >>> Set = class_renamed('Set', set, 'Set is now replaced by set')
     >>> s = Set()
     sample.py:57: DeprecationWarning: Set is now replaced by set
-      s = Set()
+    s = Set()
     >>>
     """
-    clsdict = {}
-    if message is None:
-        message = '%s is deprecated, use %s' % (old_name, new_class.__name__)
-    clsdict['__deprecation_warning__'] = message
-    try:
-        # new-style class
-        return class_deprecated(old_name, (new_class,), clsdict)
-    except (NameError, TypeError):
-        # old-style class
-        class DeprecatedClass(new_class):
-            """FIXME: There might be a better way to handle old/new-style class
-            """
-            def __init__(self, *args, **kwargs):
-                warn(message, DeprecationWarning, stacklevel=2)
-                new_class.__init__(self, *args, **kwargs)
-        return DeprecatedClass
-
+    return _defaultdeprecator.class_renamed(None, old_name, new_class, message)
 
 def class_moved(new_class, old_name=None, message=None):
-    """nice wrapper around class_renamed when a class has been moved into
-    another module
-    """
-    if old_name is None:
-        old_name = new_class.__name__
-    if message is None:
-        message = 'class %s is now available as %s.%s' % (
-            old_name, new_class.__module__, new_class.__name__)
-    return class_renamed(old_name, new_class, message)
-
-def deprecated(reason=None, stacklevel=2, name=None, doc=None):
-    """Decorator that raises a DeprecationWarning to print a message
-    when the decorated function is called.
-    """
-    def deprecated_decorator(func):
-        message = reason or 'The function "%s" is deprecated'
-        if '%s' in message:
-            message = message % func.func_name
-        def wrapped(*args, **kwargs):
-            warn(message, DeprecationWarning, stacklevel=stacklevel)
-            return func(*args, **kwargs)
-        try:
-            wrapped.__name__ = name or func.__name__
-        except TypeError: # readonly attribute in 2.3
-            pass
-        wrapped.__doc__ = doc or func.__doc__
-        return wrapped
-    return deprecated_decorator
-
-def moved(modpath, objname):
-    """use to tell that a callable has been moved to a new module.
-
-    It returns a callable wrapper, so that when its called a warning is printed
-    telling where the object can be found, import is done (and not before) and
-    the actual object is called.
-
-    NOTE: the usage is somewhat limited on classes since it will fail if the
-    wrapper is use in a class ancestors list, use the `class_moved` function
-    instead (which has no lazy import feature though).
-    """
-    def callnew(*args, **kwargs):
-        from logilab.common.modutils import load_module_from_name
-        message = "object %s has been moved to module %s" % (objname, modpath)
-        warn(message, DeprecationWarning, stacklevel=2)
-        m = load_module_from_name(modpath)
-        return getattr(m, objname)(*args, **kwargs)
-    return callnew
-
+    return _defaultdeprecator.class_moved(None, new_class, old_name, message)
+class_moved.__doc__ = _defaultdeprecator.class_moved.__doc__
 
diff --git a/third_party/logilab/common/fileutils.py b/third_party/logilab/common/fileutils.py
index 4ac9270..b30cf5f 100644
--- a/third_party/logilab/common/fileutils.py
+++ b/third_party/logilab/common/fileutils.py
@@ -23,6 +23,9 @@
 write_open_mode, ensure_fs_mode, export
 :sort: path manipulation, file manipulation
 """
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -32,12 +35,11 @@
 from os.path import abspath
 from os import sep, mkdir, remove, listdir, stat, chmod, walk
 from stat import ST_MODE, S_IWRITE
-from cStringIO import StringIO
 
 from logilab.common import STD_BLACKLIST as BASE_BLACKLIST, IGNORED_EXTENSIONS
 from logilab.common.shellutils import find
 from logilab.common.deprecation import deprecated
-from logilab.common.compat import FileIO, any
+from logilab.common.compat import FileIO
 
 def first_level_directory(path):
     """Return the first level directory of a path.
@@ -375,7 +377,7 @@
             src = join(directory, filename)
             dest = to_dir + src[len(from_dir):]
             if verbose:
-                print >> sys.stderr, src, '->', dest
+                print(src, '->', dest, file=sys.stderr)
             if exists(dest):
                 remove(dest)
             shutil.copy2(src, dest)
@@ -397,6 +399,6 @@
             src = join(dirpath, filename)
             if islink(src) and not exists(src):
                 if verbose:
-                    print 'remove dead link', src
+                    print('remove dead link', src)
                 remove(src)
 
diff --git a/third_party/logilab/common/graph.py b/third_party/logilab/common/graph.py
index 75a2ee7..cef1c98 100644
--- a/third_party/logilab/common/graph.py
+++ b/third_party/logilab/common/graph.py
@@ -28,7 +28,8 @@
 import os
 import sys
 import tempfile
-from logilab.common.compat import str_encode
+import codecs
+import errno
 
 def escape(value):
     """Make <value> usable in a dot file."""
@@ -63,7 +64,7 @@
             assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \
                    'unsupported charset %s' % charset
             self.emit('charset="%s"' % charset)
-        for param in additionnal_param.iteritems():
+        for param in sorted(additionnal_param.items()):
             self.emit('='.join(param))
 
     def get_source(self):
@@ -106,21 +107,26 @@
             ppng, outputfile = tempfile.mkstemp(".png", name)
             os.close(pdot)
             os.close(ppng)
-        pdot = open(dot_sourcepath, 'w')
-        pdot.write(str_encode(self.source, 'utf8'))
+        pdot = codecs.open(dot_sourcepath, 'w', encoding='utf8')
+        pdot.write(self.source)
         pdot.close()
         if target != 'dot':
             if sys.platform == 'win32':
                 use_shell = True
             else:
                 use_shell = False
-            if mapfile:
-                subprocess.call([self.renderer,  '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile],
-                                shell=use_shell)
-            else:
-                subprocess.call([self.renderer, '-T',  target,
-                                 dot_sourcepath, '-o',  outputfile],
-                                shell=use_shell)
+            try:
+                if mapfile:
+                    subprocess.call([self.renderer,  '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile],
+                                    shell=use_shell)
+                else:
+                    subprocess.call([self.renderer, '-T',  target,
+                                     dot_sourcepath, '-o',  outputfile],
+                                    shell=use_shell)
+            except OSError as e:
+                if e.errno == errno.ENOENT:
+                    e.strerror = 'File not found: {0}'.format(self.renderer)
+                    raise
             os.unlink(dot_sourcepath)
         return outputfile
 
@@ -134,14 +140,14 @@
         """
         attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
         n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
-        self.emit('%s -> %s [%s];' % (n_from, n_to, ", ".join(attrs)) )
+        self.emit('%s -> %s [%s];' % (n_from, n_to, ', '.join(sorted(attrs))) )
 
     def emit_node(self, name, **props):
         """emit a node with given properties.
         node properties: see http://www.graphviz.org/doc/info/attrs.html
         """
         attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
-        self.emit('%s [%s];' % (normalize_node_id(name), ", ".join(attrs)))
+        self.emit('%s [%s];' % (normalize_node_id(name), ', '.join(sorted(attrs))))
 
 def normalize_node_id(nid):
     """Returns a suitable DOT node id for `nid`."""
@@ -226,10 +232,10 @@
     if vertices is None:
         vertices = graph_dict.keys()
     for vertice in vertices:
-        _get_cycles(graph_dict, vertice, [], result)
+        _get_cycles(graph_dict, [], set(), result, vertice)
     return result
 
-def _get_cycles(graph_dict, vertice=None, path=None, result=None):
+def _get_cycles(graph_dict, path, visited, result, vertice):
     """recursive function doing the real work for get_cycles"""
     if vertice in path:
         cycle = [vertice]
@@ -248,7 +254,10 @@
     path.append(vertice)
     try:
         for node in graph_dict[vertice]:
-            _get_cycles(graph_dict, node, path, result)
+            # don't check already visited nodes again
+            if node not in visited:
+                _get_cycles(graph_dict, path, visited, result, node)
+                visited.add(node)
     except KeyError:
         pass
     path.pop()
diff --git a/third_party/logilab/common/hg.py b/third_party/logilab/common/hg.py
deleted file mode 100644
index edf2d3b..0000000
--- a/third_party/logilab/common/hg.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of logilab-common.
-#
-# logilab-common is free software: you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option) any
-# later version.
-#
-# logilab-common is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-common.  If not, see <http://www.gnu.org/licenses/>.
-"""mercurial utilities (mercurial should be installed)"""
-
-__docformat__ = "restructuredtext en"
-
-import os
-import sys
-import os.path as osp
-
-try:
-    from mercurial.error import RepoError
-    from mercurial.__version__ import version as hg_version
-except ImportError:
-    from mercurial.repo import RepoError
-    from mercurial.version import get_version
-    hg_version = get_version()
-
-from mercurial.hg import repository as Repository
-from mercurial.ui import ui as Ui
-from mercurial.node import short
-try:
-    # mercurial >= 1.2 (?)
-    from mercurial.cmdutil import walkchangerevs
-except ImportError, ex:
-    from mercurial.commands import walkchangerevs
-try:
-    # mercurial >= 1.1 (.1?)
-    from mercurial.util import cachefunc
-except ImportError, ex:
-    def cachefunc(func):
-        return func
-try:
-    # mercurial >= 1.3.1
-    from mercurial import encoding
-    _encoding = encoding.encoding
-except ImportError:
-    try:
-        from mercurial.util import _encoding
-    except ImportError:
-        import locale
-        # stay compatible with mercurial 0.9.1 (etch debian release)
-        # (borrowed from mercurial.util 1.1.2)
-        try:
-            _encoding = os.environ.get("HGENCODING")
-            if sys.platform == 'darwin' and not _encoding:
-                # On darwin, getpreferredencoding ignores the locale environment and
-                # always returns mac-roman. We override this if the environment is
-                # not C (has been customized by the user).
-                locale.setlocale(locale.LC_CTYPE, '')
-                _encoding = locale.getlocale()[1]
-            if not _encoding:
-                _encoding = locale.getpreferredencoding() or 'ascii'
-        except locale.Error:
-            _encoding = 'ascii'
-try:
-    # demandimport causes problems when activated, ensure it isn't
-    # XXX put this in apycot where the pb has been noticed?
-    from mercurial import demandimport
-    demandimport.disable()
-except:
-    pass
-
-Ui.warn = lambda *args, **kwargs: 0 # make it quiet
-
-def find_repository(path):
-    """returns <path>'s mercurial repository
-
-    None if <path> is not under hg control
-    """
-    path = osp.realpath(osp.abspath(path))
-    while not osp.isdir(osp.join(path, ".hg")):
-        oldpath = path
-        path = osp.dirname(path)
-        if path == oldpath:
-            return None
-    return path
-
-
-def get_repository(path):
-    """Simple function that open a hg repository"""
-    repopath = find_repository(path)
-    if repopath is None:
-        raise RuntimeError('no repository found in %s' % osp.abspath(path))
-    return Repository(Ui(), path=repopath)
-
-def incoming(wdrepo, masterrepo):
-    try:
-        return wdrepo.findincoming(masterrepo)
-    except AttributeError:
-        from mercurial import hg, discovery
-        revs, checkout = hg.addbranchrevs(wdrepo, masterrepo, ('', []), None)
-        common, incoming, rheads = discovery.findcommonincoming(
-            wdrepo, masterrepo, heads=revs)
-        if not masterrepo.local():
-            from mercurial import bundlerepo, changegroup
-            if revs is None and masterrepo.capable('changegroupsubset'):
-                revs = rheads
-            if revs is None:
-                cg = masterrepo.changegroup(incoming, "incoming")
-            else:
-                cg = masterrepo.changegroupsubset(incoming, revs, 'incoming')
-            fname = changegroup.writebundle(cg, None, "HG10UN")
-            # use the created uncompressed bundlerepo
-            masterrepo = bundlerepo.bundlerepository(wdrepo.ui, wdrepo.root, fname)
-        return masterrepo.changelog.nodesbetween(incoming, revs)[0]
-
-def outgoing(wdrepo, masterrepo):
-    try:
-        return wdrepo.findoutgoing(masterrepo)
-    except AttributeError:
-        from mercurial import hg, discovery
-        revs, checkout = hg.addbranchrevs(wdrepo, wdrepo, ('', []), None)
-        o = discovery.findoutgoing(wdrepo, masterrepo)
-        return wdrepo.changelog.nodesbetween(o, revs)[0]
diff --git a/third_party/logilab/common/logging_ext.py b/third_party/logilab/common/logging_ext.py
index 1b7a1e6..3b6a580 100644
--- a/third_party/logilab/common/logging_ext.py
+++ b/third_party/logilab/common/logging_ext.py
@@ -24,6 +24,8 @@
 import sys
 import logging
 
+from six import string_types
+
 from logilab.common.textutils import colorize_ansi
 
 
@@ -112,7 +114,11 @@
     else:
         try:
             if rotation_parameters is None:
-                handler = logging.FileHandler(logfile)
+                if os.name == 'posix' and sys.version_info >= (2, 6):
+                    from logging.handlers import WatchedFileHandler
+                    handler = WatchedFileHandler(logfile)
+                else:
+                    handler = logging.FileHandler(logfile)
             else:
                 from logging.handlers import TimedRotatingFileHandler
                 handler = TimedRotatingFileHandler(
@@ -127,14 +133,25 @@
             logthreshold = logging.DEBUG
         else:
             logthreshold = logging.ERROR
-    elif isinstance(logthreshold, basestring):
+    elif isinstance(logthreshold, string_types):
         logthreshold = getattr(logging, THRESHOLD_MAP.get(logthreshold,
                                                           logthreshold))
     return logthreshold
 
-def get_formatter(logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT):
+def _colorable_terminal():
     isatty = hasattr(sys.__stdout__, 'isatty') and sys.__stdout__.isatty()
-    if isatty and sys.platform != 'win32':
+    if not isatty:
+        return False
+    if os.name == 'nt':
+        try:
+            from colorama import init as init_win32_colors
+        except ImportError:
+            return False
+        init_win32_colors()
+    return True
+
+def get_formatter(logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT):
+    if _colorable_terminal():
         fmt = ColorFormatter(logformat, logdateformat)
         def col_fact(record):
             if 'XXX' in record.message:
diff --git a/third_party/logilab/common/modutils.py b/third_party/logilab/common/modutils.py
index ce0c297..a426a3a 100644
--- a/third_party/logilab/common/modutils.py
+++ b/third_party/logilab/common/modutils.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -27,6 +27,7 @@
 :type BUILTIN_MODULES: dict
 :var BUILTIN_MODULES: dictionary with builtin module names has key
 """
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -34,6 +35,9 @@
 from os.path import splitext, join, abspath, isdir, dirname, exists, basename
 from imp import find_module, load_module, C_BUILTIN, PY_COMPILED, PKG_DIRECTORY
 from distutils.sysconfig import get_config_var, get_python_lib, get_python_version
+from distutils.errors import DistutilsPlatformError
+
+from six.moves import range
 
 try:
     import zipimport
@@ -53,12 +57,18 @@
 if sys.platform.startswith('win'):
     PY_SOURCE_EXTS = ('py', 'pyw')
     PY_COMPILED_EXTS = ('dll', 'pyd')
-    STD_LIB_DIR = get_python_lib(standard_lib=1)
 else:
     PY_SOURCE_EXTS = ('py',)
     PY_COMPILED_EXTS = ('so',)
-    # extend lib dir with some arch-dependant paths
-    STD_LIB_DIR = join(get_config_var("LIBDIR"), "python%s" % get_python_version())
+
+try:
+    STD_LIB_DIR = get_python_lib(standard_lib=1)
+# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
+# non-valid path, see https://bugs.pypy.org/issue1164
+except DistutilsPlatformError:
+    STD_LIB_DIR = '//'
+
+EXT_LIB_DIR = get_python_lib()
 
 BUILTIN_MODULES = dict(zip(sys.builtin_module_names,
                            [1]*len(sys.builtin_module_names)))
@@ -84,7 +94,7 @@
     def __getattribute__(self, attr):
         try:
             return super(LazyObject, self).__getattribute__(attr)
-        except AttributeError, ex:
+        except AttributeError as ex:
             return getattr(self._getobj(), attr)
 
     def __call__(self, *args, **kwargs):
@@ -92,7 +102,7 @@
 
 
 def load_module_from_name(dotted_name, path=None, use_sys=1):
-    """Load a Python module from it's name.
+    """Load a Python module from its name.
 
     :type dotted_name: str
     :param dotted_name: python name of a module or package
@@ -117,7 +127,7 @@
 
 
 def load_module_from_modpath(parts, path=None, use_sys=1):
-    """Load a python module from it's splitted name.
+    """Load a python module from its splitted name.
 
     :type parts: list(str) or tuple(str)
     :param parts:
@@ -151,6 +161,9 @@
         if len(modpath) != len(parts):
             # even with use_sys=False, should try to get outer packages from sys.modules
             module = sys.modules.get(curname)
+        elif use_sys:
+            # because it may have been indirectly loaded through a parent
+            module = sys.modules.get(curname)
         if module is None:
             mp_file, mp_filename, mp_desc = find_module(part, path)
             module = load_module(curname, mp_file, mp_filename, mp_desc)
@@ -230,10 +243,7 @@
                     return extrapath[path_].split('.') + submodpath
     for path in sys.path:
         path = abspath(path)
-        if path and base[:len(path)] == path:
-            if filename.find('site-packages') != -1 and \
-                   path.find('site-packages') == -1:
-                continue
+        if path and base.startswith(path):
             modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg]
             if _check_init(path, modpath[:-1]):
                 return modpath
@@ -446,13 +456,16 @@
 
 def cleanup_sys_modules(directories):
     """remove submodules of `directories` from `sys.modules`"""
-    for modname, module in sys.modules.items():
+    cleaned = []
+    for modname, module in list(sys.modules.items()):
         modfile = getattr(module, '__file__', None)
         if modfile:
             for directory in directories:
                 if modfile.startswith(directory):
+                    cleaned.append(modname)
                     del sys.modules[modname]
                     break
+    return cleaned
 
 
 def is_python_source(filename):
@@ -484,7 +497,7 @@
     modname = modname.split('.')[0]
     try:
         filename = file_from_modpath([modname])
-    except ImportError, ex:
+    except ImportError as ex:
         # import failed, i'm probably not so wrong by supposing it's
         # not standard...
         return 0
@@ -493,13 +506,11 @@
     if filename is None:
         return 1
     filename = abspath(filename)
+    if filename.startswith(EXT_LIB_DIR):
+        return 0
     for path in std_path:
-        path = abspath(path)
-        if filename.startswith(path):
-            pfx_len = len(path)
-            if filename[pfx_len+1:pfx_len+14] != 'site-packages':
-                return 1
-            return 0
+        if filename.startswith(abspath(path)):
+            return 1
     return False
 
 
@@ -565,10 +576,15 @@
             if importer.find_module(modpath[0]):
                 if not importer.find_module('/'.join(modpath)):
                     raise ImportError('No module named %s in %s/%s' % (
-                        '.'.join(modpath[1:]), file, modpath))
+                        '.'.join(modpath[1:]), filepath, modpath))
                 return ZIPFILE, abspath(filepath) + '/' + '/'.join(modpath), filepath
     raise ImportError('No module named %s' % '.'.join(modpath))
 
+try:
+    import pkg_resources
+except ImportError:
+    pkg_resources = None
+
 def _module_file(modpath, path=None):
     """get a module type / file path
 
@@ -599,16 +615,35 @@
         checkeggs = True
     except AttributeError:
         checkeggs = False
+    # pkg_resources support (aka setuptools namespace packages)
+    if (pkg_resources is not None
+            and modpath[0] in pkg_resources._namespace_packages
+            and modpath[0] in sys.modules
+            and len(modpath) > 1):
+        # setuptools has added into sys.modules a module object with proper
+        # __path__, get back information from there
+        module = sys.modules[modpath.pop(0)]
+        path = module.__path__
     imported = []
     while modpath:
+        modname = modpath[0]
+        # take care to changes in find_module implementation wrt builtin modules
+        #
+        # Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23)
+        # >>> imp.find_module('posix')
+        # (None, 'posix', ('', '', 6))
+        #
+        # Python 3.3.1 (default, Apr 26 2013, 12:08:46)
+        # >>> imp.find_module('posix')
+        # (None, None, ('', '', 6))
         try:
-            _, mp_filename, mp_desc = find_module(modpath[0], path)
+            _, mp_filename, mp_desc = find_module(modname, path)
         except ImportError:
             if checkeggs:
                 return _search_zip(modpath, pic)[:2]
             raise
         else:
-            if checkeggs:
+            if checkeggs and mp_filename:
                 fullabspath = [abspath(x) for x in _path]
                 try:
                     pathindex = fullabspath.index(dirname(abspath(mp_filename)))
@@ -628,7 +663,21 @@
             if mtype != PKG_DIRECTORY:
                 raise ImportError('No module %s in %s' % ('.'.join(modpath),
                                                           '.'.join(imported)))
-            path = [mp_filename]
+            # XXX guess if package is using pkgutil.extend_path by looking for
+            # those keywords in the first four Kbytes
+            try:
+                with open(join(mp_filename, '__init__.py')) as stream:
+                    data = stream.read(4096)
+            except IOError:
+                path = [mp_filename]
+            else:
+                if 'pkgutil' in data and 'extend_path' in data:
+                    # extend_path is called, search sys.path for module/packages
+                    # of this name see pkgutil.extend_path documentation
+                    path = [join(p, *imported) for p in sys.path
+                            if isdir(join(p, *imported))]
+                else:
+                    path = [mp_filename]
     return mtype, mp_filename
 
 def _is_python_file(filename):
diff --git a/third_party/logilab/common/optik_ext.py b/third_party/logilab/common/optik_ext.py
index 39bbe18..1fd2a7f 100644
--- a/third_party/logilab/common/optik_ext.py
+++ b/third_party/logilab/common/optik_ext.py
@@ -46,6 +46,8 @@
     argument of this type will be converted to a float value in bytes
     according to byte units (b, kb, mb, gb, tb)
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import re
@@ -65,10 +67,9 @@
 except ImportError:
     HAS_MX_DATETIME = False
 
+from logilab.common.textutils import splitstrip, TIME_UNITS, BYTE_UNITS, \
+    apply_units
 
-OPTPARSE_FORMAT_DEFAULT = sys.version_info >= (2, 4)
-
-from logilab.common.textutils import splitstrip
 
 def check_regexp(option, opt, value):
     """check a regexp value by trying to compile it
@@ -168,18 +169,15 @@
     raise OptionValueError(msg % (opt, value))
 
 def check_time(option, opt, value):
-    from logilab.common.textutils import TIME_UNITS, apply_units
     if isinstance(value, (int, long, float)):
         return value
     return apply_units(value, TIME_UNITS)
 
 def check_bytes(option, opt, value):
-    from logilab.common.textutils import BYTE_UNITS, apply_units
     if hasattr(value, '__int__'):
         return value
     return apply_units(value, BYTE_UNITS)
 
-import types
 
 class Option(BaseOption):
     """override optik.Option to add some new option types
@@ -214,7 +212,7 @@
             if self.choices is None:
                 raise OptionError(
                     "must supply a list of choices for type 'choice'", self)
-            elif type(self.choices) not in (types.TupleType, types.ListType):
+            elif not isinstance(self.choices, (tuple, list)):
                 raise OptionError(
                     "choices must be a list of strings ('%s' supplied)"
                     % str(type(self.choices)).split("'")[1], self)
@@ -227,10 +225,7 @@
     def process(self, opt, value, values, parser):
         # First, convert the value(s) to the right type.  Howl if any
         # value(s) are bogus.
-        try:
-            value = self.convert_value(opt, value)
-        except AttributeError: # py < 2.4
-            value = self.check_value(opt, value)
+        value = self.convert_value(opt, value)
         if self.type == 'named':
             existant = getattr(values, self.dest)
             if existant:
@@ -388,9 +383,9 @@
     formatter = ManHelpFormatter()
     formatter.output_level = level
     formatter.parser = optparser
-    print >> stream, formatter.format_head(optparser, pkginfo, section)
-    print >> stream, optparser.format_option_help(formatter)
-    print >> stream, formatter.format_tail(pkginfo)
+    print(formatter.format_head(optparser, pkginfo, section), file=stream)
+    print(optparser.format_option_help(formatter), file=stream)
+    print(formatter.format_tail(pkginfo), file=stream)
 
 
 __all__ = ('OptionParser', 'Option', 'OptionGroup', 'OptionValueError',
diff --git a/third_party/logilab/common/optparser.py b/third_party/logilab/common/optparser.py
index 0263dab..aa17750 100644
--- a/third_party/logilab/common/optparser.py
+++ b/third_party/logilab/common/optparser.py
@@ -29,6 +29,8 @@
 
 With mymod.build that defines two functions run and add_options
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 from warnings import warn
@@ -55,9 +57,9 @@
 
     def print_main_help(self):
         optparse.OptionParser.print_help(self)
-        print '\ncommands:'
+        print('\ncommands:')
         for cmdname, (_, help) in self._commands.items():
-            print '% 10s - %s' % (cmdname, help)
+            print('% 10s - %s' % (cmdname, help))
 
     def parse_command(self, args):
         if len(args) == 0:
@@ -78,7 +80,7 @@
         # optparse inserts self.description between usage and options help
         self.description = help
         if isinstance(mod_or_f, str):
-            exec 'from %s import run, add_options' % mod_or_f
+            exec('from %s import run, add_options' % mod_or_f)
         else:
             run, add_options = mod_or_f
         add_options(self)
diff --git a/third_party/logilab/common/pdf_ext.py b/third_party/logilab/common/pdf_ext.py
deleted file mode 100644
index 71c483b..0000000
--- a/third_party/logilab/common/pdf_ext.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of logilab-common.
-#
-# logilab-common is free software: you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option) any
-# later version.
-#
-# logilab-common is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-common.  If not, see <http://www.gnu.org/licenses/>.
-"""Manipulate pdf and fdf files (pdftk recommended).
-
-Notes regarding pdftk, pdf forms and fdf files (form definition file)
-fields names can be extracted with:
-
-    pdftk orig.pdf generate_fdf output truc.fdf
-
-to merge fdf and pdf:
-
-    pdftk orig.pdf fill_form test.fdf output result.pdf [flatten]
-
-without flatten, one could further edit the resulting form.
-with flatten, everything is turned into text.
-
-
-
-
-"""
-__docformat__ = "restructuredtext en"
-# XXX seems very unix specific
-# TODO: check availability of pdftk at import
-
-
-import os
-
-HEAD="""%FDF-1.2
-%\xE2\xE3\xCF\xD3
-1 0 obj
-<<
-/FDF
-<<
-/Fields [
-"""
-
-TAIL="""]
->>
->>
-endobj
-trailer
-
-<<
-/Root 1 0 R
->>
-%%EOF
-"""
-
-def output_field( f ):
-    return "\xfe\xff" + "".join( [ "\x00"+c for c in f ] )
-
-def extract_keys(lines):
-    keys = []
-    for line in lines:
-        if line.startswith('/V'):
-            pass #print 'value',line
-        elif line.startswith('/T'):
-            key = line[7:-2]
-            key = ''.join(key.split('\x00'))
-            keys.append( key )
-    return keys
-
-def write_field(out, key, value):
-    out.write("<<\n")
-    if value:
-        out.write("/V (%s)\n" %value)
-    else:
-        out.write("/V /\n")
-    out.write("/T (%s)\n" % output_field(key) )
-    out.write(">> \n")
-
-def write_fields(out, fields):
-    out.write(HEAD)
-    for (key, value, comment) in fields:
-        write_field(out, key, value)
-        write_field(out, key+"a", value) # pour copie-carbone sur autres pages
-    out.write(TAIL)
-
-def extract_keys_from_pdf(filename):
-    # what about using 'pdftk filename dump_data_fields' and parsing the output ?
-    os.system('pdftk %s generate_fdf output /tmp/toto.fdf' % filename)
-    lines = file('/tmp/toto.fdf').readlines()
-    return extract_keys(lines)
-
-
-def fill_pdf(infile, outfile, fields):
-    write_fields(file('/tmp/toto.fdf', 'w'), fields)
-    os.system('pdftk %s fill_form /tmp/toto.fdf output %s flatten' % (infile, outfile))
-
-def testfill_pdf(infile, outfile):
-    keys = extract_keys_from_pdf(infile)
-    fields = []
-    for key in keys:
-        fields.append( (key, key, '') )
-    fill_pdf(infile, outfile, fields)
-
diff --git a/third_party/logilab/common/pyro_ext.py b/third_party/logilab/common/pyro_ext.py
index 0f4d279..5204b1b 100644
--- a/third_party/logilab/common/pyro_ext.py
+++ b/third_party/logilab/common/pyro_ext.py
@@ -118,7 +118,7 @@
     nsgroup, nsid = ns_group_and_id(nsid, defaultnsgroup)
     try:
         nsd = locate_ns(nshost)
-    except errors.PyroError, ex:
+    except errors.PyroError as ex:
         # name server not responding
         _LOGGER.error('can\'t locate pyro name server: %s', ex)
     else:
@@ -159,7 +159,7 @@
     try:
         nsd = locate_ns(nshost)
         pyrouri = nsd.resolve('%s.%s' % (nsgroup, nsid))
-    except errors.ProtocolError, ex:
+    except errors.ProtocolError as ex:
         raise errors.PyroError(
             'Could not connect to the Pyro name server (host: %s)' % nshost)
     except errors.NamingError:
diff --git a/third_party/logilab/common/pytest.py b/third_party/logilab/common/pytest.py
index 2d6ccf9..58515a9 100644
--- a/third_party/logilab/common/pytest.py
+++ b/third_party/logilab/common/pytest.py
@@ -92,6 +92,9 @@
  * ``tata`` and ``titi`` match``rouge ^ carre``
  * ``titi`` match ``rouge and not carre``
 """
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 PYTEST_DOC = """%prog [OPTIONS] [testfile [testpattern]]
@@ -105,9 +108,6 @@
 
 pytest one (will run both test_thisone and test_thatone)
 pytest path/to/mytests.py -s not (will skip test_notthisone)
-
-pytest --coverage test_foo.py
-  (only if logilab.devtools is available)
 """
 
 ENABLE_DBC = False
@@ -118,13 +118,15 @@
 from time import time, clock
 import warnings
 import types
+from inspect import isgeneratorfunction, isclass
+from contextlib import contextmanager
 
 from logilab.common.fileutils import abspath_listdir
 from logilab.common import textutils
 from logilab.common import testlib, STD_BLACKLIST
 # use the same unittest module as testlib
 from logilab.common.testlib import unittest, start_interactive_mode
-from logilab.common.compat import any
+from logilab.common.deprecation import deprecated
 import doctest
 
 import unittest as unittest_legacy
@@ -145,28 +147,41 @@
 
 CONF_FILE = 'pytestconf.py'
 
-## coverage hacks, do not read this, do not read this, do not read this
+## coverage pausing tools
 
-# hey, but this is an aspect, right ?!!!
+@contextmanager
+def replace_trace(trace=None):
+    """A context manager that temporary replaces the trace function"""
+    oldtrace = sys.gettrace()
+    sys.settrace(trace)
+    try:
+        yield
+    finally:
+        # specific hack to work around a bug in pycoverage, see
+        # https://bitbucket.org/ned/coveragepy/issue/123
+        if (oldtrace is not None and not callable(oldtrace) and
+            hasattr(oldtrace, 'pytrace')):
+            oldtrace = oldtrace.pytrace
+        sys.settrace(oldtrace)
+
+
+def pause_trace():
+    """A context manager that temporary pauses any tracing"""
+    return replace_trace()
+
 class TraceController(object):
-    nesting = 0
+    ctx_stack = []
 
+    @classmethod
+    @deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
     def pause_tracing(cls):
-        if not cls.nesting:
-            cls.tracefunc = staticmethod(getattr(sys, '__settrace__', sys.settrace))
-            cls.oldtracer = getattr(sys, '__tracer__', None)
-            sys.__notrace__ = True
-            cls.tracefunc(None)
-        cls.nesting += 1
-    pause_tracing = classmethod(pause_tracing)
+        cls.ctx_stack.append(pause_trace())
+        cls.ctx_stack[-1].__enter__()
 
+    @classmethod
+    @deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
     def resume_tracing(cls):
-        cls.nesting -= 1
-        assert cls.nesting >= 0
-        if not cls.nesting:
-            cls.tracefunc(cls.oldtracer)
-            delattr(sys, '__notrace__')
-    resume_tracing = classmethod(resume_tracing)
+        cls.ctx_stack.pop().__exit__(None, None, None)
 
 
 pause_tracing = TraceController.pause_tracing
@@ -174,20 +189,18 @@
 
 
 def nocoverage(func):
+    """Function decorator that pauses tracing functions"""
     if hasattr(func, 'uncovered'):
         return func
     func.uncovered = True
+
     def not_covered(*args, **kwargs):
-        pause_tracing()
-        try:
+        with pause_trace():
             return func(*args, **kwargs)
-        finally:
-            resume_tracing()
     not_covered.uncovered = True
     return not_covered
 
-
-## end of coverage hacks
+## end of coverage pausing tools
 
 
 TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$")
@@ -206,7 +219,7 @@
     and / or tester.
     """
     namespace = {}
-    execfile(path, namespace)
+    exec(open(path, 'rb').read(), namespace)
     if 'update_parser' in namespace:
         namespace['update_parser'](parser)
     return namespace.get('CustomPyTester', PyTester)
@@ -309,7 +322,7 @@
     we **have** to clean sys.modules to make sure the correct test_utils
     module is ran in B
     """
-    for modname, mod in sys.modules.items():
+    for modname, mod in list(sys.modules.items()):
         if mod is None:
             continue
         if not hasattr(mod, '__file__'):
@@ -336,8 +349,8 @@
     def show_report(self):
         """prints the report and returns appropriate exitcode"""
         # everything has been ran, print report
-        print "*" * 79
-        print self.report
+        print("*" * 79)
+        print(self.report)
 
     def get_errcode(self):
         # errcode set explicitly
@@ -360,13 +373,13 @@
                     dirs.remove(skipped)
             basename = osp.basename(dirname)
             if this_is_a_testdir(basename):
-                print "going into", dirname
+                print("going into", dirname)
                 # we found a testdir, let's explore it !
                 if not self.testonedir(dirname, exitfirst):
                     break
                 dirs[:] = []
         if self.report.ran == 0:
-            print "no test dir found testing here:", here
+            print("no test dir found testing here:", here)
             # if no test was found during the visit, consider
             # the local directory as a test directory even if
             # it doesn't have a traditional test directory name
@@ -385,10 +398,11 @@
                     try:
                         restartfile = open(FILE_RESTART, "w")
                         restartfile.close()
-                    except Exception, e:
-                        print >> sys.__stderr__, "Error while overwriting \
-succeeded test file :", osp.join(os.getcwd(), FILE_RESTART)
-                        raise e
+                    except Exception:
+                        print("Error while overwriting succeeded test file :",
+                              osp.join(os.getcwd(), FILE_RESTART),
+                              file=sys.__stderr__)
+                        raise
                 # run test and collect information
                 prog = self.testfile(filename, batchmode=True)
                 if exitfirst and (prog is None or not prog.result.wasSuccessful()):
@@ -412,15 +426,13 @@
             try:
                 restartfile = open(FILE_RESTART, "w")
                 restartfile.close()
-            except Exception, e:
-                print >> sys.__stderr__, "Error while overwriting \
-succeeded test file :", osp.join(os.getcwd(), FILE_RESTART)
-                raise e
+            except Exception:
+                print("Error while overwriting succeeded test file :",
+                      osp.join(os.getcwd(), FILE_RESTART), file=sys.__stderr__)
+                raise
         modname = osp.basename(filename)[:-3]
-        try:
-            print >> sys.stderr, ('  %s  ' % osp.basename(filename)).center(70, '=')
-        except TypeError: # < py 2.4 bw compat
-            print >> sys.stderr, ('  %s  ' % osp.basename(filename)).center(70)
+        print(('  %s  ' % osp.basename(filename)).center(70, '='),
+              file=sys.__stderr__)
         try:
             tstart, cstart = time(), clock()
             try:
@@ -428,16 +440,17 @@
                                                  options=self.options, outstream=sys.stderr)
             except KeyboardInterrupt:
                 raise
-            except SystemExit, exc:
+            except SystemExit as exc:
                 self.errcode = exc.code
                 raise
             except testlib.SkipTest:
-                print "Module skipped:", filename
+                print("Module skipped:", filename)
                 self.report.skip_module(filename)
                 return None
             except Exception:
                 self.report.failed_to_test_module(filename)
-                print >> sys.stderr, 'unhandled exception occurred while testing', modname
+                print('unhandled exception occurred while testing', modname,
+                      file=sys.stderr)
                 import traceback
                 traceback.print_exc(file=sys.stderr)
                 return None
@@ -488,7 +501,7 @@
         from django.test.utils import teardown_test_environment
         from django.test.utils import destroy_test_db
         teardown_test_environment()
-        print 'destroying', self.dbname
+        print('destroying', self.dbname)
         destroy_test_db(self.dbname, verbosity=0)
 
     def testall(self, exitfirst=False):
@@ -506,7 +519,7 @@
             else:
                 basename = osp.basename(dirname)
                 if basename in ('test', 'tests'):
-                    print "going into", dirname
+                    print("going into", dirname)
                     # we found a testdir, let's explore it !
                     if not self.testonedir(dirname, exitfirst):
                         break
@@ -547,7 +560,8 @@
             os.chdir(dirname)
         self.load_django_settings(dirname)
         modname = osp.basename(filename)[:-3]
-        print >>sys.stderr, ('  %s  ' % osp.basename(filename)).center(70, '=')
+        print(('  %s  ' % osp.basename(filename)).center(70, '='),
+              file=sys.stderr)
         try:
             try:
                 tstart, cstart = time(), clock()
@@ -559,12 +573,12 @@
                 return testprog
             except SystemExit:
                 raise
-            except Exception, exc:
+            except Exception as exc:
                 import traceback
                 traceback.print_exc()
                 self.report.failed_to_test_module(filename)
-                print 'unhandled exception occurred while testing', modname
-                print 'error: %s' % exc
+                print('unhandled exception occurred while testing', modname)
+                print('error: %s' % exc)
                 return None
         finally:
             self.after_testfile()
@@ -604,7 +618,7 @@
                       action="callback", help="Verbose output")
     parser.add_option('-i', '--pdb', callback=rebuild_and_store,
                       dest="pdb", action="callback",
-                      help="Enable test failure inspection (conflicts with --coverage)")
+                      help="Enable test failure inspection")
     parser.add_option('-x', '--exitfirst', callback=rebuild_and_store,
                       dest="exitfirst", default=False,
                       action="callback", help="Exit on first failure "
@@ -631,14 +645,6 @@
     parser.add_option('-m', '--match', default=None, dest='tags_pattern',
                       help="only execute test whose tag match the current pattern")
 
-    try:
-        from logilab.devtools.lib.coverage import Coverage
-        parser.add_option('--coverage', dest="coverage", default=False,
-                          action="store_true",
-                          help="run tests with pycoverage (conflicts with --pdb)")
-    except ImportError:
-        pass
-
     if DJANGO_FOUND:
         parser.add_option('-J', '--django', dest='django', default=False,
                           action="store_true",
@@ -652,8 +658,6 @@
     """
     # parse the command line
     options, args = parser.parse_args()
-    if options.pdb and getattr(options, 'coverage', False):
-        parser.error("'pdb' and 'coverage' options are exclusive")
     filenames = [arg for arg in args if arg.endswith('.py')]
     if filenames:
         if len(filenames) > 1:
@@ -683,16 +687,9 @@
     options, explicitfile = parseargs(parser)
     # mock a new command line
     sys.argv[1:] = parser.newargs
-    covermode = getattr(options, 'coverage', None)
     cvg = None
     if not '' in sys.path:
         sys.path.insert(0, '')
-    if covermode:
-        # control_import_coverage(rootdir)
-        from logilab.devtools.lib.coverage import Coverage
-        cvg = Coverage([rootdir])
-        cvg.erase()
-        cvg.start()
     if DJANGO_FOUND and options.django:
         tester = DjangoTester(cvg, options)
     else:
@@ -710,7 +707,7 @@
                 prof = hotshot.Profile(options.profile)
                 prof.runcall(cmd, *args)
                 prof.close()
-                print 'profile data saved in', options.profile
+                print('profile data saved in', options.profile)
             else:
                 cmd(*args)
         except SystemExit:
@@ -719,12 +716,7 @@
             import traceback
             traceback.print_exc()
     finally:
-        if covermode:
-            cvg.stop()
-            cvg.save()
         tester.show_report()
-        if covermode:
-            print 'coverage information stored, use it with pycoverage -ra'
         sys.exit(tester.errcode)
 
 class SkipAwareTestProgram(unittest.TestProgram):
@@ -816,7 +808,7 @@
             else:
                 self.testNames = (self.defaultTest, )
             self.createTests()
-        except getopt.error, msg:
+        except getopt.error as msg:
             self.usageExit(msg)
 
     def runTests(self):
@@ -865,7 +857,7 @@
                     removeSucceededTests(self.test, succeededtests)
                 finally:
                     restartfile.close()
-            except Exception, ex:
+            except Exception as ex:
                 raise Exception("Error while reading succeeded tests into %s: %s"
                                 % (osp.join(os.getcwd(), FILE_RESTART), ex))
 
@@ -907,17 +899,16 @@
         else:
             if isinstance(test, testlib.TestCase):
                 meth = test._get_test_method()
-                func = meth.im_func
-                testname = '%s.%s' % (meth.im_class.__name__, func.__name__)
+                testname = '%s.%s' % (test.__name__, meth.__name__)
             elif isinstance(test, types.FunctionType):
                 func = test
                 testname = func.__name__
             elif isinstance(test, types.MethodType):
-                func = test.im_func
-                testname = '%s.%s' % (test.im_class.__name__, func.__name__)
+                cls = test.__self__.__class__
+                testname = '%s.%s' % (cls.__name__, test.__name__)
             else:
                 return True # Not sure when this happens
-            if testlib.is_generator(test) and skipgenerator:
+            if isgeneratorfunction(test) and skipgenerator:
                 return self.does_match_tags(test) # Let inner tests decide at run time
         if self._this_is_skipped(testname):
             return False # this was explicitly skipped
@@ -1025,8 +1016,7 @@
     def _collect_tests(self, module):
         tests = {}
         for obj in vars(module).values():
-            if (issubclass(type(obj), (types.ClassType, type)) and
-                 issubclass(obj, unittest.TestCase)):
+            if isclass(obj) and issubclass(obj, unittest.TestCase):
                 classname = obj.__name__
                 if classname[0] == '_' or self._this_is_skipped(classname):
                     continue
@@ -1105,8 +1095,14 @@
                 testCaseClass)
         return [testname for testname in testnames if not is_skipped(testname)]
 
+
+# The 2 functions below are modified versions of the TestSuite.run method
+# that is provided with unittest2 for python 2.6, in unittest2/suite.py
+# It is used to monkeypatch the original implementation to support
+# extra runcondition and options arguments (see in testlib.py)
+
 def _ts_run(self, result, runcondition=None, options=None):
-    self._wrapped_run(result,runcondition=runcondition, options=options)
+    self._wrapped_run(result, runcondition=runcondition, options=options)
     self._tearDownPreviousClass(None, result)
     self._handleModuleTearDown(result)
     return result
@@ -1120,10 +1116,17 @@
             self._handleModuleFixture(test, result)
             self._handleClassSetUp(test, result)
             result._previousTestClass = test.__class__
-            if (getattr(test.__class__, '_classSetupFailed', False) or 
+            if (getattr(test.__class__, '_classSetupFailed', False) or
                 getattr(result, '_moduleSetUpFailed', False)):
                 continue
 
+        # --- modifications to deal with _wrapped_run ---
+        # original code is:
+        #
+        # if not debug:
+        #     test(result)
+        # else:
+        #     test.debug()
         if hasattr(test, '_wrapped_run'):
             try:
                 test._wrapped_run(result, debug, runcondition=runcondition, options=options)
@@ -1136,6 +1139,25 @@
                 test(result)
         else:
             test.debug()
+        # --- end of modifications to deal with _wrapped_run ---
+    return result
+
+if sys.version_info >= (2, 7):
+    # The function below implements a modified version of the
+    # TestSuite.run method that is provided with python 2.7, in
+    # unittest/suite.py
+    def _ts_run(self, result, debug=False, runcondition=None, options=None):
+        topLevel = False
+        if getattr(result, '_testRunEntered', False) is False:
+            result._testRunEntered = topLevel = True
+
+        self._wrapped_run(result, debug, runcondition, options)
+
+        if topLevel:
+            self._tearDownPreviousClass(None, result)
+            self._handleModuleTearDown(result)
+            result._testRunEntered = False
+        return result
 
 
 def enable_dbc(*args):
diff --git a/third_party/logilab/common/registry.py b/third_party/logilab/common/registry.py
new file mode 100644
index 0000000..a52b2eb
--- /dev/null
+++ b/third_party/logilab/common/registry.py
@@ -0,0 +1,1119 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of Logilab-common.
+#
+# Logilab-common is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# Logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with Logilab-common.  If not, see <http://www.gnu.org/licenses/>.
+"""This module provides bases for predicates dispatching (the pattern in use
+here is similar to what's refered as multi-dispatch or predicate-dispatch in the
+literature, though a bit different since the idea is to select across different
+implementation 'e.g. classes), not to dispatch a message to a function or
+method. It contains the following classes:
+
+* :class:`RegistryStore`, the top level object which loads implementation
+  objects and stores them into registries. You'll usually use it to access
+  registries and their contained objects;
+
+* :class:`Registry`, the base class which contains objects semantically grouped
+  (for instance, sharing a same API, hence the 'implementation' name). You'll
+  use it to select the proper implementation according to a context. Notice you
+  may use registries on their own without using the store.
+
+.. Note::
+
+  implementation objects are usually designed to be accessed through the
+  registry and not by direct instantiation, besides to use it as base classe.
+
+The selection procedure is delegated to a selector, which is responsible for
+scoring the object according to some context. At the end of the selection, if an
+implementation has been found, an instance of this class is returned. A selector
+is built from one or more predicates combined together using AND, OR, NOT
+operators (actually `&`, `|` and `~`). You'll thus find some base classes to
+build predicates:
+
+* :class:`Predicate`, the abstract base predicate class
+
+* :class:`AndPredicate`, :class:`OrPredicate`, :class:`NotPredicate`, which you
+  shouldn't have to use directly. You'll use `&`, `|` and '~' operators between
+  predicates directly
+
+* :func:`objectify_predicate`
+
+You'll eventually find one concrete predicate: :class:`yes`
+
+.. autoclass:: RegistryStore
+.. autoclass:: Registry
+
+Predicates
+----------
+.. autoclass:: Predicate
+.. autofunc:: objectify_predicate
+.. autoclass:: yes
+
+Debugging
+---------
+.. autoclass:: traced_selection
+
+Exceptions
+----------
+.. autoclass:: RegistryException
+.. autoclass:: RegistryNotFound
+.. autoclass:: ObjectNotFound
+.. autoclass:: NoSelectableObject
+"""
+
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+import sys
+import types
+import weakref
+import traceback as tb
+from os import listdir, stat
+from os.path import join, isdir, exists
+from logging import getLogger
+from warnings import warn
+
+from six import string_types, add_metaclass
+
+from logilab.common.modutils import modpath_from_file
+from logilab.common.logging_ext import set_log_methods
+from logilab.common.decorators import classproperty
+
+
+class RegistryException(Exception):
+    """Base class for registry exception."""
+
+class RegistryNotFound(RegistryException):
+    """Raised when an unknown registry is requested.
+
+    This is usually a programming/typo error.
+    """
+
+class ObjectNotFound(RegistryException):
+    """Raised when an unregistered object is requested.
+
+    This may be a programming/typo or a misconfiguration error.
+    """
+
+class NoSelectableObject(RegistryException):
+    """Raised when no object is selectable for a given context."""
+    def __init__(self, args, kwargs, objects):
+        self.args = args
+        self.kwargs = kwargs
+        self.objects = objects
+
+    def __str__(self):
+        return ('args: %s, kwargs: %s\ncandidates: %s'
+                % (self.args, self.kwargs.keys(), self.objects))
+
+
+def _modname_from_path(path, extrapath=None):
+    modpath = modpath_from_file(path, extrapath)
+    # omit '__init__' from package's name to avoid loading that module
+    # once for each name when it is imported by some other object
+    # module. This supposes import in modules are done as::
+    #
+    #   from package import something
+    #
+    # not::
+    #
+    #   from package.__init__ import something
+    #
+    # which seems quite correct.
+    if modpath[-1] == '__init__':
+        modpath.pop()
+    return '.'.join(modpath)
+
+
+def _toload_info(path, extrapath, _toload=None):
+    """Return a dictionary of <modname>: <modpath> and an ordered list of
+    (file, module name) to load
+    """
+    if _toload is None:
+        assert isinstance(path, list)
+        _toload = {}, []
+    for fileordir in path:
+        if isdir(fileordir) and exists(join(fileordir, '__init__.py')):
+            subfiles = [join(fileordir, fname) for fname in listdir(fileordir)]
+            _toload_info(subfiles, extrapath, _toload)
+        elif fileordir[-3:] == '.py':
+            modname = _modname_from_path(fileordir, extrapath)
+            _toload[0][modname] = fileordir
+            _toload[1].append((fileordir, modname))
+    return _toload
+
+
+class RegistrableObject(object):
+    """This is the base class for registrable objects which are selected
+    according to a context.
+
+    :attr:`__registry__`
+      name of the registry for this object (string like 'views',
+      'templates'...). You may want to define `__registries__` directly if your
+      object should be registered in several registries.
+
+    :attr:`__regid__`
+      object's identifier in the registry (string like 'main',
+      'primary', 'folder_box')
+
+    :attr:`__select__`
+      class'selector
+
+    Moreover, the `__abstract__` attribute may be set to True to indicate that a
+    class is abstract and should not be registered.
+
+    You don't have to inherit from this class to put it in a registry (having
+    `__regid__` and `__select__` is enough), though this is needed for classes
+    that should be automatically registered.
+    """
+
+    __registry__ = None
+    __regid__ = None
+    __select__ = None
+    __abstract__ = True # see doc snipppets below (in Registry class)
+
+    @classproperty
+    def __registries__(cls):
+        if cls.__registry__ is None:
+            return ()
+        return (cls.__registry__,)
+
+
+class RegistrableInstance(RegistrableObject):
+    """Inherit this class if you want instances of the classes to be
+    automatically registered.
+    """
+
+    def __new__(cls, *args, **kwargs):
+        """Add a __module__ attribute telling the module where the instance was
+        created, for automatic registration.
+        """
+        obj = super(RegistrableInstance, cls).__new__(cls)
+        # XXX subclass must no override __new__
+        filepath = tb.extract_stack(limit=2)[0][0]
+        obj.__module__ = _modname_from_path(filepath)
+        return obj
+
+
+class Registry(dict):
+    """The registry store a set of implementations associated to identifier:
+
+    * to each identifier are associated a list of implementations
+
+    * to select an implementation of a given identifier, you should use one of the
+      :meth:`select` or :meth:`select_or_none` method
+
+    * to select a list of implementations for a context, you should use the
+      :meth:`possible_objects` method
+
+    * dictionary like access to an identifier will return the bare list of
+      implementations for this identifier.
+
+    To be usable in a registry, the only requirement is to have a `__select__`
+    attribute.
+
+    At the end of the registration process, the :meth:`__registered__`
+    method is called on each registered object which have them, given the
+    registry in which it's registered as argument.
+
+    Registration methods:
+
+    .. automethod: register
+    .. automethod: unregister
+
+    Selection methods:
+
+    .. automethod: select
+    .. automethod: select_or_none
+    .. automethod: possible_objects
+    .. automethod: object_by_id
+    """
+    def __init__(self, debugmode):
+        super(Registry, self).__init__()
+        self.debugmode = debugmode
+
+    def __getitem__(self, name):
+        """return the registry (list of implementation objects) associated to
+        this name
+        """
+        try:
+            return super(Registry, self).__getitem__(name)
+        except KeyError:
+            exc = ObjectNotFound(name)
+            exc.__traceback__ = sys.exc_info()[-1]
+            raise exc
+
+    @classmethod
+    def objid(cls, obj):
+        """returns a unique identifier for an object stored in the registry"""
+        return '%s.%s' % (obj.__module__, cls.objname(obj))
+
+    @classmethod
+    def objname(cls, obj):
+        """returns a readable name for an object stored in the registry"""
+        return getattr(obj, '__name__', id(obj))
+
+    def initialization_completed(self):
+        """call method __registered__() on registered objects when the callback
+        is defined"""
+        for objects in self.values():
+            for objectcls in objects:
+                registered = getattr(objectcls, '__registered__', None)
+                if registered:
+                    registered(self)
+        if self.debugmode:
+            wrap_predicates(_lltrace)
+
+    def register(self, obj, oid=None, clear=False):
+        """base method to add an object in the registry"""
+        assert not '__abstract__' in obj.__dict__, obj
+        assert obj.__select__, obj
+        oid = oid or obj.__regid__
+        assert oid, ('no explicit name supplied to register object %s, '
+                     'which has no __regid__ set' % obj)
+        if clear:
+            objects = self[oid] =  []
+        else:
+            objects = self.setdefault(oid, [])
+        assert not obj in objects, 'object %s is already registered' % obj
+        objects.append(obj)
+
+    def register_and_replace(self, obj, replaced):
+        """remove <replaced> and register <obj>"""
+        # XXXFIXME this is a duplication of unregister()
+        # remove register_and_replace in favor of unregister + register
+        # or simplify by calling unregister then register here
+        if not isinstance(replaced, string_types):
+            replaced = self.objid(replaced)
+        # prevent from misspelling
+        assert obj is not replaced, 'replacing an object by itself: %s' % obj
+        registered_objs = self.get(obj.__regid__, ())
+        for index, registered in enumerate(registered_objs):
+            if self.objid(registered) == replaced:
+                del registered_objs[index]
+                break
+        else:
+            self.warning('trying to replace %s that is not registered with %s',
+                         replaced, obj)
+        self.register(obj)
+
+    def unregister(self, obj):
+        """remove object <obj> from this registry"""
+        objid = self.objid(obj)
+        oid = obj.__regid__
+        for registered in self.get(oid, ()):
+            # use self.objid() to compare objects because vreg will probably
+            # have its own version of the object, loaded through execfile
+            if self.objid(registered) == objid:
+                self[oid].remove(registered)
+                break
+        else:
+            self.warning('can\'t remove %s, no id %s in the registry',
+                         objid, oid)
+
+    def all_objects(self):
+        """return a list containing all objects in this registry.
+        """
+        result = []
+        for objs in self.values():
+            result += objs
+        return result
+
+    # dynamic selection methods ################################################
+
+    def object_by_id(self, oid, *args, **kwargs):
+        """return object with the `oid` identifier. Only one object is expected
+        to be found.
+
+        raise :exc:`ObjectNotFound` if there are no object with id `oid` in this
+        registry
+
+        raise :exc:`AssertionError` if there is more than one object there
+        """
+        objects = self[oid]
+        assert len(objects) == 1, objects
+        return objects[0](*args, **kwargs)
+
+    def select(self, __oid, *args, **kwargs):
+        """return the most specific object among those with the given oid
+        according to the given context.
+
+        raise :exc:`ObjectNotFound` if there are no object with id `oid` in this
+        registry
+
+        raise :exc:`NoSelectableObject` if no object can be selected
+        """
+        obj =  self._select_best(self[__oid], *args, **kwargs)
+        if obj is None:
+            raise NoSelectableObject(args, kwargs, self[__oid] )
+        return obj
+
+    def select_or_none(self, __oid, *args, **kwargs):
+        """return the most specific object among those with the given oid
+        according to the given context, or None if no object applies.
+        """
+        try:
+            return self._select_best(self[__oid], *args, **kwargs)
+        except ObjectNotFound:
+            return None
+
+    def possible_objects(self, *args, **kwargs):
+        """return an iterator on possible objects in this registry for the given
+        context
+        """
+        for objects in self.values():
+            obj = self._select_best(objects,  *args, **kwargs)
+            if obj is None:
+                continue
+            yield obj
+
+    def _select_best(self, objects, *args, **kwargs):
+        """return an instance of the most specific object according
+        to parameters
+
+        return None if not object apply (don't raise `NoSelectableObject` since
+        it's costly when searching objects using `possible_objects`
+        (e.g. searching for hooks).
+        """
+        score, winners = 0, None
+        for obj in objects:
+            objectscore = obj.__select__(obj, *args, **kwargs)
+            if objectscore > score:
+                score, winners = objectscore, [obj]
+            elif objectscore > 0 and objectscore == score:
+                winners.append(obj)
+        if winners is None:
+            return None
+        if len(winners) > 1:
+            # log in production environement / test, error while debugging
+            msg = 'select ambiguity: %s\n(args: %s, kwargs: %s)'
+            if self.debugmode:
+                # raise bare exception in debug mode
+                raise Exception(msg % (winners, args, kwargs.keys()))
+            self.error(msg, winners, args, kwargs.keys())
+        # return the result of calling the object
+        return self.selected(winners[0], args, kwargs)
+
+    def selected(self, winner, args, kwargs):
+        """override here if for instance you don't want "instanciation"
+        """
+        return winner(*args, **kwargs)
+
+    # these are overridden by set_log_methods below
+    # only defining here to prevent pylint from complaining
+    info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
+
+
+def obj_registries(cls, registryname=None):
+    """return a tuple of registry names (see __registries__)"""
+    if registryname:
+        return (registryname,)
+    return cls.__registries__
+
+
+class RegistryStore(dict):
+    """This class is responsible for loading objects and storing them
+    in their registry which is created on the fly as needed.
+
+    It handles dynamic registration of objects and provides a
+    convenient api to access them. To be recognized as an object that
+    should be stored into one of the store's registry
+    (:class:`Registry`), an object must provide the following
+    attributes, used control how they interact with the registry:
+
+    :attr:`__registries__`
+      list of registry names (string like 'views', 'templates'...) into which
+      the object should be registered
+
+    :attr:`__regid__`
+      object identifier in the registry (string like 'main',
+      'primary', 'folder_box')
+
+    :attr:`__select__`
+      the object predicate selectors
+
+    Moreover, the :attr:`__abstract__` attribute may be set to `True`
+    to indicate that an object is abstract and should not be registered
+    (such inherited attributes not considered).
+
+    .. Note::
+
+      When using the store to load objects dynamically, you *always* have
+      to use **super()** to get the methods and attributes of the
+      superclasses, and not use the class identifier. If not, you'll get into
+      trouble at reload time.
+
+      For example, instead of writing::
+
+          class Thing(Parent):
+              __regid__ = 'athing'
+              __select__ = yes()
+
+              def f(self, arg1):
+                  Parent.f(self, arg1)
+
+      You must write::
+
+          class Thing(Parent):
+              __regid__ = 'athing'
+              __select__ = yes()
+
+              def f(self, arg1):
+                  super(Thing, self).f(arg1)
+
+    Controlling object registration
+    -------------------------------
+
+    Dynamic loading is triggered by calling the
+    :meth:`register_objects` method, given a list of directories to
+    inspect for python modules.
+
+    .. automethod: register_objects
+
+    For each module, by default, all compatible objects are registered
+    automatically. However if some objects come as replacement of
+    other objects, or have to be included only if some condition is
+    met, you'll have to define a `registration_callback(vreg)`
+    function in the module and explicitly register **all objects** in
+    this module, using the api defined below.
+
+
+    .. automethod:: RegistryStore.register_all
+    .. automethod:: RegistryStore.register_and_replace
+    .. automethod:: RegistryStore.register
+    .. automethod:: RegistryStore.unregister
+
+    .. Note::
+        Once the function `registration_callback(vreg)` is implemented in a
+        module, all the objects from this module have to be explicitly
+        registered as it disables the automatic object registration.
+
+
+    Examples:
+
+    .. sourcecode:: python
+
+       def registration_callback(store):
+          # register everything in the module except BabarClass
+          store.register_all(globals().values(), __name__, (BabarClass,))
+
+          # conditionally register BabarClass
+          if 'babar_relation' in store.schema:
+              store.register(BabarClass)
+
+    In this example, we register all application object classes defined in the module
+    except `BabarClass`. This class is then registered only if the 'babar_relation'
+    relation type is defined in the instance schema.
+
+    .. sourcecode:: python
+
+       def registration_callback(store):
+          store.register(Elephant)
+          # replace Babar by Celeste
+          store.register_and_replace(Celeste, Babar)
+
+    In this example, we explicitly register classes one by one:
+
+    * the `Elephant` class
+    * the `Celeste` to replace `Babar`
+
+    If at some point we register a new appobject class in this module, it won't be
+    registered at all without modification to the `registration_callback`
+    implementation. The first example will register it though, thanks to the call
+    to the `register_all` method.
+
+    Controlling registry instantiation
+    ----------------------------------
+
+    The `REGISTRY_FACTORY` class dictionary allows to specify which class should
+    be instantiated for a given registry name. The class associated to `None`
+    key will be the class used when there is no specific class for a name.
+    """
+
+    def __init__(self, debugmode=False):
+        super(RegistryStore, self).__init__()
+        self.debugmode = debugmode
+
+    def reset(self):
+        """clear all registries managed by this store"""
+        # don't use self.clear, we want to keep existing subdictionaries
+        for subdict in self.values():
+            subdict.clear()
+        self._lastmodifs = {}
+
+    def __getitem__(self, name):
+        """return the registry (dictionary of class objects) associated to
+        this name
+        """
+        try:
+            return super(RegistryStore, self).__getitem__(name)
+        except KeyError:
+            exc = RegistryNotFound(name)
+            exc.__traceback__ = sys.exc_info()[-1]
+            raise exc
+
+    # methods for explicit (un)registration ###################################
+
+    # default class, when no specific class set
+    REGISTRY_FACTORY = {None: Registry}
+
+    def registry_class(self, regid):
+        """return existing registry named regid or use factory to create one and
+        return it"""
+        try:
+            return self.REGISTRY_FACTORY[regid]
+        except KeyError:
+            return self.REGISTRY_FACTORY[None]
+
+    def setdefault(self, regid):
+        try:
+            return self[regid]
+        except RegistryNotFound:
+            self[regid] = self.registry_class(regid)(self.debugmode)
+            return self[regid]
+
+    def register_all(self, objects, modname, butclasses=()):
+        """register registrable objects into `objects`.
+
+        Registrable objects are properly configured subclasses of
+        :class:`RegistrableObject`.  Objects which are not defined in the module
+        `modname` or which are in `butclasses` won't be registered.
+
+        Typical usage is:
+
+        .. sourcecode:: python
+
+            store.register_all(globals().values(), __name__, (ClassIWantToRegisterExplicitly,))
+
+        So you get partially automatic registration, keeping manual registration
+        for some object (to use
+        :meth:`~logilab.common.registry.RegistryStore.register_and_replace` for
+        instance).
+        """
+        assert isinstance(modname, string_types), \
+            'modname expected to be a module name (ie string), got %r' % modname
+        for obj in objects:
+            if self.is_registrable(obj) and obj.__module__ == modname and not obj in butclasses:
+                if isinstance(obj, type):
+                    self._load_ancestors_then_object(modname, obj, butclasses)
+                else:
+                    self.register(obj)
+
+    def register(self, obj, registryname=None, oid=None, clear=False):
+        """register `obj` implementation into `registryname` or
+        `obj.__registries__` if not specified, with identifier `oid` or
+        `obj.__regid__` if not specified.
+
+        If `clear` is true, all objects with the same identifier will be
+        previously unregistered.
+        """
+        assert not obj.__dict__.get('__abstract__'), obj
+        for registryname in obj_registries(obj, registryname):
+            registry = self.setdefault(registryname)
+            registry.register(obj, oid=oid, clear=clear)
+            self.debug("register %s in %s['%s']",
+                       registry.objname(obj), registryname, oid or obj.__regid__)
+            self._loadedmods.setdefault(obj.__module__, {})[registry.objid(obj)] = obj
+
+    def unregister(self, obj, registryname=None):
+        """unregister `obj` object from the registry `registryname` or
+        `obj.__registries__` if not specified.
+        """
+        for registryname in obj_registries(obj, registryname):
+            registry = self[registryname]
+            registry.unregister(obj)
+            self.debug("unregister %s from %s['%s']",
+                       registry.objname(obj), registryname, obj.__regid__)
+
+    def register_and_replace(self, obj, replaced, registryname=None):
+        """register `obj` object into `registryname` or
+        `obj.__registries__` if not specified. If found, the `replaced` object
+        will be unregistered first (else a warning will be issued as it is
+        generally unexpected).
+        """
+        for registryname in obj_registries(obj, registryname):
+            registry = self[registryname]
+            registry.register_and_replace(obj, replaced)
+            self.debug("register %s in %s['%s'] instead of %s",
+                       registry.objname(obj), registryname, obj.__regid__,
+                       registry.objname(replaced))
+
+    # initialization methods ###################################################
+
+    def init_registration(self, path, extrapath=None):
+        """reset registry and walk down path to return list of (path, name)
+        file modules to be loaded"""
+        # XXX make this private by renaming it to _init_registration ?
+        self.reset()
+        # compute list of all modules that have to be loaded
+        self._toloadmods, filemods = _toload_info(path, extrapath)
+        # XXX is _loadedmods still necessary ? It seems like it's useful
+        #     to avoid loading same module twice, especially with the
+        #     _load_ancestors_then_object logic but this needs to be checked
+        self._loadedmods = {}
+        return filemods
+
+    def register_objects(self, path, extrapath=None):
+        """register all objects found walking down <path>"""
+        # load views from each directory in the instance's path
+        # XXX inline init_registration ?
+        filemods = self.init_registration(path, extrapath)
+        for filepath, modname in filemods:
+            self.load_file(filepath, modname)
+        self.initialization_completed()
+
+    def initialization_completed(self):
+        """call initialization_completed() on all known registries"""
+        for reg in self.values():
+            reg.initialization_completed()
+
+    def _mdate(self, filepath):
+        """ return the modification date of a file path """
+        try:
+            return stat(filepath)[-2]
+        except OSError:
+            # this typically happens on emacs backup files (.#foo.py)
+            self.warning('Unable to load %s. It is likely to be a backup file',
+                         filepath)
+            return None
+
+    def is_reload_needed(self, path):
+        """return True if something module changed and the registry should be
+        reloaded
+        """
+        lastmodifs = self._lastmodifs
+        for fileordir in path:
+            if isdir(fileordir) and exists(join(fileordir, '__init__.py')):
+                if self.is_reload_needed([join(fileordir, fname)
+                                          for fname in listdir(fileordir)]):
+                    return True
+            elif fileordir[-3:] == '.py':
+                mdate = self._mdate(fileordir)
+                if mdate is None:
+                    continue # backup file, see _mdate implementation
+                elif "flymake" in fileordir:
+                    # flymake + pylint in use, don't consider these they will corrupt the registry
+                    continue
+                if fileordir not in lastmodifs or lastmodifs[fileordir] < mdate:
+                    self.info('File %s changed since last visit', fileordir)
+                    return True
+        return False
+
+    def load_file(self, filepath, modname):
+        """ load registrable objects (if any) from a python file """
+        from logilab.common.modutils import load_module_from_name
+        if modname in self._loadedmods:
+            return
+        self._loadedmods[modname] = {}
+        mdate = self._mdate(filepath)
+        if mdate is None:
+            return # backup file, see _mdate implementation
+        elif "flymake" in filepath:
+            # flymake + pylint in use, don't consider these they will corrupt the registry
+            return
+        # set update time before module loading, else we get some reloading
+        # weirdness in case of syntax error or other error while importing the
+        # module
+        self._lastmodifs[filepath] = mdate
+        # load the module
+        module = load_module_from_name(modname)
+        self.load_module(module)
+
+    def load_module(self, module):
+        """Automatically handle module objects registration.
+
+        Instances are registered as soon as they are hashable and have the
+        following attributes:
+
+        * __regid__ (a string)
+        * __select__ (a callable)
+        * __registries__ (a tuple/list of string)
+
+        For classes this is a bit more complicated :
+
+        - first ensure parent classes are already registered
+
+        - class with __abstract__ == True in their local dictionary are skipped
+
+        - object class needs to have registries and identifier properly set to a
+          non empty string to be registered.
+        """
+        self.info('loading %s from %s', module.__name__, module.__file__)
+        if hasattr(module, 'registration_callback'):
+            module.registration_callback(self)
+        else:
+            self.register_all(vars(module).values(), module.__name__)
+
+    def _load_ancestors_then_object(self, modname, objectcls, butclasses=()):
+        """handle class registration according to rules defined in
+        :meth:`load_module`
+        """
+        # backward compat, we used to allow whatever else than classes
+        if not isinstance(objectcls, type):
+            if self.is_registrable(objectcls) and objectcls.__module__ == modname:
+                self.register(objectcls)
+            return
+        # imported classes
+        objmodname = objectcls.__module__
+        if objmodname != modname:
+            # The module of the object is not the same as the currently
+            # worked on module, or this is actually an instance, which
+            # has no module at all
+            if objmodname in self._toloadmods:
+                # if this is still scheduled for loading, let's proceed immediately,
+                # but using the object module
+                self.load_file(self._toloadmods[objmodname], objmodname)
+            return
+        # ensure object hasn't been already processed
+        clsid = '%s.%s' % (modname, objectcls.__name__)
+        if clsid in self._loadedmods[modname]:
+            return
+        self._loadedmods[modname][clsid] = objectcls
+        # ensure ancestors are registered
+        for parent in objectcls.__bases__:
+            self._load_ancestors_then_object(modname, parent, butclasses)
+        # ensure object is registrable
+        if objectcls in butclasses or not self.is_registrable(objectcls):
+            return
+        # backward compat
+        reg = self.setdefault(obj_registries(objectcls)[0])
+        if reg.objname(objectcls)[0] == '_':
+            warn("[lgc 0.59] object whose name start with '_' won't be "
+                 "skipped anymore at some point, use __abstract__ = True "
+                 "instead (%s)" % objectcls, DeprecationWarning)
+            return
+        # register, finally
+        self.register(objectcls)
+
+    @classmethod
+    def is_registrable(cls, obj):
+        """ensure `obj` should be registered
+
+        as arbitrary stuff may be registered, do a lot of check and warn about
+        weird cases (think to dumb proxy objects)
+        """
+        if isinstance(obj, type):
+            if not issubclass(obj, RegistrableObject):
+                # ducktyping backward compat
+                if not (getattr(obj, '__registries__', None)
+                        and getattr(obj, '__regid__', None)
+                        and getattr(obj, '__select__', None)):
+                    return False
+            elif issubclass(obj, RegistrableInstance):
+                return False
+        elif not isinstance(obj, RegistrableInstance):
+            return False
+        if not obj.__regid__:
+            return False # no regid
+        registries = obj.__registries__
+        if not registries:
+            return False # no registries
+        selector = obj.__select__
+        if not selector:
+            return False # no selector
+        if obj.__dict__.get('__abstract__', False):
+            return False
+        # then detect potential problems that should be warned
+        if not isinstance(registries, (tuple, list)):
+            cls.warning('%s has __registries__ which is not a list or tuple', obj)
+            return False
+        if not callable(selector):
+            cls.warning('%s has not callable __select__', obj)
+            return False
+        return True
+
+    # these are overridden by set_log_methods below
+    # only defining here to prevent pylint from complaining
+    info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
+
+
+# init logging
+set_log_methods(RegistryStore, getLogger('registry.store'))
+set_log_methods(Registry, getLogger('registry'))
+
+
+# helpers for debugging selectors
+TRACED_OIDS = None
+
+def _trace_selector(cls, selector, args, ret):
+    vobj = args[0]
+    if TRACED_OIDS == 'all' or vobj.__regid__ in TRACED_OIDS:
+        print('%s -> %s for %s(%s)' % (cls, ret, vobj, vobj.__regid__))
+
+def _lltrace(selector):
+    """use this decorator on your predicates so they become traceable with
+    :class:`traced_selection`
+    """
+    def traced(cls, *args, **kwargs):
+        ret = selector(cls, *args, **kwargs)
+        if TRACED_OIDS is not None:
+            _trace_selector(cls, selector, args, ret)
+        return ret
+    traced.__name__ = selector.__name__
+    traced.__doc__ = selector.__doc__
+    return traced
+
+class traced_selection(object): # pylint: disable=C0103
+    """
+    Typical usage is :
+
+    .. sourcecode:: python
+
+        >>> from logilab.common.registry import traced_selection
+        >>> with traced_selection():
+        ...     # some code in which you want to debug selectors
+        ...     # for all objects
+
+    This will yield lines like this in the logs::
+
+        selector one_line_rset returned 0 for <class 'elephant.Babar'>
+
+    You can also give to :class:`traced_selection` the identifiers of objects on
+    which you want to debug selection ('oid1' and 'oid2' in the example above).
+
+    .. sourcecode:: python
+
+        >>> with traced_selection( ('regid1', 'regid2') ):
+        ...     # some code in which you want to debug selectors
+        ...     # for objects with __regid__ 'regid1' and 'regid2'
+
+    A potentially useful point to set up such a tracing function is
+    the `logilab.common.registry.Registry.select` method body.
+    """
+
+    def __init__(self, traced='all'):
+        self.traced = traced
+
+    def __enter__(self):
+        global TRACED_OIDS
+        TRACED_OIDS = self.traced
+
+    def __exit__(self, exctype, exc, traceback):
+        global TRACED_OIDS
+        TRACED_OIDS = None
+        return traceback is None
+
+# selector base classes and operations ########################################
+
+def objectify_predicate(selector_func):
+    """Most of the time, a simple score function is enough to build a selector.
+    The :func:`objectify_predicate` decorator turn it into a proper selector
+    class::
+
+        @objectify_predicate
+        def one(cls, req, rset=None, **kwargs):
+            return 1
+
+        class MyView(View):
+            __select__ = View.__select__ & one()
+
+    """
+    return type(selector_func.__name__, (Predicate,),
+                {'__doc__': selector_func.__doc__,
+                 '__call__': lambda self, *a, **kw: selector_func(*a, **kw)})
+
+
+_PREDICATES = {}
+
+def wrap_predicates(decorator):
+    for predicate in _PREDICATES.values():
+        if not '_decorators' in predicate.__dict__:
+            predicate._decorators = set()
+        if decorator in predicate._decorators:
+            continue
+        predicate._decorators.add(decorator)
+        predicate.__call__ = decorator(predicate.__call__)
+
+class PredicateMetaClass(type):
+    def __new__(mcs, *args, **kwargs):
+        # use __new__ so subclasses doesn't have to call Predicate.__init__
+        inst = type.__new__(mcs, *args, **kwargs)
+        proxy = weakref.proxy(inst, lambda p: _PREDICATES.pop(id(p)))
+        _PREDICATES[id(proxy)] = proxy
+        return inst
+
+
+@add_metaclass(PredicateMetaClass)
+class Predicate(object):
+    """base class for selector classes providing implementation
+    for operators ``&``, ``|`` and  ``~``
+
+    This class is only here to give access to binary operators, the selector
+    logic itself should be implemented in the :meth:`__call__` method. Notice it
+    should usually accept any arbitrary arguments (the context), though that may
+    vary depending on your usage of the registry.
+
+    a selector is called to help choosing the correct object for a
+    particular context by returning a score (`int`) telling how well
+    the implementation given as first argument fit to the given context.
+
+    0 score means that the class doesn't apply.
+    """
+
+    @property
+    def func_name(self):
+        # backward compatibility
+        return self.__class__.__name__
+
+    def search_selector(self, selector):
+        """search for the given selector, selector instance or tuple of
+        selectors in the selectors tree. Return None if not found.
+        """
+        if self is selector:
+            return self
+        if (isinstance(selector, type) or isinstance(selector, tuple)) and \
+               isinstance(self, selector):
+            return self
+        return None
+
+    def __str__(self):
+        return self.__class__.__name__
+
+    def __and__(self, other):
+        return AndPredicate(self, other)
+    def __rand__(self, other):
+        return AndPredicate(other, self)
+    def __iand__(self, other):
+        return AndPredicate(self, other)
+    def __or__(self, other):
+        return OrPredicate(self, other)
+    def __ror__(self, other):
+        return OrPredicate(other, self)
+    def __ior__(self, other):
+        return OrPredicate(self, other)
+
+    def __invert__(self):
+        return NotPredicate(self)
+
+    # XXX (function | function) or (function & function) not managed yet
+
+    def __call__(self, cls, *args, **kwargs):
+        return NotImplementedError("selector %s must implement its logic "
+                                   "in its __call__ method" % self.__class__)
+
+    def __repr__(self):
+        return u'<Predicate %s at %x>' % (self.__class__.__name__, id(self))
+
+
+class MultiPredicate(Predicate):
+    """base class for compound selector classes"""
+
+    def __init__(self, *selectors):
+        self.selectors = self.merge_selectors(selectors)
+
+    def __str__(self):
+        return '%s(%s)' % (self.__class__.__name__,
+                           ','.join(str(s) for s in self.selectors))
+
+    @classmethod
+    def merge_selectors(cls, selectors):
+        """deal with selector instanciation when necessary and merge
+        multi-selectors if possible:
+
+        AndPredicate(AndPredicate(sel1, sel2), AndPredicate(sel3, sel4))
+        ==> AndPredicate(sel1, sel2, sel3, sel4)
+        """
+        merged_selectors = []
+        for selector in selectors:
+            # XXX do we really want magic-transformations below?
+            # if so, wanna warn about them?
+            if isinstance(selector, types.FunctionType):
+                selector = objectify_predicate(selector)()
+            if isinstance(selector, type) and issubclass(selector, Predicate):
+                selector = selector()
+            assert isinstance(selector, Predicate), selector
+            if isinstance(selector, cls):
+                merged_selectors += selector.selectors
+            else:
+                merged_selectors.append(selector)
+        return merged_selectors
+
+    def search_selector(self, selector):
+        """search for the given selector or selector instance (or tuple of
+        selectors) in the selectors tree. Return None if not found
+        """
+        for childselector in self.selectors:
+            if childselector is selector:
+                return childselector
+            found = childselector.search_selector(selector)
+            if found is not None:
+                return found
+        # if not found in children, maybe we are looking for self?
+        return super(MultiPredicate, self).search_selector(selector)
+
+
+class AndPredicate(MultiPredicate):
+    """and-chained selectors"""
+    def __call__(self, cls, *args, **kwargs):
+        score = 0
+        for selector in self.selectors:
+            partscore = selector(cls, *args, **kwargs)
+            if not partscore:
+                return 0
+            score += partscore
+        return score
+
+
+class OrPredicate(MultiPredicate):
+    """or-chained selectors"""
+    def __call__(self, cls, *args, **kwargs):
+        for selector in self.selectors:
+            partscore = selector(cls, *args, **kwargs)
+            if partscore:
+                return partscore
+        return 0
+
+class NotPredicate(Predicate):
+    """negation selector"""
+    def __init__(self, selector):
+        self.selector = selector
+
+    def __call__(self, cls, *args, **kwargs):
+        score = self.selector(cls, *args, **kwargs)
+        return int(not score)
+
+    def __str__(self):
+        return 'NOT(%s)' % self.selector
+
+
+class yes(Predicate): # pylint: disable=C0103
+    """Return the score given as parameter, with a default score of 0.5 so any
+    other selector take precedence.
+
+    Usually used for objects which can be selected whatever the context, or
+    also sometimes to add arbitrary points to a score.
+
+    Take care, `yes(0)` could be named 'no'...
+    """
+    def __init__(self, score=0.5):
+        self.score = score
+
+    def __call__(self, *args, **kwargs):
+        return self.score
+
+
+# deprecated stuff #############################################################
+
+from logilab.common.deprecation import deprecated
+
+@deprecated('[lgc 0.59] use Registry.objid class method instead')
+def classid(cls):
+    return '%s.%s' % (cls.__module__, cls.__name__)
+
+@deprecated('[lgc 0.59] use obj_registries function instead')
+def class_registries(cls, registryname):
+    return obj_registries(cls, registryname)
+
diff --git a/third_party/logilab/common/shellutils.py b/third_party/logilab/common/shellutils.py
index c713913..4e68956 100644
--- a/third_party/logilab/common/shellutils.py
+++ b/third_party/logilab/common/shellutils.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -18,6 +18,9 @@
 """shell/term utilities, useful to write some python scripts instead of shell
 scripts.
 """
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import os
@@ -31,11 +34,15 @@
 import errno
 import string
 import random
+import subprocess
 from os.path import exists, isdir, islink, basename, join
 
+from six import string_types
+from six.moves import range, input as raw_input
+
 from logilab.common import STD_BLACKLIST, _handle_blacklist
-from logilab.common.compat import raw_input
 from logilab.common.compat import str_to_bytes
+from logilab.common.deprecation import deprecated
 
 try:
     from logilab.common.proc import ProcInfo, NoSuchProcess
@@ -113,7 +120,7 @@
             destination = join(destination, basename(source))
         try:
             _action(source, destination)
-        except OSError, ex:
+        except OSError as ex:
             raise OSError('Unable to move %r to %r (%s)' % (
                 source, destination, ex))
 
@@ -159,7 +166,7 @@
     :return:
       the list of all matching files
     """
-    if isinstance(exts, basestring):
+    if isinstance(exts, string_types):
         exts = (exts,)
     if exclude:
         def match(filename, exts):
@@ -224,20 +231,19 @@
             outfile.write(zfobj.read(name))
             outfile.close()
 
+
 class Execute:
     """This is a deadlock safe version of popen2 (no stdin), that returns
     an object with errorlevel, out and err.
     """
 
     def __init__(self, command):
-        outfile = tempfile.mktemp()
-        errfile = tempfile.mktemp()
-        self.status = os.system("( %s ) >%s 2>%s" %
-                                (command, outfile, errfile)) >> 8
-        self.out = open(outfile, "r").read()
-        self.err = open(errfile, "r").read()
-        os.remove(outfile)
-        os.remove(errfile)
+        cmd = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        self.out, self.err = cmd.communicate()
+        self.status = os.WEXITSTATUS(cmd.returncode)
+
+Execute = deprecated('Use subprocess.Popen instead')(Execute)
+
 
 def acquire_lock(lock_file, max_try=10, delay=10, max_delay=3600):
     """Acquire a lock represented by a file on the file system
@@ -253,7 +259,7 @@
             os.write(fd, str_to_bytes(str(os.getpid())) )
             os.close(fd)
             return True
-        except OSError, e:
+        except OSError as e:
             if e.errno == errno.EEXIST:
                 try:
                     fd = open(lock_file, "r")
@@ -315,9 +321,22 @@
 
     text = property(_get_text, _set_text, _del_text)
 
-    def update(self):
-        """Update the progression bar."""
-        self._current += 1
+    def update(self, offset=1, exact=False):
+        """Move FORWARD to new cursor position (cursor will never go backward).
+
+        :offset: fraction of ``size``
+
+        :exact:
+
+          - False: offset relative to current cursor position if True
+          - True: offset as an asbsolute position
+
+        """
+        if exact:
+            self._current = offset
+        else:
+            self._current += offset
+
         progress = int((float(self._current)/float(self._total))*self._size)
         if progress > self._progress:
             self._progress = progress
@@ -325,7 +344,7 @@
 
     def refresh(self):
         """Refresh the progression bar display."""
-        self._stream.write(self._fstr % ('.' * min(self._progress, self._size)) )
+        self._stream.write(self._fstr % ('=' * min(self._progress, self._size)) )
         if self._last_text_write_size or self._current_text:
             template = ' %%-%is' % (self._last_text_write_size)
             text = self._current_text
@@ -412,7 +431,7 @@
             if self._print:
                 self._print(msg)
             else:
-                print msg
+                print(msg)
             tries -= 1
         raise Exception('unable to get a sensible answer')
 
@@ -438,6 +457,6 @@
 def generate_password(length=8, vocab=string.ascii_letters + string.digits):
     """dumb password generation function"""
     pwd = ''
-    for i in xrange(length):
+    for i in range(length):
         pwd += random.choice(vocab)
     return pwd
diff --git a/third_party/logilab/common/table.py b/third_party/logilab/common/table.py
index 744bb78..2f3df69 100644
--- a/third_party/logilab/common/table.py
+++ b/third_party/logilab/common/table.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -16,8 +16,12 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with logilab-common.  If not, see <http://www.gnu.org/licenses/>.
 """Table management module."""
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
+from six.moves import range
 
 class Table(object):
     """Table defines a data table with column and row names.
@@ -48,6 +52,8 @@
         else:
             return list(self) == list(other)
 
+    __hash__ = object.__hash__
+
     def __ne__(self, other):
         return not self == other
 
@@ -440,7 +446,7 @@
         # The first cell <=> an empty one
         col_names_line = [' '*col_start]
         for col_name in self.col_names:
-            col_names_line.append(col_name.encode('iso-8859-1') + ' '*5)
+            col_names_line.append(col_name + ' '*5)
         lines.append('|' + '|'.join(col_names_line) + '|')
         max_line_length = len(lines[0])
 
@@ -448,7 +454,7 @@
         for row_index, row in enumerate(self.data):
             line = []
             # First, build the row_name's cell
-            row_name = self.row_names[row_index].encode('iso-8859-1')
+            row_name = self.row_names[row_index]
             line.append(row_name + ' '*(col_start-len(row_name)))
 
             # Then, build all the table's cell for this line.
@@ -648,7 +654,7 @@
                 'table.py', 'exec'))
             self.rules.append(rule)
         except SyntaxError:
-            print "Bad Stylesheet Rule : %s [skipped]"%rule
+            print("Bad Stylesheet Rule : %s [skipped]" % rule)
 
 
     def add_rowsum_rule(self, dest_cell, row_index, start_col, end_col):
@@ -743,14 +749,14 @@
     def render_row_cell(self, row_name, table, table_style):
         """Renders the cell for 'row_id' row
         """
-        cell_value = row_name.encode('iso-8859-1')
+        cell_value = row_name
         return self._render_cell_content(cell_value, table_style, 0)
 
 
     def render_col_cell(self, col_name, table, table_style):
         """Renders the cell for 'col_id' row
         """
-        cell_value = col_name.encode('iso-8859-1')
+        cell_value = col_name
         col_index = table.col_names.index(col_name)
         return self._render_cell_content(cell_value, table_style, col_index +1)
 
diff --git a/third_party/logilab/common/tasksqueue.py b/third_party/logilab/common/tasksqueue.py
index e95a77e..ed74cf5 100644
--- a/third_party/logilab/common/tasksqueue.py
+++ b/third_party/logilab/common/tasksqueue.py
@@ -20,7 +20,8 @@
 __docformat__ = "restructuredtext en"
 
 from bisect import insort_left
-from Queue import Queue
+
+from six.moves import queue
 
 LOW = 0
 MEDIUM = 10
@@ -31,11 +32,11 @@
     'MEDIUM': MEDIUM,
     'HIGH': HIGH,
     }
-REVERSE_PRIORITY = dict((values, key) for key, values in PRIORITY.iteritems())
+REVERSE_PRIORITY = dict((values, key) for key, values in PRIORITY.items())
 
 
 
-class PrioritizedTasksQueue(Queue):
+class PrioritizedTasksQueue(queue.Queue):
 
     def _init(self, maxsize):
         """Initialize the queue representation"""
@@ -94,5 +95,7 @@
     def __eq__(self, other):
         return self.id == other.id
 
+    __hash__ = object.__hash__
+
     def merge(self, other):
         pass
diff --git a/third_party/logilab/common/testlib.py b/third_party/logilab/common/testlib.py
index da49387..31efe56 100644
--- a/third_party/logilab/common/testlib.py
+++ b/third_party/logilab/common/testlib.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -36,6 +36,9 @@
 'regrtest', 'smoketest' and 'unittest'.
 
 """
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 # modified copy of some functions from test/regrtest.py from PyXml
 # disable camel case warning
@@ -52,9 +55,13 @@
 import warnings
 from shutil import rmtree
 from operator import itemgetter
-from ConfigParser import ConfigParser
-from logilab.common.deprecation import deprecated
 from itertools import dropwhile
+from inspect import isgeneratorfunction
+
+from six import string_types
+from six.moves import builtins, range, configparser, input
+
+from logilab.common.deprecation import deprecated
 
 import unittest as unittest_legacy
 if not getattr(unittest_legacy, "__package__", None):
@@ -62,31 +69,13 @@
         import unittest2 as unittest
         from unittest2 import SkipTest
     except ImportError:
-        sys.exit("You have to install python-unittest2 to use this module")
+        raise ImportError("You have to install python-unittest2 to use %s" % __name__)
 else:
     import unittest
     from unittest import SkipTest
 
-try:
-    from functools import wraps
-except ImportError:
-    def wraps(wrapped):
-        def proxy(callable):
-            callable.__name__ = wrapped.__name__
-            return callable
-        return proxy
-try:
-    from test import test_support
-except ImportError:
-    # not always available
-    class TestSupport:
-        def unload(self, test):
-            pass
-    test_support = TestSupport()
+from functools import wraps
 
-# pylint: disable=W0622
-from logilab.common.compat import any, InheritableSet, callable
-# pylint: enable=W0622
 from logilab.common.debugger import Debugger, colorize_source
 from logilab.common.decorators import cached, classproperty
 from logilab.common import textutils
@@ -97,23 +86,7 @@
 DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest',
                     'func', 'validation')
 
-
-if sys.version_info >= (2, 6):
-    # FIXME : this does not work as expected / breaks tests on testlib
-    # however testlib does not work on py3k for many reasons ...
-    from inspect import CO_GENERATOR
-else:
-    from compiler.consts import CO_GENERATOR
-
-if sys.version_info >= (3, 0):
-    def is_generator(function):
-        flags = function.__code__.co_flags
-        return flags & CO_GENERATOR
-
-else:
-    def is_generator(function):
-        flags = function.func_code.co_flags
-        return flags & CO_GENERATOR
+is_generator = deprecated('[lgc 0.63] use inspect.isgeneratorfunction')(isgeneratorfunction)
 
 # used by unittest to count the number of relevant levels in the traceback
 __unittest = 1
@@ -122,6 +95,21 @@
 def with_tempdir(callable):
     """A decorator ensuring no temporary file left when the function return
     Work only for temporary file create with the tempfile module"""
+    if isgeneratorfunction(callable):
+        def proxy(*args, **kwargs):
+            old_tmpdir = tempfile.gettempdir()
+            new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
+            tempfile.tempdir = new_tmpdir
+            try:
+                for x in callable(*args, **kwargs):
+                    yield x
+            finally:
+                try:
+                    rmtree(new_tmpdir, ignore_errors=True)
+                finally:
+                    tempfile.tempdir = old_tmpdir
+        return proxy
+
     @wraps(callable)
     def proxy(*args, **kargs):
 
@@ -190,27 +178,27 @@
     else:
         while True:
             testindex = 0
-            print "Choose a test to debug:"
+            print("Choose a test to debug:")
             # order debuggers in the same way than errors were printed
-            print "\n".join(['\t%s : %s' % (i, descr) for i, (_, descr)
-                in enumerate(descrs)])
-            print "Type 'exit' (or ^D) to quit"
-            print
+            print("\n".join(['\t%s : %s' % (i, descr) for i, (_, descr)
+                  in enumerate(descrs)]))
+            print("Type 'exit' (or ^D) to quit")
+            print()
             try:
-                todebug = raw_input('Enter a test name: ')
+                todebug = input('Enter a test name: ')
                 if todebug.strip().lower() == 'exit':
-                    print
+                    print()
                     break
                 else:
                     try:
                         testindex = int(todebug)
                         debugger = debuggers[descrs[testindex][0]]
                     except (ValueError, IndexError):
-                        print "ERROR: invalid test number %r" % (todebug, )
+                        print("ERROR: invalid test number %r" % (todebug, ))
                     else:
                         debugger.start()
             except (EOFError, KeyboardInterrupt):
-                print
+                print()
                 break
 
 
@@ -364,7 +352,7 @@
     if tearDownModule is not None:
         try:
             tearDownModule()
-        except Exception, e:
+        except Exception as e:
             if isinstance(result, _DebugResult):
                 raise
             errorName = 'tearDownModule (%s)' % previousModule
@@ -392,7 +380,7 @@
     if setUpModule is not None:
         try:
             setUpModule()
-        except Exception, e:
+        except Exception as e:
             if isinstance(result, _DebugResult):
                 raise
             result._moduleSetUpFailed = True
@@ -448,7 +436,7 @@
         instance.name = name
         return instance
 
-class Tags(InheritableSet): # 2.4 compat
+class Tags(set):
     """A set of tag able validate an expression"""
 
     def __init__(self, *tags, **kwargs):
@@ -456,7 +444,7 @@
         if kwargs:
            raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys())
 
-        if len(tags) == 1 and not isinstance(tags[0], basestring):
+        if len(tags) == 1 and not isinstance(tags[0], string_types):
             tags = tags[0]
         super(Tags, self).__init__(tags, **kwargs)
 
@@ -484,14 +472,8 @@
 
     def __init__(self, methodName='runTest'):
         super(TestCase, self).__init__(methodName)
-        # internal API changed in python2.4 and needed by DocTestCase
-        if sys.version_info >= (2, 4):
-            self.__exc_info = sys.exc_info
-            self.__testMethodName = self._testMethodName
-        else:
-            # let's give easier access to _testMethodName to every subclasses
-            if hasattr(self, "__testMethodName"):
-                self._testMethodName = self.__testMethodName
+        self.__exc_info = sys.exc_info
+        self.__testMethodName = self._testMethodName
         self._current_test_descr = None
         self._options_ = None
 
@@ -533,6 +515,14 @@
             func(*args, **kwargs)
         except (KeyboardInterrupt, SystemExit):
             raise
+        except unittest.SkipTest as e:
+            if hasattr(result, 'addSkip'):
+                result.addSkip(self, str(e))
+            else:
+                warnings.warn("TestResult has no addSkip method, skips not reported",
+                              RuntimeWarning, 2)
+                result.addSuccess(self)
+            return False
         except:
             result.addError(self, self.__exc_info())
             return False
@@ -559,13 +549,23 @@
         # if result.cvg:
         #     result.cvg.start()
         testMethod = self._get_test_method()
+        if (getattr(self.__class__, "__unittest_skip__", False) or
+            getattr(testMethod, "__unittest_skip__", False)):
+            # If the class or method was skipped.
+            try:
+                skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
+                            or getattr(testMethod, '__unittest_skip_why__', ''))
+                self._addSkip(result, skip_why)
+            finally:
+                result.stopTest(self)
+            return
         if runcondition and not runcondition(testMethod):
             return # test is skipped
         result.startTest(self)
         try:
             if not self.quiet_run(result, self.setUp):
                 return
-            generative = is_generator(testMethod.im_func)
+            generative = isgeneratorfunction(testMethod)
             # generative tests
             if generative:
                 self._proceed_generative(result, testMethod,
@@ -587,10 +587,11 @@
                             restartfile.write(descr+os.linesep)
                         finally:
                             restartfile.close()
-                    except Exception, ex:
-                        print >> sys.__stderr__, "Error while saving \
-succeeded test into", osp.join(os.getcwd(), FILE_RESTART)
-                        raise ex
+                    except Exception:
+                        print("Error while saving succeeded test into",
+                              osp.join(os.getcwd(), FILE_RESTART),
+                              file=sys.__stderr__)
+                        raise
                 result.addSuccess(self)
         finally:
             # if result.cvg:
@@ -647,10 +648,10 @@
             return 1
         except KeyboardInterrupt:
             raise
-        except InnerTestSkipped, e:
+        except InnerTestSkipped as e:
             result.addSkip(self, e)
             return 1
-        except SkipTest, e:
+        except SkipTest as e:
             result.addSkip(self, e)
             return 0
         except:
@@ -704,7 +705,7 @@
                 base = ''
             self.fail(base + '\n'.join(msgs))
 
-    @deprecated('Please use assertItemsEqual instead.')
+    @deprecated('Please use assertCountEqual instead.')
     def assertUnorderedIterableEquals(self, got, expected, msg=None):
         """compares two iterable and shows difference between both
 
@@ -826,10 +827,10 @@
             parser = make_parser()
             try:
                 parser.parse(stream)
-            except SAXParseException, ex:
+            except SAXParseException as ex:
                 if msg is None:
                     stream.seek(0)
-                    for _ in xrange(ex.getLineNumber()):
+                    for _ in range(ex.getLineNumber()):
                         line = stream.readline()
                     pointer = ('' * (ex.getLineNumber() - 1)) + '^'
                     msg = 'XML stream not well formed: %s\n%s%s' % (ex, line, pointer)
@@ -867,7 +868,7 @@
             ParseError = ExpatError
         try:
             parse(data)
-        except (ExpatError, ParseError), ex:
+        except (ExpatError, ParseError) as ex:
             if msg is None:
                 if hasattr(data, 'readlines'): #file like object
                     data.seek(0)
@@ -888,11 +889,11 @@
                     line_number_length = len('%i' % end)
                     line_pattern = " %%%ii: %%s" % line_number_length
 
-                    for line_no in xrange(start, ex.lineno):
+                    for line_no in range(start, ex.lineno):
                         context_lines.append(line_pattern % (line_no, lines[line_no-1]))
                     context_lines.append(line_pattern % (ex.lineno, lines[ex.lineno-1]))
                     context_lines.append('%s^\n' % (' ' * (1 + line_number_length + 2 +ex.offset)))
-                    for line_no in xrange(ex.lineno+1, end+1):
+                    for line_no in range(ex.lineno+1, end+1):
                         context_lines.append(line_pattern % (line_no, lines[line_no-1]))
 
                 rich_context = ''.join(context_lines)
@@ -920,7 +921,7 @@
                 self.fail( "tuple %s has %i children%s (%i expected)"%(tup,
                     len(tup[2]),
                         ('', 's')[len(tup[2])>1], len(element)))
-            for index in xrange(len(tup[2])):
+            for index in range(len(tup[2])):
                 self.assertXMLEqualsTuple(element[index], tup[2][index])
         #check text
         if element.text or len(tup)>3:
@@ -950,7 +951,7 @@
     def assertTextEquals(self, text1, text2, junk=None,
             msg_prefix='Text differ', striplines=False):
         """compare two multiline strings (using difflib and splitlines())
-        
+
         :param text1: a Python BaseString
         :param text2: a second Python Basestring
         :param junk: List of Caracters
@@ -958,9 +959,9 @@
         :param striplines: Boolean to trigger line stripping before comparing
         """
         msg = []
-        if not isinstance(text1, basestring):
+        if not isinstance(text1, string_types):
             msg.append('text1 is not a string (%s)'%(type(text1)))
-        if not isinstance(text2, basestring):
+        if not isinstance(text2, string_types):
             msg.append('text2 is not a string (%s)'%(type(text2)))
         if msg:
             self.fail('\n'.join(msg))
@@ -1016,13 +1017,13 @@
         ipath_a, idirs_a, ifiles_a = data_a = None, None, None
         while True:
             try:
-                ipath_a, idirs_a, ifiles_a = datas_a = iter_a.next()
+                ipath_a, idirs_a, ifiles_a = datas_a = next(iter_a)
                 partial_iter = False
-                ipath_b, idirs_b, ifiles_b = datas_b = iter_b.next()
+                ipath_b, idirs_b, ifiles_b = datas_b = next(iter_b)
                 partial_iter = True
 
 
-                self.assert_(ipath_a == ipath_b,
+                self.assertTrue(ipath_a == ipath_b,
                     "unexpected %s in %s while looking %s from %s" %
                     (ipath_a, path_a, ipath_b, path_b))
 
@@ -1040,7 +1041,7 @@
 
 
                 msgs = [ "%s: %s"% (name, items)
-                    for name, items in errors.iteritems() if items]
+                    for name, items in errors.items() if items]
 
                 if msgs:
                     msgs.insert(0, "%s and %s differ :" % (
@@ -1080,9 +1081,9 @@
                 msg = '%r is not an instance of %s but of %s'
             msg = msg % (obj, klass, type(obj))
         if strict:
-            self.assert_(obj.__class__ is klass, msg)
+            self.assertTrue(obj.__class__ is klass, msg)
         else:
-            self.assert_(isinstance(obj, klass), msg)
+            self.assertTrue(isinstance(obj, klass), msg)
 
     @deprecated('Please use assertIsNone instead.')
     def assertNone(self, obj, msg=None):
@@ -1092,14 +1093,14 @@
         """
         if msg is None:
             msg = "reference to %r when None expected"%(obj,)
-        self.assert_( obj is None, msg )
+        self.assertTrue( obj is None, msg )
 
     @deprecated('Please use assertIsNotNone instead.')
     def assertNotNone(self, obj, msg=None):
         """assert obj is not None"""
         if msg is None:
             msg = "unexpected reference to None"
-        self.assert_( obj is not None, msg )
+        self.assertTrue( obj is not None, msg )
 
     @deprecated('Non-standard. Please use assertAlmostEqual instead.')
     def assertFloatAlmostEquals(self, obj, other, prec=1e-5,
@@ -1117,7 +1118,7 @@
             msg = "%r != %r" % (obj, other)
         if relative:
             prec = prec*math.fabs(obj)
-        self.assert_(math.fabs(obj - other) < prec, msg)
+        self.assertTrue(math.fabs(obj - other) < prec, msg)
 
     def failUnlessRaises(self, excClass, callableObj=None, *args, **kwargs):
         """override default failUnlessRaises method to return the raised
@@ -1146,7 +1147,7 @@
             return _assert(excClass, callableObj, *args, **kwargs)
         try:
             callableObj(*args, **kwargs)
-        except excClass, exc:
+        except excClass as exc:
             class ProxyException:
                 def __init__(self, obj):
                     self._obj = obj
@@ -1166,6 +1167,16 @@
 
     assertRaises = failUnlessRaises
 
+    if sys.version_info >= (3,2):
+        assertItemsEqual = unittest.TestCase.assertCountEqual
+    else:
+        assertCountEqual = unittest.TestCase.assertItemsEqual
+        if sys.version_info < (2,7):
+            def assertIsNotNone(self, value, *args, **kwargs):
+                self.assertNotEqual(None, value, *args, **kwargs)
+
+TestCase.assertItemsEqual = deprecated('assertItemsEqual is deprecated, use assertCountEqual')(
+    TestCase.assertItemsEqual)
 
 import doctest
 
@@ -1184,10 +1195,6 @@
     def _get_test(self, obj, name, module, globs, source_lines):
         """override default _get_test method to be able to skip tests
         according to skipped attribute's value
-
-        Note: Python (<=2.4) use a _name_filter which could be used for that
-              purpose but it's no longer available in 2.5
-              Python 2.5 seems to have a [SKIP] flag
         """
         if getattr(obj, '__name__', '') in self.skipped:
             return None
@@ -1205,16 +1212,19 @@
         # pylint: disable=W0613
         try:
             finder = DocTestFinder(skipped=self.skipped)
-            if sys.version_info >= (2, 4):
-                suite = doctest.DocTestSuite(self.module, test_finder=finder)
-                if sys.version_info >= (2, 5):
-                    # XXX iirk
-                    doctest.DocTestCase._TestCase__exc_info = sys.exc_info
-            else:
-                suite = doctest.DocTestSuite(self.module)
+            suite = doctest.DocTestSuite(self.module, test_finder=finder)
+            # XXX iirk
+            doctest.DocTestCase._TestCase__exc_info = sys.exc_info
         except AttributeError:
             suite = SkippedSuite()
-        return suite.run(result)
+        # doctest may gork the builtins dictionnary
+        # This happen to the "_" entry used by gettext
+        old_builtins = builtins.__dict__.copy()
+        try:
+            return suite.run(result)
+        finally:
+            builtins.__dict__.clear()
+            builtins.__dict__.update(old_builtins)
     run = __call__
 
     def test(self):
@@ -1242,11 +1252,11 @@
         """ignore quit"""
 
 
-class MockConfigParser(ConfigParser):
+class MockConfigParser(configparser.ConfigParser):
     """fake ConfigParser.ConfigParser"""
 
     def __init__(self, options):
-        ConfigParser.__init__(self)
+        configparser.ConfigParser.__init__(self)
         for section, pairs in options.iteritems():
             self.add_section(section)
             for key, value in pairs.iteritems():
diff --git a/third_party/logilab/common/textutils.py b/third_party/logilab/common/textutils.py
index bdeed41..9046f97 100644
--- a/third_party/logilab/common/textutils.py
+++ b/third_party/logilab/common/textutils.py
@@ -284,11 +284,14 @@
     dict of {'key': 'value'}. When the same key is encountered multiple time,
     value is turned into a list containing all values.
 
-    >>> text_to_dict('''multiple=1
+    >>> d = text_to_dict('''multiple=1
     ... multiple= 2
     ... single =3
     ... ''')
-    {'single': '3', 'multiple': ['1', '2']}
+    >>> d['single']
+    '3'
+    >>> d['multiple']
+    ['1', '2']
 
     """
     res = {}
@@ -313,6 +316,8 @@
 __VALUE_URE = r'-?(([0-9]+\.[0-9]*)|((0x?)?[0-9]+))'
 __UNITS_URE = r'[a-zA-Z]+'
 _VALUE_RE = re.compile(r'(?P<value>%s)(?P<unit>%s)?'%(__VALUE_URE, __UNITS_URE))
+_VALIDATION_RE = re.compile(r'^((%s)(%s))*(%s)?$' % (__VALUE_URE, __UNITS_URE,
+                                                    __VALUE_URE))
 
 BYTE_UNITS = {
     "b": 1,
@@ -352,12 +357,12 @@
     """
     if inter is None:
         inter = final
-    string = _BLANK_RE.sub('', string)
+    fstring = _BLANK_RE.sub('', string)
+    if not (fstring and _VALIDATION_RE.match(fstring)):
+        raise ValueError("Invalid unit string: %r." % string)
     values = []
-    for match in value_reg.finditer(string):
+    for match in value_reg.finditer(fstring):
         dic = match.groupdict()
-        #import sys
-        #print >> sys.stderr, dic
         lit, unit = dic["value"], dic.get("unit")
         value = inter(lit)
         if unit is not None:
diff --git a/third_party/logilab/common/umessage.py b/third_party/logilab/common/umessage.py
index 85d564c..a5e4799 100644
--- a/third_party/logilab/common/umessage.py
+++ b/third_party/logilab/common/umessage.py
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of logilab-common.
@@ -15,12 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with logilab-common.  If not, see <http://www.gnu.org/licenses/>.
-"""Unicode email support (extends email from stdlib).
+"""Unicode email support (extends email from stdlib)"""
 
-
-
-
-"""
 __docformat__ = "restructuredtext en"
 
 import email
@@ -48,9 +44,13 @@
     for decoded, charset in decode_header(string):
         if not charset :
             charset = 'iso-8859-15'
-        parts.append(unicode(decoded, charset, 'replace'))
+        parts.append(decoded.decode(charset, 'replace'))
 
-    return u' '.join(parts)
+    if sys.version_info < (3, 3):
+        # decoding was non-RFC compliant wrt to whitespace handling
+        # see http://bugs.python.org/issue1079
+        return u' '.join(parts)
+    return u''.join(parts)
 
 def message_from_file(fd):
     try:
@@ -79,27 +79,13 @@
             return decode_QP(value)
         return value
 
+    def __getitem__(self, header):
+        return self.get(header)
+
     def get_all(self, header, default=()):
         return [decode_QP(val) for val in self.message.get_all(header, default)
                 if val is not None]
 
-    def get_payload(self, index=None, decode=False):
-        message = self.message
-        if index is None:
-            payload = message.get_payload(index, decode)
-            if isinstance(payload, list):
-                return [UMessage(msg) for msg in payload]
-            if message.get_content_maintype() != 'text':
-                return payload
-
-            charset = message.get_content_charset() or 'iso-8859-1'
-            if search_function(charset) is None:
-                charset = 'iso-8859-1'
-            return unicode(payload or '', charset, "replace")
-        else:
-            payload = UMessage(message.get_payload(index, decode))
-        return payload
-
     def is_multipart(self):
         return self.message.is_multipart()
 
@@ -110,20 +96,61 @@
         for part in self.message.walk():
             yield UMessage(part)
 
-    def get_content_maintype(self):
-        return unicode(self.message.get_content_maintype())
+    if sys.version_info < (3, 0):
 
-    def get_content_type(self):
-        return unicode(self.message.get_content_type())
+        def get_payload(self, index=None, decode=False):
+            message = self.message
+            if index is None:
+                payload = message.get_payload(index, decode)
+                if isinstance(payload, list):
+                    return [UMessage(msg) for msg in payload]
+                if message.get_content_maintype() != 'text':
+                    return payload
 
-    def get_filename(self, failobj=None):
-        value = self.message.get_filename(failobj)
-        if value is failobj:
-            return value
-        try:
-            return unicode(value)
-        except UnicodeDecodeError:
-            return u'error decoding filename'
+                charset = message.get_content_charset() or 'iso-8859-1'
+                if search_function(charset) is None:
+                    charset = 'iso-8859-1'
+                return unicode(payload or '', charset, "replace")
+            else:
+                payload = UMessage(message.get_payload(index, decode))
+            return payload
+
+        def get_content_maintype(self):
+            return unicode(self.message.get_content_maintype())
+
+        def get_content_type(self):
+            return unicode(self.message.get_content_type())
+
+        def get_filename(self, failobj=None):
+            value = self.message.get_filename(failobj)
+            if value is failobj:
+                return value
+            try:
+                return unicode(value)
+            except UnicodeDecodeError:
+                return u'error decoding filename'
+
+    else:
+
+        def get_payload(self, index=None, decode=False):
+            message = self.message
+            if index is None:
+                payload = message.get_payload(index, decode)
+                if isinstance(payload, list):
+                    return [UMessage(msg) for msg in payload]
+                return payload
+            else:
+                payload = UMessage(message.get_payload(index, decode))
+            return payload
+
+        def get_content_maintype(self):
+            return self.message.get_content_maintype()
+
+        def get_content_type(self):
+            return self.message.get_content_type()
+
+        def get_filename(self, failobj=None):
+            return self.message.get_filename(failobj)
 
     # other convenience methods ###############################################
 
diff --git a/third_party/logilab/common/ureports/__init__.py b/third_party/logilab/common/ureports/__init__.py
index dcffcfa..d76ebe5 100644
--- a/third_party/logilab/common/ureports/__init__.py
+++ b/third_party/logilab/common/ureports/__init__.py
@@ -20,13 +20,11 @@
 A way to create simple reports using python objects, primarily designed to be
 formatted as text and html.
 """
-from __future__ import generators
 __docformat__ = "restructuredtext en"
 
 import sys
-from cStringIO import StringIO
-from StringIO import StringIO as UStringIO
 
+from logilab.common.compat import StringIO
 from logilab.common.textutils import linesep
 
 
@@ -44,13 +42,13 @@
     """
     for child in layout.children:
         if isinstance(child, Title):
-            return ' '.join([node.data for node in get_nodes(child, Text)])
+            return u' '.join([node.data for node in get_nodes(child, Text)])
 
 def build_summary(layout, level=1):
     """make a summary for the report, including X level"""
     assert level > 0
     level -= 1
-    summary = List(klass='summary')
+    summary = List(klass=u'summary')
     for child in layout.children:
         if not isinstance(child, Section):
             continue
@@ -59,7 +57,7 @@
             continue
         if not child.id:
             child.id = label.replace(' ', '-')
-        node = Link('#'+child.id, label=label or child.id)
+        node = Link(u'#'+child.id, label=label or child.id)
         # FIXME: Three following lines produce not very compliant
         # docbook: there are some useless <para><para>. They might be
         # replaced by the three commented lines but this then produces
@@ -101,7 +99,7 @@
         for child in getattr(layout, 'children', ()):
             child.accept(self)
 
-    def writeln(self, string=''):
+    def writeln(self, string=u''):
         """write a line in the output buffer"""
         self.write(string + linesep)
 
@@ -134,7 +132,7 @@
             result[-1].append(cell)
         # fill missing cells
         while len(result[-1]) < cols:
-            result[-1].append('')
+            result[-1].append(u'')
         return result
 
     def compute_content(self, layout):
@@ -149,7 +147,7 @@
                 stream.write(data)
             except UnicodeEncodeError:
                 stream.write(data.encode(self.encoding))
-        def writeln(data=''):
+        def writeln(data=u''):
             try:
                 stream.write(data+linesep)
             except UnicodeEncodeError:
@@ -158,7 +156,7 @@
         self.writeln = writeln
         self.__compute_funcs.append((write, writeln))
         for child in layout.children:
-            stream = UStringIO()
+            stream = StringIO()
             child.accept(self)
             yield stream.getvalue()
         self.__compute_funcs.pop()
diff --git a/third_party/logilab/common/ureports/docbook_writer.py b/third_party/logilab/common/ureports/docbook_writer.py
index e75cbe0..857068c 100644
--- a/third_party/logilab/common/ureports/docbook_writer.py
+++ b/third_party/logilab/common/ureports/docbook_writer.py
@@ -16,9 +16,10 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with logilab-common.  If not, see <http://www.gnu.org/licenses/>.
 """HTML formatting drivers for ureports"""
-from __future__ import generators
 __docformat__ = "restructuredtext en"
 
+from six.moves import range
+
 from logilab.common.ureports import HTMLWriter
 
 class DocbookWriter(HTMLWriter):
diff --git a/third_party/logilab/common/ureports/html_writer.py b/third_party/logilab/common/ureports/html_writer.py
index 1d09503..eba34ea 100644
--- a/third_party/logilab/common/ureports/html_writer.py
+++ b/third_party/logilab/common/ureports/html_writer.py
@@ -20,6 +20,8 @@
 
 from cgi import escape
 
+from six.moves import range
+
 from logilab.common.ureports import BaseWriter
 
 
@@ -32,100 +34,100 @@
 
     def handle_attrs(self, layout):
         """get an attribute string from layout member attributes"""
-        attrs = ''
+        attrs = u''
         klass = getattr(layout, 'klass', None)
         if klass:
-            attrs += ' class="%s"' % klass
+            attrs += u' class="%s"' % klass
         nid = getattr(layout, 'id', None)
         if nid:
-            attrs += ' id="%s"' % nid
+            attrs += u' id="%s"' % nid
         return attrs
 
     def begin_format(self, layout):
         """begin to format a layout"""
         super(HTMLWriter, self).begin_format(layout)
         if self.snippet is None:
-            self.writeln('<html>')
-            self.writeln('<body>')
+            self.writeln(u'<html>')
+            self.writeln(u'<body>')
 
     def end_format(self, layout):
         """finished to format a layout"""
         if self.snippet is None:
-            self.writeln('</body>')
-            self.writeln('</html>')
+            self.writeln(u'</body>')
+            self.writeln(u'</html>')
 
 
     def visit_section(self, layout):
         """display a section as html, using div + h[section level]"""
         self.section += 1
-        self.writeln('<div%s>' % self.handle_attrs(layout))
+        self.writeln(u'<div%s>' % self.handle_attrs(layout))
         self.format_children(layout)
-        self.writeln('</div>')
+        self.writeln(u'</div>')
         self.section -= 1
 
     def visit_title(self, layout):
         """display a title using <hX>"""
-        self.write('<h%s%s>' % (self.section, self.handle_attrs(layout)))
+        self.write(u'<h%s%s>' % (self.section, self.handle_attrs(layout)))
         self.format_children(layout)
-        self.writeln('</h%s>' % self.section)
+        self.writeln(u'</h%s>' % self.section)
 
     def visit_table(self, layout):
         """display a table as html"""
-        self.writeln('<table%s>' % self.handle_attrs(layout))
+        self.writeln(u'<table%s>' % self.handle_attrs(layout))
         table_content = self.get_table_content(layout)
         for i in range(len(table_content)):
             row = table_content[i]
             if i == 0 and layout.rheaders:
-                self.writeln('<tr class="header">')
+                self.writeln(u'<tr class="header">')
             elif i+1 == len(table_content) and layout.rrheaders:
-                self.writeln('<tr class="header">')
+                self.writeln(u'<tr class="header">')
             else:
-                self.writeln('<tr class="%s">' % (i%2 and 'even' or 'odd'))
+                self.writeln(u'<tr class="%s">' % (i%2 and 'even' or 'odd'))
             for j in range(len(row)):
-                cell = row[j] or '&#160;'
+                cell = row[j] or u'&#160;'
                 if (layout.rheaders and i == 0) or \
                    (layout.cheaders and j == 0) or \
                    (layout.rrheaders and i+1 == len(table_content)) or \
                    (layout.rcheaders and j+1 == len(row)):
-                    self.writeln('<th>%s</th>' % cell)
+                    self.writeln(u'<th>%s</th>' % cell)
                 else:
-                    self.writeln('<td>%s</td>' % cell)
-            self.writeln('</tr>')
-        self.writeln('</table>')
+                    self.writeln(u'<td>%s</td>' % cell)
+            self.writeln(u'</tr>')
+        self.writeln(u'</table>')
 
     def visit_list(self, layout):
         """display a list as html"""
-        self.writeln('<ul%s>' % self.handle_attrs(layout))
+        self.writeln(u'<ul%s>' % self.handle_attrs(layout))
         for row in list(self.compute_content(layout)):
-            self.writeln('<li>%s</li>' % row)
-        self.writeln('</ul>')
+            self.writeln(u'<li>%s</li>' % row)
+        self.writeln(u'</ul>')
 
     def visit_paragraph(self, layout):
         """display links (using <p>)"""
-        self.write('<p>')
+        self.write(u'<p>')
         self.format_children(layout)
-        self.write('</p>')
+        self.write(u'</p>')
 
     def visit_span(self, layout):
         """display links (using <p>)"""
-        self.write('<span%s>' % self.handle_attrs(layout))
+        self.write(u'<span%s>' % self.handle_attrs(layout))
         self.format_children(layout)
-        self.write('</span>')
+        self.write(u'</span>')
 
     def visit_link(self, layout):
         """display links (using <a>)"""
-        self.write(' <a href="%s"%s>%s</a>' % (layout.url,
-                                               self.handle_attrs(layout),
-                                               layout.label))
+        self.write(u' <a href="%s"%s>%s</a>' % (layout.url,
+                                                self.handle_attrs(layout),
+                                                layout.label))
     def visit_verbatimtext(self, layout):
         """display verbatim text (using <pre>)"""
-        self.write('<pre>')
-        self.write(layout.data.replace('&', '&amp;').replace('<', '&lt;'))
-        self.write('</pre>')
+        self.write(u'<pre>')
+        self.write(layout.data.replace(u'&', u'&amp;').replace(u'<', u'&lt;'))
+        self.write(u'</pre>')
 
     def visit_text(self, layout):
         """add some text"""
         data = layout.data
         if layout.escaped:
-            data = data.replace('&', '&amp;').replace('<', '&lt;')
+            data = data.replace(u'&', u'&amp;').replace(u'<', u'&lt;')
         self.write(data)
diff --git a/third_party/logilab/common/ureports/nodes.py b/third_party/logilab/common/ureports/nodes.py
index d63b582..a9585b3 100644
--- a/third_party/logilab/common/ureports/nodes.py
+++ b/third_party/logilab/common/ureports/nodes.py
@@ -23,6 +23,8 @@
 
 from logilab.common.tree import VNode
 
+from six import string_types
+
 class BaseComponent(VNode):
     """base report component
 
@@ -79,7 +81,7 @@
         super(Text, self).__init__(**kwargs)
         #if isinstance(data, unicode):
         #    data = data.encode('ascii')
-        assert isinstance(data, (str, unicode)), data.__class__
+        assert isinstance(data, string_types), data.__class__
         self.escaped = escaped
         self.data = data
 
diff --git a/third_party/logilab/common/ureports/text_writer.py b/third_party/logilab/common/ureports/text_writer.py
index 04c8f26..c87613c 100644
--- a/third_party/logilab/common/ureports/text_writer.py
+++ b/third_party/logilab/common/ureports/text_writer.py
@@ -16,14 +16,19 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with logilab-common.  If not, see <http://www.gnu.org/licenses/>.
 """Text formatting drivers for ureports"""
+
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
+from six.moves import range
+
 from logilab.common.textutils import linesep
 from logilab.common.ureports import BaseWriter
 
 
-TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^']
-BULLETS = ['*', '-']
+TITLE_UNDERLINES = [u'', u'=', u'-', u'`', u'.', u'~', u'^']
+BULLETS = [u'*', u'-']
 
 class TextWriter(BaseWriter):
     """format layouts as text
@@ -43,18 +48,18 @@
         if self.pending_urls:
             self.writeln()
             for label, url in self.pending_urls:
-                self.writeln('.. _`%s`: %s' % (label, url))
+                self.writeln(u'.. _`%s`: %s' % (label, url))
             self.pending_urls = []
         self.section -= 1
         self.writeln()
 
     def visit_title(self, layout):
-        title = ''.join(list(self.compute_content(layout)))
+        title = u''.join(list(self.compute_content(layout)))
         self.writeln(title)
         try:
             self.writeln(TITLE_UNDERLINES[self.section] * len(title))
         except IndexError:
-            print "FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT"
+            print("FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT")
 
     def visit_paragraph(self, layout):
         """enter a paragraph"""
@@ -83,19 +88,19 @@
     def default_table(self, layout, table_content, cols_width):
         """format a table"""
         cols_width = [size+1 for size in cols_width]
-        format_strings = ' '.join(['%%-%ss'] * len(cols_width))
+        format_strings = u' '.join([u'%%-%ss'] * len(cols_width))
         format_strings = format_strings % tuple(cols_width)
         format_strings = format_strings.split(' ')
-        table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n'
-        headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n'
+        table_linesep = u'\n+' + u'+'.join([u'-'*w for w in cols_width]) + u'+\n'
+        headsep = u'\n+' + u'+'.join([u'='*w for w in cols_width]) + u'+\n'
         # FIXME: layout.cheaders
         self.write(table_linesep)
         for i in range(len(table_content)):
-            self.write('|')
+            self.write(u'|')
             line = table_content[i]
             for j in range(len(line)):
                 self.write(format_strings[j] % line[j])
-                self.write('|')
+                self.write(u'|')
             if i == 0 and layout.rheaders:
                 self.write(headsep)
             else:
@@ -104,7 +109,7 @@
     def field_table(self, layout, table_content, cols_width):
         """special case for field table"""
         assert layout.cols == 2
-        format_string = '%s%%-%ss: %%s' % (linesep, cols_width[0])
+        format_string = u'%s%%-%ss: %%s' % (linesep, cols_width[0])
         for field, value in table_content:
             self.write(format_string % (field, value))
 
@@ -115,14 +120,14 @@
         indent = '  ' * self.list_level
         self.list_level += 1
         for child in layout.children:
-            self.write('%s%s%s ' % (linesep, indent, bullet))
+            self.write(u'%s%s%s ' % (linesep, indent, bullet))
             child.accept(self)
         self.list_level -= 1
 
     def visit_link(self, layout):
         """add a hyperlink"""
         if layout.label != layout.url:
-            self.write('`%s`_' % layout.label)
+            self.write(u'`%s`_' % layout.label)
             self.pending_urls.append( (layout.label, layout.url) )
         else:
             self.write(layout.url)
@@ -130,11 +135,11 @@
     def visit_verbatimtext(self, layout):
         """display a verbatim layout as text (so difficult ;)
         """
-        self.writeln('::\n')
+        self.writeln(u'::\n')
         for line in layout.data.splitlines():
-            self.writeln('    ' + line)
+            self.writeln(u'    ' + line)
         self.writeln()
 
     def visit_text(self, layout):
         """add some text"""
-        self.write(layout.data)
+        self.write(u'%s' % layout.data)
diff --git a/third_party/logilab/common/urllib2ext.py b/third_party/logilab/common/urllib2ext.py
index 08797a4..339aec0 100644
--- a/third_party/logilab/common/urllib2ext.py
+++ b/third_party/logilab/common/urllib2ext.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
 import logging
 import urllib2
 
@@ -62,7 +64,7 @@
                 if result < 1:
                     raise GssapiAuthError("HTTPGssapiAuthHandler: step 2 failed with %d" % result)
             return server_response
-        except GssapiAuthError, exc:
+        except GssapiAuthError as exc:
             logging.error(repr(exc))
         finally:
             self.clean_context()
@@ -84,4 +86,4 @@
     # test with url sys.argv[1]
     h = HTTPGssapiAuthHandler()
     response = urllib2.build_opener(h, ch).open(sys.argv[1])
-    print '\nresponse: %s\n--------------\n' % response.code, response.info()
+    print('\nresponse: %s\n--------------\n' % response.code, response.info())
diff --git a/third_party/logilab/common/visitor.py b/third_party/logilab/common/visitor.py
index 802d2be..ed2b70f 100644
--- a/third_party/logilab/common/visitor.py
+++ b/third_party/logilab/common/visitor.py
@@ -35,12 +35,14 @@
             filter_func = no_filter
         self._list = list_func(node, filter_func)
 
-    def next(self):
+    def __next__(self):
         try:
             return self._list.pop(0)
         except :
             return None
 
+    next = __next__
+
 # Base Visitor ################################################################
 class Visitor(object):
 
@@ -61,10 +63,10 @@
 
     def _visit(self, node):
         iterator = self._get_iterator(node)
-        n = iterator.next()
+        n = next(iterator)
         while n:
             result = n.accept(self)
-            n = iterator.next()
+            n = next(iterator)
         return result
 
     def _get_iterator(self, node):
diff --git a/third_party/oauth2client/MODIFICATIONS.diff b/third_party/oauth2client/MODIFICATIONS.diff
index 2dfb0da..7490d91 100644
--- a/third_party/oauth2client/MODIFICATIONS.diff
+++ b/third_party/oauth2client/MODIFICATIONS.diff
@@ -14,7 +14,7 @@
  import time
  import urllib
  import urlparse
- 
+
 -from oauth2client import GOOGLE_AUTH_URI
 -from oauth2client import GOOGLE_REVOKE_URI
 -from oauth2client import GOOGLE_TOKEN_URI
@@ -25,7 +25,7 @@
 +from . import GOOGLE_TOKEN_URI
 +from . import util
 +from .anyjson import simplejson
- 
+
  HAS_OPENSSL = False
  HAS_CRYPTO = False
  try:
@@ -34,3 +34,33 @@
    HAS_CRYPTO = True
    if crypt.OpenSSLVerifier is not None:
      HAS_OPENSSL = True
+diff --git a/third_party/oauth2client/locked_file.py b/third_party/oauth2client/locked_file.py
+index 31514dc..858b702 100644
+--- a/third_party/oauth2client/locked_file.py
++++ b/third_party/oauth2client/locked_file.py
+@@ -35,7 +35,7 @@ import logging
+ import os
+ import time
+
+-from oauth2client import util
++from . import util
+
+ logger = logging.getLogger(__name__)
+
+diff --git a/third_party/oauth2client/multistore_file.py b/third_party/oauth2client/multistore_file.py
+index ce7a519..ea89027 100644
+--- a/third_party/oauth2client/multistore_file.py
++++ b/third_party/oauth2client/multistore_file.py
+@@ -50,9 +50,9 @@ import os
+ import threading
+
+ from anyjson import simplejson
+-from oauth2client.client import Storage as BaseStorage
+-from oauth2client.client import Credentials
+-from oauth2client import util
++from .client import Storage as BaseStorage
++from .client import Credentials
++from . import util
+ from locked_file import LockedFile
+
+ logger = logging.getLogger(__name__)
diff --git a/third_party/oauth2client/locked_file.py b/third_party/oauth2client/locked_file.py
index 31514dc..858b702 100644
--- a/third_party/oauth2client/locked_file.py
+++ b/third_party/oauth2client/locked_file.py
@@ -35,7 +35,7 @@
 import os
 import time
 
-from oauth2client import util
+from . import util
 
 logger = logging.getLogger(__name__)
 
diff --git a/third_party/oauth2client/multistore_file.py b/third_party/oauth2client/multistore_file.py
index ce7a519..ea89027 100644
--- a/third_party/oauth2client/multistore_file.py
+++ b/third_party/oauth2client/multistore_file.py
@@ -50,9 +50,9 @@
 import threading
 
 from anyjson import simplejson
-from oauth2client.client import Storage as BaseStorage
-from oauth2client.client import Credentials
-from oauth2client import util
+from .client import Storage as BaseStorage
+from .client import Credentials
+from . import util
 from locked_file import LockedFile
 
 logger = logging.getLogger(__name__)
diff --git a/third_party/protobuf26/README.chromium b/third_party/protobuf26/README.chromium
new file mode 100644
index 0000000..35df332
--- /dev/null
+++ b/third_party/protobuf26/README.chromium
@@ -0,0 +1,20 @@
+This directory contains google.protobuf module version 2.6.0 build 0.
+
+sergiyb@: It has been manually renamed to protobuf26. This is needed to avoid
+conflicts with a built-in google.protobuf module found on many developer
+machines. The long-term solution to this problem, however, should be virtualenv.
+Unfortunately due to limited time and lack of experience, it was not a
+reasonable short-term solution.
+
+If you need to update this package, please make sure that you replace all
+the references to google.protobuf in the package itself with protobuf26, e.g.
+
+  from google.protobuf import text_format
+  import google.protobuf
+
+becomes
+
+  from protobuf26 import text_format
+  import protobuf26
+
+Bug tracking setting up virtualenv for depot_tools is https://crbug.com/496241.
diff --git a/third_party/protobuf26/__init__.py b/third_party/protobuf26/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/third_party/protobuf26/__init__.py
diff --git a/third_party/protobuf26/compiler/__init__.py b/third_party/protobuf26/compiler/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/third_party/protobuf26/compiler/__init__.py
diff --git a/third_party/protobuf26/compiler/plugin_pb2.py b/third_party/protobuf26/compiler/plugin_pb2.py
new file mode 100644
index 0000000..806241d
--- /dev/null
+++ b/third_party/protobuf26/compiler/plugin_pb2.py
@@ -0,0 +1,184 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: google/protobuf/compiler/plugin.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from protobuf26 import descriptor as _descriptor
+from protobuf26 import message as _message
+from protobuf26 import reflection as _reflection
+from protobuf26 import symbol_database as _symbol_database
+from protobuf26 import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+import protobuf26.descriptor_pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='google/protobuf/compiler/plugin.proto',
+  package='google.protobuf.compiler',
+  serialized_pb=_b('\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB,\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtos')
+  ,
+  dependencies=[google.protobuf.descriptor_pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_CODEGENERATORREQUEST = _descriptor.Descriptor(
+  name='CodeGeneratorRequest',
+  full_name='google.protobuf.compiler.CodeGeneratorRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0,
+      number=1, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2,
+      number=15, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=101,
+  serialized_end=226,
+)
+
+
+_CODEGENERATORRESPONSE_FILE = _descriptor.Descriptor(
+  name='File',
+  full_name='google.protobuf.compiler.CodeGeneratorResponse.File',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2,
+      number=15, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=337,
+  serialized_end=399,
+)
+
+_CODEGENERATORRESPONSE = _descriptor.Descriptor(
+  name='CodeGeneratorResponse',
+  full_name='google.protobuf.compiler.CodeGeneratorResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=1,
+      number=15, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_CODEGENERATORRESPONSE_FILE, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=229,
+  serialized_end=399,
+)
+
+_CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google.protobuf.descriptor_pb2._FILEDESCRIPTORPROTO
+_CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE
+_CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE
+DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST
+DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE
+
+CodeGeneratorRequest = _reflection.GeneratedProtocolMessageType('CodeGeneratorRequest', (_message.Message,), dict(
+  DESCRIPTOR = _CODEGENERATORREQUEST,
+  __module__ = 'google.protobuf.compiler.plugin_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest)
+  ))
+_sym_db.RegisterMessage(CodeGeneratorRequest)
+
+CodeGeneratorResponse = _reflection.GeneratedProtocolMessageType('CodeGeneratorResponse', (_message.Message,), dict(
+
+  File = _reflection.GeneratedProtocolMessageType('File', (_message.Message,), dict(
+    DESCRIPTOR = _CODEGENERATORRESPONSE_FILE,
+    __module__ = 'google.protobuf.compiler.plugin_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File)
+    ))
+  ,
+  DESCRIPTOR = _CODEGENERATORRESPONSE,
+  __module__ = 'google.protobuf.compiler.plugin_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse)
+  ))
+_sym_db.RegisterMessage(CodeGeneratorResponse)
+_sym_db.RegisterMessage(CodeGeneratorResponse.File)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.protobuf.compilerB\014PluginProtos'))
+# @@protoc_insertion_point(module_scope)
diff --git a/third_party/protobuf26/descriptor.py b/third_party/protobuf26/descriptor.py
new file mode 100644
index 0000000..109a09d
--- /dev/null
+++ b/third_party/protobuf26/descriptor.py
@@ -0,0 +1,849 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Needs to stay compatible with Python 2.5 due to GAE.
+#
+# Copyright 2007 Google Inc. All Rights Reserved.
+
+"""Descriptors essentially contain exactly the information found in a .proto
+file, in types that make this information accessible in Python.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+from protobuf26.internal import api_implementation
+
+
+if api_implementation.Type() == 'cpp':
+  # Used by MakeDescriptor in cpp mode
+  import os
+  import uuid
+
+  if api_implementation.Version() == 2:
+    from protobuf26.pyext import _message
+  else:
+    from protobuf26.internal import cpp_message
+
+
+class Error(Exception):
+  """Base error for this module."""
+
+
+class TypeTransformationError(Error):
+  """Error transforming between python proto type and corresponding C++ type."""
+
+
+class DescriptorBase(object):
+
+  """Descriptors base class.
+
+  This class is the base of all descriptor classes. It provides common options
+  related functionaility.
+
+  Attributes:
+    has_options:  True if the descriptor has non-default options.  Usually it
+        is not necessary to read this -- just call GetOptions() which will
+        happily return the default instance.  However, it's sometimes useful
+        for efficiency, and also useful inside the protobuf implementation to
+        avoid some bootstrapping issues.
+  """
+
+  def __init__(self, options, options_class_name):
+    """Initialize the descriptor given its options message and the name of the
+    class of the options message. The name of the class is required in case
+    the options message is None and has to be created.
+    """
+    self._options = options
+    self._options_class_name = options_class_name
+
+    # Does this descriptor have non-default options?
+    self.has_options = options is not None
+
+  def _SetOptions(self, options, options_class_name):
+    """Sets the descriptor's options
+
+    This function is used in generated proto2 files to update descriptor
+    options. It must not be used outside proto2.
+    """
+    self._options = options
+    self._options_class_name = options_class_name
+
+    # Does this descriptor have non-default options?
+    self.has_options = options is not None
+
+  def GetOptions(self):
+    """Retrieves descriptor options.
+
+    This method returns the options set or creates the default options for the
+    descriptor.
+    """
+    if self._options:
+      return self._options
+    from protobuf26 import descriptor_pb2
+    try:
+      options_class = getattr(descriptor_pb2, self._options_class_name)
+    except AttributeError:
+      raise RuntimeError('Unknown options class name %s!' %
+                         (self._options_class_name))
+    self._options = options_class()
+    return self._options
+
+
+class _NestedDescriptorBase(DescriptorBase):
+  """Common class for descriptors that can be nested."""
+
+  def __init__(self, options, options_class_name, name, full_name,
+               file, containing_type, serialized_start=None,
+               serialized_end=None):
+    """Constructor.
+
+    Args:
+      options: Protocol message options or None
+        to use default message options.
+      options_class_name: (str) The class name of the above options.
+
+      name: (str) Name of this protocol message type.
+      full_name: (str) Fully-qualified name of this protocol message type,
+        which will include protocol "package" name and the name of any
+        enclosing types.
+      file: (FileDescriptor) Reference to file info.
+      containing_type: if provided, this is a nested descriptor, with this
+        descriptor as parent, otherwise None.
+      serialized_start: The start index (inclusive) in block in the
+        file.serialized_pb that describes this descriptor.
+      serialized_end: The end index (exclusive) in block in the
+        file.serialized_pb that describes this descriptor.
+    """
+    super(_NestedDescriptorBase, self).__init__(
+        options, options_class_name)
+
+    self.name = name
+    # TODO(falk): Add function to calculate full_name instead of having it in
+    #             memory?
+    self.full_name = full_name
+    self.file = file
+    self.containing_type = containing_type
+
+    self._serialized_start = serialized_start
+    self._serialized_end = serialized_end
+
+  def GetTopLevelContainingType(self):
+    """Returns the root if this is a nested type, or itself if its the root."""
+    desc = self
+    while desc.containing_type is not None:
+      desc = desc.containing_type
+    return desc
+
+  def CopyToProto(self, proto):
+    """Copies this to the matching proto in descriptor_pb2.
+
+    Args:
+      proto: An empty proto instance from descriptor_pb2.
+
+    Raises:
+      Error: If self couldnt be serialized, due to to few constructor arguments.
+    """
+    if (self.file is not None and
+        self._serialized_start is not None and
+        self._serialized_end is not None):
+      proto.ParseFromString(self.file.serialized_pb[
+          self._serialized_start:self._serialized_end])
+    else:
+      raise Error('Descriptor does not contain serialization.')
+
+
+class Descriptor(_NestedDescriptorBase):
+
+  """Descriptor for a protocol message type.
+
+  A Descriptor instance has the following attributes:
+
+    name: (str) Name of this protocol message type.
+    full_name: (str) Fully-qualified name of this protocol message type,
+      which will include protocol "package" name and the name of any
+      enclosing types.
+
+    containing_type: (Descriptor) Reference to the descriptor of the
+      type containing us, or None if this is top-level.
+
+    fields: (list of FieldDescriptors) Field descriptors for all
+      fields in this type.
+    fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
+      objects as in |fields|, but indexed by "number" attribute in each
+      FieldDescriptor.
+    fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
+      objects as in |fields|, but indexed by "name" attribute in each
+      FieldDescriptor.
+
+    nested_types: (list of Descriptors) Descriptor references
+      for all protocol message types nested within this one.
+    nested_types_by_name: (dict str -> Descriptor) Same Descriptor
+      objects as in |nested_types|, but indexed by "name" attribute
+      in each Descriptor.
+
+    enum_types: (list of EnumDescriptors) EnumDescriptor references
+      for all enums contained within this type.
+    enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
+      objects as in |enum_types|, but indexed by "name" attribute
+      in each EnumDescriptor.
+    enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
+      from enum value name to EnumValueDescriptor for that value.
+
+    extensions: (list of FieldDescriptor) All extensions defined directly
+      within this message type (NOT within a nested type).
+    extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
+      objects as |extensions|, but indexed by "name" attribute of each
+      FieldDescriptor.
+
+    is_extendable:  Does this type define any extension ranges?
+
+    options: (descriptor_pb2.MessageOptions) Protocol message options or None
+      to use default message options.
+
+    oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields
+      in this message.
+    oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|,
+      but indexed by "name" attribute.
+
+    file: (FileDescriptor) Reference to file descriptor.
+  """
+
+  # NOTE(tmarek): The file argument redefining a builtin is nothing we can
+  # fix right now since we don't know how many clients already rely on the
+  # name of the argument.
+  def __init__(self, name, full_name, filename, containing_type, fields,
+               nested_types, enum_types, extensions, options=None,
+               is_extendable=True, extension_ranges=None, oneofs=None,
+               file=None, serialized_start=None, serialized_end=None):  # pylint:disable=redefined-builtin
+    """Arguments to __init__() are as described in the description
+    of Descriptor fields above.
+
+    Note that filename is an obsolete argument, that is not used anymore.
+    Please use file.name to access this as an attribute.
+    """
+    super(Descriptor, self).__init__(
+        options, 'MessageOptions', name, full_name, file,
+        containing_type, serialized_start=serialized_start,
+        serialized_end=serialized_end)
+
+    # We have fields in addition to fields_by_name and fields_by_number,
+    # so that:
+    #   1. Clients can index fields by "order in which they're listed."
+    #   2. Clients can easily iterate over all fields with the terse
+    #      syntax: for f in descriptor.fields: ...
+    self.fields = fields
+    for field in self.fields:
+      field.containing_type = self
+    self.fields_by_number = dict((f.number, f) for f in fields)
+    self.fields_by_name = dict((f.name, f) for f in fields)
+
+    self.nested_types = nested_types
+    for nested_type in nested_types:
+      nested_type.containing_type = self
+    self.nested_types_by_name = dict((t.name, t) for t in nested_types)
+
+    self.enum_types = enum_types
+    for enum_type in self.enum_types:
+      enum_type.containing_type = self
+    self.enum_types_by_name = dict((t.name, t) for t in enum_types)
+    self.enum_values_by_name = dict(
+        (v.name, v) for t in enum_types for v in t.values)
+
+    self.extensions = extensions
+    for extension in self.extensions:
+      extension.extension_scope = self
+    self.extensions_by_name = dict((f.name, f) for f in extensions)
+    self.is_extendable = is_extendable
+    self.extension_ranges = extension_ranges
+    self.oneofs = oneofs if oneofs is not None else []
+    self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
+    for oneof in self.oneofs:
+      oneof.containing_type = self
+
+  def EnumValueName(self, enum, value):
+    """Returns the string name of an enum value.
+
+    This is just a small helper method to simplify a common operation.
+
+    Args:
+      enum: string name of the Enum.
+      value: int, value of the enum.
+
+    Returns:
+      string name of the enum value.
+
+    Raises:
+      KeyError if either the Enum doesn't exist or the value is not a valid
+        value for the enum.
+    """
+    return self.enum_types_by_name[enum].values_by_number[value].name
+
+  def CopyToProto(self, proto):
+    """Copies this to a descriptor_pb2.DescriptorProto.
+
+    Args:
+      proto: An empty descriptor_pb2.DescriptorProto.
+    """
+    # This function is overriden to give a better doc comment.
+    super(Descriptor, self).CopyToProto(proto)
+
+
+# TODO(robinson): We should have aggressive checking here,
+# for example:
+#   * If you specify a repeated field, you should not be allowed
+#     to specify a default value.
+#   * [Other examples here as needed].
+#
+# TODO(robinson): for this and other *Descriptor classes, we
+# might also want to lock things down aggressively (e.g.,
+# prevent clients from setting the attributes).  Having
+# stronger invariants here in general will reduce the number
+# of runtime checks we must do in reflection.py...
+class FieldDescriptor(DescriptorBase):
+
+  """Descriptor for a single field in a .proto file.
+
+  A FieldDescriptor instance has the following attributes:
+
+    name: (str) Name of this field, exactly as it appears in .proto.
+    full_name: (str) Name of this field, including containing scope.  This is
+      particularly relevant for extensions.
+    index: (int) Dense, 0-indexed index giving the order that this
+      field textually appears within its message in the .proto file.
+    number: (int) Tag number declared for this field in the .proto file.
+
+    type: (One of the TYPE_* constants below) Declared type.
+    cpp_type: (One of the CPPTYPE_* constants below) C++ type used to
+      represent this field.
+
+    label: (One of the LABEL_* constants below) Tells whether this
+      field is optional, required, or repeated.
+    has_default_value: (bool) True if this field has a default value defined,
+      otherwise false.
+    default_value: (Varies) Default value of this field.  Only
+      meaningful for non-repeated scalar fields.  Repeated fields
+      should always set this to [], and non-repeated composite
+      fields should always set this to None.
+
+    containing_type: (Descriptor) Descriptor of the protocol message
+      type that contains this field.  Set by the Descriptor constructor
+      if we're passed into one.
+      Somewhat confusingly, for extension fields, this is the
+      descriptor of the EXTENDED message, not the descriptor
+      of the message containing this field.  (See is_extension and
+      extension_scope below).
+    message_type: (Descriptor) If a composite field, a descriptor
+      of the message type contained in this field.  Otherwise, this is None.
+    enum_type: (EnumDescriptor) If this field contains an enum, a
+      descriptor of that enum.  Otherwise, this is None.
+
+    is_extension: True iff this describes an extension field.
+    extension_scope: (Descriptor) Only meaningful if is_extension is True.
+      Gives the message that immediately contains this extension field.
+      Will be None iff we're a top-level (file-level) extension field.
+
+    options: (descriptor_pb2.FieldOptions) Protocol message field options or
+      None to use default field options.
+
+    containing_oneof: (OneofDescriptor) If the field is a member of a oneof
+      union, contains its descriptor. Otherwise, None.
+  """
+
+  # Must be consistent with C++ FieldDescriptor::Type enum in
+  # descriptor.h.
+  #
+  # TODO(robinson): Find a way to eliminate this repetition.
+  TYPE_DOUBLE         = 1
+  TYPE_FLOAT          = 2
+  TYPE_INT64          = 3
+  TYPE_UINT64         = 4
+  TYPE_INT32          = 5
+  TYPE_FIXED64        = 6
+  TYPE_FIXED32        = 7
+  TYPE_BOOL           = 8
+  TYPE_STRING         = 9
+  TYPE_GROUP          = 10
+  TYPE_MESSAGE        = 11
+  TYPE_BYTES          = 12
+  TYPE_UINT32         = 13
+  TYPE_ENUM           = 14
+  TYPE_SFIXED32       = 15
+  TYPE_SFIXED64       = 16
+  TYPE_SINT32         = 17
+  TYPE_SINT64         = 18
+  MAX_TYPE            = 18
+
+  # Must be consistent with C++ FieldDescriptor::CppType enum in
+  # descriptor.h.
+  #
+  # TODO(robinson): Find a way to eliminate this repetition.
+  CPPTYPE_INT32       = 1
+  CPPTYPE_INT64       = 2
+  CPPTYPE_UINT32      = 3
+  CPPTYPE_UINT64      = 4
+  CPPTYPE_DOUBLE      = 5
+  CPPTYPE_FLOAT       = 6
+  CPPTYPE_BOOL        = 7
+  CPPTYPE_ENUM        = 8
+  CPPTYPE_STRING      = 9
+  CPPTYPE_MESSAGE     = 10
+  MAX_CPPTYPE         = 10
+
+  _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
+      TYPE_DOUBLE: CPPTYPE_DOUBLE,
+      TYPE_FLOAT: CPPTYPE_FLOAT,
+      TYPE_ENUM: CPPTYPE_ENUM,
+      TYPE_INT64: CPPTYPE_INT64,
+      TYPE_SINT64: CPPTYPE_INT64,
+      TYPE_SFIXED64: CPPTYPE_INT64,
+      TYPE_UINT64: CPPTYPE_UINT64,
+      TYPE_FIXED64: CPPTYPE_UINT64,
+      TYPE_INT32: CPPTYPE_INT32,
+      TYPE_SFIXED32: CPPTYPE_INT32,
+      TYPE_SINT32: CPPTYPE_INT32,
+      TYPE_UINT32: CPPTYPE_UINT32,
+      TYPE_FIXED32: CPPTYPE_UINT32,
+      TYPE_BYTES: CPPTYPE_STRING,
+      TYPE_STRING: CPPTYPE_STRING,
+      TYPE_BOOL: CPPTYPE_BOOL,
+      TYPE_MESSAGE: CPPTYPE_MESSAGE,
+      TYPE_GROUP: CPPTYPE_MESSAGE
+      }
+
+  # Must be consistent with C++ FieldDescriptor::Label enum in
+  # descriptor.h.
+  #
+  # TODO(robinson): Find a way to eliminate this repetition.
+  LABEL_OPTIONAL      = 1
+  LABEL_REQUIRED      = 2
+  LABEL_REPEATED      = 3
+  MAX_LABEL           = 3
+
+  # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
+  # and kLastReservedNumber in descriptor.h
+  MAX_FIELD_NUMBER = (1 << 29) - 1
+  FIRST_RESERVED_FIELD_NUMBER = 19000
+  LAST_RESERVED_FIELD_NUMBER = 19999
+
+  def __init__(self, name, full_name, index, number, type, cpp_type, label,
+               default_value, message_type, enum_type, containing_type,
+               is_extension, extension_scope, options=None,
+               has_default_value=True, containing_oneof=None):
+    """The arguments are as described in the description of FieldDescriptor
+    attributes above.
+
+    Note that containing_type may be None, and may be set later if necessary
+    (to deal with circular references between message types, for example).
+    Likewise for extension_scope.
+    """
+    super(FieldDescriptor, self).__init__(options, 'FieldOptions')
+    self.name = name
+    self.full_name = full_name
+    self.index = index
+    self.number = number
+    self.type = type
+    self.cpp_type = cpp_type
+    self.label = label
+    self.has_default_value = has_default_value
+    self.default_value = default_value
+    self.containing_type = containing_type
+    self.message_type = message_type
+    self.enum_type = enum_type
+    self.is_extension = is_extension
+    self.extension_scope = extension_scope
+    self.containing_oneof = containing_oneof
+    if api_implementation.Type() == 'cpp':
+      if is_extension:
+        if api_implementation.Version() == 2:
+          # pylint: disable=protected-access
+          self._cdescriptor = (
+              _message.Message._GetExtensionDescriptor(full_name))
+          # pylint: enable=protected-access
+        else:
+          self._cdescriptor = cpp_message.GetExtensionDescriptor(full_name)
+      else:
+        if api_implementation.Version() == 2:
+          # pylint: disable=protected-access
+          self._cdescriptor = _message.Message._GetFieldDescriptor(full_name)
+          # pylint: enable=protected-access
+        else:
+          self._cdescriptor = cpp_message.GetFieldDescriptor(full_name)
+    else:
+      self._cdescriptor = None
+
+  @staticmethod
+  def ProtoTypeToCppProtoType(proto_type):
+    """Converts from a Python proto type to a C++ Proto Type.
+
+    The Python ProtocolBuffer classes specify both the 'Python' datatype and the
+    'C++' datatype - and they're not the same. This helper method should
+    translate from one to another.
+
+    Args:
+      proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
+    Returns:
+      descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
+    Raises:
+      TypeTransformationError: when the Python proto type isn't known.
+    """
+    try:
+      return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
+    except KeyError:
+      raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
+
+
+class EnumDescriptor(_NestedDescriptorBase):
+
+  """Descriptor for an enum defined in a .proto file.
+
+  An EnumDescriptor instance has the following attributes:
+
+    name: (str) Name of the enum type.
+    full_name: (str) Full name of the type, including package name
+      and any enclosing type(s).
+
+    values: (list of EnumValueDescriptors) List of the values
+      in this enum.
+    values_by_name: (dict str -> EnumValueDescriptor) Same as |values|,
+      but indexed by the "name" field of each EnumValueDescriptor.
+    values_by_number: (dict int -> EnumValueDescriptor) Same as |values|,
+      but indexed by the "number" field of each EnumValueDescriptor.
+    containing_type: (Descriptor) Descriptor of the immediate containing
+      type of this enum, or None if this is an enum defined at the
+      top level in a .proto file.  Set by Descriptor's constructor
+      if we're passed into one.
+    file: (FileDescriptor) Reference to file descriptor.
+    options: (descriptor_pb2.EnumOptions) Enum options message or
+      None to use default enum options.
+  """
+
+  def __init__(self, name, full_name, filename, values,
+               containing_type=None, options=None, file=None,
+               serialized_start=None, serialized_end=None):
+    """Arguments are as described in the attribute description above.
+
+    Note that filename is an obsolete argument, that is not used anymore.
+    Please use file.name to access this as an attribute.
+    """
+    super(EnumDescriptor, self).__init__(
+        options, 'EnumOptions', name, full_name, file,
+        containing_type, serialized_start=serialized_start,
+        serialized_end=serialized_end)
+
+    self.values = values
+    for value in self.values:
+      value.type = self
+    self.values_by_name = dict((v.name, v) for v in values)
+    self.values_by_number = dict((v.number, v) for v in values)
+
+  def CopyToProto(self, proto):
+    """Copies this to a descriptor_pb2.EnumDescriptorProto.
+
+    Args:
+      proto: An empty descriptor_pb2.EnumDescriptorProto.
+    """
+    # This function is overriden to give a better doc comment.
+    super(EnumDescriptor, self).CopyToProto(proto)
+
+
+class EnumValueDescriptor(DescriptorBase):
+
+  """Descriptor for a single value within an enum.
+
+    name: (str) Name of this value.
+    index: (int) Dense, 0-indexed index giving the order that this
+      value appears textually within its enum in the .proto file.
+    number: (int) Actual number assigned to this enum value.
+    type: (EnumDescriptor) EnumDescriptor to which this value
+      belongs.  Set by EnumDescriptor's constructor if we're
+      passed into one.
+    options: (descriptor_pb2.EnumValueOptions) Enum value options message or
+      None to use default enum value options options.
+  """
+
+  def __init__(self, name, index, number, type=None, options=None):
+    """Arguments are as described in the attribute description above."""
+    super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
+    self.name = name
+    self.index = index
+    self.number = number
+    self.type = type
+
+
+class OneofDescriptor(object):
+  """Descriptor for a oneof field.
+
+    name: (str) Name of the oneof field.
+    full_name: (str) Full name of the oneof field, including package name.
+    index: (int) 0-based index giving the order of the oneof field inside
+      its containing type.
+    containing_type: (Descriptor) Descriptor of the protocol message
+      type that contains this field.  Set by the Descriptor constructor
+      if we're passed into one.
+    fields: (list of FieldDescriptor) The list of field descriptors this
+      oneof can contain.
+  """
+
+  def __init__(self, name, full_name, index, containing_type, fields):
+    """Arguments are as described in the attribute description above."""
+    self.name = name
+    self.full_name = full_name
+    self.index = index
+    self.containing_type = containing_type
+    self.fields = fields
+
+
+class ServiceDescriptor(_NestedDescriptorBase):
+
+  """Descriptor for a service.
+
+    name: (str) Name of the service.
+    full_name: (str) Full name of the service, including package name.
+    index: (int) 0-indexed index giving the order that this services
+      definition appears withing the .proto file.
+    methods: (list of MethodDescriptor) List of methods provided by this
+      service.
+    options: (descriptor_pb2.ServiceOptions) Service options message or
+      None to use default service options.
+    file: (FileDescriptor) Reference to file info.
+  """
+
+  def __init__(self, name, full_name, index, methods, options=None, file=None,
+               serialized_start=None, serialized_end=None):
+    super(ServiceDescriptor, self).__init__(
+        options, 'ServiceOptions', name, full_name, file,
+        None, serialized_start=serialized_start,
+        serialized_end=serialized_end)
+    self.index = index
+    self.methods = methods
+    # Set the containing service for each method in this service.
+    for method in self.methods:
+      method.containing_service = self
+
+  def FindMethodByName(self, name):
+    """Searches for the specified method, and returns its descriptor."""
+    for method in self.methods:
+      if name == method.name:
+        return method
+    return None
+
+  def CopyToProto(self, proto):
+    """Copies this to a descriptor_pb2.ServiceDescriptorProto.
+
+    Args:
+      proto: An empty descriptor_pb2.ServiceDescriptorProto.
+    """
+    # This function is overriden to give a better doc comment.
+    super(ServiceDescriptor, self).CopyToProto(proto)
+
+
+class MethodDescriptor(DescriptorBase):
+
+  """Descriptor for a method in a service.
+
+  name: (str) Name of the method within the service.
+  full_name: (str) Full name of method.
+  index: (int) 0-indexed index of the method inside the service.
+  containing_service: (ServiceDescriptor) The service that contains this
+    method.
+  input_type: The descriptor of the message that this method accepts.
+  output_type: The descriptor of the message that this method returns.
+  options: (descriptor_pb2.MethodOptions) Method options message or
+    None to use default method options.
+  """
+
+  def __init__(self, name, full_name, index, containing_service,
+               input_type, output_type, options=None):
+    """The arguments are as described in the description of MethodDescriptor
+    attributes above.
+
+    Note that containing_service may be None, and may be set later if necessary.
+    """
+    super(MethodDescriptor, self).__init__(options, 'MethodOptions')
+    self.name = name
+    self.full_name = full_name
+    self.index = index
+    self.containing_service = containing_service
+    self.input_type = input_type
+    self.output_type = output_type
+
+
+class FileDescriptor(DescriptorBase):
+  """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
+
+  Note that enum_types_by_name, extensions_by_name, and dependencies
+  fields are only set by the message_factory module, and not by the
+  generated proto code.
+
+  name: name of file, relative to root of source tree.
+  package: name of the package
+  serialized_pb: (str) Byte string of serialized
+    descriptor_pb2.FileDescriptorProto.
+  dependencies: List of other FileDescriptors this FileDescriptor depends on.
+  message_types_by_name: Dict of message names of their descriptors.
+  enum_types_by_name: Dict of enum names and their descriptors.
+  extensions_by_name: Dict of extension names and their descriptors.
+  """
+
+  def __init__(self, name, package, options=None, serialized_pb=None,
+               dependencies=None):
+    """Constructor."""
+    super(FileDescriptor, self).__init__(options, 'FileOptions')
+
+    self.message_types_by_name = {}
+    self.name = name
+    self.package = package
+    self.serialized_pb = serialized_pb
+
+    self.enum_types_by_name = {}
+    self.extensions_by_name = {}
+    self.dependencies = (dependencies or [])
+
+    if (api_implementation.Type() == 'cpp' and
+        self.serialized_pb is not None):
+      if api_implementation.Version() == 2:
+        # pylint: disable=protected-access
+        _message.Message._BuildFile(self.serialized_pb)
+        # pylint: enable=protected-access
+      else:
+        cpp_message.BuildFile(self.serialized_pb)
+
+  def CopyToProto(self, proto):
+    """Copies this to a descriptor_pb2.FileDescriptorProto.
+
+    Args:
+      proto: An empty descriptor_pb2.FileDescriptorProto.
+    """
+    proto.ParseFromString(self.serialized_pb)
+
+
+def _ParseOptions(message, string):
+  """Parses serialized options.
+
+  This helper function is used to parse serialized options in generated
+  proto2 files. It must not be used outside proto2.
+  """
+  message.ParseFromString(string)
+  return message
+
+
+def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True):
+  """Make a protobuf Descriptor given a DescriptorProto protobuf.
+
+  Handles nested descriptors. Note that this is limited to the scope of defining
+  a message inside of another message. Composite fields can currently only be
+  resolved if the message is defined in the same scope as the field.
+
+  Args:
+    desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+    package: Optional package name for the new message Descriptor (string).
+    build_file_if_cpp: Update the C++ descriptor pool if api matches.
+                       Set to False on recursion, so no duplicates are created.
+  Returns:
+    A Descriptor for protobuf messages.
+  """
+  if api_implementation.Type() == 'cpp' and build_file_if_cpp:
+    # The C++ implementation requires all descriptors to be backed by the same
+    # definition in the C++ descriptor pool. To do this, we build a
+    # FileDescriptorProto with the same definition as this descriptor and build
+    # it into the pool.
+    from protobuf26 import descriptor_pb2
+    file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
+    file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
+
+    # Generate a random name for this proto file to prevent conflicts with
+    # any imported ones. We need to specify a file name so BuildFile accepts
+    # our FileDescriptorProto, but it is not important what that file name
+    # is actually set to.
+    proto_name = str(uuid.uuid4())
+
+    if package:
+      file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
+                                                proto_name + '.proto')
+      file_descriptor_proto.package = package
+    else:
+      file_descriptor_proto.name = proto_name + '.proto'
+
+    if api_implementation.Version() == 2:
+      # pylint: disable=protected-access
+      _message.Message._BuildFile(file_descriptor_proto.SerializeToString())
+      # pylint: enable=protected-access
+    else:
+      cpp_message.BuildFile(file_descriptor_proto.SerializeToString())
+
+  full_message_name = [desc_proto.name]
+  if package: full_message_name.insert(0, package)
+
+  # Create Descriptors for enum types
+  enum_types = {}
+  for enum_proto in desc_proto.enum_type:
+    full_name = '.'.join(full_message_name + [enum_proto.name])
+    enum_desc = EnumDescriptor(
+      enum_proto.name, full_name, None, [
+          EnumValueDescriptor(enum_val.name, ii, enum_val.number)
+          for ii, enum_val in enumerate(enum_proto.value)])
+    enum_types[full_name] = enum_desc
+
+  # Create Descriptors for nested types
+  nested_types = {}
+  for nested_proto in desc_proto.nested_type:
+    full_name = '.'.join(full_message_name + [nested_proto.name])
+    # Nested types are just those defined inside of the message, not all types
+    # used by fields in the message, so no loops are possible here.
+    nested_desc = MakeDescriptor(nested_proto,
+                                 package='.'.join(full_message_name),
+                                 build_file_if_cpp=False)
+    nested_types[full_name] = nested_desc
+
+  fields = []
+  for field_proto in desc_proto.field:
+    full_name = '.'.join(full_message_name + [field_proto.name])
+    enum_desc = None
+    nested_desc = None
+    if field_proto.HasField('type_name'):
+      type_name = field_proto.type_name
+      full_type_name = '.'.join(full_message_name +
+                                [type_name[type_name.rfind('.')+1:]])
+      if full_type_name in nested_types:
+        nested_desc = nested_types[full_type_name]
+      elif full_type_name in enum_types:
+        enum_desc = enum_types[full_type_name]
+      # Else type_name references a non-local type, which isn't implemented
+    field = FieldDescriptor(
+        field_proto.name, full_name, field_proto.number - 1,
+        field_proto.number, field_proto.type,
+        FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
+        field_proto.label, None, nested_desc, enum_desc, None, False, None,
+        has_default_value=False)
+    fields.append(field)
+
+  desc_name = '.'.join(full_message_name)
+  return Descriptor(desc_proto.name, desc_name, None, None, fields,
+                    nested_types.values(), enum_types.values(), [])
diff --git a/third_party/protobuf26/descriptor_database.py b/third_party/protobuf26/descriptor_database.py
new file mode 100644
index 0000000..9f5a117
--- /dev/null
+++ b/third_party/protobuf26/descriptor_database.py
@@ -0,0 +1,137 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Provides a container for DescriptorProtos."""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+
+class Error(Exception):
+  pass
+
+
+class DescriptorDatabaseConflictingDefinitionError(Error):
+  """Raised when a proto is added with the same name & different descriptor."""
+
+
+class DescriptorDatabase(object):
+  """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
+
+  def __init__(self):
+    self._file_desc_protos_by_file = {}
+    self._file_desc_protos_by_symbol = {}
+
+  def Add(self, file_desc_proto):
+    """Adds the FileDescriptorProto and its types to this database.
+
+    Args:
+      file_desc_proto: The FileDescriptorProto to add.
+    Raises:
+      DescriptorDatabaseException: if an attempt is made to add a proto
+        with the same name but different definition than an exisiting
+        proto in the database.
+    """
+    proto_name = file_desc_proto.name
+    if proto_name not in self._file_desc_protos_by_file:
+      self._file_desc_protos_by_file[proto_name] = file_desc_proto
+    elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
+      raise DescriptorDatabaseConflictingDefinitionError(
+          '%s already added, but with different descriptor.' % proto_name)
+
+    package = file_desc_proto.package
+    for message in file_desc_proto.message_type:
+      self._file_desc_protos_by_symbol.update(
+          (name, file_desc_proto) for name in _ExtractSymbols(message, package))
+    for enum in file_desc_proto.enum_type:
+      self._file_desc_protos_by_symbol[
+          '.'.join((package, enum.name))] = file_desc_proto
+
+  def FindFileByName(self, name):
+    """Finds the file descriptor proto by file name.
+
+    Typically the file name is a relative path ending to a .proto file. The
+    proto with the given name will have to have been added to this database
+    using the Add method or else an error will be raised.
+
+    Args:
+      name: The file name to find.
+
+    Returns:
+      The file descriptor proto matching the name.
+
+    Raises:
+      KeyError if no file by the given name was added.
+    """
+
+    return self._file_desc_protos_by_file[name]
+
+  def FindFileContainingSymbol(self, symbol):
+    """Finds the file descriptor proto containing the specified symbol.
+
+    The symbol should be a fully qualified name including the file descriptor's
+    package and any containing messages. Some examples:
+
+    'some.package.name.Message'
+    'some.package.name.Message.NestedEnum'
+
+    The file descriptor proto containing the specified symbol must be added to
+    this database using the Add method or else an error will be raised.
+
+    Args:
+      symbol: The fully qualified symbol name.
+
+    Returns:
+      The file descriptor proto containing the symbol.
+
+    Raises:
+      KeyError if no file contains the specified symbol.
+    """
+
+    return self._file_desc_protos_by_symbol[symbol]
+
+
+def _ExtractSymbols(desc_proto, package):
+  """Pulls out all the symbols from a descriptor proto.
+
+  Args:
+    desc_proto: The proto to extract symbols from.
+    package: The package containing the descriptor type.
+
+  Yields:
+    The fully qualified name found in the descriptor.
+  """
+
+  message_name = '.'.join((package, desc_proto.name))
+  yield message_name
+  for nested_type in desc_proto.nested_type:
+    for symbol in _ExtractSymbols(nested_type, message_name):
+      yield symbol
+    for enum_type in desc_proto.enum_type:
+      yield '.'.join((message_name, enum_type.name))
diff --git a/third_party/protobuf26/descriptor_pb2.py b/third_party/protobuf26/descriptor_pb2.py
new file mode 100644
index 0000000..ae81b36
--- /dev/null
+++ b/third_party/protobuf26/descriptor_pb2.py
@@ -0,0 +1,1522 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: google/protobuf/descriptor.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from protobuf26 import descriptor as _descriptor
+from protobuf26 import message as _message
+from protobuf26 import reflection as _reflection
+from protobuf26 import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='google/protobuf/descriptor.proto',
+  package='google.protobuf',
+  serialized_pb=_b('\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xcb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\x7f\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\"\xab\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd3\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xbe\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
+  name='Type',
+  full_name='google.protobuf.FieldDescriptorProto.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_DOUBLE', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_FLOAT', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_INT64', index=2, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_UINT64', index=3, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_INT32', index=4, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_FIXED64', index=5, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_FIXED32', index=6, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_BOOL', index=7, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_STRING', index=8, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_GROUP', index=9, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_MESSAGE', index=10, number=11,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_BYTES', index=11, number=12,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_UINT32', index=12, number=13,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_ENUM', index=13, number=14,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SFIXED32', index=14, number=15,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SFIXED64', index=15, number=16,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SINT32', index=16, number=17,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SINT64', index=17, number=18,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1378,
+  serialized_end=1688,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
+
+_FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
+  name='Label',
+  full_name='google.protobuf.FieldDescriptorProto.Label',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='LABEL_OPTIONAL', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LABEL_REQUIRED', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LABEL_REPEATED', index=2, number=3,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1690,
+  serialized_end=1757,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
+
+_FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
+  name='OptimizeMode',
+  full_name='google.protobuf.FileOptions.OptimizeMode',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SPEED', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CODE_SIZE', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LITE_RUNTIME', index=2, number=3,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=2813,
+  serialized_end=2871,
+)
+_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
+
+_FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
+  name='CType',
+  full_name='google.protobuf.FieldOptions.CType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CORD', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='STRING_PIECE', index=2, number=2,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=3359,
+  serialized_end=3406,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
+
+
+_FILEDESCRIPTORSET = _descriptor.Descriptor(
+  name='FileDescriptorSet',
+  full_name='google.protobuf.FileDescriptorSet',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=53,
+  serialized_end=124,
+)
+
+
+_FILEDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='FileDescriptorProto',
+  full_name='google.protobuf.FileDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
+      number=3, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
+      number=10, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
+      number=11, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
+      number=7, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
+      number=8, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
+      number=9, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=127,
+  serialized_end=586,
+)
+
+
+_DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
+  name='ExtensionRange',
+  full_name='google.protobuf.DescriptorProto.ExtensionRange',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1029,
+  serialized_end=1073,
+)
+
+_DESCRIPTORPROTO = _descriptor.Descriptor(
+  name='DescriptorProto',
+  full_name='google.protobuf.DescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
+      number=8, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
+      number=7, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=589,
+  serialized_end=1073,
+)
+
+
+_FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='FieldDescriptorProto',
+  full_name='google.protobuf.FieldDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
+      number=3, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
+      number=5, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
+      number=6, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
+      number=7, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
+      number=9, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=8,
+      number=8, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FIELDDESCRIPTORPROTO_TYPE,
+    _FIELDDESCRIPTORPROTO_LABEL,
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1076,
+  serialized_end=1757,
+)
+
+
+_ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='OneofDescriptorProto',
+  full_name='google.protobuf.OneofDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1759,
+  serialized_end=1795,
+)
+
+
+_ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='EnumDescriptorProto',
+  full_name='google.protobuf.EnumDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1798,
+  serialized_end=1938,
+)
+
+
+_ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='EnumValueDescriptorProto',
+  full_name='google.protobuf.EnumValueDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1940,
+  serialized_end=2048,
+)
+
+
+_SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='ServiceDescriptorProto',
+  full_name='google.protobuf.ServiceDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2051,
+  serialized_end=2195,
+)
+
+
+_METHODDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='MethodDescriptorProto',
+  full_name='google.protobuf.MethodDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2197,
+  serialized_end=2324,
+)
+
+
+_FILEOPTIONS = _descriptor.Descriptor(
+  name='FileOptions',
+  full_name='google.protobuf.FileOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
+      number=8, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
+      number=10, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
+      number=20, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
+      number=27, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
+      number=9, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
+      number=11, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
+      number=16, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
+      number=17, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
+      number=18, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=10,
+      number=23, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=11,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FILEOPTIONS_OPTIMIZEMODE,
+  ],
+  options=None,
+  is_extendable=True,
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=2327,
+  serialized_end=2882,
+)
+
+
+_MESSAGEOPTIONS = _descriptor.Descriptor(
+  name='MessageOptions',
+  full_name='google.protobuf.MessageOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=3,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=2885,
+  serialized_end=3096,
+)
+
+
+_FIELDOPTIONS = _descriptor.Descriptor(
+  name='FieldOptions',
+  full_name='google.protobuf.FieldOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=2,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=3,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='experimental_map_key', full_name='google.protobuf.FieldOptions.experimental_map_key', index=4,
+      number=9, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='weak', full_name='google.protobuf.FieldOptions.weak', index=5,
+      number=10, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=6,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FIELDOPTIONS_CTYPE,
+  ],
+  options=None,
+  is_extendable=True,
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3099,
+  serialized_end=3417,
+)
+
+
+_ENUMOPTIONS = _descriptor.Descriptor(
+  name='EnumOptions',
+  full_name='google.protobuf.EnumOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3420,
+  serialized_end=3561,
+)
+
+
+_ENUMVALUEOPTIONS = _descriptor.Descriptor(
+  name='EnumValueOptions',
+  full_name='google.protobuf.EnumValueOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3563,
+  serialized_end=3688,
+)
+
+
+_SERVICEOPTIONS = _descriptor.Descriptor(
+  name='ServiceOptions',
+  full_name='google.protobuf.ServiceOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0,
+      number=33, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3690,
+  serialized_end=3813,
+)
+
+
+_METHODOPTIONS = _descriptor.Descriptor(
+  name='MethodOptions',
+  full_name='google.protobuf.MethodOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
+      number=33, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=1,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3815,
+  serialized_end=3937,
+)
+
+
+_UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
+  name='NamePart',
+  full_name='google.protobuf.UninterpretedOption.NamePart',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
+      number=1, type=9, cpp_type=9, label=2,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
+      number=2, type=8, cpp_type=7, label=2,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4175,
+  serialized_end=4226,
+)
+
+_UNINTERPRETEDOPTION = _descriptor.Descriptor(
+  name='UninterpretedOption',
+  full_name='google.protobuf.UninterpretedOption',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
+      number=4, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
+      number=5, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
+      number=6, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
+      number=7, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b(""),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
+      number=8, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3940,
+  serialized_end=4226,
+)
+
+
+_SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
+  name='Location',
+  full_name='google.protobuf.SourceCodeInfo.Location',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
+      number=1, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
+      number=2, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4307,
+  serialized_end=4406,
+)
+
+_SOURCECODEINFO = _descriptor.Descriptor(
+  name='SourceCodeInfo',
+  full_name='google.protobuf.SourceCodeInfo',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_SOURCECODEINFO_LOCATION, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4229,
+  serialized_end=4406,
+)
+
+_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
+_FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
+_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
+_DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
+_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
+_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
+_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
+_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
+_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
+_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
+_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
+_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
+_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
+_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
+_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
+_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
+_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
+_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
+_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
+_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
+_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
+_SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
+_SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
+DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
+DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
+DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
+DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
+DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
+DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
+DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
+DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
+DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
+DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
+
+FileDescriptorSet = _reflection.GeneratedProtocolMessageType('FileDescriptorSet', (_message.Message,), dict(
+  DESCRIPTOR = _FILEDESCRIPTORSET,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet)
+  ))
+_sym_db.RegisterMessage(FileDescriptorSet)
+
+FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _FILEDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto)
+  ))
+_sym_db.RegisterMessage(FileDescriptorProto)
+
+DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), dict(
+
+  ExtensionRange = _reflection.GeneratedProtocolMessageType('ExtensionRange', (_message.Message,), dict(
+    DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE,
+    __module__ = 'google.protobuf.descriptor_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange)
+    ))
+  ,
+  DESCRIPTOR = _DESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto)
+  ))
+_sym_db.RegisterMessage(DescriptorProto)
+_sym_db.RegisterMessage(DescriptorProto.ExtensionRange)
+
+FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _FIELDDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto)
+  ))
+_sym_db.RegisterMessage(FieldDescriptorProto)
+
+OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ONEOFDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.OneofDescriptorProto)
+  ))
+_sym_db.RegisterMessage(OneofDescriptorProto)
+
+EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto)
+  ))
+_sym_db.RegisterMessage(EnumDescriptorProto)
+
+EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto)
+  ))
+_sym_db.RegisterMessage(EnumValueDescriptorProto)
+
+ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _SERVICEDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto)
+  ))
+_sym_db.RegisterMessage(ServiceDescriptorProto)
+
+MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _METHODDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto)
+  ))
+_sym_db.RegisterMessage(MethodDescriptorProto)
+
+FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), dict(
+  DESCRIPTOR = _FILEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FileOptions)
+  ))
+_sym_db.RegisterMessage(FileOptions)
+
+MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), dict(
+  DESCRIPTOR = _MESSAGEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions)
+  ))
+_sym_db.RegisterMessage(MessageOptions)
+
+FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), dict(
+  DESCRIPTOR = _FIELDOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions)
+  ))
+_sym_db.RegisterMessage(FieldOptions)
+
+EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions)
+  ))
+_sym_db.RegisterMessage(EnumOptions)
+
+EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMVALUEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions)
+  ))
+_sym_db.RegisterMessage(EnumValueOptions)
+
+ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), dict(
+  DESCRIPTOR = _SERVICEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions)
+  ))
+_sym_db.RegisterMessage(ServiceOptions)
+
+MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), dict(
+  DESCRIPTOR = _METHODOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions)
+  ))
+_sym_db.RegisterMessage(MethodOptions)
+
+UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), dict(
+
+  NamePart = _reflection.GeneratedProtocolMessageType('NamePart', (_message.Message,), dict(
+    DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART,
+    __module__ = 'google.protobuf.descriptor_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart)
+    ))
+  ,
+  DESCRIPTOR = _UNINTERPRETEDOPTION,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption)
+  ))
+_sym_db.RegisterMessage(UninterpretedOption)
+_sym_db.RegisterMessage(UninterpretedOption.NamePart)
+
+SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), dict(
+
+  Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), dict(
+    DESCRIPTOR = _SOURCECODEINFO_LOCATION,
+    __module__ = 'google.protobuf.descriptor_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo.Location)
+    ))
+  ,
+  DESCRIPTOR = _SOURCECODEINFO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo)
+  ))
+_sym_db.RegisterMessage(SourceCodeInfo)
+_sym_db.RegisterMessage(SourceCodeInfo.Location)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/third_party/protobuf26/descriptor_pool.py b/third_party/protobuf26/descriptor_pool.py
new file mode 100644
index 0000000..882fd9f
--- /dev/null
+++ b/third_party/protobuf26/descriptor_pool.py
@@ -0,0 +1,643 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Provides DescriptorPool to use as a container for proto2 descriptors.
+
+The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
+a collection of protocol buffer descriptors for use when dynamically creating
+message types at runtime.
+
+For most applications protocol buffers should be used via modules generated by
+the protocol buffer compiler tool. This should only be used when the type of
+protocol buffers used in an application or library cannot be predetermined.
+
+Below is a straightforward example on how to use this class:
+
+  pool = DescriptorPool()
+  file_descriptor_protos = [ ... ]
+  for file_descriptor_proto in file_descriptor_protos:
+    pool.Add(file_descriptor_proto)
+  my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
+
+The message descriptor can be used in conjunction with the message_factory
+module in order to create a protocol buffer class that can be encoded and
+decoded.
+
+If you want to get a Python class for the specified proto, use the
+helper functions inside google.protobuf.message_factory
+directly instead of this class.
+"""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+import sys
+
+from protobuf26 import descriptor
+from protobuf26 import descriptor_database
+from protobuf26 import text_encoding
+
+
+def _NormalizeFullyQualifiedName(name):
+  """Remove leading period from fully-qualified type name.
+
+  Due to b/13860351 in descriptor_database.py, types in the root namespace are
+  generated with a leading period. This function removes that prefix.
+
+  Args:
+    name: A str, the fully-qualified symbol name.
+
+  Returns:
+    A str, the normalized fully-qualified symbol name.
+  """
+  return name.lstrip('.')
+
+
+class DescriptorPool(object):
+  """A collection of protobufs dynamically constructed by descriptor protos."""
+
+  def __init__(self, descriptor_db=None):
+    """Initializes a Pool of proto buffs.
+
+    The descriptor_db argument to the constructor is provided to allow
+    specialized file descriptor proto lookup code to be triggered on demand. An
+    example would be an implementation which will read and compile a file
+    specified in a call to FindFileByName() and not require the call to Add()
+    at all. Results from this database will be cached internally here as well.
+
+    Args:
+      descriptor_db: A secondary source of file descriptors.
+    """
+
+    self._internal_db = descriptor_database.DescriptorDatabase()
+    self._descriptor_db = descriptor_db
+    self._descriptors = {}
+    self._enum_descriptors = {}
+    self._file_descriptors = {}
+
+  def Add(self, file_desc_proto):
+    """Adds the FileDescriptorProto and its types to this pool.
+
+    Args:
+      file_desc_proto: The FileDescriptorProto to add.
+    """
+
+    self._internal_db.Add(file_desc_proto)
+
+  def AddDescriptor(self, desc):
+    """Adds a Descriptor to the pool, non-recursively.
+
+    If the Descriptor contains nested messages or enums, the caller must
+    explicitly register them. This method also registers the FileDescriptor
+    associated with the message.
+
+    Args:
+      desc: A Descriptor.
+    """
+    if not isinstance(desc, descriptor.Descriptor):
+      raise TypeError('Expected instance of descriptor.Descriptor.')
+
+    self._descriptors[desc.full_name] = desc
+    self.AddFileDescriptor(desc.file)
+
+  def AddEnumDescriptor(self, enum_desc):
+    """Adds an EnumDescriptor to the pool.
+
+    This method also registers the FileDescriptor associated with the message.
+
+    Args:
+      enum_desc: An EnumDescriptor.
+    """
+
+    if not isinstance(enum_desc, descriptor.EnumDescriptor):
+      raise TypeError('Expected instance of descriptor.EnumDescriptor.')
+
+    self._enum_descriptors[enum_desc.full_name] = enum_desc
+    self.AddFileDescriptor(enum_desc.file)
+
+  def AddFileDescriptor(self, file_desc):
+    """Adds a FileDescriptor to the pool, non-recursively.
+
+    If the FileDescriptor contains messages or enums, the caller must explicitly
+    register them.
+
+    Args:
+      file_desc: A FileDescriptor.
+    """
+
+    if not isinstance(file_desc, descriptor.FileDescriptor):
+      raise TypeError('Expected instance of descriptor.FileDescriptor.')
+    self._file_descriptors[file_desc.name] = file_desc
+
+  def FindFileByName(self, file_name):
+    """Gets a FileDescriptor by file name.
+
+    Args:
+      file_name: The path to the file to get a descriptor for.
+
+    Returns:
+      A FileDescriptor for the named file.
+
+    Raises:
+      KeyError: if the file can not be found in the pool.
+    """
+
+    try:
+      return self._file_descriptors[file_name]
+    except KeyError:
+      pass
+
+    try:
+      file_proto = self._internal_db.FindFileByName(file_name)
+    except KeyError:
+      _, error, _ = sys.exc_info()  #PY25 compatible for GAE.
+      if self._descriptor_db:
+        file_proto = self._descriptor_db.FindFileByName(file_name)
+      else:
+        raise error
+    if not file_proto:
+      raise KeyError('Cannot find a file named %s' % file_name)
+    return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+  def FindFileContainingSymbol(self, symbol):
+    """Gets the FileDescriptor for the file containing the specified symbol.
+
+    Args:
+      symbol: The name of the symbol to search for.
+
+    Returns:
+      A FileDescriptor that contains the specified symbol.
+
+    Raises:
+      KeyError: if the file can not be found in the pool.
+    """
+
+    symbol = _NormalizeFullyQualifiedName(symbol)
+    try:
+      return self._descriptors[symbol].file
+    except KeyError:
+      pass
+
+    try:
+      return self._enum_descriptors[symbol].file
+    except KeyError:
+      pass
+
+    try:
+      file_proto = self._internal_db.FindFileContainingSymbol(symbol)
+    except KeyError:
+      _, error, _ = sys.exc_info()  #PY25 compatible for GAE.
+      if self._descriptor_db:
+        file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
+      else:
+        raise error
+    if not file_proto:
+      raise KeyError('Cannot find a file containing %s' % symbol)
+    return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+  def FindMessageTypeByName(self, full_name):
+    """Loads the named descriptor from the pool.
+
+    Args:
+      full_name: The full name of the descriptor to load.
+
+    Returns:
+      The descriptor for the named type.
+    """
+
+    full_name = _NormalizeFullyQualifiedName(full_name)
+    if full_name not in self._descriptors:
+      self.FindFileContainingSymbol(full_name)
+    return self._descriptors[full_name]
+
+  def FindEnumTypeByName(self, full_name):
+    """Loads the named enum descriptor from the pool.
+
+    Args:
+      full_name: The full name of the enum descriptor to load.
+
+    Returns:
+      The enum descriptor for the named type.
+    """
+
+    full_name = _NormalizeFullyQualifiedName(full_name)
+    if full_name not in self._enum_descriptors:
+      self.FindFileContainingSymbol(full_name)
+    return self._enum_descriptors[full_name]
+
+  def _ConvertFileProtoToFileDescriptor(self, file_proto):
+    """Creates a FileDescriptor from a proto or returns a cached copy.
+
+    This method also has the side effect of loading all the symbols found in
+    the file into the appropriate dictionaries in the pool.
+
+    Args:
+      file_proto: The proto to convert.
+
+    Returns:
+      A FileDescriptor matching the passed in proto.
+    """
+
+    if file_proto.name not in self._file_descriptors:
+      built_deps = list(self._GetDeps(file_proto.dependency))
+      direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
+
+      file_descriptor = descriptor.FileDescriptor(
+          name=file_proto.name,
+          package=file_proto.package,
+          options=file_proto.options,
+          serialized_pb=file_proto.SerializeToString(),
+          dependencies=direct_deps)
+      scope = {}
+
+      # This loop extracts all the message and enum types from all the
+      # dependencoes of the file_proto. This is necessary to create the
+      # scope of available message types when defining the passed in
+      # file proto.
+      for dependency in built_deps:
+        scope.update(self._ExtractSymbols(
+            dependency.message_types_by_name.values()))
+        scope.update((_PrefixWithDot(enum.full_name), enum)
+                     for enum in dependency.enum_types_by_name.values())
+
+      for message_type in file_proto.message_type:
+        message_desc = self._ConvertMessageDescriptor(
+            message_type, file_proto.package, file_descriptor, scope)
+        file_descriptor.message_types_by_name[message_desc.name] = message_desc
+
+      for enum_type in file_proto.enum_type:
+        file_descriptor.enum_types_by_name[enum_type.name] = (
+            self._ConvertEnumDescriptor(enum_type, file_proto.package,
+                                        file_descriptor, None, scope))
+
+      for index, extension_proto in enumerate(file_proto.extension):
+        extension_desc = self.MakeFieldDescriptor(
+            extension_proto, file_proto.package, index, is_extension=True)
+        extension_desc.containing_type = self._GetTypeFromScope(
+            file_descriptor.package, extension_proto.extendee, scope)
+        self.SetFieldType(extension_proto, extension_desc,
+                          file_descriptor.package, scope)
+        file_descriptor.extensions_by_name[extension_desc.name] = extension_desc
+
+      for desc_proto in file_proto.message_type:
+        self.SetAllFieldTypes(file_proto.package, desc_proto, scope)
+
+      if file_proto.package:
+        desc_proto_prefix = _PrefixWithDot(file_proto.package)
+      else:
+        desc_proto_prefix = ''
+
+      for desc_proto in file_proto.message_type:
+        desc = self._GetTypeFromScope(desc_proto_prefix, desc_proto.name, scope)
+        file_descriptor.message_types_by_name[desc_proto.name] = desc
+      self.Add(file_proto)
+      self._file_descriptors[file_proto.name] = file_descriptor
+
+    return self._file_descriptors[file_proto.name]
+
+  def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
+                                scope=None):
+    """Adds the proto to the pool in the specified package.
+
+    Args:
+      desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+      package: The package the proto should be located in.
+      file_desc: The file containing this message.
+      scope: Dict mapping short and full symbols to message and enum types.
+
+    Returns:
+      The added descriptor.
+    """
+
+    if package:
+      desc_name = '.'.join((package, desc_proto.name))
+    else:
+      desc_name = desc_proto.name
+
+    if file_desc is None:
+      file_name = None
+    else:
+      file_name = file_desc.name
+
+    if scope is None:
+      scope = {}
+
+    nested = [
+        self._ConvertMessageDescriptor(nested, desc_name, file_desc, scope)
+        for nested in desc_proto.nested_type]
+    enums = [
+        self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
+        for enum in desc_proto.enum_type]
+    fields = [self.MakeFieldDescriptor(field, desc_name, index)
+              for index, field in enumerate(desc_proto.field)]
+    extensions = [
+        self.MakeFieldDescriptor(extension, desc_name, index, is_extension=True)
+        for index, extension in enumerate(desc_proto.extension)]
+    oneofs = [
+        descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)),
+                                   index, None, [])
+        for index, desc in enumerate(desc_proto.oneof_decl)]
+    extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
+    if extension_ranges:
+      is_extendable = True
+    else:
+      is_extendable = False
+    desc = descriptor.Descriptor(
+        name=desc_proto.name,
+        full_name=desc_name,
+        filename=file_name,
+        containing_type=None,
+        fields=fields,
+        oneofs=oneofs,
+        nested_types=nested,
+        enum_types=enums,
+        extensions=extensions,
+        options=desc_proto.options,
+        is_extendable=is_extendable,
+        extension_ranges=extension_ranges,
+        file=file_desc,
+        serialized_start=None,
+        serialized_end=None)
+    for nested in desc.nested_types:
+      nested.containing_type = desc
+    for enum in desc.enum_types:
+      enum.containing_type = desc
+    for field_index, field_desc in enumerate(desc_proto.field):
+      if field_desc.HasField('oneof_index'):
+        oneof_index = field_desc.oneof_index
+        oneofs[oneof_index].fields.append(fields[field_index])
+        fields[field_index].containing_oneof = oneofs[oneof_index]
+
+    scope[_PrefixWithDot(desc_name)] = desc
+    self._descriptors[desc_name] = desc
+    return desc
+
+  def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
+                             containing_type=None, scope=None):
+    """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
+
+    Args:
+      enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
+      package: Optional package name for the new message EnumDescriptor.
+      file_desc: The file containing the enum descriptor.
+      containing_type: The type containing this enum.
+      scope: Scope containing available types.
+
+    Returns:
+      The added descriptor
+    """
+
+    if package:
+      enum_name = '.'.join((package, enum_proto.name))
+    else:
+      enum_name = enum_proto.name
+
+    if file_desc is None:
+      file_name = None
+    else:
+      file_name = file_desc.name
+
+    values = [self._MakeEnumValueDescriptor(value, index)
+              for index, value in enumerate(enum_proto.value)]
+    desc = descriptor.EnumDescriptor(name=enum_proto.name,
+                                     full_name=enum_name,
+                                     filename=file_name,
+                                     file=file_desc,
+                                     values=values,
+                                     containing_type=containing_type,
+                                     options=enum_proto.options)
+    scope['.%s' % enum_name] = desc
+    self._enum_descriptors[enum_name] = desc
+    return desc
+
+  def MakeFieldDescriptor(self, field_proto, message_name, index,
+                          is_extension=False):
+    """Creates a field descriptor from a FieldDescriptorProto.
+
+    For message and enum type fields, this method will do a look up
+    in the pool for the appropriate descriptor for that type. If it
+    is unavailable, it will fall back to the _source function to
+    create it. If this type is still unavailable, construction will
+    fail.
+
+    Args:
+      field_proto: The proto describing the field.
+      message_name: The name of the containing message.
+      index: Index of the field
+      is_extension: Indication that this field is for an extension.
+
+    Returns:
+      An initialized FieldDescriptor object
+    """
+
+    if message_name:
+      full_name = '.'.join((message_name, field_proto.name))
+    else:
+      full_name = field_proto.name
+
+    return descriptor.FieldDescriptor(
+        name=field_proto.name,
+        full_name=full_name,
+        index=index,
+        number=field_proto.number,
+        type=field_proto.type,
+        cpp_type=None,
+        message_type=None,
+        enum_type=None,
+        containing_type=None,
+        label=field_proto.label,
+        has_default_value=False,
+        default_value=None,
+        is_extension=is_extension,
+        extension_scope=None,
+        options=field_proto.options)
+
+  def SetAllFieldTypes(self, package, desc_proto, scope):
+    """Sets all the descriptor's fields's types.
+
+    This method also sets the containing types on any extensions.
+
+    Args:
+      package: The current package of desc_proto.
+      desc_proto: The message descriptor to update.
+      scope: Enclosing scope of available types.
+    """
+
+    package = _PrefixWithDot(package)
+
+    main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
+
+    if package == '.':
+      nested_package = _PrefixWithDot(desc_proto.name)
+    else:
+      nested_package = '.'.join([package, desc_proto.name])
+
+    for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
+      self.SetFieldType(field_proto, field_desc, nested_package, scope)
+
+    for extension_proto, extension_desc in (
+        zip(desc_proto.extension, main_desc.extensions)):
+      extension_desc.containing_type = self._GetTypeFromScope(
+          nested_package, extension_proto.extendee, scope)
+      self.SetFieldType(extension_proto, extension_desc, nested_package, scope)
+
+    for nested_type in desc_proto.nested_type:
+      self.SetAllFieldTypes(nested_package, nested_type, scope)
+
+  def SetFieldType(self, field_proto, field_desc, package, scope):
+    """Sets the field's type, cpp_type, message_type and enum_type.
+
+    Args:
+      field_proto: Data about the field in proto format.
+      field_desc: The descriptor to modiy.
+      package: The package the field's container is in.
+      scope: Enclosing scope of available types.
+    """
+    if field_proto.type_name:
+      desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
+    else:
+      desc = None
+
+    if not field_proto.HasField('type'):
+      if isinstance(desc, descriptor.Descriptor):
+        field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
+      else:
+        field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
+
+    field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
+        field_proto.type)
+
+    if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
+        or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
+      field_desc.message_type = desc
+
+    if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+      field_desc.enum_type = desc
+
+    if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+      field_desc.has_default_value = False
+      field_desc.default_value = []
+    elif field_proto.HasField('default_value'):
+      field_desc.has_default_value = True
+      if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
+          field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
+        field_desc.default_value = float(field_proto.default_value)
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
+        field_desc.default_value = field_proto.default_value
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
+        field_desc.default_value = field_proto.default_value.lower() == 'true'
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+        field_desc.default_value = field_desc.enum_type.values_by_name[
+            field_proto.default_value].index
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
+        field_desc.default_value = text_encoding.CUnescape(
+            field_proto.default_value)
+      else:
+        field_desc.default_value = int(field_proto.default_value)
+    else:
+      field_desc.has_default_value = False
+      field_desc.default_value = None
+
+    field_desc.type = field_proto.type
+
+  def _MakeEnumValueDescriptor(self, value_proto, index):
+    """Creates a enum value descriptor object from a enum value proto.
+
+    Args:
+      value_proto: The proto describing the enum value.
+      index: The index of the enum value.
+
+    Returns:
+      An initialized EnumValueDescriptor object.
+    """
+
+    return descriptor.EnumValueDescriptor(
+        name=value_proto.name,
+        index=index,
+        number=value_proto.number,
+        options=value_proto.options,
+        type=None)
+
+  def _ExtractSymbols(self, descriptors):
+    """Pulls out all the symbols from descriptor protos.
+
+    Args:
+      descriptors: The messages to extract descriptors from.
+    Yields:
+      A two element tuple of the type name and descriptor object.
+    """
+
+    for desc in descriptors:
+      yield (_PrefixWithDot(desc.full_name), desc)
+      for symbol in self._ExtractSymbols(desc.nested_types):
+        yield symbol
+      for enum in desc.enum_types:
+        yield (_PrefixWithDot(enum.full_name), enum)
+
+  def _GetDeps(self, dependencies):
+    """Recursively finds dependencies for file protos.
+
+    Args:
+      dependencies: The names of the files being depended on.
+
+    Yields:
+      Each direct and indirect dependency.
+    """
+
+    for dependency in dependencies:
+      dep_desc = self.FindFileByName(dependency)
+      yield dep_desc
+      for parent_dep in dep_desc.dependencies:
+        yield parent_dep
+
+  def _GetTypeFromScope(self, package, type_name, scope):
+    """Finds a given type name in the current scope.
+
+    Args:
+      package: The package the proto should be located in.
+      type_name: The name of the type to be found in the scope.
+      scope: Dict mapping short and full symbols to message and enum types.
+
+    Returns:
+      The descriptor for the requested type.
+    """
+    if type_name not in scope:
+      components = _PrefixWithDot(package).split('.')
+      while components:
+        possible_match = '.'.join(components + [type_name])
+        if possible_match in scope:
+          type_name = possible_match
+          break
+        else:
+          components.pop(-1)
+    return scope[type_name]
+
+
+def _PrefixWithDot(name):
+  return name if name.startswith('.') else '.%s' % name
diff --git a/third_party/protobuf26/internal/__init__.py b/third_party/protobuf26/internal/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/third_party/protobuf26/internal/__init__.py
diff --git a/third_party/protobuf26/internal/api_implementation.py b/third_party/protobuf26/internal/api_implementation.py
new file mode 100644
index 0000000..b5615d1
--- /dev/null
+++ b/third_party/protobuf26/internal/api_implementation.py
@@ -0,0 +1,89 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Determine which implementation of the protobuf API is used in this process.
+"""
+
+import os
+import sys
+
+try:
+  # pylint: disable=g-import-not-at-top
+  from protobuf26.internal import _api_implementation
+  # The compile-time constants in the _api_implementation module can be used to
+  # switch to a certain implementation of the Python API at build time.
+  _api_version = _api_implementation.api_version
+  del _api_implementation
+except ImportError:
+  _api_version = 0
+
+_default_implementation_type = (
+    'python' if _api_version == 0 else 'cpp')
+_default_version_str = (
+    '1' if _api_version <= 1 else '2')
+
+# This environment variable can be used to switch to a certain implementation
+# of the Python API, overriding the compile-time constants in the
+# _api_implementation module. Right now only 'python' and 'cpp' are valid
+# values. Any other value will be ignored.
+_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
+                                 _default_implementation_type)
+
+if _implementation_type != 'python':
+  _implementation_type = 'cpp'
+
+# This environment variable can be used to switch between the two
+# 'cpp' implementations, overriding the compile-time constants in the
+# _api_implementation module. Right now only 1 and 2 are valid values. Any other
+# value will be ignored.
+_implementation_version_str = os.getenv(
+    'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION',
+    _default_version_str)
+
+if _implementation_version_str not in ('1', '2'):
+  raise ValueError(
+      "unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: '" +
+      _implementation_version_str + "' (supported versions: 1, 2)"
+      )
+
+_implementation_version = int(_implementation_version_str)
+
+
+# Usage of this function is discouraged. Clients shouldn't care which
+# implementation of the API is in use. Note that there is no guarantee
+# that differences between APIs will be maintained.
+# Please don't use this function if possible.
+def Type():
+  return _implementation_type
+
+
+# See comment on 'Type' above.
+def Version():
+  return _implementation_version
diff --git a/third_party/protobuf26/internal/containers.py b/third_party/protobuf26/internal/containers.py
new file mode 100644
index 0000000..5797e81
--- /dev/null
+++ b/third_party/protobuf26/internal/containers.py
@@ -0,0 +1,269 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains container classes to represent different protocol buffer types.
+
+This file defines container classes which represent categories of protocol
+buffer field types which need extra maintenance. Currently these categories
+are:
+  - Repeated scalar fields - These are all repeated fields which aren't
+    composite (e.g. they are of simple types like int32, string, etc).
+  - Repeated composite fields - Repeated fields which are composite. This
+    includes groups and nested messages.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+
+class BaseContainer(object):
+
+  """Base container class."""
+
+  # Minimizes memory usage and disallows assignment to other attributes.
+  __slots__ = ['_message_listener', '_values']
+
+  def __init__(self, message_listener):
+    """
+    Args:
+      message_listener: A MessageListener implementation.
+        The RepeatedScalarFieldContainer will call this object's
+        Modified() method when it is modified.
+    """
+    self._message_listener = message_listener
+    self._values = []
+
+  def __getitem__(self, key):
+    """Retrieves item by the specified key."""
+    return self._values[key]
+
+  def __len__(self):
+    """Returns the number of elements in the container."""
+    return len(self._values)
+
+  def __ne__(self, other):
+    """Checks if another instance isn't equal to this one."""
+    # The concrete classes should define __eq__.
+    return not self == other
+
+  def __hash__(self):
+    raise TypeError('unhashable object')
+
+  def __repr__(self):
+    return repr(self._values)
+
+  def sort(self, *args, **kwargs):
+    # Continue to support the old sort_function keyword argument.
+    # This is expected to be a rare occurrence, so use LBYL to avoid
+    # the overhead of actually catching KeyError.
+    if 'sort_function' in kwargs:
+      kwargs['cmp'] = kwargs.pop('sort_function')
+    self._values.sort(*args, **kwargs)
+
+
+class RepeatedScalarFieldContainer(BaseContainer):
+
+  """Simple, type-checked, list-like container for holding repeated scalars."""
+
+  # Disallows assignment to other attributes.
+  __slots__ = ['_type_checker']
+
+  def __init__(self, message_listener, type_checker):
+    """
+    Args:
+      message_listener: A MessageListener implementation.
+        The RepeatedScalarFieldContainer will call this object's
+        Modified() method when it is modified.
+      type_checker: A type_checkers.ValueChecker instance to run on elements
+        inserted into this container.
+    """
+    super(RepeatedScalarFieldContainer, self).__init__(message_listener)
+    self._type_checker = type_checker
+
+  def append(self, value):
+    """Appends an item to the list. Similar to list.append()."""
+    self._values.append(self._type_checker.CheckValue(value))
+    if not self._message_listener.dirty:
+      self._message_listener.Modified()
+
+  def insert(self, key, value):
+    """Inserts the item at the specified position. Similar to list.insert()."""
+    self._values.insert(key, self._type_checker.CheckValue(value))
+    if not self._message_listener.dirty:
+      self._message_listener.Modified()
+
+  def extend(self, elem_seq):
+    """Extends by appending the given sequence. Similar to list.extend()."""
+    if not elem_seq:
+      return
+
+    new_values = []
+    for elem in elem_seq:
+      new_values.append(self._type_checker.CheckValue(elem))
+    self._values.extend(new_values)
+    self._message_listener.Modified()
+
+  def MergeFrom(self, other):
+    """Appends the contents of another repeated field of the same type to this
+    one. We do not check the types of the individual fields.
+    """
+    self._values.extend(other._values)
+    self._message_listener.Modified()
+
+  def remove(self, elem):
+    """Removes an item from the list. Similar to list.remove()."""
+    self._values.remove(elem)
+    self._message_listener.Modified()
+
+  def __setitem__(self, key, value):
+    """Sets the item on the specified position."""
+    if isinstance(key, slice):  # PY3
+      if key.step is not None:
+        raise ValueError('Extended slices not supported')
+      self.__setslice__(key.start, key.stop, value)
+    else:
+      self._values[key] = self._type_checker.CheckValue(value)
+      self._message_listener.Modified()
+
+  def __getslice__(self, start, stop):
+    """Retrieves the subset of items from between the specified indices."""
+    return self._values[start:stop]
+
+  def __setslice__(self, start, stop, values):
+    """Sets the subset of items from between the specified indices."""
+    new_values = []
+    for value in values:
+      new_values.append(self._type_checker.CheckValue(value))
+    self._values[start:stop] = new_values
+    self._message_listener.Modified()
+
+  def __delitem__(self, key):
+    """Deletes the item at the specified position."""
+    del self._values[key]
+    self._message_listener.Modified()
+
+  def __delslice__(self, start, stop):
+    """Deletes the subset of items from between the specified indices."""
+    del self._values[start:stop]
+    self._message_listener.Modified()
+
+  def __eq__(self, other):
+    """Compares the current instance with another one."""
+    if self is other:
+      return True
+    # Special case for the same type which should be common and fast.
+    if isinstance(other, self.__class__):
+      return other._values == self._values
+    # We are presumably comparing against some other sequence type.
+    return other == self._values
+
+
+class RepeatedCompositeFieldContainer(BaseContainer):
+
+  """Simple, list-like container for holding repeated composite fields."""
+
+  # Disallows assignment to other attributes.
+  __slots__ = ['_message_descriptor']
+
+  def __init__(self, message_listener, message_descriptor):
+    """
+    Note that we pass in a descriptor instead of the generated directly,
+    since at the time we construct a _RepeatedCompositeFieldContainer we
+    haven't yet necessarily initialized the type that will be contained in the
+    container.
+
+    Args:
+      message_listener: A MessageListener implementation.
+        The RepeatedCompositeFieldContainer will call this object's
+        Modified() method when it is modified.
+      message_descriptor: A Descriptor instance describing the protocol type
+        that should be present in this container.  We'll use the
+        _concrete_class field of this descriptor when the client calls add().
+    """
+    super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
+    self._message_descriptor = message_descriptor
+
+  def add(self, **kwargs):
+    """Adds a new element at the end of the list and returns it. Keyword
+    arguments may be used to initialize the element.
+    """
+    new_element = self._message_descriptor._concrete_class(**kwargs)
+    new_element._SetListener(self._message_listener)
+    self._values.append(new_element)
+    if not self._message_listener.dirty:
+      self._message_listener.Modified()
+    return new_element
+
+  def extend(self, elem_seq):
+    """Extends by appending the given sequence of elements of the same type
+    as this one, copying each individual message.
+    """
+    message_class = self._message_descriptor._concrete_class
+    listener = self._message_listener
+    values = self._values
+    for message in elem_seq:
+      new_element = message_class()
+      new_element._SetListener(listener)
+      new_element.MergeFrom(message)
+      values.append(new_element)
+    listener.Modified()
+
+  def MergeFrom(self, other):
+    """Appends the contents of another repeated field of the same type to this
+    one, copying each individual message.
+    """
+    self.extend(other._values)
+
+  def remove(self, elem):
+    """Removes an item from the list. Similar to list.remove()."""
+    self._values.remove(elem)
+    self._message_listener.Modified()
+
+  def __getslice__(self, start, stop):
+    """Retrieves the subset of items from between the specified indices."""
+    return self._values[start:stop]
+
+  def __delitem__(self, key):
+    """Deletes the item at the specified position."""
+    del self._values[key]
+    self._message_listener.Modified()
+
+  def __delslice__(self, start, stop):
+    """Deletes the subset of items from between the specified indices."""
+    del self._values[start:stop]
+    self._message_listener.Modified()
+
+  def __eq__(self, other):
+    """Compares the current instance with another one."""
+    if self is other:
+      return True
+    if not isinstance(other, self.__class__):
+      raise TypeError('Can only compare repeated composite fields against '
+                      'other repeated composite fields.')
+    return self._values == other._values
diff --git a/third_party/protobuf26/internal/cpp_message.py b/third_party/protobuf26/internal/cpp_message.py
new file mode 100644
index 0000000..1904a04
--- /dev/null
+++ b/third_party/protobuf26/internal/cpp_message.py
@@ -0,0 +1,663 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains helper functions used to create protocol message classes from
+Descriptor objects at runtime backed by the protocol buffer C++ API.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+import copy_reg
+import operator
+from protobuf26.internal import _net_proto2___python
+from protobuf26.internal import enum_type_wrapper
+from protobuf26 import message
+
+
+_LABEL_REPEATED = _net_proto2___python.LABEL_REPEATED
+_LABEL_OPTIONAL = _net_proto2___python.LABEL_OPTIONAL
+_CPPTYPE_MESSAGE = _net_proto2___python.CPPTYPE_MESSAGE
+_TYPE_MESSAGE = _net_proto2___python.TYPE_MESSAGE
+
+
+def GetDescriptorPool():
+  """Creates a new DescriptorPool C++ object."""
+  return _net_proto2___python.NewCDescriptorPool()
+
+
+_pool = GetDescriptorPool()
+
+
+def GetFieldDescriptor(full_field_name):
+  """Searches for a field descriptor given a full field name."""
+  return _pool.FindFieldByName(full_field_name)
+
+
+def BuildFile(content):
+  """Registers a new proto file in the underlying C++ descriptor pool."""
+  _net_proto2___python.BuildFile(content)
+
+
+def GetExtensionDescriptor(full_extension_name):
+  """Searches for extension descriptor given a full field name."""
+  return _pool.FindExtensionByName(full_extension_name)
+
+
+def NewCMessage(full_message_name):
+  """Creates a new C++ protocol message by its name."""
+  return _net_proto2___python.NewCMessage(full_message_name)
+
+
+def ScalarProperty(cdescriptor):
+  """Returns a scalar property for the given descriptor."""
+
+  def Getter(self):
+    return self._cmsg.GetScalar(cdescriptor)
+
+  def Setter(self, value):
+    self._cmsg.SetScalar(cdescriptor, value)
+
+  return property(Getter, Setter)
+
+
+def CompositeProperty(cdescriptor, message_type):
+  """Returns a Python property the given composite field."""
+
+  def Getter(self):
+    sub_message = self._composite_fields.get(cdescriptor.name, None)
+    if sub_message is None:
+      cmessage = self._cmsg.NewSubMessage(cdescriptor)
+      sub_message = message_type._concrete_class(__cmessage=cmessage)
+      self._composite_fields[cdescriptor.name] = sub_message
+    return sub_message
+
+  return property(Getter)
+
+
+class RepeatedScalarContainer(object):
+  """Container for repeated scalar fields."""
+
+  __slots__ = ['_message', '_cfield_descriptor', '_cmsg']
+
+  def __init__(self, msg, cfield_descriptor):
+    self._message = msg
+    self._cmsg = msg._cmsg
+    self._cfield_descriptor = cfield_descriptor
+
+  def append(self, value):
+    self._cmsg.AddRepeatedScalar(
+        self._cfield_descriptor, value)
+
+  def extend(self, sequence):
+    for element in sequence:
+      self.append(element)
+
+  def insert(self, key, value):
+    values = self[slice(None, None, None)]
+    values.insert(key, value)
+    self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
+
+  def remove(self, value):
+    values = self[slice(None, None, None)]
+    values.remove(value)
+    self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
+
+  def __setitem__(self, key, value):
+    values = self[slice(None, None, None)]
+    values[key] = value
+    self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
+
+  def __getitem__(self, key):
+    return self._cmsg.GetRepeatedScalar(self._cfield_descriptor, key)
+
+  def __delitem__(self, key):
+    self._cmsg.DeleteRepeatedField(self._cfield_descriptor, key)
+
+  def __len__(self):
+    return len(self[slice(None, None, None)])
+
+  def __eq__(self, other):
+    if self is other:
+      return True
+    if not operator.isSequenceType(other):
+      raise TypeError(
+          'Can only compare repeated scalar fields against sequences.')
+    # We are presumably comparing against some other sequence type.
+    return other == self[slice(None, None, None)]
+
+  def __ne__(self, other):
+    return not self == other
+
+  def __hash__(self):
+    raise TypeError('unhashable object')
+
+  def sort(self, *args, **kwargs):
+    # Maintain compatibility with the previous interface.
+    if 'sort_function' in kwargs:
+      kwargs['cmp'] = kwargs.pop('sort_function')
+    self._cmsg.AssignRepeatedScalar(self._cfield_descriptor,
+                                    sorted(self, *args, **kwargs))
+
+
+def RepeatedScalarProperty(cdescriptor):
+  """Returns a Python property the given repeated scalar field."""
+
+  def Getter(self):
+    container = self._composite_fields.get(cdescriptor.name, None)
+    if container is None:
+      container = RepeatedScalarContainer(self, cdescriptor)
+      self._composite_fields[cdescriptor.name] = container
+    return container
+
+  def Setter(self, new_value):
+    raise AttributeError('Assignment not allowed to repeated field '
+                         '"%s" in protocol message object.' % cdescriptor.name)
+
+  doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name
+  return property(Getter, Setter, doc=doc)
+
+
+class RepeatedCompositeContainer(object):
+  """Container for repeated composite fields."""
+
+  __slots__ = ['_message', '_subclass', '_cfield_descriptor', '_cmsg']
+
+  def __init__(self, msg, cfield_descriptor, subclass):
+    self._message = msg
+    self._cmsg = msg._cmsg
+    self._subclass = subclass
+    self._cfield_descriptor = cfield_descriptor
+
+  def add(self, **kwargs):
+    cmessage = self._cmsg.AddMessage(self._cfield_descriptor)
+    return self._subclass(__cmessage=cmessage, __owner=self._message, **kwargs)
+
+  def extend(self, elem_seq):
+    """Extends by appending the given sequence of elements of the same type
+    as this one, copying each individual message.
+    """
+    for message in elem_seq:
+      self.add().MergeFrom(message)
+
+  def remove(self, value):
+    # TODO(protocol-devel): This is inefficient as it needs to generate a
+    # message pointer for each message only to do index().  Move this to a C++
+    # extension function.
+    self.__delitem__(self[slice(None, None, None)].index(value))
+
+  def MergeFrom(self, other):
+    for message in other[:]:
+      self.add().MergeFrom(message)
+
+  def __getitem__(self, key):
+    cmessages = self._cmsg.GetRepeatedMessage(
+        self._cfield_descriptor, key)
+    subclass = self._subclass
+    if not isinstance(cmessages, list):
+      return subclass(__cmessage=cmessages, __owner=self._message)
+
+    return [subclass(__cmessage=m, __owner=self._message) for m in cmessages]
+
+  def __delitem__(self, key):
+    self._cmsg.DeleteRepeatedField(
+        self._cfield_descriptor, key)
+
+  def __len__(self):
+    return self._cmsg.FieldLength(self._cfield_descriptor)
+
+  def __eq__(self, other):
+    """Compares the current instance with another one."""
+    if self is other:
+      return True
+    if not isinstance(other, self.__class__):
+      raise TypeError('Can only compare repeated composite fields against '
+                      'other repeated composite fields.')
+    messages = self[slice(None, None, None)]
+    other_messages = other[slice(None, None, None)]
+    return messages == other_messages
+
+  def __hash__(self):
+    raise TypeError('unhashable object')
+
+  def sort(self, cmp=None, key=None, reverse=False, **kwargs):
+    # Maintain compatibility with the old interface.
+    if cmp is None and 'sort_function' in kwargs:
+      cmp = kwargs.pop('sort_function')
+
+    # The cmp function, if provided, is passed the results of the key function,
+    # so we only need to wrap one of them.
+    if key is None:
+      index_key = self.__getitem__
+    else:
+      index_key = lambda i: key(self[i])
+
+    # Sort the list of current indexes by the underlying object.
+    indexes = range(len(self))
+    indexes.sort(cmp=cmp, key=index_key, reverse=reverse)
+
+    # Apply the transposition.
+    for dest, src in enumerate(indexes):
+      if dest == src:
+        continue
+      self._cmsg.SwapRepeatedFieldElements(self._cfield_descriptor, dest, src)
+      # Don't swap the same value twice.
+      indexes[src] = src
+
+
+def RepeatedCompositeProperty(cdescriptor, message_type):
+  """Returns a Python property for the given repeated composite field."""
+
+  def Getter(self):
+    container = self._composite_fields.get(cdescriptor.name, None)
+    if container is None:
+      container = RepeatedCompositeContainer(
+          self, cdescriptor, message_type._concrete_class)
+      self._composite_fields[cdescriptor.name] = container
+    return container
+
+  def Setter(self, new_value):
+    raise AttributeError('Assignment not allowed to repeated field '
+                         '"%s" in protocol message object.' % cdescriptor.name)
+
+  doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name
+  return property(Getter, Setter, doc=doc)
+
+
+class ExtensionDict(object):
+  """Extension dictionary added to each protocol message."""
+
+  def __init__(self, msg):
+    self._message = msg
+    self._cmsg = msg._cmsg
+    self._values = {}
+
+  def __setitem__(self, extension, value):
+    from protobuf26 import descriptor
+    if not isinstance(extension, descriptor.FieldDescriptor):
+      raise KeyError('Bad extension %r.' % (extension,))
+    cdescriptor = extension._cdescriptor
+    if (cdescriptor.label != _LABEL_OPTIONAL or
+        cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
+      raise TypeError('Extension %r is repeated and/or a composite type.' % (
+          extension.full_name,))
+    self._cmsg.SetScalar(cdescriptor, value)
+    self._values[extension] = value
+
+  def __getitem__(self, extension):
+    from protobuf26 import descriptor
+    if not isinstance(extension, descriptor.FieldDescriptor):
+      raise KeyError('Bad extension %r.' % (extension,))
+
+    cdescriptor = extension._cdescriptor
+    if (cdescriptor.label != _LABEL_REPEATED and
+        cdescriptor.cpp_type != _CPPTYPE_MESSAGE):
+      return self._cmsg.GetScalar(cdescriptor)
+
+    ext = self._values.get(extension, None)
+    if ext is not None:
+      return ext
+
+    ext = self._CreateNewHandle(extension)
+    self._values[extension] = ext
+    return ext
+
+  def ClearExtension(self, extension):
+    from protobuf26 import descriptor
+    if not isinstance(extension, descriptor.FieldDescriptor):
+      raise KeyError('Bad extension %r.' % (extension,))
+    self._cmsg.ClearFieldByDescriptor(extension._cdescriptor)
+    if extension in self._values:
+      del self._values[extension]
+
+  def HasExtension(self, extension):
+    from protobuf26 import descriptor
+    if not isinstance(extension, descriptor.FieldDescriptor):
+      raise KeyError('Bad extension %r.' % (extension,))
+    return self._cmsg.HasFieldByDescriptor(extension._cdescriptor)
+
+  def _FindExtensionByName(self, name):
+    """Tries to find a known extension with the specified name.
+
+    Args:
+      name: Extension full name.
+
+    Returns:
+      Extension field descriptor.
+    """
+    return self._message._extensions_by_name.get(name, None)
+
+  def _CreateNewHandle(self, extension):
+    cdescriptor = extension._cdescriptor
+    if (cdescriptor.label != _LABEL_REPEATED and
+        cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
+      cmessage = self._cmsg.NewSubMessage(cdescriptor)
+      return extension.message_type._concrete_class(__cmessage=cmessage)
+
+    if cdescriptor.label == _LABEL_REPEATED:
+      if cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
+        return RepeatedCompositeContainer(
+            self._message, cdescriptor, extension.message_type._concrete_class)
+      else:
+        return RepeatedScalarContainer(self._message, cdescriptor)
+    # This shouldn't happen!
+    assert False
+    return None
+
+
+def NewMessage(bases, message_descriptor, dictionary):
+  """Creates a new protocol message *class*."""
+  _AddClassAttributesForNestedExtensions(message_descriptor, dictionary)
+  _AddEnumValues(message_descriptor, dictionary)
+  _AddDescriptors(message_descriptor, dictionary)
+  return bases
+
+
+def InitMessage(message_descriptor, cls):
+  """Constructs a new message instance (called before instance's __init__)."""
+  cls._extensions_by_name = {}
+  _AddInitMethod(message_descriptor, cls)
+  _AddMessageMethods(message_descriptor, cls)
+  _AddPropertiesForExtensions(message_descriptor, cls)
+  copy_reg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
+
+
+def _AddDescriptors(message_descriptor, dictionary):
+  """Sets up a new protocol message class dictionary.
+
+  Args:
+    message_descriptor: A Descriptor instance describing this message type.
+    dictionary: Class dictionary to which we'll add a '__slots__' entry.
+  """
+  dictionary['__descriptors'] = {}
+  for field in message_descriptor.fields:
+    dictionary['__descriptors'][field.name] = GetFieldDescriptor(
+        field.full_name)
+
+  dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [
+      '_cmsg', '_owner', '_composite_fields', 'Extensions', '_HACK_REFCOUNTS']
+
+
+def _AddEnumValues(message_descriptor, dictionary):
+  """Sets class-level attributes for all enum fields defined in this message.
+
+  Args:
+    message_descriptor: Descriptor object for this message type.
+    dictionary: Class dictionary that should be populated.
+  """
+  for enum_type in message_descriptor.enum_types:
+    dictionary[enum_type.name] = enum_type_wrapper.EnumTypeWrapper(enum_type)
+    for enum_value in enum_type.values:
+      dictionary[enum_value.name] = enum_value.number
+
+
+def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary):
+  """Adds class attributes for the nested extensions."""
+  extension_dict = message_descriptor.extensions_by_name
+  for extension_name, extension_field in extension_dict.iteritems():
+    assert extension_name not in dictionary
+    dictionary[extension_name] = extension_field
+
+
+def _AddInitMethod(message_descriptor, cls):
+  """Adds an __init__ method to cls."""
+
+  # Create and attach message field properties to the message class.
+  # This can be done just once per message class, since property setters and
+  # getters are passed the message instance.
+  # This makes message instantiation extremely fast, and at the same time it
+  # doesn't require the creation of property objects for each message instance,
+  # which saves a lot of memory.
+  for field in message_descriptor.fields:
+    field_cdescriptor = cls.__descriptors[field.name]
+    if field.label == _LABEL_REPEATED:
+      if field.cpp_type == _CPPTYPE_MESSAGE:
+        value = RepeatedCompositeProperty(field_cdescriptor, field.message_type)
+      else:
+        value = RepeatedScalarProperty(field_cdescriptor)
+    elif field.cpp_type == _CPPTYPE_MESSAGE:
+      value = CompositeProperty(field_cdescriptor, field.message_type)
+    else:
+      value = ScalarProperty(field_cdescriptor)
+    setattr(cls, field.name, value)
+
+    # Attach a constant with the field number.
+    constant_name = field.name.upper() + '_FIELD_NUMBER'
+    setattr(cls, constant_name, field.number)
+
+  def Init(self, **kwargs):
+    """Message constructor."""
+    cmessage = kwargs.pop('__cmessage', None)
+    if cmessage:
+      self._cmsg = cmessage
+    else:
+      self._cmsg = NewCMessage(message_descriptor.full_name)
+
+    # Keep a reference to the owner, as the owner keeps a reference to the
+    # underlying protocol buffer message.
+    owner = kwargs.pop('__owner', None)
+    if owner:
+      self._owner = owner
+
+    if message_descriptor.is_extendable:
+      self.Extensions = ExtensionDict(self)
+    else:
+      # Reference counting in the C++ code is broken and depends on
+      # the Extensions reference to keep this object alive during unit
+      # tests (see b/4856052).  Remove this once b/4945904 is fixed.
+      self._HACK_REFCOUNTS = self
+    self._composite_fields = {}
+
+    for field_name, field_value in kwargs.iteritems():
+      field_cdescriptor = self.__descriptors.get(field_name, None)
+      if not field_cdescriptor:
+        raise ValueError('Protocol message has no "%s" field.' % field_name)
+      if field_cdescriptor.label == _LABEL_REPEATED:
+        if field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
+          field_name = getattr(self, field_name)
+          for val in field_value:
+            field_name.add().MergeFrom(val)
+        else:
+          getattr(self, field_name).extend(field_value)
+      elif field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
+        getattr(self, field_name).MergeFrom(field_value)
+      else:
+        setattr(self, field_name, field_value)
+
+  Init.__module__ = None
+  Init.__doc__ = None
+  cls.__init__ = Init
+
+
+def _IsMessageSetExtension(field):
+  """Checks if a field is a message set extension."""
+  return (field.is_extension and
+          field.containing_type.has_options and
+          field.containing_type.GetOptions().message_set_wire_format and
+          field.type == _TYPE_MESSAGE and
+          field.message_type == field.extension_scope and
+          field.label == _LABEL_OPTIONAL)
+
+
+def _AddMessageMethods(message_descriptor, cls):
+  """Adds the methods to a protocol message class."""
+  if message_descriptor.is_extendable:
+
+    def ClearExtension(self, extension):
+      self.Extensions.ClearExtension(extension)
+
+    def HasExtension(self, extension):
+      return self.Extensions.HasExtension(extension)
+
+  def HasField(self, field_name):
+    return self._cmsg.HasField(field_name)
+
+  def ClearField(self, field_name):
+    child_cmessage = None
+    if field_name in self._composite_fields:
+      child_field = self._composite_fields[field_name]
+      del self._composite_fields[field_name]
+
+      child_cdescriptor = self.__descriptors[field_name]
+      # TODO(anuraag): Support clearing repeated message fields as well.
+      if (child_cdescriptor.label != _LABEL_REPEATED and
+          child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
+        child_field._owner = None
+        child_cmessage = child_field._cmsg
+
+    if child_cmessage is not None:
+      self._cmsg.ClearField(field_name, child_cmessage)
+    else:
+      self._cmsg.ClearField(field_name)
+
+  def Clear(self):
+    cmessages_to_release = []
+    for field_name, child_field in self._composite_fields.iteritems():
+      child_cdescriptor = self.__descriptors[field_name]
+      # TODO(anuraag): Support clearing repeated message fields as well.
+      if (child_cdescriptor.label != _LABEL_REPEATED and
+          child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
+        child_field._owner = None
+        cmessages_to_release.append((child_cdescriptor, child_field._cmsg))
+    self._composite_fields.clear()
+    self._cmsg.Clear(cmessages_to_release)
+
+  def IsInitialized(self, errors=None):
+    if self._cmsg.IsInitialized():
+      return True
+    if errors is not None:
+      errors.extend(self.FindInitializationErrors());
+    return False
+
+  def SerializeToString(self):
+    if not self.IsInitialized():
+      raise message.EncodeError(
+          'Message %s is missing required fields: %s' % (
+          self._cmsg.full_name, ','.join(self.FindInitializationErrors())))
+    return self._cmsg.SerializeToString()
+
+  def SerializePartialToString(self):
+    return self._cmsg.SerializePartialToString()
+
+  def ParseFromString(self, serialized):
+    self.Clear()
+    self.MergeFromString(serialized)
+
+  def MergeFromString(self, serialized):
+    byte_size = self._cmsg.MergeFromString(serialized)
+    if byte_size < 0:
+      raise message.DecodeError('Unable to merge from string.')
+    return byte_size
+
+  def MergeFrom(self, msg):
+    if not isinstance(msg, cls):
+      raise TypeError(
+          "Parameter to MergeFrom() must be instance of same class: "
+          "expected %s got %s." % (cls.__name__, type(msg).__name__))
+    self._cmsg.MergeFrom(msg._cmsg)
+
+  def CopyFrom(self, msg):
+    self._cmsg.CopyFrom(msg._cmsg)
+
+  def ByteSize(self):
+    return self._cmsg.ByteSize()
+
+  def SetInParent(self):
+    return self._cmsg.SetInParent()
+
+  def ListFields(self):
+    all_fields = []
+    field_list = self._cmsg.ListFields()
+    fields_by_name = cls.DESCRIPTOR.fields_by_name
+    for is_extension, field_name in field_list:
+      if is_extension:
+        extension = cls._extensions_by_name[field_name]
+        all_fields.append((extension, self.Extensions[extension]))
+      else:
+        field_descriptor = fields_by_name[field_name]
+        all_fields.append(
+            (field_descriptor, getattr(self, field_name)))
+    all_fields.sort(key=lambda item: item[0].number)
+    return all_fields
+
+  def FindInitializationErrors(self):
+    return self._cmsg.FindInitializationErrors()
+
+  def __str__(self):
+    return str(self._cmsg)
+
+  def __eq__(self, other):
+    if self is other:
+      return True
+    if not isinstance(other, self.__class__):
+      return False
+    return self.ListFields() == other.ListFields()
+
+  def __ne__(self, other):
+    return not self == other
+
+  def __hash__(self):
+    raise TypeError('unhashable object')
+
+  def __unicode__(self):
+    # Lazy import to prevent circular import when text_format imports this file.
+    from protobuf26 import text_format
+    return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
+
+  # Attach the local methods to the message class.
+  for key, value in locals().copy().iteritems():
+    if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'):
+      setattr(cls, key, value)
+
+  # Static methods:
+
+  def RegisterExtension(extension_handle):
+    extension_handle.containing_type = cls.DESCRIPTOR
+    cls._extensions_by_name[extension_handle.full_name] = extension_handle
+
+    if _IsMessageSetExtension(extension_handle):
+      # MessageSet extension.  Also register under type name.
+      cls._extensions_by_name[
+          extension_handle.message_type.full_name] = extension_handle
+  cls.RegisterExtension = staticmethod(RegisterExtension)
+
+  def FromString(string):
+    msg = cls()
+    msg.MergeFromString(string)
+    return msg
+  cls.FromString = staticmethod(FromString)
+
+
+
+def _AddPropertiesForExtensions(message_descriptor, cls):
+  """Adds properties for all fields in this protocol message type."""
+  extension_dict = message_descriptor.extensions_by_name
+  for extension_name, extension_field in extension_dict.iteritems():
+    constant_name = extension_name.upper() + '_FIELD_NUMBER'
+    setattr(cls, constant_name, extension_field.number)
diff --git a/third_party/protobuf26/internal/decoder.py b/third_party/protobuf26/internal/decoder.py
new file mode 100644
index 0000000..800f04b
--- /dev/null
+++ b/third_party/protobuf26/internal/decoder.py
@@ -0,0 +1,831 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#PY25 compatible for GAE.
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+
+"""Code for decoding protocol buffer primitives.
+
+This code is very similar to encoder.py -- read the docs for that module first.
+
+A "decoder" is a function with the signature:
+  Decode(buffer, pos, end, message, field_dict)
+The arguments are:
+  buffer:     The string containing the encoded message.
+  pos:        The current position in the string.
+  end:        The position in the string where the current message ends.  May be
+              less than len(buffer) if we're reading a sub-message.
+  message:    The message object into which we're parsing.
+  field_dict: message._fields (avoids a hashtable lookup).
+The decoder reads the field and stores it into field_dict, returning the new
+buffer position.  A decoder for a repeated field may proactively decode all of
+the elements of that field, if they appear consecutively.
+
+Note that decoders may throw any of the following:
+  IndexError:  Indicates a truncated message.
+  struct.error:  Unpacking of a fixed-width field failed.
+  message.DecodeError:  Other errors.
+
+Decoders are expected to raise an exception if they are called with pos > end.
+This allows callers to be lax about bounds checking:  it's fineto read past
+"end" as long as you are sure that someone else will notice and throw an
+exception later on.
+
+Something up the call stack is expected to catch IndexError and struct.error
+and convert them to message.DecodeError.
+
+Decoders are constructed using decoder constructors with the signature:
+  MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
+The arguments are:
+  field_number:  The field number of the field we want to decode.
+  is_repeated:   Is the field a repeated field? (bool)
+  is_packed:     Is the field a packed field? (bool)
+  key:           The key to use when looking up the field within field_dict.
+                 (This is actually the FieldDescriptor but nothing in this
+                 file should depend on that.)
+  new_default:   A function which takes a message object as a parameter and
+                 returns a new instance of the default value for this field.
+                 (This is called for repeated fields and sub-messages, when an
+                 instance does not already exist.)
+
+As with encoders, we define a decoder constructor for every type of field.
+Then, for every field of every message class we construct an actual decoder.
+That decoder goes into a dict indexed by tag, so when we decode a message
+we repeatedly read a tag, look up the corresponding decoder, and invoke it.
+"""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+import struct
+import sys  ##PY25
+_PY2 = sys.version_info[0] < 3  ##PY25
+from protobuf26.internal import encoder
+from protobuf26.internal import wire_format
+from protobuf26 import message
+
+
+# This will overflow and thus become IEEE-754 "infinity".  We would use
+# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
+_POS_INF = 1e10000
+_NEG_INF = -_POS_INF
+_NAN = _POS_INF * 0
+
+
+# This is not for optimization, but rather to avoid conflicts with local
+# variables named "message".
+_DecodeError = message.DecodeError
+
+
+def _VarintDecoder(mask, result_type):
+  """Return an encoder for a basic varint value (does not include tag).
+
+  Decoded values will be bitwise-anded with the given mask before being
+  returned, e.g. to limit them to 32 bits.  The returned decoder does not
+  take the usual "end" parameter -- the caller is expected to do bounds checking
+  after the fact (often the caller can defer such checking until later).  The
+  decoder returns a (value, new_pos) pair.
+  """
+
+  local_ord = ord
+  py2 = _PY2  ##PY25
+##!PY25  py2 = str is bytes
+  def DecodeVarint(buffer, pos):
+    result = 0
+    shift = 0
+    while 1:
+      b = local_ord(buffer[pos]) if py2 else buffer[pos]
+      result |= ((b & 0x7f) << shift)
+      pos += 1
+      if not (b & 0x80):
+        result &= mask
+        result = result_type(result)
+        return (result, pos)
+      shift += 7
+      if shift >= 64:
+        raise _DecodeError('Too many bytes when decoding varint.')
+  return DecodeVarint
+
+
+def _SignedVarintDecoder(mask, result_type):
+  """Like _VarintDecoder() but decodes signed values."""
+
+  local_ord = ord
+  py2 = _PY2  ##PY25
+##!PY25  py2 = str is bytes
+  def DecodeVarint(buffer, pos):
+    result = 0
+    shift = 0
+    while 1:
+      b = local_ord(buffer[pos]) if py2 else buffer[pos]
+      result |= ((b & 0x7f) << shift)
+      pos += 1
+      if not (b & 0x80):
+        if result > 0x7fffffffffffffff:
+          result -= (1 << 64)
+          result |= ~mask
+        else:
+          result &= mask
+        result = result_type(result)
+        return (result, pos)
+      shift += 7
+      if shift >= 64:
+        raise _DecodeError('Too many bytes when decoding varint.')
+  return DecodeVarint
+
+# We force 32-bit values to int and 64-bit values to long to make
+# alternate implementations where the distinction is more significant
+# (e.g. the C++ implementation) simpler.
+
+_DecodeVarint = _VarintDecoder((1 << 64) - 1, long)
+_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long)
+
+# Use these versions for values which must be limited to 32 bits.
+_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
+_DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1, int)
+
+
+def ReadTag(buffer, pos):
+  """Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple.
+
+  We return the raw bytes of the tag rather than decoding them.  The raw
+  bytes can then be used to look up the proper decoder.  This effectively allows
+  us to trade some work that would be done in pure-python (decoding a varint)
+  for work that is done in C (searching for a byte string in a hash table).
+  In a low-level language it would be much cheaper to decode the varint and
+  use that, but not in Python.
+  """
+
+  py2 = _PY2  ##PY25
+##!PY25  py2 = str is bytes
+  start = pos
+  while (ord(buffer[pos]) if py2 else buffer[pos]) & 0x80:
+    pos += 1
+  pos += 1
+  return (buffer[start:pos], pos)
+
+
+# --------------------------------------------------------------------
+
+
+def _SimpleDecoder(wire_type, decode_value):
+  """Return a constructor for a decoder for fields of a particular type.
+
+  Args:
+      wire_type:  The field's wire type.
+      decode_value:  A function which decodes an individual value, e.g.
+        _DecodeVarint()
+  """
+
+  def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default):
+    if is_packed:
+      local_DecodeVarint = _DecodeVarint
+      def DecodePackedField(buffer, pos, end, message, field_dict):
+        value = field_dict.get(key)
+        if value is None:
+          value = field_dict.setdefault(key, new_default(message))
+        (endpoint, pos) = local_DecodeVarint(buffer, pos)
+        endpoint += pos
+        if endpoint > end:
+          raise _DecodeError('Truncated message.')
+        while pos < endpoint:
+          (element, pos) = decode_value(buffer, pos)
+          value.append(element)
+        if pos > endpoint:
+          del value[-1]   # Discard corrupt value.
+          raise _DecodeError('Packed element was truncated.')
+        return pos
+      return DecodePackedField
+    elif is_repeated:
+      tag_bytes = encoder.TagBytes(field_number, wire_type)
+      tag_len = len(tag_bytes)
+      def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+        value = field_dict.get(key)
+        if value is None:
+          value = field_dict.setdefault(key, new_default(message))
+        while 1:
+          (element, new_pos) = decode_value(buffer, pos)
+          value.append(element)
+          # Predict that the next tag is another copy of the same repeated
+          # field.
+          pos = new_pos + tag_len
+          if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
+            # Prediction failed.  Return.
+            if new_pos > end:
+              raise _DecodeError('Truncated message.')
+            return new_pos
+      return DecodeRepeatedField
+    else:
+      def DecodeField(buffer, pos, end, message, field_dict):
+        (field_dict[key], pos) = decode_value(buffer, pos)
+        if pos > end:
+          del field_dict[key]  # Discard corrupt value.
+          raise _DecodeError('Truncated message.')
+        return pos
+      return DecodeField
+
+  return SpecificDecoder
+
+
+def _ModifiedDecoder(wire_type, decode_value, modify_value):
+  """Like SimpleDecoder but additionally invokes modify_value on every value
+  before storing it.  Usually modify_value is ZigZagDecode.
+  """
+
+  # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
+  # not enough to make a significant difference.
+
+  def InnerDecode(buffer, pos):
+    (result, new_pos) = decode_value(buffer, pos)
+    return (modify_value(result), new_pos)
+  return _SimpleDecoder(wire_type, InnerDecode)
+
+
+def _StructPackDecoder(wire_type, format):
+  """Return a constructor for a decoder for a fixed-width field.
+
+  Args:
+      wire_type:  The field's wire type.
+      format:  The format string to pass to struct.unpack().
+  """
+
+  value_size = struct.calcsize(format)
+  local_unpack = struct.unpack
+
+  # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
+  # not enough to make a significant difference.
+
+  # Note that we expect someone up-stack to catch struct.error and convert
+  # it to _DecodeError -- this way we don't have to set up exception-
+  # handling blocks every time we parse one value.
+
+  def InnerDecode(buffer, pos):
+    new_pos = pos + value_size
+    result = local_unpack(format, buffer[pos:new_pos])[0]
+    return (result, new_pos)
+  return _SimpleDecoder(wire_type, InnerDecode)
+
+
+def _FloatDecoder():
+  """Returns a decoder for a float field.
+
+  This code works around a bug in struct.unpack for non-finite 32-bit
+  floating-point values.
+  """
+
+  local_unpack = struct.unpack
+  b = (lambda x:x) if _PY2 else lambda x:x.encode('latin1')  ##PY25
+
+  def InnerDecode(buffer, pos):
+    # We expect a 32-bit value in little-endian byte order.  Bit 1 is the sign
+    # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
+    new_pos = pos + 4
+    float_bytes = buffer[pos:new_pos]
+
+    # If this value has all its exponent bits set, then it's non-finite.
+    # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
+    # To avoid that, we parse it specially.
+    if ((float_bytes[3:4] in b('\x7F\xFF'))  ##PY25
+##!PY25    if ((float_bytes[3:4] in b'\x7F\xFF')
+        and (float_bytes[2:3] >= b('\x80'))):  ##PY25
+##!PY25        and (float_bytes[2:3] >= b'\x80')):
+      # If at least one significand bit is set...
+      if float_bytes[0:3] != b('\x00\x00\x80'):  ##PY25
+##!PY25      if float_bytes[0:3] != b'\x00\x00\x80':
+        return (_NAN, new_pos)
+      # If sign bit is set...
+      if float_bytes[3:4] == b('\xFF'):  ##PY25
+##!PY25      if float_bytes[3:4] == b'\xFF':
+        return (_NEG_INF, new_pos)
+      return (_POS_INF, new_pos)
+
+    # Note that we expect someone up-stack to catch struct.error and convert
+    # it to _DecodeError -- this way we don't have to set up exception-
+    # handling blocks every time we parse one value.
+    result = local_unpack('<f', float_bytes)[0]
+    return (result, new_pos)
+  return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
+
+
+def _DoubleDecoder():
+  """Returns a decoder for a double field.
+
+  This code works around a bug in struct.unpack for not-a-number.
+  """
+
+  local_unpack = struct.unpack
+  b = (lambda x:x) if _PY2 else lambda x:x.encode('latin1')  ##PY25
+
+  def InnerDecode(buffer, pos):
+    # We expect a 64-bit value in little-endian byte order.  Bit 1 is the sign
+    # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
+    new_pos = pos + 8
+    double_bytes = buffer[pos:new_pos]
+
+    # If this value has all its exponent bits set and at least one significand
+    # bit set, it's not a number.  In Python 2.4, struct.unpack will treat it
+    # as inf or -inf.  To avoid that, we treat it specially.
+##!PY25    if ((double_bytes[7:8] in b'\x7F\xFF')
+##!PY25        and (double_bytes[6:7] >= b'\xF0')
+##!PY25        and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
+    if ((double_bytes[7:8] in b('\x7F\xFF'))  ##PY25
+        and (double_bytes[6:7] >= b('\xF0'))  ##PY25
+        and (double_bytes[0:7] != b('\x00\x00\x00\x00\x00\x00\xF0'))):  ##PY25
+      return (_NAN, new_pos)
+
+    # Note that we expect someone up-stack to catch struct.error and convert
+    # it to _DecodeError -- this way we don't have to set up exception-
+    # handling blocks every time we parse one value.
+    result = local_unpack('<d', double_bytes)[0]
+    return (result, new_pos)
+  return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
+
+
+def EnumDecoder(field_number, is_repeated, is_packed, key, new_default):
+  enum_type = key.enum_type
+  if is_packed:
+    local_DecodeVarint = _DecodeVarint
+    def DecodePackedField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      (endpoint, pos) = local_DecodeVarint(buffer, pos)
+      endpoint += pos
+      if endpoint > end:
+        raise _DecodeError('Truncated message.')
+      while pos < endpoint:
+        value_start_pos = pos
+        (element, pos) = _DecodeSignedVarint32(buffer, pos)
+        if element in enum_type.values_by_number:
+          value.append(element)
+        else:
+          if not message._unknown_fields:
+            message._unknown_fields = []
+          tag_bytes = encoder.TagBytes(field_number,
+                                       wire_format.WIRETYPE_VARINT)
+          message._unknown_fields.append(
+              (tag_bytes, buffer[value_start_pos:pos]))
+      if pos > endpoint:
+        if element in enum_type.values_by_number:
+          del value[-1]   # Discard corrupt value.
+        else:
+          del message._unknown_fields[-1]
+        raise _DecodeError('Packed element was truncated.')
+      return pos
+    return DecodePackedField
+  elif is_repeated:
+    tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
+    tag_len = len(tag_bytes)
+    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      while 1:
+        (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
+        if element in enum_type.values_by_number:
+          value.append(element)
+        else:
+          if not message._unknown_fields:
+            message._unknown_fields = []
+          message._unknown_fields.append(
+              (tag_bytes, buffer[pos:new_pos]))
+        # Predict that the next tag is another copy of the same repeated
+        # field.
+        pos = new_pos + tag_len
+        if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
+          # Prediction failed.  Return.
+          if new_pos > end:
+            raise _DecodeError('Truncated message.')
+          return new_pos
+    return DecodeRepeatedField
+  else:
+    def DecodeField(buffer, pos, end, message, field_dict):
+      value_start_pos = pos
+      (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
+      if pos > end:
+        raise _DecodeError('Truncated message.')
+      if enum_value in enum_type.values_by_number:
+        field_dict[key] = enum_value
+      else:
+        if not message._unknown_fields:
+          message._unknown_fields = []
+        tag_bytes = encoder.TagBytes(field_number,
+                                     wire_format.WIRETYPE_VARINT)
+        message._unknown_fields.append(
+          (tag_bytes, buffer[value_start_pos:pos]))
+      return pos
+    return DecodeField
+
+
+# --------------------------------------------------------------------
+
+
+Int32Decoder = _SimpleDecoder(
+    wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
+
+Int64Decoder = _SimpleDecoder(
+    wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
+
+UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
+UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
+
+SInt32Decoder = _ModifiedDecoder(
+    wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
+SInt64Decoder = _ModifiedDecoder(
+    wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
+
+# Note that Python conveniently guarantees that when using the '<' prefix on
+# formats, they will also have the same size across all platforms (as opposed
+# to without the prefix, where their sizes depend on the C compiler's basic
+# type sizes).
+Fixed32Decoder  = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
+Fixed64Decoder  = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
+SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
+SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
+FloatDecoder = _FloatDecoder()
+DoubleDecoder = _DoubleDecoder()
+
+BoolDecoder = _ModifiedDecoder(
+    wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
+
+
+def StringDecoder(field_number, is_repeated, is_packed, key, new_default):
+  """Returns a decoder for a string field."""
+
+  local_DecodeVarint = _DecodeVarint
+  local_unicode = unicode
+
+  def _ConvertToUnicode(byte_str):
+    try:
+      return local_unicode(byte_str, 'utf-8')
+    except UnicodeDecodeError, e:
+      # add more information to the error message and re-raise it.
+      e.reason = '%s in field: %s' % (e, key.full_name)
+      raise
+
+  assert not is_packed
+  if is_repeated:
+    tag_bytes = encoder.TagBytes(field_number,
+                                 wire_format.WIRETYPE_LENGTH_DELIMITED)
+    tag_len = len(tag_bytes)
+    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      while 1:
+        (size, pos) = local_DecodeVarint(buffer, pos)
+        new_pos = pos + size
+        if new_pos > end:
+          raise _DecodeError('Truncated string.')
+        value.append(_ConvertToUnicode(buffer[pos:new_pos]))
+        # Predict that the next tag is another copy of the same repeated field.
+        pos = new_pos + tag_len
+        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+          # Prediction failed.  Return.
+          return new_pos
+    return DecodeRepeatedField
+  else:
+    def DecodeField(buffer, pos, end, message, field_dict):
+      (size, pos) = local_DecodeVarint(buffer, pos)
+      new_pos = pos + size
+      if new_pos > end:
+        raise _DecodeError('Truncated string.')
+      field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
+      return new_pos
+    return DecodeField
+
+
+def BytesDecoder(field_number, is_repeated, is_packed, key, new_default):
+  """Returns a decoder for a bytes field."""
+
+  local_DecodeVarint = _DecodeVarint
+
+  assert not is_packed
+  if is_repeated:
+    tag_bytes = encoder.TagBytes(field_number,
+                                 wire_format.WIRETYPE_LENGTH_DELIMITED)
+    tag_len = len(tag_bytes)
+    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      while 1:
+        (size, pos) = local_DecodeVarint(buffer, pos)
+        new_pos = pos + size
+        if new_pos > end:
+          raise _DecodeError('Truncated string.')
+        value.append(buffer[pos:new_pos])
+        # Predict that the next tag is another copy of the same repeated field.
+        pos = new_pos + tag_len
+        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+          # Prediction failed.  Return.
+          return new_pos
+    return DecodeRepeatedField
+  else:
+    def DecodeField(buffer, pos, end, message, field_dict):
+      (size, pos) = local_DecodeVarint(buffer, pos)
+      new_pos = pos + size
+      if new_pos > end:
+        raise _DecodeError('Truncated string.')
+      field_dict[key] = buffer[pos:new_pos]
+      return new_pos
+    return DecodeField
+
+
+def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
+  """Returns a decoder for a group field."""
+
+  end_tag_bytes = encoder.TagBytes(field_number,
+                                   wire_format.WIRETYPE_END_GROUP)
+  end_tag_len = len(end_tag_bytes)
+
+  assert not is_packed
+  if is_repeated:
+    tag_bytes = encoder.TagBytes(field_number,
+                                 wire_format.WIRETYPE_START_GROUP)
+    tag_len = len(tag_bytes)
+    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      while 1:
+        value = field_dict.get(key)
+        if value is None:
+          value = field_dict.setdefault(key, new_default(message))
+        # Read sub-message.
+        pos = value.add()._InternalParse(buffer, pos, end)
+        # Read end tag.
+        new_pos = pos+end_tag_len
+        if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
+          raise _DecodeError('Missing group end tag.')
+        # Predict that the next tag is another copy of the same repeated field.
+        pos = new_pos + tag_len
+        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+          # Prediction failed.  Return.
+          return new_pos
+    return DecodeRepeatedField
+  else:
+    def DecodeField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      # Read sub-message.
+      pos = value._InternalParse(buffer, pos, end)
+      # Read end tag.
+      new_pos = pos+end_tag_len
+      if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
+        raise _DecodeError('Missing group end tag.')
+      return new_pos
+    return DecodeField
+
+
+def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
+  """Returns a decoder for a message field."""
+
+  local_DecodeVarint = _DecodeVarint
+
+  assert not is_packed
+  if is_repeated:
+    tag_bytes = encoder.TagBytes(field_number,
+                                 wire_format.WIRETYPE_LENGTH_DELIMITED)
+    tag_len = len(tag_bytes)
+    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      while 1:
+        value = field_dict.get(key)
+        if value is None:
+          value = field_dict.setdefault(key, new_default(message))
+        # Read length.
+        (size, pos) = local_DecodeVarint(buffer, pos)
+        new_pos = pos + size
+        if new_pos > end:
+          raise _DecodeError('Truncated message.')
+        # Read sub-message.
+        if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
+          # The only reason _InternalParse would return early is if it
+          # encountered an end-group tag.
+          raise _DecodeError('Unexpected end-group tag.')
+        # Predict that the next tag is another copy of the same repeated field.
+        pos = new_pos + tag_len
+        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+          # Prediction failed.  Return.
+          return new_pos
+    return DecodeRepeatedField
+  else:
+    def DecodeField(buffer, pos, end, message, field_dict):
+      value = field_dict.get(key)
+      if value is None:
+        value = field_dict.setdefault(key, new_default(message))
+      # Read length.
+      (size, pos) = local_DecodeVarint(buffer, pos)
+      new_pos = pos + size
+      if new_pos > end:
+        raise _DecodeError('Truncated message.')
+      # Read sub-message.
+      if value._InternalParse(buffer, pos, new_pos) != new_pos:
+        # The only reason _InternalParse would return early is if it encountered
+        # an end-group tag.
+        raise _DecodeError('Unexpected end-group tag.')
+      return new_pos
+    return DecodeField
+
+
+# --------------------------------------------------------------------
+
+MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
+
+def MessageSetItemDecoder(extensions_by_number):
+  """Returns a decoder for a MessageSet item.
+
+  The parameter is the _extensions_by_number map for the message class.
+
+  The message set message looks like this:
+    message MessageSet {
+      repeated group Item = 1 {
+        required int32 type_id = 2;
+        required string message = 3;
+      }
+    }
+  """
+
+  type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
+  message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
+  item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
+
+  local_ReadTag = ReadTag
+  local_DecodeVarint = _DecodeVarint
+  local_SkipField = SkipField
+
+  def DecodeItem(buffer, pos, end, message, field_dict):
+    message_set_item_start = pos
+    type_id = -1
+    message_start = -1
+    message_end = -1
+
+    # Technically, type_id and message can appear in any order, so we need
+    # a little loop here.
+    while 1:
+      (tag_bytes, pos) = local_ReadTag(buffer, pos)
+      if tag_bytes == type_id_tag_bytes:
+        (type_id, pos) = local_DecodeVarint(buffer, pos)
+      elif tag_bytes == message_tag_bytes:
+        (size, message_start) = local_DecodeVarint(buffer, pos)
+        pos = message_end = message_start + size
+      elif tag_bytes == item_end_tag_bytes:
+        break
+      else:
+        pos = SkipField(buffer, pos, end, tag_bytes)
+        if pos == -1:
+          raise _DecodeError('Missing group end tag.')
+
+    if pos > end:
+      raise _DecodeError('Truncated message.')
+
+    if type_id == -1:
+      raise _DecodeError('MessageSet item missing type_id.')
+    if message_start == -1:
+      raise _DecodeError('MessageSet item missing message.')
+
+    extension = extensions_by_number.get(type_id)
+    if extension is not None:
+      value = field_dict.get(extension)
+      if value is None:
+        value = field_dict.setdefault(
+            extension, extension.message_type._concrete_class())
+      if value._InternalParse(buffer, message_start,message_end) != message_end:
+        # The only reason _InternalParse would return early is if it encountered
+        # an end-group tag.
+        raise _DecodeError('Unexpected end-group tag.')
+    else:
+      if not message._unknown_fields:
+        message._unknown_fields = []
+      message._unknown_fields.append((MESSAGE_SET_ITEM_TAG,
+                                      buffer[message_set_item_start:pos]))
+
+    return pos
+
+  return DecodeItem
+
+# --------------------------------------------------------------------
+# Optimization is not as heavy here because calls to SkipField() are rare,
+# except for handling end-group tags.
+
+def _SkipVarint(buffer, pos, end):
+  """Skip a varint value.  Returns the new position."""
+  # Previously ord(buffer[pos]) raised IndexError when pos is out of range.
+  # With this code, ord(b'') raises TypeError.  Both are handled in
+  # python_message.py to generate a 'Truncated message' error.
+  while ord(buffer[pos:pos+1]) & 0x80:
+    pos += 1
+  pos += 1
+  if pos > end:
+    raise _DecodeError('Truncated message.')
+  return pos
+
+def _SkipFixed64(buffer, pos, end):
+  """Skip a fixed64 value.  Returns the new position."""
+
+  pos += 8
+  if pos > end:
+    raise _DecodeError('Truncated message.')
+  return pos
+
+def _SkipLengthDelimited(buffer, pos, end):
+  """Skip a length-delimited value.  Returns the new position."""
+
+  (size, pos) = _DecodeVarint(buffer, pos)
+  pos += size
+  if pos > end:
+    raise _DecodeError('Truncated message.')
+  return pos
+
+def _SkipGroup(buffer, pos, end):
+  """Skip sub-group.  Returns the new position."""
+
+  while 1:
+    (tag_bytes, pos) = ReadTag(buffer, pos)
+    new_pos = SkipField(buffer, pos, end, tag_bytes)
+    if new_pos == -1:
+      return pos
+    pos = new_pos
+
+def _EndGroup(buffer, pos, end):
+  """Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
+
+  return -1
+
+def _SkipFixed32(buffer, pos, end):
+  """Skip a fixed32 value.  Returns the new position."""
+
+  pos += 4
+  if pos > end:
+    raise _DecodeError('Truncated message.')
+  return pos
+
+def _RaiseInvalidWireType(buffer, pos, end):
+  """Skip function for unknown wire types.  Raises an exception."""
+
+  raise _DecodeError('Tag had invalid wire type.')
+
+def _FieldSkipper():
+  """Constructs the SkipField function."""
+
+  WIRETYPE_TO_SKIPPER = [
+      _SkipVarint,
+      _SkipFixed64,
+      _SkipLengthDelimited,
+      _SkipGroup,
+      _EndGroup,
+      _SkipFixed32,
+      _RaiseInvalidWireType,
+      _RaiseInvalidWireType,
+      ]
+
+  wiretype_mask = wire_format.TAG_TYPE_MASK
+
+  def SkipField(buffer, pos, end, tag_bytes):
+    """Skips a field with the specified tag.
+
+    |pos| should point to the byte immediately after the tag.
+
+    Returns:
+        The new position (after the tag value), or -1 if the tag is an end-group
+        tag (in which case the calling loop should break).
+    """
+
+    # The wire type is always in the first byte since varints are little-endian.
+    wire_type = ord(tag_bytes[0:1]) & wiretype_mask
+    return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
+
+  return SkipField
+
+SkipField = _FieldSkipper()
diff --git a/third_party/protobuf26/internal/encoder.py b/third_party/protobuf26/internal/encoder.py
new file mode 100644
index 0000000..22b1841
--- /dev/null
+++ b/third_party/protobuf26/internal/encoder.py
@@ -0,0 +1,788 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#PY25 compatible for GAE.
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+
+"""Code for encoding protocol message primitives.
+
+Contains the logic for encoding every logical protocol field type
+into one of the 5 physical wire types.
+
+This code is designed to push the Python interpreter's performance to the
+limits.
+
+The basic idea is that at startup time, for every field (i.e. every
+FieldDescriptor) we construct two functions:  a "sizer" and an "encoder".  The
+sizer takes a value of this field's type and computes its byte size.  The
+encoder takes a writer function and a value.  It encodes the value into byte
+strings and invokes the writer function to write those strings.  Typically the
+writer function is the write() method of a cStringIO.
+
+We try to do as much work as possible when constructing the writer and the
+sizer rather than when calling them.  In particular:
+* We copy any needed global functions to local variables, so that we do not need
+  to do costly global table lookups at runtime.
+* Similarly, we try to do any attribute lookups at startup time if possible.
+* Every field's tag is encoded to bytes at startup, since it can't change at
+  runtime.
+* Whatever component of the field size we can compute at startup, we do.
+* We *avoid* sharing code if doing so would make the code slower and not sharing
+  does not burden us too much.  For example, encoders for repeated fields do
+  not just call the encoders for singular fields in a loop because this would
+  add an extra function call overhead for every loop iteration; instead, we
+  manually inline the single-value encoder into the loop.
+* If a Python function lacks a return statement, Python actually generates
+  instructions to pop the result of the last statement off the stack, push
+  None onto the stack, and then return that.  If we really don't care what
+  value is returned, then we can save two instructions by returning the
+  result of the last statement.  It looks funny but it helps.
+* We assume that type and bounds checking has happened at a higher level.
+"""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+import struct
+import sys  ##PY25
+_PY2 = sys.version_info[0] < 3  ##PY25
+from protobuf26.internal import wire_format
+
+
+# This will overflow and thus become IEEE-754 "infinity".  We would use
+# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
+_POS_INF = 1e10000
+_NEG_INF = -_POS_INF
+
+
+def _VarintSize(value):
+  """Compute the size of a varint value."""
+  if value <= 0x7f: return 1
+  if value <= 0x3fff: return 2
+  if value <= 0x1fffff: return 3
+  if value <= 0xfffffff: return 4
+  if value <= 0x7ffffffff: return 5
+  if value <= 0x3ffffffffff: return 6
+  if value <= 0x1ffffffffffff: return 7
+  if value <= 0xffffffffffffff: return 8
+  if value <= 0x7fffffffffffffff: return 9
+  return 10
+
+
+def _SignedVarintSize(value):
+  """Compute the size of a signed varint value."""
+  if value < 0: return 10
+  if value <= 0x7f: return 1
+  if value <= 0x3fff: return 2
+  if value <= 0x1fffff: return 3
+  if value <= 0xfffffff: return 4
+  if value <= 0x7ffffffff: return 5
+  if value <= 0x3ffffffffff: return 6
+  if value <= 0x1ffffffffffff: return 7
+  if value <= 0xffffffffffffff: return 8
+  if value <= 0x7fffffffffffffff: return 9
+  return 10
+
+
+def _TagSize(field_number):
+  """Returns the number of bytes required to serialize a tag with this field
+  number."""
+  # Just pass in type 0, since the type won't affect the tag+type size.
+  return _VarintSize(wire_format.PackTag(field_number, 0))
+
+
+# --------------------------------------------------------------------
+# In this section we define some generic sizers.  Each of these functions
+# takes parameters specific to a particular field type, e.g. int32 or fixed64.
+# It returns another function which in turn takes parameters specific to a
+# particular field, e.g. the field number and whether it is repeated or packed.
+# Look at the next section to see how these are used.
+
+
+def _SimpleSizer(compute_value_size):
+  """A sizer which uses the function compute_value_size to compute the size of
+  each value.  Typically compute_value_size is _VarintSize."""
+
+  def SpecificSizer(field_number, is_repeated, is_packed):
+    tag_size = _TagSize(field_number)
+    if is_packed:
+      local_VarintSize = _VarintSize
+      def PackedFieldSize(value):
+        result = 0
+        for element in value:
+          result += compute_value_size(element)
+        return result + local_VarintSize(result) + tag_size
+      return PackedFieldSize
+    elif is_repeated:
+      def RepeatedFieldSize(value):
+        result = tag_size * len(value)
+        for element in value:
+          result += compute_value_size(element)
+        return result
+      return RepeatedFieldSize
+    else:
+      def FieldSize(value):
+        return tag_size + compute_value_size(value)
+      return FieldSize
+
+  return SpecificSizer
+
+
+def _ModifiedSizer(compute_value_size, modify_value):
+  """Like SimpleSizer, but modify_value is invoked on each value before it is
+  passed to compute_value_size.  modify_value is typically ZigZagEncode."""
+
+  def SpecificSizer(field_number, is_repeated, is_packed):
+    tag_size = _TagSize(field_number)
+    if is_packed:
+      local_VarintSize = _VarintSize
+      def PackedFieldSize(value):
+        result = 0
+        for element in value:
+          result += compute_value_size(modify_value(element))
+        return result + local_VarintSize(result) + tag_size
+      return PackedFieldSize
+    elif is_repeated:
+      def RepeatedFieldSize(value):
+        result = tag_size * len(value)
+        for element in value:
+          result += compute_value_size(modify_value(element))
+        return result
+      return RepeatedFieldSize
+    else:
+      def FieldSize(value):
+        return tag_size + compute_value_size(modify_value(value))
+      return FieldSize
+
+  return SpecificSizer
+
+
+def _FixedSizer(value_size):
+  """Like _SimpleSizer except for a fixed-size field.  The input is the size
+  of one value."""
+
+  def SpecificSizer(field_number, is_repeated, is_packed):
+    tag_size = _TagSize(field_number)
+    if is_packed:
+      local_VarintSize = _VarintSize
+      def PackedFieldSize(value):
+        result = len(value) * value_size
+        return result + local_VarintSize(result) + tag_size
+      return PackedFieldSize
+    elif is_repeated:
+      element_size = value_size + tag_size
+      def RepeatedFieldSize(value):
+        return len(value) * element_size
+      return RepeatedFieldSize
+    else:
+      field_size = value_size + tag_size
+      def FieldSize(value):
+        return field_size
+      return FieldSize
+
+  return SpecificSizer
+
+
+# ====================================================================
+# Here we declare a sizer constructor for each field type.  Each "sizer
+# constructor" is a function that takes (field_number, is_repeated, is_packed)
+# as parameters and returns a sizer, which in turn takes a field value as
+# a parameter and returns its encoded size.
+
+
+Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
+
+UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
+
+SInt32Sizer = SInt64Sizer = _ModifiedSizer(
+    _SignedVarintSize, wire_format.ZigZagEncode)
+
+Fixed32Sizer = SFixed32Sizer = FloatSizer  = _FixedSizer(4)
+Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
+
+BoolSizer = _FixedSizer(1)
+
+
+def StringSizer(field_number, is_repeated, is_packed):
+  """Returns a sizer for a string field."""
+
+  tag_size = _TagSize(field_number)
+  local_VarintSize = _VarintSize
+  local_len = len
+  assert not is_packed
+  if is_repeated:
+    def RepeatedFieldSize(value):
+      result = tag_size * len(value)
+      for element in value:
+        l = local_len(element.encode('utf-8'))
+        result += local_VarintSize(l) + l
+      return result
+    return RepeatedFieldSize
+  else:
+    def FieldSize(value):
+      l = local_len(value.encode('utf-8'))
+      return tag_size + local_VarintSize(l) + l
+    return FieldSize
+
+
+def BytesSizer(field_number, is_repeated, is_packed):
+  """Returns a sizer for a bytes field."""
+
+  tag_size = _TagSize(field_number)
+  local_VarintSize = _VarintSize
+  local_len = len
+  assert not is_packed
+  if is_repeated:
+    def RepeatedFieldSize(value):
+      result = tag_size * len(value)
+      for element in value:
+        l = local_len(element)
+        result += local_VarintSize(l) + l
+      return result
+    return RepeatedFieldSize
+  else:
+    def FieldSize(value):
+      l = local_len(value)
+      return tag_size + local_VarintSize(l) + l
+    return FieldSize
+
+
+def GroupSizer(field_number, is_repeated, is_packed):
+  """Returns a sizer for a group field."""
+
+  tag_size = _TagSize(field_number) * 2
+  assert not is_packed
+  if is_repeated:
+    def RepeatedFieldSize(value):
+      result = tag_size * len(value)
+      for element in value:
+        result += element.ByteSize()
+      return result
+    return RepeatedFieldSize
+  else:
+    def FieldSize(value):
+      return tag_size + value.ByteSize()
+    return FieldSize
+
+
+def MessageSizer(field_number, is_repeated, is_packed):
+  """Returns a sizer for a message field."""
+
+  tag_size = _TagSize(field_number)
+  local_VarintSize = _VarintSize
+  assert not is_packed
+  if is_repeated:
+    def RepeatedFieldSize(value):
+      result = tag_size * len(value)
+      for element in value:
+        l = element.ByteSize()
+        result += local_VarintSize(l) + l
+      return result
+    return RepeatedFieldSize
+  else:
+    def FieldSize(value):
+      l = value.ByteSize()
+      return tag_size + local_VarintSize(l) + l
+    return FieldSize
+
+
+# --------------------------------------------------------------------
+# MessageSet is special.
+
+
+def MessageSetItemSizer(field_number):
+  """Returns a sizer for extensions of MessageSet.
+
+  The message set message looks like this:
+    message MessageSet {
+      repeated group Item = 1 {
+        required int32 type_id = 2;
+        required string message = 3;
+      }
+    }
+  """
+  static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
+                 _TagSize(3))
+  local_VarintSize = _VarintSize
+
+  def FieldSize(value):
+    l = value.ByteSize()
+    return static_size + local_VarintSize(l) + l
+
+  return FieldSize
+
+
+# ====================================================================
+# Encoders!
+
+
+def _VarintEncoder():
+  """Return an encoder for a basic varint value (does not include tag)."""
+
+  local_chr = _PY2 and chr or (lambda x: bytes((x,)))  ##PY25
+##!PY25  local_chr = chr if bytes is str else lambda x: bytes((x,))
+  def EncodeVarint(write, value):
+    bits = value & 0x7f
+    value >>= 7
+    while value:
+      write(local_chr(0x80|bits))
+      bits = value & 0x7f
+      value >>= 7
+    return write(local_chr(bits))
+
+  return EncodeVarint
+
+
+def _SignedVarintEncoder():
+  """Return an encoder for a basic signed varint value (does not include
+  tag)."""
+
+  local_chr = _PY2 and chr or (lambda x: bytes((x,)))  ##PY25
+##!PY25  local_chr = chr if bytes is str else lambda x: bytes((x,))
+  def EncodeSignedVarint(write, value):
+    if value < 0:
+      value += (1 << 64)
+    bits = value & 0x7f
+    value >>= 7
+    while value:
+      write(local_chr(0x80|bits))
+      bits = value & 0x7f
+      value >>= 7
+    return write(local_chr(bits))
+
+  return EncodeSignedVarint
+
+
+_EncodeVarint = _VarintEncoder()
+_EncodeSignedVarint = _SignedVarintEncoder()
+
+
+def _VarintBytes(value):
+  """Encode the given integer as a varint and return the bytes.  This is only
+  called at startup time so it doesn't need to be fast."""
+
+  pieces = []
+  _EncodeVarint(pieces.append, value)
+  return "".encode("latin1").join(pieces)  ##PY25
+##!PY25  return b"".join(pieces)
+
+
+def TagBytes(field_number, wire_type):
+  """Encode the given tag and return the bytes.  Only called at startup."""
+
+  return _VarintBytes(wire_format.PackTag(field_number, wire_type))
+
+# --------------------------------------------------------------------
+# As with sizers (see above), we have a number of common encoder
+# implementations.
+
+
+def _SimpleEncoder(wire_type, encode_value, compute_value_size):
+  """Return a constructor for an encoder for fields of a particular type.
+
+  Args:
+      wire_type:  The field's wire type, for encoding tags.
+      encode_value:  A function which encodes an individual value, e.g.
+        _EncodeVarint().
+      compute_value_size:  A function which computes the size of an individual
+        value, e.g. _VarintSize().
+  """
+
+  def SpecificEncoder(field_number, is_repeated, is_packed):
+    if is_packed:
+      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+      local_EncodeVarint = _EncodeVarint
+      def EncodePackedField(write, value):
+        write(tag_bytes)
+        size = 0
+        for element in value:
+          size += compute_value_size(element)
+        local_EncodeVarint(write, size)
+        for element in value:
+          encode_value(write, element)
+      return EncodePackedField
+    elif is_repeated:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeRepeatedField(write, value):
+        for element in value:
+          write(tag_bytes)
+          encode_value(write, element)
+      return EncodeRepeatedField
+    else:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeField(write, value):
+        write(tag_bytes)
+        return encode_value(write, value)
+      return EncodeField
+
+  return SpecificEncoder
+
+
+def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
+  """Like SimpleEncoder but additionally invokes modify_value on every value
+  before passing it to encode_value.  Usually modify_value is ZigZagEncode."""
+
+  def SpecificEncoder(field_number, is_repeated, is_packed):
+    if is_packed:
+      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+      local_EncodeVarint = _EncodeVarint
+      def EncodePackedField(write, value):
+        write(tag_bytes)
+        size = 0
+        for element in value:
+          size += compute_value_size(modify_value(element))
+        local_EncodeVarint(write, size)
+        for element in value:
+          encode_value(write, modify_value(element))
+      return EncodePackedField
+    elif is_repeated:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeRepeatedField(write, value):
+        for element in value:
+          write(tag_bytes)
+          encode_value(write, modify_value(element))
+      return EncodeRepeatedField
+    else:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeField(write, value):
+        write(tag_bytes)
+        return encode_value(write, modify_value(value))
+      return EncodeField
+
+  return SpecificEncoder
+
+
+def _StructPackEncoder(wire_type, format):
+  """Return a constructor for an encoder for a fixed-width field.
+
+  Args:
+      wire_type:  The field's wire type, for encoding tags.
+      format:  The format string to pass to struct.pack().
+  """
+
+  value_size = struct.calcsize(format)
+
+  def SpecificEncoder(field_number, is_repeated, is_packed):
+    local_struct_pack = struct.pack
+    if is_packed:
+      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+      local_EncodeVarint = _EncodeVarint
+      def EncodePackedField(write, value):
+        write(tag_bytes)
+        local_EncodeVarint(write, len(value) * value_size)
+        for element in value:
+          write(local_struct_pack(format, element))
+      return EncodePackedField
+    elif is_repeated:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeRepeatedField(write, value):
+        for element in value:
+          write(tag_bytes)
+          write(local_struct_pack(format, element))
+      return EncodeRepeatedField
+    else:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeField(write, value):
+        write(tag_bytes)
+        return write(local_struct_pack(format, value))
+      return EncodeField
+
+  return SpecificEncoder
+
+
+def _FloatingPointEncoder(wire_type, format):
+  """Return a constructor for an encoder for float fields.
+
+  This is like StructPackEncoder, but catches errors that may be due to
+  passing non-finite floating-point values to struct.pack, and makes a
+  second attempt to encode those values.
+
+  Args:
+      wire_type:  The field's wire type, for encoding tags.
+      format:  The format string to pass to struct.pack().
+  """
+
+  b = _PY2 and (lambda x:x) or (lambda x:x.encode('latin1'))  ##PY25
+  value_size = struct.calcsize(format)
+  if value_size == 4:
+    def EncodeNonFiniteOrRaise(write, value):
+      # Remember that the serialized form uses little-endian byte order.
+      if value == _POS_INF:
+        write(b('\x00\x00\x80\x7F'))  ##PY25
+##!PY25        write(b'\x00\x00\x80\x7F')
+      elif value == _NEG_INF:
+        write(b('\x00\x00\x80\xFF'))  ##PY25
+##!PY25        write(b'\x00\x00\x80\xFF')
+      elif value != value:           # NaN
+        write(b('\x00\x00\xC0\x7F'))  ##PY25
+##!PY25        write(b'\x00\x00\xC0\x7F')
+      else:
+        raise
+  elif value_size == 8:
+    def EncodeNonFiniteOrRaise(write, value):
+      if value == _POS_INF:
+        write(b('\x00\x00\x00\x00\x00\x00\xF0\x7F'))  ##PY25
+##!PY25        write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
+      elif value == _NEG_INF:
+        write(b('\x00\x00\x00\x00\x00\x00\xF0\xFF'))  ##PY25
+##!PY25        write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
+      elif value != value:                         # NaN
+        write(b('\x00\x00\x00\x00\x00\x00\xF8\x7F'))  ##PY25
+##!PY25        write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
+      else:
+        raise
+  else:
+    raise ValueError('Can\'t encode floating-point values that are '
+                     '%d bytes long (only 4 or 8)' % value_size)
+
+  def SpecificEncoder(field_number, is_repeated, is_packed):
+    local_struct_pack = struct.pack
+    if is_packed:
+      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+      local_EncodeVarint = _EncodeVarint
+      def EncodePackedField(write, value):
+        write(tag_bytes)
+        local_EncodeVarint(write, len(value) * value_size)
+        for element in value:
+          # This try/except block is going to be faster than any code that
+          # we could write to check whether element is finite.
+          try:
+            write(local_struct_pack(format, element))
+          except SystemError:
+            EncodeNonFiniteOrRaise(write, element)
+      return EncodePackedField
+    elif is_repeated:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeRepeatedField(write, value):
+        for element in value:
+          write(tag_bytes)
+          try:
+            write(local_struct_pack(format, element))
+          except SystemError:
+            EncodeNonFiniteOrRaise(write, element)
+      return EncodeRepeatedField
+    else:
+      tag_bytes = TagBytes(field_number, wire_type)
+      def EncodeField(write, value):
+        write(tag_bytes)
+        try:
+          write(local_struct_pack(format, value))
+        except SystemError:
+          EncodeNonFiniteOrRaise(write, value)
+      return EncodeField
+
+  return SpecificEncoder
+
+
+# ====================================================================
+# Here we declare an encoder constructor for each field type.  These work
+# very similarly to sizer constructors, described earlier.
+
+
+Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
+    wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
+
+UInt32Encoder = UInt64Encoder = _SimpleEncoder(
+    wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
+
+SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
+    wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
+    wire_format.ZigZagEncode)
+
+# Note that Python conveniently guarantees that when using the '<' prefix on
+# formats, they will also have the same size across all platforms (as opposed
+# to without the prefix, where their sizes depend on the C compiler's basic
+# type sizes).
+Fixed32Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
+Fixed64Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
+SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
+SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
+FloatEncoder    = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
+DoubleEncoder   = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
+
+
+def BoolEncoder(field_number, is_repeated, is_packed):
+  """Returns an encoder for a boolean field."""
+
+##!PY25  false_byte = b'\x00'
+##!PY25  true_byte = b'\x01'
+  false_byte = '\x00'.encode('latin1')  ##PY25
+  true_byte = '\x01'.encode('latin1')  ##PY25
+  if is_packed:
+    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+    local_EncodeVarint = _EncodeVarint
+    def EncodePackedField(write, value):
+      write(tag_bytes)
+      local_EncodeVarint(write, len(value))
+      for element in value:
+        if element:
+          write(true_byte)
+        else:
+          write(false_byte)
+    return EncodePackedField
+  elif is_repeated:
+    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
+    def EncodeRepeatedField(write, value):
+      for element in value:
+        write(tag_bytes)
+        if element:
+          write(true_byte)
+        else:
+          write(false_byte)
+    return EncodeRepeatedField
+  else:
+    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
+    def EncodeField(write, value):
+      write(tag_bytes)
+      if value:
+        return write(true_byte)
+      return write(false_byte)
+    return EncodeField
+
+
+def StringEncoder(field_number, is_repeated, is_packed):
+  """Returns an encoder for a string field."""
+
+  tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+  local_EncodeVarint = _EncodeVarint
+  local_len = len
+  assert not is_packed
+  if is_repeated:
+    def EncodeRepeatedField(write, value):
+      for element in value:
+        encoded = element.encode('utf-8')
+        write(tag)
+        local_EncodeVarint(write, local_len(encoded))
+        write(encoded)
+    return EncodeRepeatedField
+  else:
+    def EncodeField(write, value):
+      encoded = value.encode('utf-8')
+      write(tag)
+      local_EncodeVarint(write, local_len(encoded))
+      return write(encoded)
+    return EncodeField
+
+
+def BytesEncoder(field_number, is_repeated, is_packed):
+  """Returns an encoder for a bytes field."""
+
+  tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+  local_EncodeVarint = _EncodeVarint
+  local_len = len
+  assert not is_packed
+  if is_repeated:
+    def EncodeRepeatedField(write, value):
+      for element in value:
+        write(tag)
+        local_EncodeVarint(write, local_len(element))
+        write(element)
+    return EncodeRepeatedField
+  else:
+    def EncodeField(write, value):
+      write(tag)
+      local_EncodeVarint(write, local_len(value))
+      return write(value)
+    return EncodeField
+
+
+def GroupEncoder(field_number, is_repeated, is_packed):
+  """Returns an encoder for a group field."""
+
+  start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
+  end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
+  assert not is_packed
+  if is_repeated:
+    def EncodeRepeatedField(write, value):
+      for element in value:
+        write(start_tag)
+        element._InternalSerialize(write)
+        write(end_tag)
+    return EncodeRepeatedField
+  else:
+    def EncodeField(write, value):
+      write(start_tag)
+      value._InternalSerialize(write)
+      return write(end_tag)
+    return EncodeField
+
+
+def MessageEncoder(field_number, is_repeated, is_packed):
+  """Returns an encoder for a message field."""
+
+  tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+  local_EncodeVarint = _EncodeVarint
+  assert not is_packed
+  if is_repeated:
+    def EncodeRepeatedField(write, value):
+      for element in value:
+        write(tag)
+        local_EncodeVarint(write, element.ByteSize())
+        element._InternalSerialize(write)
+    return EncodeRepeatedField
+  else:
+    def EncodeField(write, value):
+      write(tag)
+      local_EncodeVarint(write, value.ByteSize())
+      return value._InternalSerialize(write)
+    return EncodeField
+
+
+# --------------------------------------------------------------------
+# As before, MessageSet is special.
+
+
+def MessageSetItemEncoder(field_number):
+  """Encoder for extensions of MessageSet.
+
+  The message set message looks like this:
+    message MessageSet {
+      repeated group Item = 1 {
+        required int32 type_id = 2;
+        required string message = 3;
+      }
+    }
+  """
+  start_bytes = "".encode("latin1").join([  ##PY25
+##!PY25  start_bytes = b"".join([
+      TagBytes(1, wire_format.WIRETYPE_START_GROUP),
+      TagBytes(2, wire_format.WIRETYPE_VARINT),
+      _VarintBytes(field_number),
+      TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
+  end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
+  local_EncodeVarint = _EncodeVarint
+
+  def EncodeField(write, value):
+    write(start_bytes)
+    local_EncodeVarint(write, value.ByteSize())
+    value._InternalSerialize(write)
+    return write(end_bytes)
+
+  return EncodeField
diff --git a/third_party/protobuf26/internal/enum_type_wrapper.py b/third_party/protobuf26/internal/enum_type_wrapper.py
new file mode 100644
index 0000000..7b28645
--- /dev/null
+++ b/third_party/protobuf26/internal/enum_type_wrapper.py
@@ -0,0 +1,89 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""A simple wrapper around enum types to expose utility functions.
+
+Instances are created as properties with the same name as the enum they wrap
+on proto classes.  For usage, see:
+  reflection_test.py
+"""
+
+__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
+
+
+class EnumTypeWrapper(object):
+  """A utility for finding the names of enum values."""
+
+  DESCRIPTOR = None
+
+  def __init__(self, enum_type):
+    """Inits EnumTypeWrapper with an EnumDescriptor."""
+    self._enum_type = enum_type
+    self.DESCRIPTOR = enum_type;
+
+  def Name(self, number):
+    """Returns a string containing the name of an enum value."""
+    if number in self._enum_type.values_by_number:
+      return self._enum_type.values_by_number[number].name
+    raise ValueError('Enum %s has no name defined for value %d' % (
+        self._enum_type.name, number))
+
+  def Value(self, name):
+    """Returns the value coresponding to the given enum name."""
+    if name in self._enum_type.values_by_name:
+      return self._enum_type.values_by_name[name].number
+    raise ValueError('Enum %s has no value defined for name %s' % (
+        self._enum_type.name, name))
+
+  def keys(self):
+    """Return a list of the string names in the enum.
+
+    These are returned in the order they were defined in the .proto file.
+    """
+
+    return [value_descriptor.name
+            for value_descriptor in self._enum_type.values]
+
+  def values(self):
+    """Return a list of the integer values in the enum.
+
+    These are returned in the order they were defined in the .proto file.
+    """
+
+    return [value_descriptor.number
+            for value_descriptor in self._enum_type.values]
+
+  def items(self):
+    """Return a list of the (name, value) pairs of the enum.
+
+    These are returned in the order they were defined in the .proto file.
+    """
+    return [(value_descriptor.name, value_descriptor.number)
+            for value_descriptor in self._enum_type.values]
diff --git a/third_party/protobuf26/internal/message_listener.py b/third_party/protobuf26/internal/message_listener.py
new file mode 100644
index 0000000..1080234
--- /dev/null
+++ b/third_party/protobuf26/internal/message_listener.py
@@ -0,0 +1,78 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Defines a listener interface for observing certain
+state transitions on Message objects.
+
+Also defines a null implementation of this interface.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+
+class MessageListener(object):
+
+  """Listens for modifications made to a message.  Meant to be registered via
+  Message._SetListener().
+
+  Attributes:
+    dirty:  If True, then calling Modified() would be a no-op.  This can be
+            used to avoid these calls entirely in the common case.
+  """
+
+  def Modified(self):
+    """Called every time the message is modified in such a way that the parent
+    message may need to be updated.  This currently means either:
+    (a) The message was modified for the first time, so the parent message
+        should henceforth mark the message as present.
+    (b) The message's cached byte size became dirty -- i.e. the message was
+        modified for the first time after a previous call to ByteSize().
+        Therefore the parent should also mark its byte size as dirty.
+    Note that (a) implies (b), since new objects start out with a client cached
+    size (zero).  However, we document (a) explicitly because it is important.
+
+    Modified() will *only* be called in response to one of these two events --
+    not every time the sub-message is modified.
+
+    Note that if the listener's |dirty| attribute is true, then calling
+    Modified at the moment would be a no-op, so it can be skipped.  Performance-
+    sensitive callers should check this attribute directly before calling since
+    it will be true most of the time.
+    """
+
+    raise NotImplementedError
+
+
+class NullMessageListener(object):
+
+  """No-op MessageListener implementation."""
+
+  def Modified(self):
+    pass
diff --git a/third_party/protobuf26/internal/python_message.py b/third_party/protobuf26/internal/python_message.py
new file mode 100644
index 0000000..bbd0ea5
--- /dev/null
+++ b/third_party/protobuf26/internal/python_message.py
@@ -0,0 +1,1247 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Keep it Python2.5 compatible for GAE.
+#
+# Copyright 2007 Google Inc. All Rights Reserved.
+#
+# This code is meant to work on Python 2.4 and above only.
+#
+# TODO(robinson): Helpers for verbose, common checks like seeing if a
+# descriptor's cpp_type is CPPTYPE_MESSAGE.
+
+"""Contains a metaclass and helper functions used to create
+protocol message classes from Descriptor objects at runtime.
+
+Recall that a metaclass is the "type" of a class.
+(A class is to a metaclass what an instance is to a class.)
+
+In this case, we use the GeneratedProtocolMessageType metaclass
+to inject all the useful functionality into the classes
+output by the protocol compiler at compile-time.
+
+The upshot of all this is that the real implementation
+details for ALL pure-Python protocol buffers are *here in
+this file*.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import sys
+if sys.version_info[0] < 3:
+  try:
+    from cStringIO import StringIO as BytesIO
+  except ImportError:
+    from StringIO import StringIO as BytesIO
+  import copy_reg as copyreg
+else:
+  from io import BytesIO
+  import copyreg
+import struct
+import weakref
+
+# We use "as" to avoid name collisions with variables.
+from protobuf26.internal import containers
+from protobuf26.internal import decoder
+from protobuf26.internal import encoder
+from protobuf26.internal import enum_type_wrapper
+from protobuf26.internal import message_listener as message_listener_mod
+from protobuf26.internal import type_checkers
+from protobuf26.internal import wire_format
+from protobuf26 import descriptor as descriptor_mod
+from protobuf26 import message as message_mod
+from protobuf26 import text_format
+
+_FieldDescriptor = descriptor_mod.FieldDescriptor
+
+
+def NewMessage(bases, descriptor, dictionary):
+  _AddClassAttributesForNestedExtensions(descriptor, dictionary)
+  _AddSlots(descriptor, dictionary)
+  return bases
+
+
+def InitMessage(descriptor, cls):
+  cls._decoders_by_tag = {}
+  cls._extensions_by_name = {}
+  cls._extensions_by_number = {}
+  if (descriptor.has_options and
+      descriptor.GetOptions().message_set_wire_format):
+    cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
+        decoder.MessageSetItemDecoder(cls._extensions_by_number))
+
+  # Attach stuff to each FieldDescriptor for quick lookup later on.
+  for field in descriptor.fields:
+    _AttachFieldHelpers(cls, field)
+
+  _AddEnumValues(descriptor, cls)
+  _AddInitMethod(descriptor, cls)
+  _AddPropertiesForFields(descriptor, cls)
+  _AddPropertiesForExtensions(descriptor, cls)
+  _AddStaticMethods(cls)
+  _AddMessageMethods(descriptor, cls)
+  _AddPrivateHelperMethods(descriptor, cls)
+  copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
+
+
+# Stateless helpers for GeneratedProtocolMessageType below.
+# Outside clients should not access these directly.
+#
+# I opted not to make any of these methods on the metaclass, to make it more
+# clear that I'm not really using any state there and to keep clients from
+# thinking that they have direct access to these construction helpers.
+
+
+def _PropertyName(proto_field_name):
+  """Returns the name of the public property attribute which
+  clients can use to get and (in some cases) set the value
+  of a protocol message field.
+
+  Args:
+    proto_field_name: The protocol message field name, exactly
+      as it appears (or would appear) in a .proto file.
+  """
+  # TODO(robinson): Escape Python keywords (e.g., yield), and test this support.
+  # nnorwitz makes my day by writing:
+  # """
+  # FYI.  See the keyword module in the stdlib. This could be as simple as:
+  #
+  # if keyword.iskeyword(proto_field_name):
+  #   return proto_field_name + "_"
+  # return proto_field_name
+  # """
+  # Kenton says:  The above is a BAD IDEA.  People rely on being able to use
+  #   getattr() and setattr() to reflectively manipulate field values.  If we
+  #   rename the properties, then every such user has to also make sure to apply
+  #   the same transformation.  Note that currently if you name a field "yield",
+  #   you can still access it just fine using getattr/setattr -- it's not even
+  #   that cumbersome to do so.
+  # TODO(kenton):  Remove this method entirely if/when everyone agrees with my
+  #   position.
+  return proto_field_name
+
+
+def _VerifyExtensionHandle(message, extension_handle):
+  """Verify that the given extension handle is valid."""
+
+  if not isinstance(extension_handle, _FieldDescriptor):
+    raise KeyError('HasExtension() expects an extension handle, got: %s' %
+                   extension_handle)
+
+  if not extension_handle.is_extension:
+    raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
+
+  if not extension_handle.containing_type:
+    raise KeyError('"%s" is missing a containing_type.'
+                   % extension_handle.full_name)
+
+  if extension_handle.containing_type is not message.DESCRIPTOR:
+    raise KeyError('Extension "%s" extends message type "%s", but this '
+                   'message is of type "%s".' %
+                   (extension_handle.full_name,
+                    extension_handle.containing_type.full_name,
+                    message.DESCRIPTOR.full_name))
+
+
+def _AddSlots(message_descriptor, dictionary):
+  """Adds a __slots__ entry to dictionary, containing the names of all valid
+  attributes for this message type.
+
+  Args:
+    message_descriptor: A Descriptor instance describing this message type.
+    dictionary: Class dictionary to which we'll add a '__slots__' entry.
+  """
+  dictionary['__slots__'] = ['_cached_byte_size',
+                             '_cached_byte_size_dirty',
+                             '_fields',
+                             '_unknown_fields',
+                             '_is_present_in_parent',
+                             '_listener',
+                             '_listener_for_children',
+                             '__weakref__',
+                             '_oneofs']
+
+
+def _IsMessageSetExtension(field):
+  return (field.is_extension and
+          field.containing_type.has_options and
+          field.containing_type.GetOptions().message_set_wire_format and
+          field.type == _FieldDescriptor.TYPE_MESSAGE and
+          field.message_type == field.extension_scope and
+          field.label == _FieldDescriptor.LABEL_OPTIONAL)
+
+
+def _AttachFieldHelpers(cls, field_descriptor):
+  is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
+  is_packed = (field_descriptor.has_options and
+               field_descriptor.GetOptions().packed)
+
+  if _IsMessageSetExtension(field_descriptor):
+    field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
+    sizer = encoder.MessageSetItemSizer(field_descriptor.number)
+  else:
+    field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
+        field_descriptor.number, is_repeated, is_packed)
+    sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
+        field_descriptor.number, is_repeated, is_packed)
+
+  field_descriptor._encoder = field_encoder
+  field_descriptor._sizer = sizer
+  field_descriptor._default_constructor = _DefaultValueConstructorForField(
+      field_descriptor)
+
+  def AddDecoder(wiretype, is_packed):
+    tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
+    cls._decoders_by_tag[tag_bytes] = (
+        type_checkers.TYPE_TO_DECODER[field_descriptor.type](
+            field_descriptor.number, is_repeated, is_packed,
+            field_descriptor, field_descriptor._default_constructor))
+
+  AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type],
+             False)
+
+  if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
+    # To support wire compatibility of adding packed = true, add a decoder for
+    # packed values regardless of the field's options.
+    AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
+
+
+def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
+  extension_dict = descriptor.extensions_by_name
+  for extension_name, extension_field in extension_dict.iteritems():
+    assert extension_name not in dictionary
+    dictionary[extension_name] = extension_field
+
+
+def _AddEnumValues(descriptor, cls):
+  """Sets class-level attributes for all enum fields defined in this message.
+
+  Also exporting a class-level object that can name enum values.
+
+  Args:
+    descriptor: Descriptor object for this message type.
+    cls: Class we're constructing for this message type.
+  """
+  for enum_type in descriptor.enum_types:
+    setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
+    for enum_value in enum_type.values:
+      setattr(cls, enum_value.name, enum_value.number)
+
+
+def _DefaultValueConstructorForField(field):
+  """Returns a function which returns a default value for a field.
+
+  Args:
+    field: FieldDescriptor object for this field.
+
+  The returned function has one argument:
+    message: Message instance containing this field, or a weakref proxy
+      of same.
+
+  That function in turn returns a default value for this field.  The default
+    value may refer back to |message| via a weak reference.
+  """
+
+  if field.label == _FieldDescriptor.LABEL_REPEATED:
+    if field.has_default_value and field.default_value != []:
+      raise ValueError('Repeated field default value not empty list: %s' % (
+          field.default_value))
+    if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+      # We can't look at _concrete_class yet since it might not have
+      # been set.  (Depends on order in which we initialize the classes).
+      message_type = field.message_type
+      def MakeRepeatedMessageDefault(message):
+        return containers.RepeatedCompositeFieldContainer(
+            message._listener_for_children, field.message_type)
+      return MakeRepeatedMessageDefault
+    else:
+      type_checker = type_checkers.GetTypeChecker(field)
+      def MakeRepeatedScalarDefault(message):
+        return containers.RepeatedScalarFieldContainer(
+            message._listener_for_children, type_checker)
+      return MakeRepeatedScalarDefault
+
+  if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+    # _concrete_class may not yet be initialized.
+    message_type = field.message_type
+    def MakeSubMessageDefault(message):
+      result = message_type._concrete_class()
+      result._SetListener(message._listener_for_children)
+      return result
+    return MakeSubMessageDefault
+
+  def MakeScalarDefault(message):
+    # TODO(protobuf-team): This may be broken since there may not be
+    # default_value.  Combine with has_default_value somehow.
+    return field.default_value
+  return MakeScalarDefault
+
+
+def _AddInitMethod(message_descriptor, cls):
+  """Adds an __init__ method to cls."""
+  fields = message_descriptor.fields
+  def init(self, **kwargs):
+    self._cached_byte_size = 0
+    self._cached_byte_size_dirty = len(kwargs) > 0
+    self._fields = {}
+    # Contains a mapping from oneof field descriptors to the descriptor
+    # of the currently set field in that oneof field.
+    self._oneofs = {}
+
+    # _unknown_fields is () when empty for efficiency, and will be turned into
+    # a list if fields are added.
+    self._unknown_fields = ()
+    self._is_present_in_parent = False
+    self._listener = message_listener_mod.NullMessageListener()
+    self._listener_for_children = _Listener(self)
+    for field_name, field_value in kwargs.iteritems():
+      field = _GetFieldByName(message_descriptor, field_name)
+      if field is None:
+        raise TypeError("%s() got an unexpected keyword argument '%s'" %
+                        (message_descriptor.name, field_name))
+      if field.label == _FieldDescriptor.LABEL_REPEATED:
+        copy = field._default_constructor(self)
+        if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:  # Composite
+          for val in field_value:
+            copy.add().MergeFrom(val)
+        else:  # Scalar
+          copy.extend(field_value)
+        self._fields[field] = copy
+      elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+        copy = field._default_constructor(self)
+        copy.MergeFrom(field_value)
+        self._fields[field] = copy
+      else:
+        setattr(self, field_name, field_value)
+
+  init.__module__ = None
+  init.__doc__ = None
+  cls.__init__ = init
+
+
+def _GetFieldByName(message_descriptor, field_name):
+  """Returns a field descriptor by field name.
+
+  Args:
+    message_descriptor: A Descriptor describing all fields in message.
+    field_name: The name of the field to retrieve.
+  Returns:
+    The field descriptor associated with the field name.
+  """
+  try:
+    return message_descriptor.fields_by_name[field_name]
+  except KeyError:
+    raise ValueError('Protocol message has no "%s" field.' % field_name)
+
+
+def _AddPropertiesForFields(descriptor, cls):
+  """Adds properties for all fields in this protocol message type."""
+  for field in descriptor.fields:
+    _AddPropertiesForField(field, cls)
+
+  if descriptor.is_extendable:
+    # _ExtensionDict is just an adaptor with no state so we allocate a new one
+    # every time it is accessed.
+    cls.Extensions = property(lambda self: _ExtensionDict(self))
+
+
+def _AddPropertiesForField(field, cls):
+  """Adds a public property for a protocol message field.
+  Clients can use this property to get and (in the case
+  of non-repeated scalar fields) directly set the value
+  of a protocol message field.
+
+  Args:
+    field: A FieldDescriptor for this field.
+    cls: The class we're constructing.
+  """
+  # Catch it if we add other types that we should
+  # handle specially here.
+  assert _FieldDescriptor.MAX_CPPTYPE == 10
+
+  constant_name = field.name.upper() + "_FIELD_NUMBER"
+  setattr(cls, constant_name, field.number)
+
+  if field.label == _FieldDescriptor.LABEL_REPEATED:
+    _AddPropertiesForRepeatedField(field, cls)
+  elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+    _AddPropertiesForNonRepeatedCompositeField(field, cls)
+  else:
+    _AddPropertiesForNonRepeatedScalarField(field, cls)
+
+
+def _AddPropertiesForRepeatedField(field, cls):
+  """Adds a public property for a "repeated" protocol message field.  Clients
+  can use this property to get the value of the field, which will be either a
+  _RepeatedScalarFieldContainer or _RepeatedCompositeFieldContainer (see
+  below).
+
+  Note that when clients add values to these containers, we perform
+  type-checking in the case of repeated scalar fields, and we also set any
+  necessary "has" bits as a side-effect.
+
+  Args:
+    field: A FieldDescriptor for this field.
+    cls: The class we're constructing.
+  """
+  proto_field_name = field.name
+  property_name = _PropertyName(proto_field_name)
+
+  def getter(self):
+    field_value = self._fields.get(field)
+    if field_value is None:
+      # Construct a new object to represent this field.
+      field_value = field._default_constructor(self)
+
+      # Atomically check if another thread has preempted us and, if not, swap
+      # in the new object we just created.  If someone has preempted us, we
+      # take that object and discard ours.
+      # WARNING:  We are relying on setdefault() being atomic.  This is true
+      #   in CPython but we haven't investigated others.  This warning appears
+      #   in several other locations in this file.
+      field_value = self._fields.setdefault(field, field_value)
+    return field_value
+  getter.__module__ = None
+  getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+  # We define a setter just so we can throw an exception with a more
+  # helpful error message.
+  def setter(self, new_value):
+    raise AttributeError('Assignment not allowed to repeated field '
+                         '"%s" in protocol message object.' % proto_field_name)
+
+  doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+  setattr(cls, property_name, property(getter, setter, doc=doc))
+
+
+def _AddPropertiesForNonRepeatedScalarField(field, cls):
+  """Adds a public property for a nonrepeated, scalar protocol message field.
+  Clients can use this property to get and directly set the value of the field.
+  Note that when the client sets the value of a field by using this property,
+  all necessary "has" bits are set as a side-effect, and we also perform
+  type-checking.
+
+  Args:
+    field: A FieldDescriptor for this field.
+    cls: The class we're constructing.
+  """
+  proto_field_name = field.name
+  property_name = _PropertyName(proto_field_name)
+  type_checker = type_checkers.GetTypeChecker(field)
+  default_value = field.default_value
+  valid_values = set()
+
+  def getter(self):
+    # TODO(protobuf-team): This may be broken since there may not be
+    # default_value.  Combine with has_default_value somehow.
+    return self._fields.get(field, default_value)
+  getter.__module__ = None
+  getter.__doc__ = 'Getter for %s.' % proto_field_name
+  def field_setter(self, new_value):
+    # pylint: disable=protected-access
+    self._fields[field] = type_checker.CheckValue(new_value)
+    # Check _cached_byte_size_dirty inline to improve performance, since scalar
+    # setters are called frequently.
+    if not self._cached_byte_size_dirty:
+      self._Modified()
+
+  if field.containing_oneof is not None:
+    def setter(self, new_value):
+      field_setter(self, new_value)
+      self._UpdateOneofState(field)
+  else:
+    setter = field_setter
+
+  setter.__module__ = None
+  setter.__doc__ = 'Setter for %s.' % proto_field_name
+
+  # Add a property to encapsulate the getter/setter.
+  doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+  setattr(cls, property_name, property(getter, setter, doc=doc))
+
+
+def _AddPropertiesForNonRepeatedCompositeField(field, cls):
+  """Adds a public property for a nonrepeated, composite protocol message field.
+  A composite field is a "group" or "message" field.
+
+  Clients can use this property to get the value of the field, but cannot
+  assign to the property directly.
+
+  Args:
+    field: A FieldDescriptor for this field.
+    cls: The class we're constructing.
+  """
+  # TODO(robinson): Remove duplication with similar method
+  # for non-repeated scalars.
+  proto_field_name = field.name
+  property_name = _PropertyName(proto_field_name)
+
+  # TODO(komarek): Can anyone explain to me why we cache the message_type this
+  # way, instead of referring to field.message_type inside of getter(self)?
+  # What if someone sets message_type later on (which makes for simpler
+  # dyanmic proto descriptor and class creation code).
+  message_type = field.message_type
+
+  def getter(self):
+    field_value = self._fields.get(field)
+    if field_value is None:
+      # Construct a new object to represent this field.
+      field_value = message_type._concrete_class()  # use field.message_type?
+      field_value._SetListener(
+          _OneofListener(self, field)
+          if field.containing_oneof is not None
+          else self._listener_for_children)
+
+      # Atomically check if another thread has preempted us and, if not, swap
+      # in the new object we just created.  If someone has preempted us, we
+      # take that object and discard ours.
+      # WARNING:  We are relying on setdefault() being atomic.  This is true
+      #   in CPython but we haven't investigated others.  This warning appears
+      #   in several other locations in this file.
+      field_value = self._fields.setdefault(field, field_value)
+    return field_value
+  getter.__module__ = None
+  getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+  # We define a setter just so we can throw an exception with a more
+  # helpful error message.
+  def setter(self, new_value):
+    raise AttributeError('Assignment not allowed to composite field '
+                         '"%s" in protocol message object.' % proto_field_name)
+
+  # Add a property to encapsulate the getter.
+  doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+  setattr(cls, property_name, property(getter, setter, doc=doc))
+
+
+def _AddPropertiesForExtensions(descriptor, cls):
+  """Adds properties for all fields in this protocol message type."""
+  extension_dict = descriptor.extensions_by_name
+  for extension_name, extension_field in extension_dict.iteritems():
+    constant_name = extension_name.upper() + "_FIELD_NUMBER"
+    setattr(cls, constant_name, extension_field.number)
+
+
+def _AddStaticMethods(cls):
+  # TODO(robinson): This probably needs to be thread-safe(?)
+  def RegisterExtension(extension_handle):
+    extension_handle.containing_type = cls.DESCRIPTOR
+    _AttachFieldHelpers(cls, extension_handle)
+
+    # Try to insert our extension, failing if an extension with the same number
+    # already exists.
+    actual_handle = cls._extensions_by_number.setdefault(
+        extension_handle.number, extension_handle)
+    if actual_handle is not extension_handle:
+      raise AssertionError(
+          'Extensions "%s" and "%s" both try to extend message type "%s" with '
+          'field number %d.' %
+          (extension_handle.full_name, actual_handle.full_name,
+           cls.DESCRIPTOR.full_name, extension_handle.number))
+
+    cls._extensions_by_name[extension_handle.full_name] = extension_handle
+
+    handle = extension_handle  # avoid line wrapping
+    if _IsMessageSetExtension(handle):
+      # MessageSet extension.  Also register under type name.
+      cls._extensions_by_name[
+          extension_handle.message_type.full_name] = extension_handle
+
+  cls.RegisterExtension = staticmethod(RegisterExtension)
+
+  def FromString(s):
+    message = cls()
+    message.MergeFromString(s)
+    return message
+  cls.FromString = staticmethod(FromString)
+
+
+def _IsPresent(item):
+  """Given a (FieldDescriptor, value) tuple from _fields, return true if the
+  value should be included in the list returned by ListFields()."""
+
+  if item[0].label == _FieldDescriptor.LABEL_REPEATED:
+    return bool(item[1])
+  elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+    return item[1]._is_present_in_parent
+  else:
+    return True
+
+
+def _AddListFieldsMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+
+  def ListFields(self):
+    all_fields = [item for item in self._fields.iteritems() if _IsPresent(item)]
+    all_fields.sort(key = lambda item: item[0].number)
+    return all_fields
+
+  cls.ListFields = ListFields
+
+
+def _AddHasFieldMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+
+  singular_fields = {}
+  for field in message_descriptor.fields:
+    if field.label != _FieldDescriptor.LABEL_REPEATED:
+      singular_fields[field.name] = field
+  # Fields inside oneofs are never repeated (enforced by the compiler).
+  for field in message_descriptor.oneofs:
+    singular_fields[field.name] = field
+
+  def HasField(self, field_name):
+    try:
+      field = singular_fields[field_name]
+    except KeyError:
+      raise ValueError(
+          'Protocol message has no singular "%s" field.' % field_name)
+
+    if isinstance(field, descriptor_mod.OneofDescriptor):
+      try:
+        return HasField(self, self._oneofs[field].name)
+      except KeyError:
+        return False
+    else:
+      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+        value = self._fields.get(field)
+        return value is not None and value._is_present_in_parent
+      else:
+        return field in self._fields
+
+  cls.HasField = HasField
+
+
+def _AddClearFieldMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+  def ClearField(self, field_name):
+    try:
+      field = message_descriptor.fields_by_name[field_name]
+    except KeyError:
+      try:
+        field = message_descriptor.oneofs_by_name[field_name]
+        if field in self._oneofs:
+          field = self._oneofs[field]
+        else:
+          return
+      except KeyError:
+        raise ValueError('Protocol message has no "%s" field.' % field_name)
+
+    if field in self._fields:
+      # Note:  If the field is a sub-message, its listener will still point
+      #   at us.  That's fine, because the worst than can happen is that it
+      #   will call _Modified() and invalidate our byte size.  Big deal.
+      del self._fields[field]
+
+      if self._oneofs.get(field.containing_oneof, None) is field:
+        del self._oneofs[field.containing_oneof]
+
+    # Always call _Modified() -- even if nothing was changed, this is
+    # a mutating method, and thus calling it should cause the field to become
+    # present in the parent message.
+    self._Modified()
+
+  cls.ClearField = ClearField
+
+
+def _AddClearExtensionMethod(cls):
+  """Helper for _AddMessageMethods()."""
+  def ClearExtension(self, extension_handle):
+    _VerifyExtensionHandle(self, extension_handle)
+
+    # Similar to ClearField(), above.
+    if extension_handle in self._fields:
+      del self._fields[extension_handle]
+    self._Modified()
+  cls.ClearExtension = ClearExtension
+
+
+def _AddClearMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+  def Clear(self):
+    # Clear fields.
+    self._fields = {}
+    self._unknown_fields = ()
+    self._Modified()
+  cls.Clear = Clear
+
+
+def _AddHasExtensionMethod(cls):
+  """Helper for _AddMessageMethods()."""
+  def HasExtension(self, extension_handle):
+    _VerifyExtensionHandle(self, extension_handle)
+    if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
+      raise KeyError('"%s" is repeated.' % extension_handle.full_name)
+
+    if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+      value = self._fields.get(extension_handle)
+      return value is not None and value._is_present_in_parent
+    else:
+      return extension_handle in self._fields
+  cls.HasExtension = HasExtension
+
+
+def _AddEqualsMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+  def __eq__(self, other):
+    if (not isinstance(other, message_mod.Message) or
+        other.DESCRIPTOR != self.DESCRIPTOR):
+      return False
+
+    if self is other:
+      return True
+
+    if not self.ListFields() == other.ListFields():
+      return False
+
+    # Sort unknown fields because their order shouldn't affect equality test.
+    unknown_fields = list(self._unknown_fields)
+    unknown_fields.sort()
+    other_unknown_fields = list(other._unknown_fields)
+    other_unknown_fields.sort()
+
+    return unknown_fields == other_unknown_fields
+
+  cls.__eq__ = __eq__
+
+
+def _AddStrMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+  def __str__(self):
+    return text_format.MessageToString(self)
+  cls.__str__ = __str__
+
+
+def _AddUnicodeMethod(unused_message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+
+  def __unicode__(self):
+    return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
+  cls.__unicode__ = __unicode__
+
+
+def _AddSetListenerMethod(cls):
+  """Helper for _AddMessageMethods()."""
+  def SetListener(self, listener):
+    if listener is None:
+      self._listener = message_listener_mod.NullMessageListener()
+    else:
+      self._listener = listener
+  cls._SetListener = SetListener
+
+
+def _BytesForNonRepeatedElement(value, field_number, field_type):
+  """Returns the number of bytes needed to serialize a non-repeated element.
+  The returned byte count includes space for tag information and any
+  other additional space associated with serializing value.
+
+  Args:
+    value: Value we're serializing.
+    field_number: Field number of this value.  (Since the field number
+      is stored as part of a varint-encoded tag, this has an impact
+      on the total bytes required to serialize the value).
+    field_type: The type of the field.  One of the TYPE_* constants
+      within FieldDescriptor.
+  """
+  try:
+    fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
+    return fn(field_number, value)
+  except KeyError:
+    raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
+
+
+def _AddByteSizeMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+
+  def ByteSize(self):
+    if not self._cached_byte_size_dirty:
+      return self._cached_byte_size
+
+    size = 0
+    for field_descriptor, field_value in self.ListFields():
+      size += field_descriptor._sizer(field_value)
+
+    for tag_bytes, value_bytes in self._unknown_fields:
+      size += len(tag_bytes) + len(value_bytes)
+
+    self._cached_byte_size = size
+    self._cached_byte_size_dirty = False
+    self._listener_for_children.dirty = False
+    return size
+
+  cls.ByteSize = ByteSize
+
+
+def _AddSerializeToStringMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+
+  def SerializeToString(self):
+    # Check if the message has all of its required fields set.
+    errors = []
+    if not self.IsInitialized():
+      raise message_mod.EncodeError(
+          'Message %s is missing required fields: %s' % (
+          self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
+    return self.SerializePartialToString()
+  cls.SerializeToString = SerializeToString
+
+
+def _AddSerializePartialToStringMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+
+  def SerializePartialToString(self):
+    out = BytesIO()
+    self._InternalSerialize(out.write)
+    return out.getvalue()
+  cls.SerializePartialToString = SerializePartialToString
+
+  def InternalSerialize(self, write_bytes):
+    for field_descriptor, field_value in self.ListFields():
+      field_descriptor._encoder(write_bytes, field_value)
+    for tag_bytes, value_bytes in self._unknown_fields:
+      write_bytes(tag_bytes)
+      write_bytes(value_bytes)
+  cls._InternalSerialize = InternalSerialize
+
+
+def _AddMergeFromStringMethod(message_descriptor, cls):
+  """Helper for _AddMessageMethods()."""
+  def MergeFromString(self, serialized):
+    length = len(serialized)
+    try:
+      if self._InternalParse(serialized, 0, length) != length:
+        # The only reason _InternalParse would return early is if it
+        # encountered an end-group tag.
+        raise message_mod.DecodeError('Unexpected end-group tag.')
+    except (IndexError, TypeError):
+      # Now ord(buf[p:p+1]) == ord('') gets TypeError.
+      raise message_mod.DecodeError('Truncated message.')
+    except struct.error, e:
+      raise message_mod.DecodeError(e)
+    return length   # Return this for legacy reasons.
+  cls.MergeFromString = MergeFromString
+
+  local_ReadTag = decoder.ReadTag
+  local_SkipField = decoder.SkipField
+  decoders_by_tag = cls._decoders_by_tag
+
+  def InternalParse(self, buffer, pos, end):
+    self._Modified()
+    field_dict = self._fields
+    unknown_field_list = self._unknown_fields
+    while pos != end:
+      (tag_bytes, new_pos) = local_ReadTag(buffer, pos)
+      field_decoder = decoders_by_tag.get(tag_bytes)
+      if field_decoder is None:
+        value_start_pos = new_pos
+        new_pos = local_SkipField(buffer, new_pos, end, tag_bytes)
+        if new_pos == -1:
+          return pos
+        if not unknown_field_list:
+          unknown_field_list = self._unknown_fields = []
+        unknown_field_list.append((tag_bytes, buffer[value_start_pos:new_pos]))
+        pos = new_pos
+      else:
+        pos = field_decoder(buffer, new_pos, end, self, field_dict)
+    return pos
+  cls._InternalParse = InternalParse
+
+
+def _AddIsInitializedMethod(message_descriptor, cls):
+  """Adds the IsInitialized and FindInitializationError methods to the
+  protocol message class."""
+
+  required_fields = [field for field in message_descriptor.fields
+                           if field.label == _FieldDescriptor.LABEL_REQUIRED]
+
+  def IsInitialized(self, errors=None):
+    """Checks if all required fields of a message are set.
+
+    Args:
+      errors:  A list which, if provided, will be populated with the field
+               paths of all missing required fields.
+
+    Returns:
+      True iff the specified message has all required fields set.
+    """
+
+    # Performance is critical so we avoid HasField() and ListFields().
+
+    for field in required_fields:
+      if (field not in self._fields or
+          (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
+           not self._fields[field]._is_present_in_parent)):
+        if errors is not None:
+          errors.extend(self.FindInitializationErrors())
+        return False
+
+    for field, value in list(self._fields.items()):  # dict can change size!
+      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+        if field.label == _FieldDescriptor.LABEL_REPEATED:
+          for element in value:
+            if not element.IsInitialized():
+              if errors is not None:
+                errors.extend(self.FindInitializationErrors())
+              return False
+        elif value._is_present_in_parent and not value.IsInitialized():
+          if errors is not None:
+            errors.extend(self.FindInitializationErrors())
+          return False
+
+    return True
+
+  cls.IsInitialized = IsInitialized
+
+  def FindInitializationErrors(self):
+    """Finds required fields which are not initialized.
+
+    Returns:
+      A list of strings.  Each string is a path to an uninitialized field from
+      the top-level message, e.g. "foo.bar[5].baz".
+    """
+
+    errors = []  # simplify things
+
+    for field in required_fields:
+      if not self.HasField(field.name):
+        errors.append(field.name)
+
+    for field, value in self.ListFields():
+      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+        if field.is_extension:
+          name = "(%s)" % field.full_name
+        else:
+          name = field.name
+
+        if field.label == _FieldDescriptor.LABEL_REPEATED:
+          for i in xrange(len(value)):
+            element = value[i]
+            prefix = "%s[%d]." % (name, i)
+            sub_errors = element.FindInitializationErrors()
+            errors += [ prefix + error for error in sub_errors ]
+        else:
+          prefix = name + "."
+          sub_errors = value.FindInitializationErrors()
+          errors += [ prefix + error for error in sub_errors ]
+
+    return errors
+
+  cls.FindInitializationErrors = FindInitializationErrors
+
+
+def _AddMergeFromMethod(cls):
+  LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
+  CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
+
+  def MergeFrom(self, msg):
+    if not isinstance(msg, cls):
+      raise TypeError(
+          "Parameter to MergeFrom() must be instance of same class: "
+          "expected %s got %s." % (cls.__name__, type(msg).__name__))
+
+    assert msg is not self
+    self._Modified()
+
+    fields = self._fields
+
+    for field, value in msg._fields.iteritems():
+      if field.label == LABEL_REPEATED:
+        field_value = fields.get(field)
+        if field_value is None:
+          # Construct a new object to represent this field.
+          field_value = field._default_constructor(self)
+          fields[field] = field_value
+        field_value.MergeFrom(value)
+      elif field.cpp_type == CPPTYPE_MESSAGE:
+        if value._is_present_in_parent:
+          field_value = fields.get(field)
+          if field_value is None:
+            # Construct a new object to represent this field.
+            field_value = field._default_constructor(self)
+            fields[field] = field_value
+          field_value.MergeFrom(value)
+      else:
+        self._fields[field] = value
+
+    if msg._unknown_fields:
+      if not self._unknown_fields:
+        self._unknown_fields = []
+      self._unknown_fields.extend(msg._unknown_fields)
+
+  cls.MergeFrom = MergeFrom
+
+
+def _AddWhichOneofMethod(message_descriptor, cls):
+  def WhichOneof(self, oneof_name):
+    """Returns the name of the currently set field inside a oneof, or None."""
+    try:
+      field = message_descriptor.oneofs_by_name[oneof_name]
+    except KeyError:
+      raise ValueError(
+          'Protocol message has no oneof "%s" field.' % oneof_name)
+
+    nested_field = self._oneofs.get(field, None)
+    if nested_field is not None and self.HasField(nested_field.name):
+      return nested_field.name
+    else:
+      return None
+
+  cls.WhichOneof = WhichOneof
+
+
+def _AddMessageMethods(message_descriptor, cls):
+  """Adds implementations of all Message methods to cls."""
+  _AddListFieldsMethod(message_descriptor, cls)
+  _AddHasFieldMethod(message_descriptor, cls)
+  _AddClearFieldMethod(message_descriptor, cls)
+  if message_descriptor.is_extendable:
+    _AddClearExtensionMethod(cls)
+    _AddHasExtensionMethod(cls)
+  _AddClearMethod(message_descriptor, cls)
+  _AddEqualsMethod(message_descriptor, cls)
+  _AddStrMethod(message_descriptor, cls)
+  _AddUnicodeMethod(message_descriptor, cls)
+  _AddSetListenerMethod(cls)
+  _AddByteSizeMethod(message_descriptor, cls)
+  _AddSerializeToStringMethod(message_descriptor, cls)
+  _AddSerializePartialToStringMethod(message_descriptor, cls)
+  _AddMergeFromStringMethod(message_descriptor, cls)
+  _AddIsInitializedMethod(message_descriptor, cls)
+  _AddMergeFromMethod(cls)
+  _AddWhichOneofMethod(message_descriptor, cls)
+
+def _AddPrivateHelperMethods(message_descriptor, cls):
+  """Adds implementation of private helper methods to cls."""
+
+  def Modified(self):
+    """Sets the _cached_byte_size_dirty bit to true,
+    and propagates this to our listener iff this was a state change.
+    """
+
+    # Note:  Some callers check _cached_byte_size_dirty before calling
+    #   _Modified() as an extra optimization.  So, if this method is ever
+    #   changed such that it does stuff even when _cached_byte_size_dirty is
+    #   already true, the callers need to be updated.
+    if not self._cached_byte_size_dirty:
+      self._cached_byte_size_dirty = True
+      self._listener_for_children.dirty = True
+      self._is_present_in_parent = True
+      self._listener.Modified()
+
+  def _UpdateOneofState(self, field):
+    """Sets field as the active field in its containing oneof.
+
+    Will also delete currently active field in the oneof, if it is different
+    from the argument. Does not mark the message as modified.
+    """
+    other_field = self._oneofs.setdefault(field.containing_oneof, field)
+    if other_field is not field:
+      del self._fields[other_field]
+      self._oneofs[field.containing_oneof] = field
+
+  cls._Modified = Modified
+  cls.SetInParent = Modified
+  cls._UpdateOneofState = _UpdateOneofState
+
+
+class _Listener(object):
+
+  """MessageListener implementation that a parent message registers with its
+  child message.
+
+  In order to support semantics like:
+
+    foo.bar.baz.qux = 23
+    assert foo.HasField('bar')
+
+  ...child objects must have back references to their parents.
+  This helper class is at the heart of this support.
+  """
+
+  def __init__(self, parent_message):
+    """Args:
+      parent_message: The message whose _Modified() method we should call when
+        we receive Modified() messages.
+    """
+    # This listener establishes a back reference from a child (contained) object
+    # to its parent (containing) object.  We make this a weak reference to avoid
+    # creating cyclic garbage when the client finishes with the 'parent' object
+    # in the tree.
+    if isinstance(parent_message, weakref.ProxyType):
+      self._parent_message_weakref = parent_message
+    else:
+      self._parent_message_weakref = weakref.proxy(parent_message)
+
+    # As an optimization, we also indicate directly on the listener whether
+    # or not the parent message is dirty.  This way we can avoid traversing
+    # up the tree in the common case.
+    self.dirty = False
+
+  def Modified(self):
+    if self.dirty:
+      return
+    try:
+      # Propagate the signal to our parents iff this is the first field set.
+      self._parent_message_weakref._Modified()
+    except ReferenceError:
+      # We can get here if a client has kept a reference to a child object,
+      # and is now setting a field on it, but the child's parent has been
+      # garbage-collected.  This is not an error.
+      pass
+
+
+class _OneofListener(_Listener):
+  """Special listener implementation for setting composite oneof fields."""
+
+  def __init__(self, parent_message, field):
+    """Args:
+      parent_message: The message whose _Modified() method we should call when
+        we receive Modified() messages.
+      field: The descriptor of the field being set in the parent message.
+    """
+    super(_OneofListener, self).__init__(parent_message)
+    self._field = field
+
+  def Modified(self):
+    """Also updates the state of the containing oneof in the parent message."""
+    try:
+      self._parent_message_weakref._UpdateOneofState(self._field)
+      super(_OneofListener, self).Modified()
+    except ReferenceError:
+      pass
+
+
+# TODO(robinson): Move elsewhere?  This file is getting pretty ridiculous...
+# TODO(robinson): Unify error handling of "unknown extension" crap.
+# TODO(robinson): Support iteritems()-style iteration over all
+# extensions with the "has" bits turned on?
+class _ExtensionDict(object):
+
+  """Dict-like container for supporting an indexable "Extensions"
+  field on proto instances.
+
+  Note that in all cases we expect extension handles to be
+  FieldDescriptors.
+  """
+
+  def __init__(self, extended_message):
+    """extended_message: Message instance for which we are the Extensions dict.
+    """
+
+    self._extended_message = extended_message
+
+  def __getitem__(self, extension_handle):
+    """Returns the current value of the given extension handle."""
+
+    _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+    result = self._extended_message._fields.get(extension_handle)
+    if result is not None:
+      return result
+
+    if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
+      result = extension_handle._default_constructor(self._extended_message)
+    elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+      result = extension_handle.message_type._concrete_class()
+      try:
+        result._SetListener(self._extended_message._listener_for_children)
+      except ReferenceError:
+        pass
+    else:
+      # Singular scalar -- just return the default without inserting into the
+      # dict.
+      return extension_handle.default_value
+
+    # Atomically check if another thread has preempted us and, if not, swap
+    # in the new object we just created.  If someone has preempted us, we
+    # take that object and discard ours.
+    # WARNING:  We are relying on setdefault() being atomic.  This is true
+    #   in CPython but we haven't investigated others.  This warning appears
+    #   in several other locations in this file.
+    result = self._extended_message._fields.setdefault(
+        extension_handle, result)
+
+    return result
+
+  def __eq__(self, other):
+    if not isinstance(other, self.__class__):
+      return False
+
+    my_fields = self._extended_message.ListFields()
+    other_fields = other._extended_message.ListFields()
+
+    # Get rid of non-extension fields.
+    my_fields    = [ field for field in my_fields    if field.is_extension ]
+    other_fields = [ field for field in other_fields if field.is_extension ]
+
+    return my_fields == other_fields
+
+  def __ne__(self, other):
+    return not self == other
+
+  def __hash__(self):
+    raise TypeError('unhashable object')
+
+  # Note that this is only meaningful for non-repeated, scalar extension
+  # fields.  Note also that we may have to call _Modified() when we do
+  # successfully set a field this way, to set any necssary "has" bits in the
+  # ancestors of the extended message.
+  def __setitem__(self, extension_handle, value):
+    """If extension_handle specifies a non-repeated, scalar extension
+    field, sets the value of that field.
+    """
+
+    _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+    if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or
+        extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE):
+      raise TypeError(
+          'Cannot assign to extension "%s" because it is a repeated or '
+          'composite type.' % extension_handle.full_name)
+
+    # It's slightly wasteful to lookup the type checker each time,
+    # but we expect this to be a vanishingly uncommon case anyway.
+    type_checker = type_checkers.GetTypeChecker(
+        extension_handle)
+    # pylint: disable=protected-access
+    self._extended_message._fields[extension_handle] = (
+      type_checker.CheckValue(value))
+    self._extended_message._Modified()
+
+  def _FindExtensionByName(self, name):
+    """Tries to find a known extension with the specified name.
+
+    Args:
+      name: Extension full name.
+
+    Returns:
+      Extension field descriptor.
+    """
+    return self._extended_message._extensions_by_name.get(name, None)
diff --git a/third_party/protobuf26/internal/type_checkers.py b/third_party/protobuf26/internal/type_checkers.py
new file mode 100644
index 0000000..779c09c
--- /dev/null
+++ b/third_party/protobuf26/internal/type_checkers.py
@@ -0,0 +1,328 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#PY25 compatible for GAE.
+#
+# Copyright 2008 Google Inc. All Rights Reserved.
+
+"""Provides type checking routines.
+
+This module defines type checking utilities in the forms of dictionaries:
+
+VALUE_CHECKERS: A dictionary of field types and a value validation object.
+TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
+  function.
+TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
+  function.
+FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
+  coresponding wire types.
+TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
+  function.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import sys  ##PY25
+if sys.version < '2.6': bytes = str  ##PY25
+from protobuf26.internal import api_implementation
+from protobuf26.internal import decoder
+from protobuf26.internal import encoder
+from protobuf26.internal import wire_format
+from protobuf26 import descriptor
+
+_FieldDescriptor = descriptor.FieldDescriptor
+
+
+def GetTypeChecker(field):
+  """Returns a type checker for a message field of the specified types.
+
+  Args:
+    field: FieldDescriptor object for this field.
+
+  Returns:
+    An instance of TypeChecker which can be used to verify the types
+    of values assigned to a field of the specified type.
+  """
+  if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
+      field.type == _FieldDescriptor.TYPE_STRING):
+    return UnicodeValueChecker()
+  if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
+    return EnumValueChecker(field.enum_type)
+  return _VALUE_CHECKERS[field.cpp_type]
+
+
+# None of the typecheckers below make any attempt to guard against people
+# subclassing builtin types and doing weird things.  We're not trying to
+# protect against malicious clients here, just people accidentally shooting
+# themselves in the foot in obvious ways.
+
+class TypeChecker(object):
+
+  """Type checker used to catch type errors as early as possible
+  when the client is setting scalar fields in protocol messages.
+  """
+
+  def __init__(self, *acceptable_types):
+    self._acceptable_types = acceptable_types
+
+  def CheckValue(self, proposed_value):
+    """Type check the provided value and return it.
+
+    The returned value might have been normalized to another type.
+    """
+    if not isinstance(proposed_value, self._acceptable_types):
+      message = ('%.1024r has type %s, but expected one of: %s' %
+                 (proposed_value, type(proposed_value), self._acceptable_types))
+      raise TypeError(message)
+    return proposed_value
+
+
+# IntValueChecker and its subclasses perform integer type-checks
+# and bounds-checks.
+class IntValueChecker(object):
+
+  """Checker used for integer fields.  Performs type-check and range check."""
+
+  def CheckValue(self, proposed_value):
+    if not isinstance(proposed_value, (int, long)):
+      message = ('%.1024r has type %s, but expected one of: %s' %
+                 (proposed_value, type(proposed_value), (int, long)))
+      raise TypeError(message)
+    if not self._MIN <= proposed_value <= self._MAX:
+      raise ValueError('Value out of range: %d' % proposed_value)
+    # We force 32-bit values to int and 64-bit values to long to make
+    # alternate implementations where the distinction is more significant
+    # (e.g. the C++ implementation) simpler.
+    proposed_value = self._TYPE(proposed_value)
+    return proposed_value
+
+
+class EnumValueChecker(object):
+
+  """Checker used for enum fields.  Performs type-check and range check."""
+
+  def __init__(self, enum_type):
+    self._enum_type = enum_type
+
+  def CheckValue(self, proposed_value):
+    if not isinstance(proposed_value, (int, long)):
+      message = ('%.1024r has type %s, but expected one of: %s' %
+                 (proposed_value, type(proposed_value), (int, long)))
+      raise TypeError(message)
+    if proposed_value not in self._enum_type.values_by_number:
+      raise ValueError('Unknown enum value: %d' % proposed_value)
+    return proposed_value
+
+
+class UnicodeValueChecker(object):
+
+  """Checker used for string fields.
+
+  Always returns a unicode value, even if the input is of type str.
+  """
+
+  def CheckValue(self, proposed_value):
+    if not isinstance(proposed_value, (bytes, unicode)):
+      message = ('%.1024r has type %s, but expected one of: %s' %
+                 (proposed_value, type(proposed_value), (bytes, unicode)))
+      raise TypeError(message)
+
+    # If the value is of type 'bytes' make sure that it is in 7-bit ASCII
+    # encoding.
+    if isinstance(proposed_value, bytes):
+      try:
+        proposed_value = proposed_value.decode('ascii')
+      except UnicodeDecodeError:
+        raise ValueError('%.1024r has type bytes, but isn\'t in 7-bit ASCII '
+                         'encoding. Non-ASCII strings must be converted to '
+                         'unicode objects before being added.' %
+                         (proposed_value))
+    return proposed_value
+
+
+class Int32ValueChecker(IntValueChecker):
+  # We're sure to use ints instead of longs here since comparison may be more
+  # efficient.
+  _MIN = -2147483648
+  _MAX = 2147483647
+  _TYPE = int
+
+
+class Uint32ValueChecker(IntValueChecker):
+  _MIN = 0
+  _MAX = (1 << 32) - 1
+  _TYPE = int
+
+
+class Int64ValueChecker(IntValueChecker):
+  _MIN = -(1 << 63)
+  _MAX = (1 << 63) - 1
+  _TYPE = long
+
+
+class Uint64ValueChecker(IntValueChecker):
+  _MIN = 0
+  _MAX = (1 << 64) - 1
+  _TYPE = long
+
+
+# Type-checkers for all scalar CPPTYPEs.
+_VALUE_CHECKERS = {
+    _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
+    _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
+    _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
+    _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
+    _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker(
+        float, int, long),
+    _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker(
+        float, int, long),
+    _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int),
+    _FieldDescriptor.CPPTYPE_STRING: TypeChecker(bytes),
+    }
+
+
+# Map from field type to a function F, such that F(field_num, value)
+# gives the total byte size for a value of the given type.  This
+# byte size includes tag information and any other additional space
+# associated with serializing "value".
+TYPE_TO_BYTE_SIZE_FN = {
+    _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
+    _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
+    _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
+    _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
+    _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
+    _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
+    _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
+    _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
+    _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
+    _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
+    _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
+    _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
+    _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
+    _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
+    _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
+    _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
+    _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
+    _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
+    }
+
+
+# Maps from field types to encoder constructors.
+TYPE_TO_ENCODER = {
+    _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
+    _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
+    _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
+    _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
+    _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
+    _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
+    _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
+    _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
+    _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
+    _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
+    _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
+    _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
+    _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
+    _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
+    _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
+    _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
+    _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
+    _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
+    }
+
+
+# Maps from field types to sizer constructors.
+TYPE_TO_SIZER = {
+    _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
+    _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
+    _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
+    _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
+    _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
+    _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
+    _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
+    _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
+    _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
+    _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
+    _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
+    _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
+    _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
+    _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
+    _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
+    _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
+    _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
+    _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
+    }
+
+
+# Maps from field type to a decoder constructor.
+TYPE_TO_DECODER = {
+    _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
+    _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
+    _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
+    _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
+    _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
+    _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
+    _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
+    _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
+    _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
+    _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
+    _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
+    _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
+    _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
+    _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
+    _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
+    _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
+    _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
+    _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
+    }
+
+# Maps from field type to expected wiretype.
+FIELD_TYPE_TO_WIRE_TYPE = {
+    _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
+    _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
+    _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
+    _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
+    _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
+    _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
+    _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
+    _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
+    _FieldDescriptor.TYPE_STRING:
+      wire_format.WIRETYPE_LENGTH_DELIMITED,
+    _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
+    _FieldDescriptor.TYPE_MESSAGE:
+      wire_format.WIRETYPE_LENGTH_DELIMITED,
+    _FieldDescriptor.TYPE_BYTES:
+      wire_format.WIRETYPE_LENGTH_DELIMITED,
+    _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
+    _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
+    _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
+    _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
+    _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
+    _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
+    }
diff --git a/third_party/protobuf26/internal/wire_format.py b/third_party/protobuf26/internal/wire_format.py
new file mode 100644
index 0000000..29961fd
--- /dev/null
+++ b/third_party/protobuf26/internal/wire_format.py
@@ -0,0 +1,268 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Constants and static functions to support protocol buffer wire format."""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import struct
+from protobuf26 import descriptor
+from protobuf26 import message
+
+
+TAG_TYPE_BITS = 3  # Number of bits used to hold type info in a proto tag.
+TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1  # 0x7
+
+# These numbers identify the wire type of a protocol buffer value.
+# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
+# tag-and-type to store one of these WIRETYPE_* constants.
+# These values must match WireType enum in google/protobuf/wire_format.h.
+WIRETYPE_VARINT = 0
+WIRETYPE_FIXED64 = 1
+WIRETYPE_LENGTH_DELIMITED = 2
+WIRETYPE_START_GROUP = 3
+WIRETYPE_END_GROUP = 4
+WIRETYPE_FIXED32 = 5
+_WIRETYPE_MAX = 5
+
+
+# Bounds for various integer types.
+INT32_MAX = int((1 << 31) - 1)
+INT32_MIN = int(-(1 << 31))
+UINT32_MAX = (1 << 32) - 1
+
+INT64_MAX = (1 << 63) - 1
+INT64_MIN = -(1 << 63)
+UINT64_MAX = (1 << 64) - 1
+
+# "struct" format strings that will encode/decode the specified formats.
+FORMAT_UINT32_LITTLE_ENDIAN = '<I'
+FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
+FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
+FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
+
+
+# We'll have to provide alternate implementations of AppendLittleEndian*() on
+# any architectures where these checks fail.
+if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
+  raise AssertionError('Format "I" is not a 32-bit number.')
+if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
+  raise AssertionError('Format "Q" is not a 64-bit number.')
+
+
+def PackTag(field_number, wire_type):
+  """Returns an unsigned 32-bit integer that encodes the field number and
+  wire type information in standard protocol message wire format.
+
+  Args:
+    field_number: Expected to be an integer in the range [1, 1 << 29)
+    wire_type: One of the WIRETYPE_* constants.
+  """
+  if not 0 <= wire_type <= _WIRETYPE_MAX:
+    raise message.EncodeError('Unknown wire type: %d' % wire_type)
+  return (field_number << TAG_TYPE_BITS) | wire_type
+
+
+def UnpackTag(tag):
+  """The inverse of PackTag().  Given an unsigned 32-bit number,
+  returns a (field_number, wire_type) tuple.
+  """
+  return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
+
+
+def ZigZagEncode(value):
+  """ZigZag Transform:  Encodes signed integers so that they can be
+  effectively used with varint encoding.  See wire_format.h for
+  more details.
+  """
+  if value >= 0:
+    return value << 1
+  return (value << 1) ^ (~0)
+
+
+def ZigZagDecode(value):
+  """Inverse of ZigZagEncode()."""
+  if not value & 0x1:
+    return value >> 1
+  return (value >> 1) ^ (~0)
+
+
+
+# The *ByteSize() functions below return the number of bytes required to
+# serialize "field number + type" information and then serialize the value.
+
+
+def Int32ByteSize(field_number, int32):
+  return Int64ByteSize(field_number, int32)
+
+
+def Int32ByteSizeNoTag(int32):
+  return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
+
+
+def Int64ByteSize(field_number, int64):
+  # Have to convert to uint before calling UInt64ByteSize().
+  return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
+
+
+def UInt32ByteSize(field_number, uint32):
+  return UInt64ByteSize(field_number, uint32)
+
+
+def UInt64ByteSize(field_number, uint64):
+  return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
+
+
+def SInt32ByteSize(field_number, int32):
+  return UInt32ByteSize(field_number, ZigZagEncode(int32))
+
+
+def SInt64ByteSize(field_number, int64):
+  return UInt64ByteSize(field_number, ZigZagEncode(int64))
+
+
+def Fixed32ByteSize(field_number, fixed32):
+  return TagByteSize(field_number) + 4
+
+
+def Fixed64ByteSize(field_number, fixed64):
+  return TagByteSize(field_number) + 8
+
+
+def SFixed32ByteSize(field_number, sfixed32):
+  return TagByteSize(field_number) + 4
+
+
+def SFixed64ByteSize(field_number, sfixed64):
+  return TagByteSize(field_number) + 8
+
+
+def FloatByteSize(field_number, flt):
+  return TagByteSize(field_number) + 4
+
+
+def DoubleByteSize(field_number, double):
+  return TagByteSize(field_number) + 8
+
+
+def BoolByteSize(field_number, b):
+  return TagByteSize(field_number) + 1
+
+
+def EnumByteSize(field_number, enum):
+  return UInt32ByteSize(field_number, enum)
+
+
+def StringByteSize(field_number, string):
+  return BytesByteSize(field_number, string.encode('utf-8'))
+
+
+def BytesByteSize(field_number, b):
+  return (TagByteSize(field_number)
+          + _VarUInt64ByteSizeNoTag(len(b))
+          + len(b))
+
+
+def GroupByteSize(field_number, message):
+  return (2 * TagByteSize(field_number)  # START and END group.
+          + message.ByteSize())
+
+
+def MessageByteSize(field_number, message):
+  return (TagByteSize(field_number)
+          + _VarUInt64ByteSizeNoTag(message.ByteSize())
+          + message.ByteSize())
+
+
+def MessageSetItemByteSize(field_number, msg):
+  # First compute the sizes of the tags.
+  # There are 2 tags for the beginning and ending of the repeated group, that
+  # is field number 1, one with field number 2 (type_id) and one with field
+  # number 3 (message).
+  total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
+
+  # Add the number of bytes for type_id.
+  total_size += _VarUInt64ByteSizeNoTag(field_number)
+
+  message_size = msg.ByteSize()
+
+  # The number of bytes for encoding the length of the message.
+  total_size += _VarUInt64ByteSizeNoTag(message_size)
+
+  # The size of the message.
+  total_size += message_size
+  return total_size
+
+
+def TagByteSize(field_number):
+  """Returns the bytes required to serialize a tag with this field number."""
+  # Just pass in type 0, since the type won't affect the tag+type size.
+  return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
+
+
+# Private helper function for the *ByteSize() functions above.
+
+def _VarUInt64ByteSizeNoTag(uint64):
+  """Returns the number of bytes required to serialize a single varint
+  using boundary value comparisons. (unrolled loop optimization -WPierce)
+  uint64 must be unsigned.
+  """
+  if uint64 <= 0x7f: return 1
+  if uint64 <= 0x3fff: return 2
+  if uint64 <= 0x1fffff: return 3
+  if uint64 <= 0xfffffff: return 4
+  if uint64 <= 0x7ffffffff: return 5
+  if uint64 <= 0x3ffffffffff: return 6
+  if uint64 <= 0x1ffffffffffff: return 7
+  if uint64 <= 0xffffffffffffff: return 8
+  if uint64 <= 0x7fffffffffffffff: return 9
+  if uint64 > UINT64_MAX:
+    raise message.EncodeError('Value out of range: %d' % uint64)
+  return 10
+
+
+NON_PACKABLE_TYPES = (
+  descriptor.FieldDescriptor.TYPE_STRING,
+  descriptor.FieldDescriptor.TYPE_GROUP,
+  descriptor.FieldDescriptor.TYPE_MESSAGE,
+  descriptor.FieldDescriptor.TYPE_BYTES
+)
+
+
+def IsTypePackable(field_type):
+  """Return true iff packable = true is valid for fields of this type.
+
+  Args:
+    field_type: a FieldDescriptor::Type value.
+
+  Returns:
+    True iff fields of this type are packable.
+  """
+  return field_type not in NON_PACKABLE_TYPES
diff --git a/third_party/protobuf26/message.py b/third_party/protobuf26/message.py
new file mode 100644
index 0000000..37b0af1
--- /dev/null
+++ b/third_party/protobuf26/message.py
@@ -0,0 +1,284 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# TODO(robinson): We should just make these methods all "pure-virtual" and move
+# all implementation out, into reflection.py for now.
+
+
+"""Contains an abstract base class for protocol messages."""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+
+class Error(Exception): pass
+class DecodeError(Error): pass
+class EncodeError(Error): pass
+
+
+class Message(object):
+
+  """Abstract base class for protocol messages.
+
+  Protocol message classes are almost always generated by the protocol
+  compiler.  These generated types subclass Message and implement the methods
+  shown below.
+
+  TODO(robinson): Link to an HTML document here.
+
+  TODO(robinson): Document that instances of this class will also
+  have an Extensions attribute with __getitem__ and __setitem__.
+  Again, not sure how to best convey this.
+
+  TODO(robinson): Document that the class must also have a static
+    RegisterExtension(extension_field) method.
+    Not sure how to best express at this point.
+  """
+
+  # TODO(robinson): Document these fields and methods.
+
+  __slots__ = []
+
+  DESCRIPTOR = None
+
+  def __deepcopy__(self, memo=None):
+    clone = type(self)()
+    clone.MergeFrom(self)
+    return clone
+
+  def __eq__(self, other_msg):
+    """Recursively compares two messages by value and structure."""
+    raise NotImplementedError
+
+  def __ne__(self, other_msg):
+    # Can't just say self != other_msg, since that would infinitely recurse. :)
+    return not self == other_msg
+
+  def __hash__(self):
+    raise TypeError('unhashable object')
+
+  def __str__(self):
+    """Outputs a human-readable representation of the message."""
+    raise NotImplementedError
+
+  def __unicode__(self):
+    """Outputs a human-readable representation of the message."""
+    raise NotImplementedError
+
+  def MergeFrom(self, other_msg):
+    """Merges the contents of the specified message into current message.
+
+    This method merges the contents of the specified message into the current
+    message. Singular fields that are set in the specified message overwrite
+    the corresponding fields in the current message. Repeated fields are
+    appended. Singular sub-messages and groups are recursively merged.
+
+    Args:
+      other_msg: Message to merge into the current message.
+    """
+    raise NotImplementedError
+
+  def CopyFrom(self, other_msg):
+    """Copies the content of the specified message into the current message.
+
+    The method clears the current message and then merges the specified
+    message using MergeFrom.
+
+    Args:
+      other_msg: Message to copy into the current one.
+    """
+    if self is other_msg:
+      return
+    self.Clear()
+    self.MergeFrom(other_msg)
+
+  def Clear(self):
+    """Clears all data that was set in the message."""
+    raise NotImplementedError
+
+  def SetInParent(self):
+    """Mark this as present in the parent.
+
+    This normally happens automatically when you assign a field of a
+    sub-message, but sometimes you want to make the sub-message
+    present while keeping it empty.  If you find yourself using this,
+    you may want to reconsider your design."""
+    raise NotImplementedError
+
+  def IsInitialized(self):
+    """Checks if the message is initialized.
+
+    Returns:
+      The method returns True if the message is initialized (i.e. all of its
+      required fields are set).
+    """
+    raise NotImplementedError
+
+  # TODO(robinson): MergeFromString() should probably return None and be
+  # implemented in terms of a helper that returns the # of bytes read.  Our
+  # deserialization routines would use the helper when recursively
+  # deserializing, but the end user would almost always just want the no-return
+  # MergeFromString().
+
+  def MergeFromString(self, serialized):
+    """Merges serialized protocol buffer data into this message.
+
+    When we find a field in |serialized| that is already present
+    in this message:
+      - If it's a "repeated" field, we append to the end of our list.
+      - Else, if it's a scalar, we overwrite our field.
+      - Else, (it's a nonrepeated composite), we recursively merge
+        into the existing composite.
+
+    TODO(robinson): Document handling of unknown fields.
+
+    Args:
+      serialized: Any object that allows us to call buffer(serialized)
+        to access a string of bytes using the buffer interface.
+
+    TODO(robinson): When we switch to a helper, this will return None.
+
+    Returns:
+      The number of bytes read from |serialized|.
+      For non-group messages, this will always be len(serialized),
+      but for messages which are actually groups, this will
+      generally be less than len(serialized), since we must
+      stop when we reach an END_GROUP tag.  Note that if
+      we *do* stop because of an END_GROUP tag, the number
+      of bytes returned does not include the bytes
+      for the END_GROUP tag information.
+    """
+    raise NotImplementedError
+
+  def ParseFromString(self, serialized):
+    """Parse serialized protocol buffer data into this message.
+
+    Like MergeFromString(), except we clear the object first and
+    do not return the value that MergeFromString returns.
+    """
+    self.Clear()
+    self.MergeFromString(serialized)
+
+  def SerializeToString(self):
+    """Serializes the protocol message to a binary string.
+
+    Returns:
+      A binary string representation of the message if all of the required
+      fields in the message are set (i.e. the message is initialized).
+
+    Raises:
+      message.EncodeError if the message isn't initialized.
+    """
+    raise NotImplementedError
+
+  def SerializePartialToString(self):
+    """Serializes the protocol message to a binary string.
+
+    This method is similar to SerializeToString but doesn't check if the
+    message is initialized.
+
+    Returns:
+      A string representation of the partial message.
+    """
+    raise NotImplementedError
+
+  # TODO(robinson): Decide whether we like these better
+  # than auto-generated has_foo() and clear_foo() methods
+  # on the instances themselves.  This way is less consistent
+  # with C++, but it makes reflection-type access easier and
+  # reduces the number of magically autogenerated things.
+  #
+  # TODO(robinson): Be sure to document (and test) exactly
+  # which field names are accepted here.  Are we case-sensitive?
+  # What do we do with fields that share names with Python keywords
+  # like 'lambda' and 'yield'?
+  #
+  # nnorwitz says:
+  # """
+  # Typically (in python), an underscore is appended to names that are
+  # keywords. So they would become lambda_ or yield_.
+  # """
+  def ListFields(self):
+    """Returns a list of (FieldDescriptor, value) tuples for all
+    fields in the message which are not empty.  A singular field is non-empty
+    if HasField() would return true, and a repeated field is non-empty if
+    it contains at least one element.  The fields are ordered by field
+    number"""
+    raise NotImplementedError
+
+  def HasField(self, field_name):
+    """Checks if a certain field is set for the message. Note if the
+    field_name is not defined in the message descriptor, ValueError will be
+    raised."""
+    raise NotImplementedError
+
+  def ClearField(self, field_name):
+    raise NotImplementedError
+
+  def HasExtension(self, extension_handle):
+    raise NotImplementedError
+
+  def ClearExtension(self, extension_handle):
+    raise NotImplementedError
+
+  def ByteSize(self):
+    """Returns the serialized size of this message.
+    Recursively calls ByteSize() on all contained messages.
+    """
+    raise NotImplementedError
+
+  def _SetListener(self, message_listener):
+    """Internal method used by the protocol message implementation.
+    Clients should not call this directly.
+
+    Sets a listener that this message will call on certain state transitions.
+
+    The purpose of this method is to register back-edges from children to
+    parents at runtime, for the purpose of setting "has" bits and
+    byte-size-dirty bits in the parent and ancestor objects whenever a child or
+    descendant object is modified.
+
+    If the client wants to disconnect this Message from the object tree, she
+    explicitly sets callback to None.
+
+    If message_listener is None, unregisters any existing listener.  Otherwise,
+    message_listener must implement the MessageListener interface in
+    internal/message_listener.py, and we discard any listener registered
+    via a previous _SetListener() call.
+    """
+    raise NotImplementedError
+
+  def __getstate__(self):
+    """Support the pickle protocol."""
+    return dict(serialized=self.SerializePartialToString())
+
+  def __setstate__(self, state):
+    """Support the pickle protocol."""
+    self.__init__()
+    self.ParseFromString(state['serialized'])
diff --git a/third_party/protobuf26/message_factory.py b/third_party/protobuf26/message_factory.py
new file mode 100644
index 0000000..a8d63f7
--- /dev/null
+++ b/third_party/protobuf26/message_factory.py
@@ -0,0 +1,155 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#PY25 compatible for GAE.
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+
+"""Provides a factory class for generating dynamic messages.
+
+The easiest way to use this class is if you have access to the FileDescriptor
+protos containing the messages you want to create you can just do the following:
+
+message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
+my_proto_instance = message_classes['some.proto.package.MessageName']()
+"""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+import sys  ##PY25
+from protobuf26 import descriptor_database
+from protobuf26 import descriptor_pool
+from protobuf26 import message
+from protobuf26 import reflection
+
+
+class MessageFactory(object):
+  """Factory for creating Proto2 messages from descriptors in a pool."""
+
+  def __init__(self, pool=None):
+    """Initializes a new factory."""
+    self.pool = (pool or descriptor_pool.DescriptorPool(
+        descriptor_database.DescriptorDatabase()))
+
+    # local cache of all classes built from protobuf descriptors
+    self._classes = {}
+
+  def GetPrototype(self, descriptor):
+    """Builds a proto2 message class based on the passed in descriptor.
+
+    Passing a descriptor with a fully qualified name matching a previous
+    invocation will cause the same class to be returned.
+
+    Args:
+      descriptor: The descriptor to build from.
+
+    Returns:
+      A class describing the passed in descriptor.
+    """
+    if descriptor.full_name not in self._classes:
+      descriptor_name = descriptor.name
+      if sys.version_info[0] < 3:  ##PY25
+##!PY25      if str is bytes:  # PY2
+        descriptor_name = descriptor.name.encode('ascii', 'ignore')
+      result_class = reflection.GeneratedProtocolMessageType(
+          descriptor_name,
+          (message.Message,),
+          {'DESCRIPTOR': descriptor, '__module__': None})
+          # If module not set, it wrongly points to the reflection.py module.
+      self._classes[descriptor.full_name] = result_class
+      for field in descriptor.fields:
+        if field.message_type:
+          self.GetPrototype(field.message_type)
+      for extension in result_class.DESCRIPTOR.extensions:
+        if extension.containing_type.full_name not in self._classes:
+          self.GetPrototype(extension.containing_type)
+        extended_class = self._classes[extension.containing_type.full_name]
+        extended_class.RegisterExtension(extension)
+    return self._classes[descriptor.full_name]
+
+  def GetMessages(self, files):
+    """Gets all the messages from a specified file.
+
+    This will find and resolve dependencies, failing if the descriptor
+    pool cannot satisfy them.
+
+    Args:
+      files: The file names to extract messages from.
+
+    Returns:
+      A dictionary mapping proto names to the message classes. This will include
+      any dependent messages as well as any messages defined in the same file as
+      a specified message.
+    """
+    result = {}
+    for file_name in files:
+      file_desc = self.pool.FindFileByName(file_name)
+      for name, msg in file_desc.message_types_by_name.iteritems():
+        if file_desc.package:
+          full_name = '.'.join([file_desc.package, name])
+        else:
+          full_name = msg.name
+        result[full_name] = self.GetPrototype(
+            self.pool.FindMessageTypeByName(full_name))
+
+      # While the extension FieldDescriptors are created by the descriptor pool,
+      # the python classes created in the factory need them to be registered
+      # explicitly, which is done below.
+      #
+      # The call to RegisterExtension will specifically check if the
+      # extension was already registered on the object and either
+      # ignore the registration if the original was the same, or raise
+      # an error if they were different.
+
+      for name, extension in file_desc.extensions_by_name.iteritems():
+        if extension.containing_type.full_name not in self._classes:
+          self.GetPrototype(extension.containing_type)
+        extended_class = self._classes[extension.containing_type.full_name]
+        extended_class.RegisterExtension(extension)
+    return result
+
+
+_FACTORY = MessageFactory()
+
+
+def GetMessages(file_protos):
+  """Builds a dictionary of all the messages available in a set of files.
+
+  Args:
+    file_protos: A sequence of file protos to build messages out of.
+
+  Returns:
+    A dictionary mapping proto names to the message classes. This will include
+    any dependent messages as well as any messages defined in the same file as
+    a specified message.
+  """
+  for file_proto in file_protos:
+    _FACTORY.pool.Add(file_proto)
+  return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
diff --git a/third_party/protobuf26/reflection.py b/third_party/protobuf26/reflection.py
new file mode 100644
index 0000000..673348c
--- /dev/null
+++ b/third_party/protobuf26/reflection.py
@@ -0,0 +1,205 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This code is meant to work on Python 2.4 and above only.
+
+"""Contains a metaclass and helper functions used to create
+protocol message classes from Descriptor objects at runtime.
+
+Recall that a metaclass is the "type" of a class.
+(A class is to a metaclass what an instance is to a class.)
+
+In this case, we use the GeneratedProtocolMessageType metaclass
+to inject all the useful functionality into the classes
+output by the protocol compiler at compile-time.
+
+The upshot of all this is that the real implementation
+details for ALL pure-Python protocol buffers are *here in
+this file*.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+
+from protobuf26.internal import api_implementation
+from protobuf26 import descriptor as descriptor_mod
+from protobuf26 import message
+
+_FieldDescriptor = descriptor_mod.FieldDescriptor
+
+
+if api_implementation.Type() == 'cpp':
+  if api_implementation.Version() == 2:
+    from protobuf26.pyext import cpp_message
+    _NewMessage = cpp_message.NewMessage
+    _InitMessage = cpp_message.InitMessage
+  else:
+    from protobuf26.internal import cpp_message
+    _NewMessage = cpp_message.NewMessage
+    _InitMessage = cpp_message.InitMessage
+else:
+  from protobuf26.internal import python_message
+  _NewMessage = python_message.NewMessage
+  _InitMessage = python_message.InitMessage
+
+
+class GeneratedProtocolMessageType(type):
+
+  """Metaclass for protocol message classes created at runtime from Descriptors.
+
+  We add implementations for all methods described in the Message class.  We
+  also create properties to allow getting/setting all fields in the protocol
+  message.  Finally, we create slots to prevent users from accidentally
+  "setting" nonexistent fields in the protocol message, which then wouldn't get
+  serialized / deserialized properly.
+
+  The protocol compiler currently uses this metaclass to create protocol
+  message classes at runtime.  Clients can also manually create their own
+  classes at runtime, as in this example:
+
+  mydescriptor = Descriptor(.....)
+  class MyProtoClass(Message):
+    __metaclass__ = GeneratedProtocolMessageType
+    DESCRIPTOR = mydescriptor
+  myproto_instance = MyProtoClass()
+  myproto.foo_field = 23
+  ...
+
+  The above example will not work for nested types. If you wish to include them,
+  use reflection.MakeClass() instead of manually instantiating the class in
+  order to create the appropriate class structure.
+  """
+
+  # Must be consistent with the protocol-compiler code in
+  # proto2/compiler/internal/generator.*.
+  _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+  def __new__(cls, name, bases, dictionary):
+    """Custom allocation for runtime-generated class types.
+
+    We override __new__ because this is apparently the only place
+    where we can meaningfully set __slots__ on the class we're creating(?).
+    (The interplay between metaclasses and slots is not very well-documented).
+
+    Args:
+      name: Name of the class (ignored, but required by the
+        metaclass protocol).
+      bases: Base classes of the class we're constructing.
+        (Should be message.Message).  We ignore this field, but
+        it's required by the metaclass protocol
+      dictionary: The class dictionary of the class we're
+        constructing.  dictionary[_DESCRIPTOR_KEY] must contain
+        a Descriptor object describing this protocol message
+        type.
+
+    Returns:
+      Newly-allocated class.
+    """
+    descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
+    bases = _NewMessage(bases, descriptor, dictionary)
+    superclass = super(GeneratedProtocolMessageType, cls)
+
+    new_class = superclass.__new__(cls, name, bases, dictionary)
+    setattr(descriptor, '_concrete_class', new_class)
+    return new_class
+
+  def __init__(cls, name, bases, dictionary):
+    """Here we perform the majority of our work on the class.
+    We add enum getters, an __init__ method, implementations
+    of all Message methods, and properties for all fields
+    in the protocol type.
+
+    Args:
+      name: Name of the class (ignored, but required by the
+        metaclass protocol).
+      bases: Base classes of the class we're constructing.
+        (Should be message.Message).  We ignore this field, but
+        it's required by the metaclass protocol
+      dictionary: The class dictionary of the class we're
+        constructing.  dictionary[_DESCRIPTOR_KEY] must contain
+        a Descriptor object describing this protocol message
+        type.
+    """
+    descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
+    _InitMessage(descriptor, cls)
+    superclass = super(GeneratedProtocolMessageType, cls)
+    superclass.__init__(name, bases, dictionary)
+
+
+def ParseMessage(descriptor, byte_str):
+  """Generate a new Message instance from this Descriptor and a byte string.
+
+  Args:
+    descriptor: Protobuf Descriptor object
+    byte_str: Serialized protocol buffer byte string
+
+  Returns:
+    Newly created protobuf Message object.
+  """
+  result_class = MakeClass(descriptor)
+  new_msg = result_class()
+  new_msg.ParseFromString(byte_str)
+  return new_msg
+
+
+def MakeClass(descriptor):
+  """Construct a class object for a protobuf described by descriptor.
+
+  Composite descriptors are handled by defining the new class as a member of the
+  parent class, recursing as deep as necessary.
+  This is the dynamic equivalent to:
+
+  class Parent(message.Message):
+    __metaclass__ = GeneratedProtocolMessageType
+    DESCRIPTOR = descriptor
+    class Child(message.Message):
+      __metaclass__ = GeneratedProtocolMessageType
+      DESCRIPTOR = descriptor.nested_types[0]
+
+  Sample usage:
+    file_descriptor = descriptor_pb2.FileDescriptorProto()
+    file_descriptor.ParseFromString(proto2_string)
+    msg_descriptor = descriptor.MakeDescriptor(file_descriptor.message_type[0])
+    msg_class = reflection.MakeClass(msg_descriptor)
+    msg = msg_class()
+
+  Args:
+    descriptor: A descriptor.Descriptor object describing the protobuf.
+  Returns:
+    The Message class object described by the descriptor.
+  """
+  attributes = {}
+  for name, nested_type in descriptor.nested_types_by_name.items():
+    attributes[name] = MakeClass(nested_type)
+
+  attributes[GeneratedProtocolMessageType._DESCRIPTOR_KEY] = descriptor
+
+  return GeneratedProtocolMessageType(str(descriptor.name), (message.Message,),
+                                      attributes)
diff --git a/third_party/protobuf26/service.py b/third_party/protobuf26/service.py
new file mode 100644
index 0000000..180b70e
--- /dev/null
+++ b/third_party/protobuf26/service.py
@@ -0,0 +1,226 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""DEPRECATED:  Declares the RPC service interfaces.
+
+This module declares the abstract interfaces underlying proto2 RPC
+services.  These are intended to be independent of any particular RPC
+implementation, so that proto2 services can be used on top of a variety
+of implementations.  Starting with version 2.3.0, RPC implementations should
+not try to build on these, but should instead provide code generator plugins
+which generate code specific to the particular RPC implementation.  This way
+the generated code can be more appropriate for the implementation in use
+and can avoid unnecessary layers of indirection.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+
+class RpcException(Exception):
+  """Exception raised on failed blocking RPC method call."""
+  pass
+
+
+class Service(object):
+
+  """Abstract base interface for protocol-buffer-based RPC services.
+
+  Services themselves are abstract classes (implemented either by servers or as
+  stubs), but they subclass this base interface. The methods of this
+  interface can be used to call the methods of the service without knowing
+  its exact type at compile time (analogous to the Message interface).
+  """
+
+  def GetDescriptor():
+    """Retrieves this service's descriptor."""
+    raise NotImplementedError
+
+  def CallMethod(self, method_descriptor, rpc_controller,
+                 request, done):
+    """Calls a method of the service specified by method_descriptor.
+
+    If "done" is None then the call is blocking and the response
+    message will be returned directly.  Otherwise the call is asynchronous
+    and "done" will later be called with the response value.
+
+    In the blocking case, RpcException will be raised on error.
+
+    Preconditions:
+    * method_descriptor.service == GetDescriptor
+    * request is of the exact same classes as returned by
+      GetRequestClass(method).
+    * After the call has started, the request must not be modified.
+    * "rpc_controller" is of the correct type for the RPC implementation being
+      used by this Service.  For stubs, the "correct type" depends on the
+      RpcChannel which the stub is using.
+
+    Postconditions:
+    * "done" will be called when the method is complete.  This may be
+      before CallMethod() returns or it may be at some point in the future.
+    * If the RPC failed, the response value passed to "done" will be None.
+      Further details about the failure can be found by querying the
+      RpcController.
+    """
+    raise NotImplementedError
+
+  def GetRequestClass(self, method_descriptor):
+    """Returns the class of the request message for the specified method.
+
+    CallMethod() requires that the request is of a particular subclass of
+    Message. GetRequestClass() gets the default instance of this required
+    type.
+
+    Example:
+      method = service.GetDescriptor().FindMethodByName("Foo")
+      request = stub.GetRequestClass(method)()
+      request.ParseFromString(input)
+      service.CallMethod(method, request, callback)
+    """
+    raise NotImplementedError
+
+  def GetResponseClass(self, method_descriptor):
+    """Returns the class of the response message for the specified method.
+
+    This method isn't really needed, as the RpcChannel's CallMethod constructs
+    the response protocol message. It's provided anyway in case it is useful
+    for the caller to know the response type in advance.
+    """
+    raise NotImplementedError
+
+
+class RpcController(object):
+
+  """An RpcController mediates a single method call.
+
+  The primary purpose of the controller is to provide a way to manipulate
+  settings specific to the RPC implementation and to find out about RPC-level
+  errors. The methods provided by the RpcController interface are intended
+  to be a "least common denominator" set of features which we expect all
+  implementations to support.  Specific implementations may provide more
+  advanced features (e.g. deadline propagation).
+  """
+
+  # Client-side methods below
+
+  def Reset(self):
+    """Resets the RpcController to its initial state.
+
+    After the RpcController has been reset, it may be reused in
+    a new call. Must not be called while an RPC is in progress.
+    """
+    raise NotImplementedError
+
+  def Failed(self):
+    """Returns true if the call failed.
+
+    After a call has finished, returns true if the call failed.  The possible
+    reasons for failure depend on the RPC implementation.  Failed() must not
+    be called before a call has finished.  If Failed() returns true, the
+    contents of the response message are undefined.
+    """
+    raise NotImplementedError
+
+  def ErrorText(self):
+    """If Failed is true, returns a human-readable description of the error."""
+    raise NotImplementedError
+
+  def StartCancel(self):
+    """Initiate cancellation.
+
+    Advises the RPC system that the caller desires that the RPC call be
+    canceled.  The RPC system may cancel it immediately, may wait awhile and
+    then cancel it, or may not even cancel the call at all.  If the call is
+    canceled, the "done" callback will still be called and the RpcController
+    will indicate that the call failed at that time.
+    """
+    raise NotImplementedError
+
+  # Server-side methods below
+
+  def SetFailed(self, reason):
+    """Sets a failure reason.
+
+    Causes Failed() to return true on the client side.  "reason" will be
+    incorporated into the message returned by ErrorText().  If you find
+    you need to return machine-readable information about failures, you
+    should incorporate it into your response protocol buffer and should
+    NOT call SetFailed().
+    """
+    raise NotImplementedError
+
+  def IsCanceled(self):
+    """Checks if the client cancelled the RPC.
+
+    If true, indicates that the client canceled the RPC, so the server may
+    as well give up on replying to it.  The server should still call the
+    final "done" callback.
+    """
+    raise NotImplementedError
+
+  def NotifyOnCancel(self, callback):
+    """Sets a callback to invoke on cancel.
+
+    Asks that the given callback be called when the RPC is canceled.  The
+    callback will always be called exactly once.  If the RPC completes without
+    being canceled, the callback will be called after completion.  If the RPC
+    has already been canceled when NotifyOnCancel() is called, the callback
+    will be called immediately.
+
+    NotifyOnCancel() must be called no more than once per request.
+    """
+    raise NotImplementedError
+
+
+class RpcChannel(object):
+
+  """Abstract interface for an RPC channel.
+
+  An RpcChannel represents a communication line to a service which can be used
+  to call that service's methods.  The service may be running on another
+  machine. Normally, you should not use an RpcChannel directly, but instead
+  construct a stub {@link Service} wrapping it.  Example:
+
+  Example:
+    RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
+    RpcController controller = rpcImpl.Controller()
+    MyService service = MyService_Stub(channel)
+    service.MyMethod(controller, request, callback)
+  """
+
+  def CallMethod(self, method_descriptor, rpc_controller,
+                 request, response_class, done):
+    """Calls the method identified by the descriptor.
+
+    Call the given method of the remote service.  The signature of this
+    procedure looks the same as Service.CallMethod(), but the requirements
+    are less strict in one important way:  the request object doesn't have to
+    be of any specific class as long as its descriptor is method.input_type.
+    """
+    raise NotImplementedError
diff --git a/third_party/protobuf26/service_reflection.py b/third_party/protobuf26/service_reflection.py
new file mode 100644
index 0000000..851e83e
--- /dev/null
+++ b/third_party/protobuf26/service_reflection.py
@@ -0,0 +1,284 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains metaclasses used to create protocol service and service stub
+classes from ServiceDescriptor objects at runtime.
+
+The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
+inject all useful functionality into the classes output by the protocol
+compiler at compile-time.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+
+class GeneratedServiceType(type):
+
+  """Metaclass for service classes created at runtime from ServiceDescriptors.
+
+  Implementations for all methods described in the Service class are added here
+  by this class. We also create properties to allow getting/setting all fields
+  in the protocol message.
+
+  The protocol compiler currently uses this metaclass to create protocol service
+  classes at runtime. Clients can also manually create their own classes at
+  runtime, as in this example:
+
+  mydescriptor = ServiceDescriptor(.....)
+  class MyProtoService(service.Service):
+    __metaclass__ = GeneratedServiceType
+    DESCRIPTOR = mydescriptor
+  myservice_instance = MyProtoService()
+  ...
+  """
+
+  _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+  def __init__(cls, name, bases, dictionary):
+    """Creates a message service class.
+
+    Args:
+      name: Name of the class (ignored, but required by the metaclass
+        protocol).
+      bases: Base classes of the class being constructed.
+      dictionary: The class dictionary of the class being constructed.
+        dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
+        describing this protocol service type.
+    """
+    # Don't do anything if this class doesn't have a descriptor. This happens
+    # when a service class is subclassed.
+    if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
+      return
+    descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
+    service_builder = _ServiceBuilder(descriptor)
+    service_builder.BuildService(cls)
+
+
+class GeneratedServiceStubType(GeneratedServiceType):
+
+  """Metaclass for service stubs created at runtime from ServiceDescriptors.
+
+  This class has similar responsibilities as GeneratedServiceType, except that
+  it creates the service stub classes.
+  """
+
+  _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+  def __init__(cls, name, bases, dictionary):
+    """Creates a message service stub class.
+
+    Args:
+      name: Name of the class (ignored, here).
+      bases: Base classes of the class being constructed.
+      dictionary: The class dictionary of the class being constructed.
+        dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
+        describing this protocol service type.
+    """
+    super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
+    # Don't do anything if this class doesn't have a descriptor. This happens
+    # when a service stub is subclassed.
+    if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
+      return
+    descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
+    service_stub_builder = _ServiceStubBuilder(descriptor)
+    service_stub_builder.BuildServiceStub(cls)
+
+
+class _ServiceBuilder(object):
+
+  """This class constructs a protocol service class using a service descriptor.
+
+  Given a service descriptor, this class constructs a class that represents
+  the specified service descriptor. One service builder instance constructs
+  exactly one service class. That means all instances of that class share the
+  same builder.
+  """
+
+  def __init__(self, service_descriptor):
+    """Initializes an instance of the service class builder.
+
+    Args:
+      service_descriptor: ServiceDescriptor to use when constructing the
+        service class.
+    """
+    self.descriptor = service_descriptor
+
+  def BuildService(self, cls):
+    """Constructs the service class.
+
+    Args:
+      cls: The class that will be constructed.
+    """
+
+    # CallMethod needs to operate with an instance of the Service class. This
+    # internal wrapper function exists only to be able to pass the service
+    # instance to the method that does the real CallMethod work.
+    def _WrapCallMethod(srvc, method_descriptor,
+                        rpc_controller, request, callback):
+      return self._CallMethod(srvc, method_descriptor,
+                       rpc_controller, request, callback)
+    self.cls = cls
+    cls.CallMethod = _WrapCallMethod
+    cls.GetDescriptor = staticmethod(lambda: self.descriptor)
+    cls.GetDescriptor.__doc__ = "Returns the service descriptor."
+    cls.GetRequestClass = self._GetRequestClass
+    cls.GetResponseClass = self._GetResponseClass
+    for method in self.descriptor.methods:
+      setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
+
+  def _CallMethod(self, srvc, method_descriptor,
+                  rpc_controller, request, callback):
+    """Calls the method described by a given method descriptor.
+
+    Args:
+      srvc: Instance of the service for which this method is called.
+      method_descriptor: Descriptor that represent the method to call.
+      rpc_controller: RPC controller to use for this method's execution.
+      request: Request protocol message.
+      callback: A callback to invoke after the method has completed.
+    """
+    if method_descriptor.containing_service != self.descriptor:
+      raise RuntimeError(
+          'CallMethod() given method descriptor for wrong service type.')
+    method = getattr(srvc, method_descriptor.name)
+    return method(rpc_controller, request, callback)
+
+  def _GetRequestClass(self, method_descriptor):
+    """Returns the class of the request protocol message.
+
+    Args:
+      method_descriptor: Descriptor of the method for which to return the
+        request protocol message class.
+
+    Returns:
+      A class that represents the input protocol message of the specified
+      method.
+    """
+    if method_descriptor.containing_service != self.descriptor:
+      raise RuntimeError(
+          'GetRequestClass() given method descriptor for wrong service type.')
+    return method_descriptor.input_type._concrete_class
+
+  def _GetResponseClass(self, method_descriptor):
+    """Returns the class of the response protocol message.
+
+    Args:
+      method_descriptor: Descriptor of the method for which to return the
+        response protocol message class.
+
+    Returns:
+      A class that represents the output protocol message of the specified
+      method.
+    """
+    if method_descriptor.containing_service != self.descriptor:
+      raise RuntimeError(
+          'GetResponseClass() given method descriptor for wrong service type.')
+    return method_descriptor.output_type._concrete_class
+
+  def _GenerateNonImplementedMethod(self, method):
+    """Generates and returns a method that can be set for a service methods.
+
+    Args:
+      method: Descriptor of the service method for which a method is to be
+        generated.
+
+    Returns:
+      A method that can be added to the service class.
+    """
+    return lambda inst, rpc_controller, request, callback: (
+        self._NonImplementedMethod(method.name, rpc_controller, callback))
+
+  def _NonImplementedMethod(self, method_name, rpc_controller, callback):
+    """The body of all methods in the generated service class.
+
+    Args:
+      method_name: Name of the method being executed.
+      rpc_controller: RPC controller used to execute this method.
+      callback: A callback which will be invoked when the method finishes.
+    """
+    rpc_controller.SetFailed('Method %s not implemented.' % method_name)
+    callback(None)
+
+
+class _ServiceStubBuilder(object):
+
+  """Constructs a protocol service stub class using a service descriptor.
+
+  Given a service descriptor, this class constructs a suitable stub class.
+  A stub is just a type-safe wrapper around an RpcChannel which emulates a
+  local implementation of the service.
+
+  One service stub builder instance constructs exactly one class. It means all
+  instances of that class share the same service stub builder.
+  """
+
+  def __init__(self, service_descriptor):
+    """Initializes an instance of the service stub class builder.
+
+    Args:
+      service_descriptor: ServiceDescriptor to use when constructing the
+        stub class.
+    """
+    self.descriptor = service_descriptor
+
+  def BuildServiceStub(self, cls):
+    """Constructs the stub class.
+
+    Args:
+      cls: The class that will be constructed.
+    """
+
+    def _ServiceStubInit(stub, rpc_channel):
+      stub.rpc_channel = rpc_channel
+    self.cls = cls
+    cls.__init__ = _ServiceStubInit
+    for method in self.descriptor.methods:
+      setattr(cls, method.name, self._GenerateStubMethod(method))
+
+  def _GenerateStubMethod(self, method):
+    return (lambda inst, rpc_controller, request, callback=None:
+        self._StubMethod(inst, method, rpc_controller, request, callback))
+
+  def _StubMethod(self, stub, method_descriptor,
+                  rpc_controller, request, callback):
+    """The body of all service methods in the generated stub class.
+
+    Args:
+      stub: Stub instance.
+      method_descriptor: Descriptor of the invoked method.
+      rpc_controller: Rpc controller to execute the method.
+      request: Request protocol message.
+      callback: A callback to execute when the method finishes.
+    Returns:
+      Response message (in case of blocking call).
+    """
+    return stub.rpc_channel.CallMethod(
+        method_descriptor, rpc_controller, request,
+        method_descriptor.output_type._concrete_class, callback)
diff --git a/third_party/protobuf26/symbol_database.py b/third_party/protobuf26/symbol_database.py
new file mode 100644
index 0000000..bb586bd
--- /dev/null
+++ b/third_party/protobuf26/symbol_database.py
@@ -0,0 +1,185 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""A database of Python protocol buffer generated symbols.
+
+SymbolDatabase makes it easy to create new instances of a registered type, given
+only the type's protocol buffer symbol name. Once all symbols are registered,
+they can be accessed using either the MessageFactory interface which
+SymbolDatabase exposes, or the DescriptorPool interface of the underlying
+pool.
+
+Example usage:
+
+  db = symbol_database.SymbolDatabase()
+
+  # Register symbols of interest, from one or multiple files.
+  db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
+  db.RegisterMessage(my_proto_pb2.MyMessage)
+  db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
+
+  # The database can be used as a MessageFactory, to generate types based on
+  # their name:
+  types = db.GetMessages(['my_proto.proto'])
+  my_message_instance = types['MyMessage']()
+
+  # The database's underlying descriptor pool can be queried, so it's not
+  # necessary to know a type's filename to be able to generate it:
+  filename = db.pool.FindFileContainingSymbol('MyMessage')
+  my_message_instance = db.GetMessages([filename])['MyMessage']()
+
+  # This functionality is also provided directly via a convenience method:
+  my_message_instance = db.GetSymbol('MyMessage')()
+"""
+
+
+from protobuf26 import descriptor_pool
+
+
+class SymbolDatabase(object):
+  """A database of Python generated symbols.
+
+  SymbolDatabase also models message_factory.MessageFactory.
+
+  The symbol database can be used to keep a global registry of all protocol
+  buffer types used within a program.
+  """
+
+  def __init__(self):
+    """Constructor."""
+
+    self._symbols = {}
+    self._symbols_by_file = {}
+    self.pool = descriptor_pool.DescriptorPool()
+
+  def RegisterMessage(self, message):
+    """Registers the given message type in the local database.
+
+    Args:
+      message: a message.Message, to be registered.
+
+    Returns:
+      The provided message.
+    """
+
+    desc = message.DESCRIPTOR
+    self._symbols[desc.full_name] = message
+    if desc.file.name not in self._symbols_by_file:
+      self._symbols_by_file[desc.file.name] = {}
+    self._symbols_by_file[desc.file.name][desc.full_name] = message
+    self.pool.AddDescriptor(desc)
+    return message
+
+  def RegisterEnumDescriptor(self, enum_descriptor):
+    """Registers the given enum descriptor in the local database.
+
+    Args:
+      enum_descriptor: a descriptor.EnumDescriptor.
+
+    Returns:
+      The provided descriptor.
+    """
+    self.pool.AddEnumDescriptor(enum_descriptor)
+    return enum_descriptor
+
+  def RegisterFileDescriptor(self, file_descriptor):
+    """Registers the given file descriptor in the local database.
+
+    Args:
+      file_descriptor: a descriptor.FileDescriptor.
+
+    Returns:
+      The provided descriptor.
+    """
+    self.pool.AddFileDescriptor(file_descriptor)
+
+  def GetSymbol(self, symbol):
+    """Tries to find a symbol in the local database.
+
+    Currently, this method only returns message.Message instances, however, if
+    may be extended in future to support other symbol types.
+
+    Args:
+      symbol: A str, a protocol buffer symbol.
+
+    Returns:
+      A Python class corresponding to the symbol.
+
+    Raises:
+      KeyError: if the symbol could not be found.
+    """
+
+    return self._symbols[symbol]
+
+  def GetPrototype(self, descriptor):
+    """Builds a proto2 message class based on the passed in descriptor.
+
+    Passing a descriptor with a fully qualified name matching a previous
+    invocation will cause the same class to be returned.
+
+    Args:
+      descriptor: The descriptor to build from.
+
+    Returns:
+      A class describing the passed in descriptor.
+    """
+
+    return self.GetSymbol(descriptor.full_name)
+
+  def GetMessages(self, files):
+    """Gets all the messages from a specified file.
+
+    This will find and resolve dependencies, failing if they are not registered
+    in the symbol database.
+
+
+    Args:
+      files: The file names to extract messages from.
+
+    Returns:
+      A dictionary mapping proto names to the message classes. This will include
+      any dependent messages as well as any messages defined in the same file as
+      a specified message.
+
+    Raises:
+      KeyError: if a file could not be found.
+    """
+
+    result = {}
+    for f in files:
+      result.update(self._symbols_by_file[f])
+    return result
+
+_DEFAULT = SymbolDatabase()
+
+
+def Default():
+  """Returns the default SymbolDatabase."""
+  return _DEFAULT
diff --git a/third_party/protobuf26/text_encoding.py b/third_party/protobuf26/text_encoding.py
new file mode 100644
index 0000000..ed0aabf
--- /dev/null
+++ b/third_party/protobuf26/text_encoding.py
@@ -0,0 +1,110 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#PY25 compatible for GAE.
+#
+"""Encoding related utilities."""
+
+import re
+import sys  ##PY25
+
+# Lookup table for utf8
+_cescape_utf8_to_str = [chr(i) for i in xrange(0, 256)]
+_cescape_utf8_to_str[9] = r'\t'  # optional escape
+_cescape_utf8_to_str[10] = r'\n'  # optional escape
+_cescape_utf8_to_str[13] = r'\r'  # optional escape
+_cescape_utf8_to_str[39] = r"\'"  # optional escape
+
+_cescape_utf8_to_str[34] = r'\"'  # necessary escape
+_cescape_utf8_to_str[92] = r'\\'  # necessary escape
+
+# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
+_cescape_byte_to_str = ([r'\%03o' % i for i in xrange(0, 32)] +
+                        [chr(i) for i in xrange(32, 127)] +
+                        [r'\%03o' % i for i in xrange(127, 256)])
+_cescape_byte_to_str[9] = r'\t'  # optional escape
+_cescape_byte_to_str[10] = r'\n'  # optional escape
+_cescape_byte_to_str[13] = r'\r'  # optional escape
+_cescape_byte_to_str[39] = r"\'"  # optional escape
+
+_cescape_byte_to_str[34] = r'\"'  # necessary escape
+_cescape_byte_to_str[92] = r'\\'  # necessary escape
+
+
+def CEscape(text, as_utf8):
+  """Escape a bytes string for use in an ascii protocol buffer.
+
+  text.encode('string_escape') does not seem to satisfy our needs as it
+  encodes unprintable characters using two-digit hex escapes whereas our
+  C++ unescaping function allows hex escapes to be any length.  So,
+  "\0011".encode('string_escape') ends up being "\\x011", which will be
+  decoded in C++ as a single-character string with char code 0x11.
+
+  Args:
+    text: A byte string to be escaped
+    as_utf8: Specifies if result should be returned in UTF-8 encoding
+  Returns:
+    Escaped string
+  """
+  # PY3 hack: make Ord work for str and bytes:
+  # //platforms/networking/data uses unicode here, hence basestring.
+  Ord = ord if isinstance(text, basestring) else lambda x: x
+  if as_utf8:
+    return ''.join(_cescape_utf8_to_str[Ord(c)] for c in text)
+  return ''.join(_cescape_byte_to_str[Ord(c)] for c in text)
+
+
+_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
+_cescape_highbit_to_str = ([chr(i) for i in range(0, 127)] +
+                           [r'\%03o' % i for i in range(127, 256)])
+
+
+def CUnescape(text):
+  """Unescape a text string with C-style escape sequences to UTF-8 bytes."""
+
+  def ReplaceHex(m):
+    # Only replace the match if the number of leading back slashes is odd. i.e.
+    # the slash itself is not escaped.
+    if len(m.group(1)) & 1:
+      return m.group(1) + 'x0' + m.group(2)
+    return m.group(0)
+
+  # This is required because the 'string_escape' encoding doesn't
+  # allow single-digit hex escapes (like '\xf').
+  result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
+
+  if sys.version_info[0] < 3:  ##PY25
+##!PY25  if str is bytes:  # PY2
+    return result.decode('string_escape')
+  result = ''.join(_cescape_highbit_to_str[ord(c)] for c in result)
+  return (result.encode('ascii')  # Make it bytes to allow decode.
+          .decode('unicode_escape')
+          # Make it bytes again to return the proper type.
+          .encode('raw_unicode_escape'))
diff --git a/third_party/protobuf26/text_format.py b/third_party/protobuf26/text_format.py
new file mode 100644
index 0000000..6ef7cb9
--- /dev/null
+++ b/third_party/protobuf26/text_format.py
@@ -0,0 +1,873 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# http://code.google.com/p/protobuf/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#PY25 compatible for GAE.
+#
+# Copyright 2007 Google Inc. All Rights Reserved.
+
+"""Contains routines for printing protocol messages in text format."""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+import cStringIO
+import re
+
+from protobuf26.internal import type_checkers
+from protobuf26 import descriptor
+from protobuf26 import text_encoding
+
+__all__ = ['MessageToString', 'PrintMessage', 'PrintField',
+           'PrintFieldValue', 'Merge']
+
+
+_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
+                     type_checkers.Int32ValueChecker(),
+                     type_checkers.Uint64ValueChecker(),
+                     type_checkers.Int64ValueChecker())
+_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?', re.IGNORECASE)
+_FLOAT_NAN = re.compile('nanf?', re.IGNORECASE)
+_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
+                          descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
+
+
+class Error(Exception):
+  """Top-level module error for text_format."""
+
+
+class ParseError(Error):
+  """Thrown in case of ASCII parsing error."""
+
+
+def MessageToString(message, as_utf8=False, as_one_line=False,
+                    pointy_brackets=False, use_index_order=False,
+                    float_format=None):
+  """Convert protobuf message to text format.
+
+  Floating point values can be formatted compactly with 15 digits of
+  precision (which is the most that IEEE 754 "double" can guarantee)
+  using float_format='.15g'.
+
+  Args:
+    message: The protocol buffers message.
+    as_utf8: Produce text output in UTF8 format.
+    as_one_line: Don't introduce newlines between fields.
+    pointy_brackets: If True, use angle brackets instead of curly braces for
+      nesting.
+    use_index_order: If True, print fields of a proto message using the order
+      defined in source code instead of the field number. By default, use the
+      field number order.
+    float_format: If set, use this to specify floating point number formatting
+      (per the "Format Specification Mini-Language"); otherwise, str() is used.
+
+  Returns:
+    A string of the text formatted protocol buffer message.
+  """
+  out = cStringIO.StringIO()
+  PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line,
+               pointy_brackets=pointy_brackets,
+               use_index_order=use_index_order,
+               float_format=float_format)
+  result = out.getvalue()
+  out.close()
+  if as_one_line:
+    return result.rstrip()
+  return result
+
+
+def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False,
+                 pointy_brackets=False, use_index_order=False,
+                 float_format=None):
+  fields = message.ListFields()
+  if use_index_order:
+    fields.sort(key=lambda x: x[0].index)
+  for field, value in fields:
+    if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+      for element in value:
+        PrintField(field, element, out, indent, as_utf8, as_one_line,
+                   pointy_brackets=pointy_brackets,
+                   float_format=float_format)
+    else:
+      PrintField(field, value, out, indent, as_utf8, as_one_line,
+                 pointy_brackets=pointy_brackets,
+                 float_format=float_format)
+
+
+def PrintField(field, value, out, indent=0, as_utf8=False, as_one_line=False,
+               pointy_brackets=False, float_format=None):
+  """Print a single field name/value pair.  For repeated fields, the value
+  should be a single element."""
+
+  out.write(' ' * indent)
+  if field.is_extension:
+    out.write('[')
+    if (field.containing_type.GetOptions().message_set_wire_format and
+        field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+        field.message_type == field.extension_scope and
+        field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
+      out.write(field.message_type.full_name)
+    else:
+      out.write(field.full_name)
+    out.write(']')
+  elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
+    # For groups, use the capitalized name.
+    out.write(field.message_type.name)
+  else:
+    out.write(field.name)
+
+  if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+    # The colon is optional in this case, but our cross-language golden files
+    # don't include it.
+    out.write(': ')
+
+  PrintFieldValue(field, value, out, indent, as_utf8, as_one_line,
+                  pointy_brackets=pointy_brackets,
+                  float_format=float_format)
+  if as_one_line:
+    out.write(' ')
+  else:
+    out.write('\n')
+
+
+def PrintFieldValue(field, value, out, indent=0, as_utf8=False,
+                    as_one_line=False, pointy_brackets=False,
+                    float_format=None):
+  """Print a single field value (not including name).  For repeated fields,
+  the value should be a single element."""
+
+  if pointy_brackets:
+    openb = '<'
+    closeb = '>'
+  else:
+    openb = '{'
+    closeb = '}'
+
+  if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+    if as_one_line:
+      out.write(' %s ' % openb)
+      PrintMessage(value, out, indent, as_utf8, as_one_line,
+                   pointy_brackets=pointy_brackets,
+                   float_format=float_format)
+      out.write(closeb)
+    else:
+      out.write(' %s\n' % openb)
+      PrintMessage(value, out, indent + 2, as_utf8, as_one_line,
+                   pointy_brackets=pointy_brackets,
+                   float_format=float_format)
+      out.write(' ' * indent + closeb)
+  elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
+    enum_value = field.enum_type.values_by_number.get(value, None)
+    if enum_value is not None:
+      out.write(enum_value.name)
+    else:
+      out.write(str(value))
+  elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
+    out.write('\"')
+    if isinstance(value, unicode):
+      out_value = value.encode('utf-8')
+    else:
+      out_value = value
+    if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+      # We need to escape non-UTF8 chars in TYPE_BYTES field.
+      out_as_utf8 = False
+    else:
+      out_as_utf8 = as_utf8
+    out.write(text_encoding.CEscape(out_value, out_as_utf8))
+    out.write('\"')
+  elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
+    if value:
+      out.write('true')
+    else:
+      out.write('false')
+  elif field.cpp_type in _FLOAT_TYPES and float_format is not None:
+    out.write('{1:{0}}'.format(float_format, value))
+  else:
+    out.write(str(value))
+
+
+def _ParseOrMerge(lines, message, allow_multiple_scalars):
+  """Converts an ASCII representation of a protocol message into a message.
+
+  Args:
+    lines: Lines of a message's ASCII representation.
+    message: A protocol buffer message to merge into.
+    allow_multiple_scalars: Determines if repeated values for a non-repeated
+      field are permitted, e.g., the string "foo: 1 foo: 2" for a
+      required/optional field named "foo".
+
+  Raises:
+    ParseError: On ASCII parsing problems.
+  """
+  tokenizer = _Tokenizer(lines)
+  while not tokenizer.AtEnd():
+    _MergeField(tokenizer, message, allow_multiple_scalars)
+
+
+def Parse(text, message):
+  """Parses an ASCII representation of a protocol message into a message.
+
+  Args:
+    text: Message ASCII representation.
+    message: A protocol buffer message to merge into.
+
+  Returns:
+    The same message passed as argument.
+
+  Raises:
+    ParseError: On ASCII parsing problems.
+  """
+  if not isinstance(text, str): text = text.decode('utf-8')
+  return ParseLines(text.split('\n'), message)
+
+
+def Merge(text, message):
+  """Parses an ASCII representation of a protocol message into a message.
+
+  Like Parse(), but allows repeated values for a non-repeated field, and uses
+  the last one.
+
+  Args:
+    text: Message ASCII representation.
+    message: A protocol buffer message to merge into.
+
+  Returns:
+    The same message passed as argument.
+
+  Raises:
+    ParseError: On ASCII parsing problems.
+  """
+  return MergeLines(text.split('\n'), message)
+
+
+def ParseLines(lines, message):
+  """Parses an ASCII representation of a protocol message into a message.
+
+  Args:
+    lines: An iterable of lines of a message's ASCII representation.
+    message: A protocol buffer message to merge into.
+
+  Returns:
+    The same message passed as argument.
+
+  Raises:
+    ParseError: On ASCII parsing problems.
+  """
+  _ParseOrMerge(lines, message, False)
+  return message
+
+
+def MergeLines(lines, message):
+  """Parses an ASCII representation of a protocol message into a message.
+
+  Args:
+    lines: An iterable of lines of a message's ASCII representation.
+    message: A protocol buffer message to merge into.
+
+  Returns:
+    The same message passed as argument.
+
+  Raises:
+    ParseError: On ASCII parsing problems.
+  """
+  _ParseOrMerge(lines, message, True)
+  return message
+
+
+def _MergeField(tokenizer, message, allow_multiple_scalars):
+  """Merges a single protocol message field into a message.
+
+  Args:
+    tokenizer: A tokenizer to parse the field name and values.
+    message: A protocol message to record the data.
+    allow_multiple_scalars: Determines if repeated values for a non-repeated
+      field are permitted, e.g., the string "foo: 1 foo: 2" for a
+      required/optional field named "foo".
+
+  Raises:
+    ParseError: In case of ASCII parsing problems.
+  """
+  message_descriptor = message.DESCRIPTOR
+  if tokenizer.TryConsume('['):
+    name = [tokenizer.ConsumeIdentifier()]
+    while tokenizer.TryConsume('.'):
+      name.append(tokenizer.ConsumeIdentifier())
+    name = '.'.join(name)
+
+    if not message_descriptor.is_extendable:
+      raise tokenizer.ParseErrorPreviousToken(
+          'Message type "%s" does not have extensions.' %
+          message_descriptor.full_name)
+    # pylint: disable=protected-access
+    field = message.Extensions._FindExtensionByName(name)
+    # pylint: enable=protected-access
+    if not field:
+      raise tokenizer.ParseErrorPreviousToken(
+          'Extension "%s" not registered.' % name)
+    elif message_descriptor != field.containing_type:
+      raise tokenizer.ParseErrorPreviousToken(
+          'Extension "%s" does not extend message type "%s".' % (
+              name, message_descriptor.full_name))
+    tokenizer.Consume(']')
+  else:
+    name = tokenizer.ConsumeIdentifier()
+    field = message_descriptor.fields_by_name.get(name, None)
+
+    # Group names are expected to be capitalized as they appear in the
+    # .proto file, which actually matches their type names, not their field
+    # names.
+    if not field:
+      field = message_descriptor.fields_by_name.get(name.lower(), None)
+      if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
+        field = None
+
+    if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
+        field.message_type.name != name):
+      field = None
+
+    if not field:
+      raise tokenizer.ParseErrorPreviousToken(
+          'Message type "%s" has no field named "%s".' % (
+              message_descriptor.full_name, name))
+
+  if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+    tokenizer.TryConsume(':')
+
+    if tokenizer.TryConsume('<'):
+      end_token = '>'
+    else:
+      tokenizer.Consume('{')
+      end_token = '}'
+
+    if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+      if field.is_extension:
+        sub_message = message.Extensions[field].add()
+      else:
+        sub_message = getattr(message, field.name).add()
+    else:
+      if field.is_extension:
+        sub_message = message.Extensions[field]
+      else:
+        sub_message = getattr(message, field.name)
+      sub_message.SetInParent()
+
+    while not tokenizer.TryConsume(end_token):
+      if tokenizer.AtEnd():
+        raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token))
+      _MergeField(tokenizer, sub_message, allow_multiple_scalars)
+  else:
+    _MergeScalarField(tokenizer, message, field, allow_multiple_scalars)
+
+  # For historical reasons, fields may optionally be separated by commas or
+  # semicolons.
+  if not tokenizer.TryConsume(','):
+    tokenizer.TryConsume(';')
+
+
+def _MergeScalarField(tokenizer, message, field, allow_multiple_scalars):
+  """Merges a single protocol message scalar field into a message.
+
+  Args:
+    tokenizer: A tokenizer to parse the field value.
+    message: A protocol message to record the data.
+    field: The descriptor of the field to be merged.
+    allow_multiple_scalars: Determines if repeated values for a non-repeated
+      field are permitted, e.g., the string "foo: 1 foo: 2" for a
+      required/optional field named "foo".
+
+  Raises:
+    ParseError: In case of ASCII parsing problems.
+    RuntimeError: On runtime errors.
+  """
+  tokenizer.Consume(':')
+  value = None
+
+  if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
+                    descriptor.FieldDescriptor.TYPE_SINT32,
+                    descriptor.FieldDescriptor.TYPE_SFIXED32):
+    value = tokenizer.ConsumeInt32()
+  elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
+                      descriptor.FieldDescriptor.TYPE_SINT64,
+                      descriptor.FieldDescriptor.TYPE_SFIXED64):
+    value = tokenizer.ConsumeInt64()
+  elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
+                      descriptor.FieldDescriptor.TYPE_FIXED32):
+    value = tokenizer.ConsumeUint32()
+  elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
+                      descriptor.FieldDescriptor.TYPE_FIXED64):
+    value = tokenizer.ConsumeUint64()
+  elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
+                      descriptor.FieldDescriptor.TYPE_DOUBLE):
+    value = tokenizer.ConsumeFloat()
+  elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
+    value = tokenizer.ConsumeBool()
+  elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
+    value = tokenizer.ConsumeString()
+  elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+    value = tokenizer.ConsumeByteString()
+  elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
+    value = tokenizer.ConsumeEnum(field)
+  else:
+    raise RuntimeError('Unknown field type %d' % field.type)
+
+  if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+    if field.is_extension:
+      message.Extensions[field].append(value)
+    else:
+      getattr(message, field.name).append(value)
+  else:
+    if field.is_extension:
+      if not allow_multiple_scalars and message.HasExtension(field):
+        raise tokenizer.ParseErrorPreviousToken(
+            'Message type "%s" should not have multiple "%s" extensions.' %
+            (message.DESCRIPTOR.full_name, field.full_name))
+      else:
+        message.Extensions[field] = value
+    else:
+      if not allow_multiple_scalars and message.HasField(field.name):
+        raise tokenizer.ParseErrorPreviousToken(
+            'Message type "%s" should not have multiple "%s" fields.' %
+            (message.DESCRIPTOR.full_name, field.name))
+      else:
+        setattr(message, field.name, value)
+
+
+class _Tokenizer(object):
+  """Protocol buffer ASCII representation tokenizer.
+
+  This class handles the lower level string parsing by splitting it into
+  meaningful tokens.
+
+  It was directly ported from the Java protocol buffer API.
+  """
+
+  _WHITESPACE = re.compile('(\\s|(#.*$))+', re.MULTILINE)
+  _TOKEN = re.compile(
+      '[a-zA-Z_][0-9a-zA-Z_+-]*|'           # an identifier
+      '[0-9+-][0-9a-zA-Z_.+-]*|'            # a number
+      '\"([^\"\n\\\\]|\\\\.)*(\"|\\\\?$)|'  # a double-quoted string
+      '\'([^\'\n\\\\]|\\\\.)*(\'|\\\\?$)')  # a single-quoted string
+  _IDENTIFIER = re.compile(r'\w+')
+
+  def __init__(self, lines):
+    self._position = 0
+    self._line = -1
+    self._column = 0
+    self._token_start = None
+    self.token = ''
+    self._lines = iter(lines)
+    self._current_line = ''
+    self._previous_line = 0
+    self._previous_column = 0
+    self._more_lines = True
+    self._SkipWhitespace()
+    self.NextToken()
+
+  def AtEnd(self):
+    """Checks the end of the text was reached.
+
+    Returns:
+      True iff the end was reached.
+    """
+    return not self.token
+
+  def _PopLine(self):
+    while len(self._current_line) <= self._column:
+      try:
+        self._current_line = self._lines.next()
+      except StopIteration:
+        self._current_line = ''
+        self._more_lines = False
+        return
+      else:
+        self._line += 1
+        self._column = 0
+
+  def _SkipWhitespace(self):
+    while True:
+      self._PopLine()
+      match = self._WHITESPACE.match(self._current_line, self._column)
+      if not match:
+        break
+      length = len(match.group(0))
+      self._column += length
+
+  def TryConsume(self, token):
+    """Tries to consume a given piece of text.
+
+    Args:
+      token: Text to consume.
+
+    Returns:
+      True iff the text was consumed.
+    """
+    if self.token == token:
+      self.NextToken()
+      return True
+    return False
+
+  def Consume(self, token):
+    """Consumes a piece of text.
+
+    Args:
+      token: Text to consume.
+
+    Raises:
+      ParseError: If the text couldn't be consumed.
+    """
+    if not self.TryConsume(token):
+      raise self._ParseError('Expected "%s".' % token)
+
+  def ConsumeIdentifier(self):
+    """Consumes protocol message field identifier.
+
+    Returns:
+      Identifier string.
+
+    Raises:
+      ParseError: If an identifier couldn't be consumed.
+    """
+    result = self.token
+    if not self._IDENTIFIER.match(result):
+      raise self._ParseError('Expected identifier.')
+    self.NextToken()
+    return result
+
+  def ConsumeInt32(self):
+    """Consumes a signed 32bit integer number.
+
+    Returns:
+      The integer parsed.
+
+    Raises:
+      ParseError: If a signed 32bit integer couldn't be consumed.
+    """
+    try:
+      result = ParseInteger(self.token, is_signed=True, is_long=False)
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ConsumeUint32(self):
+    """Consumes an unsigned 32bit integer number.
+
+    Returns:
+      The integer parsed.
+
+    Raises:
+      ParseError: If an unsigned 32bit integer couldn't be consumed.
+    """
+    try:
+      result = ParseInteger(self.token, is_signed=False, is_long=False)
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ConsumeInt64(self):
+    """Consumes a signed 64bit integer number.
+
+    Returns:
+      The integer parsed.
+
+    Raises:
+      ParseError: If a signed 64bit integer couldn't be consumed.
+    """
+    try:
+      result = ParseInteger(self.token, is_signed=True, is_long=True)
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ConsumeUint64(self):
+    """Consumes an unsigned 64bit integer number.
+
+    Returns:
+      The integer parsed.
+
+    Raises:
+      ParseError: If an unsigned 64bit integer couldn't be consumed.
+    """
+    try:
+      result = ParseInteger(self.token, is_signed=False, is_long=True)
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ConsumeFloat(self):
+    """Consumes an floating point number.
+
+    Returns:
+      The number parsed.
+
+    Raises:
+      ParseError: If a floating point number couldn't be consumed.
+    """
+    try:
+      result = ParseFloat(self.token)
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ConsumeBool(self):
+    """Consumes a boolean value.
+
+    Returns:
+      The bool parsed.
+
+    Raises:
+      ParseError: If a boolean value couldn't be consumed.
+    """
+    try:
+      result = ParseBool(self.token)
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ConsumeString(self):
+    """Consumes a string value.
+
+    Returns:
+      The string parsed.
+
+    Raises:
+      ParseError: If a string value couldn't be consumed.
+    """
+    the_bytes = self.ConsumeByteString()
+    try:
+      return unicode(the_bytes, 'utf-8')
+    except UnicodeDecodeError, e:
+      raise self._StringParseError(e)
+
+  def ConsumeByteString(self):
+    """Consumes a byte array value.
+
+    Returns:
+      The array parsed (as a string).
+
+    Raises:
+      ParseError: If a byte array value couldn't be consumed.
+    """
+    the_list = [self._ConsumeSingleByteString()]
+    while self.token and self.token[0] in ('\'', '"'):
+      the_list.append(self._ConsumeSingleByteString())
+    return ''.encode('latin1').join(the_list)  ##PY25
+##!PY25    return b''.join(the_list)
+
+  def _ConsumeSingleByteString(self):
+    """Consume one token of a string literal.
+
+    String literals (whether bytes or text) can come in multiple adjacent
+    tokens which are automatically concatenated, like in C or Python.  This
+    method only consumes one token.
+    """
+    text = self.token
+    if len(text) < 1 or text[0] not in ('\'', '"'):
+      raise self._ParseError('Expected string.')
+
+    if len(text) < 2 or text[-1] != text[0]:
+      raise self._ParseError('String missing ending quote.')
+
+    try:
+      result = text_encoding.CUnescape(text[1:-1])
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ConsumeEnum(self, field):
+    try:
+      result = ParseEnum(field, self.token)
+    except ValueError, e:
+      raise self._ParseError(str(e))
+    self.NextToken()
+    return result
+
+  def ParseErrorPreviousToken(self, message):
+    """Creates and *returns* a ParseError for the previously read token.
+
+    Args:
+      message: A message to set for the exception.
+
+    Returns:
+      A ParseError instance.
+    """
+    return ParseError('%d:%d : %s' % (
+        self._previous_line + 1, self._previous_column + 1, message))
+
+  def _ParseError(self, message):
+    """Creates and *returns* a ParseError for the current token."""
+    return ParseError('%d:%d : %s' % (
+        self._line + 1, self._column + 1, message))
+
+  def _StringParseError(self, e):
+    return self._ParseError('Couldn\'t parse string: ' + str(e))
+
+  def NextToken(self):
+    """Reads the next meaningful token."""
+    self._previous_line = self._line
+    self._previous_column = self._column
+
+    self._column += len(self.token)
+    self._SkipWhitespace()
+
+    if not self._more_lines:
+      self.token = ''
+      return
+
+    match = self._TOKEN.match(self._current_line, self._column)
+    if match:
+      token = match.group(0)
+      self.token = token
+    else:
+      self.token = self._current_line[self._column]
+
+
+def ParseInteger(text, is_signed=False, is_long=False):
+  """Parses an integer.
+
+  Args:
+    text: The text to parse.
+    is_signed: True if a signed integer must be parsed.
+    is_long: True if a long integer must be parsed.
+
+  Returns:
+    The integer value.
+
+  Raises:
+    ValueError: Thrown Iff the text is not a valid integer.
+  """
+  # Do the actual parsing. Exception handling is propagated to caller.
+  try:
+    # We force 32-bit values to int and 64-bit values to long to make
+    # alternate implementations where the distinction is more significant
+    # (e.g. the C++ implementation) simpler.
+    if is_long:
+      result = long(text, 0)
+    else:
+      result = int(text, 0)
+  except ValueError:
+    raise ValueError('Couldn\'t parse integer: %s' % text)
+
+  # Check if the integer is sane. Exceptions handled by callers.
+  checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
+  checker.CheckValue(result)
+  return result
+
+
+def ParseFloat(text):
+  """Parse a floating point number.
+
+  Args:
+    text: Text to parse.
+
+  Returns:
+    The number parsed.
+
+  Raises:
+    ValueError: If a floating point number couldn't be parsed.
+  """
+  try:
+    # Assume Python compatible syntax.
+    return float(text)
+  except ValueError:
+    # Check alternative spellings.
+    if _FLOAT_INFINITY.match(text):
+      if text[0] == '-':
+        return float('-inf')
+      else:
+        return float('inf')
+    elif _FLOAT_NAN.match(text):
+      return float('nan')
+    else:
+      # assume '1.0f' format
+      try:
+        return float(text.rstrip('f'))
+      except ValueError:
+        raise ValueError('Couldn\'t parse float: %s' % text)
+
+
+def ParseBool(text):
+  """Parse a boolean value.
+
+  Args:
+    text: Text to parse.
+
+  Returns:
+    Boolean values parsed
+
+  Raises:
+    ValueError: If text is not a valid boolean.
+  """
+  if text in ('true', 't', '1'):
+    return True
+  elif text in ('false', 'f', '0'):
+    return False
+  else:
+    raise ValueError('Expected "true" or "false".')
+
+
+def ParseEnum(field, value):
+  """Parse an enum value.
+
+  The value can be specified by a number (the enum value), or by
+  a string literal (the enum name).
+
+  Args:
+    field: Enum field descriptor.
+    value: String value.
+
+  Returns:
+    Enum value number.
+
+  Raises:
+    ValueError: If the enum value could not be parsed.
+  """
+  enum_descriptor = field.enum_type
+  try:
+    number = int(value, 0)
+  except ValueError:
+    # Identifier.
+    enum_value = enum_descriptor.values_by_name.get(value, None)
+    if enum_value is None:
+      raise ValueError(
+          'Enum type "%s" has no value named %s.' % (
+              enum_descriptor.full_name, value))
+  else:
+    # Numeric value.
+    enum_value = enum_descriptor.values_by_number.get(number, None)
+    if enum_value is None:
+      raise ValueError(
+          'Enum type "%s" has no value with number %d.' % (
+              enum_descriptor.full_name, number))
+  return enum_value.number
diff --git a/third_party/pylint.py b/third_party/pylint.py
index 4aca4b6..bf1860b 100755
--- a/third_party/pylint.py
+++ b/third_party/pylint.py
@@ -16,7 +16,13 @@
 
 Copyright (c) 2012 The Chromium Authors. All rights reserved.
 """
+import os
 import sys
+
+# Add local modules to the search path.
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(
+    __file__)), 'logilab'))
+
 from pylint import lint
 
 args = sys.argv[1:]
diff --git a/third_party/pylint/README.chromium b/third_party/pylint/README.chromium
index c9dc46c..3e468c2 100644
--- a/third_party/pylint/README.chromium
+++ b/third_party/pylint/README.chromium
@@ -1,5 +1,5 @@
-URL: http://www.logilab.org/project/pylint
-Version: 0.25.1
+URL: http://www.pylint.org/
+Version: 1.4.1
 License: GPL
 License File: LICENSE.txt
 
@@ -7,4 +7,47 @@
 This directory contains the pylint module.
 
 Local Modifications:
-None
+- applied upstream fix https://bitbucket.org/logilab/pylint/commits/5df347467ee0
+- applied fix to work around bad interaction between sys.path manipulation in
+  pylint itself and multiprocessing's implementation on Windows (DIFF1)
+
+
+Diffs:
+DIFF1
+diff --git a/third_party/pylint/lint.py b/third_party/pylint/lint.py
+index e10ae56..082d8b3 100644
+--- a/third_party/pylint/lint.py
++++ b/third_party/pylint/lint.py
+@@ -671,7 +671,8 @@ class PyLinter(configuration.OptionsManagerMixIn,
+             files_or_modules = (files_or_modules,)
+
+         if self.config.jobs == 1:
+-            self._do_check(files_or_modules)
++            with fix_import_path(files_or_modules):
++                self._do_check(files_or_modules)
+         else:
+             # Hack that permits running pylint, on Windows, with -m switch
+             # and with --jobs, as in 'python -2 -m pylint .. --jobs'.
+@@ -1252,8 +1253,8 @@ group are mutually exclusive.'),
+
+         # insert current working directory to the python path to have a correct
+         # behaviour
+-        with fix_import_path(args):
+-            if self.linter.config.profile:
++        if self.linter.config.profile:
++            with fix_import_path(args):
+                 print('** profiled run', file=sys.stderr)
+                 import cProfile, pstats
+                 cProfile.runctx('linter.check(%r)' % args, globals(), locals(),
+@@ -1262,9 +1263,9 @@ group are mutually exclusive.'),
+                 data.strip_dirs()
+                 data.sort_stats('time', 'calls')
+                 data.print_stats(30)
+-            else:
+-                linter.check(args)
+-            linter.generate_reports()
++        else:
++            linter.check(args)
++        linter.generate_reports()
+         if exit:
+             sys.exit(self.linter.msg_status)
diff --git a/third_party/pylint/__init__.py b/third_party/pylint/__init__.py
index 0c4bd13..82e557d 100644
--- a/third_party/pylint/__init__.py
+++ b/third_party/pylint/__init__.py
@@ -1,3 +1,6 @@
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -9,8 +12,35 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-""" Copyright (c) 2002-2008 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr  
-"""
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+import sys
 
+from .__pkginfo__ import version as __version__
+
+def run_pylint():
+    """run pylint"""
+    from pylint.lint import Run
+    Run(sys.argv[1:])
+
+def run_pylint_gui():
+    """run pylint-gui"""
+    try:
+        from pylint.gui import Run
+        Run(sys.argv[1:])
+    except ImportError:
+        sys.exit('tkinter is not available')
+
+def run_epylint():
+    """run pylint"""
+    from pylint.epylint import Run
+    Run()
+
+def run_pyreverse():
+    """run pyreverse"""
+    from pylint.pyreverse.main import Run
+    Run(sys.argv[1:])
+
+def run_symilar():
+    """run symilar"""
+    from pylint.checkers.similar import Run
+    Run(sys.argv[1:])
diff --git a/third_party/pylint/__main__.py b/third_party/pylint/__main__.py
new file mode 100644
index 0000000..7716361
--- /dev/null
+++ b/third_party/pylint/__main__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+import pylint
+pylint.run_pylint()
diff --git a/third_party/pylint/__pkginfo__.py b/third_party/pylint/__pkginfo__.py
index 2a6ac3a..6ed331a 100644
--- a/third_party/pylint/__pkginfo__.py
+++ b/third_party/pylint/__pkginfo__.py
@@ -1,5 +1,5 @@
 # pylint: disable=W0622,C0103
-# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -13,35 +13,36 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """pylint packaging information"""
+from __future__ import absolute_import
 
 modname = distname = 'pylint'
 
-numversion = (0, 25, 1)
+numversion = (1, 4, 1)
 version = '.'.join([str(num) for num in numversion])
 
-install_requires = ['logilab-common >= 0.53.0', 'logilab-astng >= 0.21.1']
+install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.3.3', 'six']
 
 license = 'GPL'
-copyright = 'Logilab S.A.'
 description = "python code static checker"
-web = "http://www.logilab.org/project/%s" % distname
-ftp = "ftp://ftp.logilab.org/pub/%s" % modname
-mailinglist = "mailto://python-projects@lists.logilab.org"
+web = 'http://www.pylint.org'
+mailinglist = "mailto://code-quality@python.org"
 author = 'Logilab'
 author_email = 'python-projects@lists.logilab.org'
 
-classifiers =  ['Development Status :: 4 - Beta',
-                'Environment :: Console',
-                'Intended Audience :: Developers',
-                'License :: OSI Approved :: GNU General Public License (GPL)',
-                'Operating System :: OS Independent',
-                'Programming Language :: Python',
-                'Topic :: Software Development :: Debuggers',
-                'Topic :: Software Development :: Quality Assurance',
-                'Topic :: Software Development :: Testing',
-                ]
+classifiers = ['Development Status :: 4 - Beta',
+               'Environment :: Console',
+               'Intended Audience :: Developers',
+               'License :: OSI Approved :: GNU General Public License (GPL)',
+               'Operating System :: OS Independent',
+               'Programming Language :: Python',
+               'Programming Language :: Python :: 2',
+               'Programming Language :: Python :: 3',
+               'Topic :: Software Development :: Debuggers',
+               'Topic :: Software Development :: Quality Assurance',
+               'Topic :: Software Development :: Testing'
+              ]
 
 
 long_desc = """\
@@ -66,3 +67,4 @@
            for filename in ('pylint', 'pylint-gui', "symilar", "epylint",
                             "pyreverse")]
 
+include_dirs = ['test']
diff --git a/third_party/pylint/checkers/__init__.py b/third_party/pylint/checkers/__init__.py
index 969066b..51adb4d 100644
--- a/third_party/pylint/checkers/__init__.py
+++ b/third_party/pylint/checkers/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,7 +12,7 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """utilities methods and classes for checkers
 
 Base id of standard checkers (used in msg and report ids):
@@ -29,7 +29,10 @@
 11: typecheck
 12: logging
 13: string_format
-14-50: not yet used: reserved for future internal checkers.
+14: string_constant
+15: stdlib
+16: python3
+17-50: not yet used: reserved for future internal checkers.
 51-99: perhaps used: reserved for external checkers
 
 The raw_metrics checker has no number associated since it doesn't emit any
@@ -37,14 +40,16 @@
 
 """
 
+import sys
 import tokenize
-from os import listdir
-from os.path import dirname, join, isdir, splitext
+import warnings
 
-from logilab.astng.utils import ASTWalker
 from logilab.common.configuration import OptionsProviderMixIn
 
-from pylint.reporters import diff_string, EmptyReport
+from pylint.reporters import diff_string
+from pylint.utils import register_plugins
+from pylint.interfaces import UNDEFINED
+
 
 def table_lines_from_stats(stats, old_stats, columns):
     """get values listed in <columns> from <stats> and <old_stats>,
@@ -54,7 +59,7 @@
     lines = []
     for m_type in columns:
         new = stats[m_type]
-        format = str
+        format = str # pylint: disable=redefined-builtin
         if isinstance(new, float):
             format = lambda num: '%.3f' % num
         old = old_stats.get(m_type)
@@ -67,7 +72,7 @@
     return lines
 
 
-class BaseChecker(OptionsProviderMixIn, ASTWalker):
+class BaseChecker(OptionsProviderMixIn):
     """base class for checkers"""
     # checker name (you may reuse an existing one)
     name = None
@@ -79,27 +84,21 @@
     msgs = {}
     # reports issued by this checker
     reports = ()
+    # mark this checker as enabled or not.
+    enabled = True
 
     def __init__(self, linter=None):
         """checker instances should have the linter as argument
 
         linter is an object implementing ILinter
         """
-        ASTWalker.__init__(self, self)
         self.name = self.name.lower()
         OptionsProviderMixIn.__init__(self)
         self.linter = linter
-        # messages that are active for the current check
-        self.active_msgs = set()
 
-    def add_message(self, msg_id, line=None, node=None, args=None):
+    def add_message(self, msg_id, line=None, node=None, args=None, confidence=UNDEFINED):
         """add a message of a given type"""
-        self.linter.add_message(msg_id, line, node, args)
-
-    def package_dir(self):
-        """return the base directory for the analysed package"""
-        return dirname(self.linter.base_file)
-
+        self.linter.add_message(msg_id, line, node, args, confidence)
 
     # dummy methods implementing the IChecker interface
 
@@ -109,55 +108,17 @@
     def close(self):
         """called after visiting project (i.e set of modules)"""
 
-class BaseRawChecker(BaseChecker):
-    """base class for raw checkers"""
 
-    def process_module(self, node):
-        """process a module
-
-        the module's content is accessible via the stream object
-
-        stream must implement the readline method
-        """
-        stream = node.file_stream
-        stream.seek(0) # XXX may be removed with astng > 0.23
-        self.process_tokens(tokenize.generate_tokens(stream.readline))
+class BaseTokenChecker(BaseChecker):
+    """Base class for checkers that want to have access to the token stream."""
 
     def process_tokens(self, tokens):
-        """should be overridden by subclasses"""
+        """Should be overridden by subclasses."""
         raise NotImplementedError()
 
 
-PY_EXTS = ('.py', '.pyc', '.pyo', '.pyw', '.so', '.dll')
-
 def initialize(linter):
     """initialize linter with checkers in this package """
-    package_load(linter, __path__[0])
+    register_plugins(linter, __path__[0])
 
-def package_load(linter, directory):
-    """load all module and package in the given directory, looking for a
-    'register' function in each one, used to register pylint checkers
-    """
-    globs = globals()
-    imported = {}
-    for filename in listdir(directory):
-        basename, extension = splitext(filename)
-        if basename in imported or basename == '__pycache__':
-            continue
-        if extension in PY_EXTS and basename != '__init__' or (
-             not extension and basename != 'CVS' and
-             isdir(join(directory, basename))):
-            try:
-                module = __import__(basename, globs, globs, None)
-            except ValueError:
-                # empty module name (usually emacs auto-save files)
-                continue
-            except ImportError, exc:
-                import sys
-                print >> sys.stderr, "Problem importing module %s: %s" % (filename, exc)
-            else:
-                if hasattr(module, 'register'):
-                    module.register(linter)
-                    imported[basename] = 1
-
-__all__ = ('CheckerHandler', 'BaseChecker', 'initialize', 'package_load')
+__all__ = ('BaseChecker', 'initialize')
diff --git a/third_party/pylint/checkers/base.py b/third_party/pylint/checkers/base.py
index 2062ae2..750d661 100644
--- a/third_party/pylint/checkers/base.py
+++ b/third_party/pylint/checkers/base.py
@@ -1,6 +1,7 @@
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-# Copyright (c) 2009-2010 Arista Networks, Inc.
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# Copyright (c) 2009-2010 Arista Networks, Inc.
+#
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -12,39 +13,98 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """basic checker for Python code"""
 
-
-from logilab import astng
-from logilab.common.ureports import Table
-from logilab.astng import are_exclusive
-
-from pylint.interfaces import IASTNGChecker
-from pylint.reporters import diff_string
-from pylint.checkers import BaseChecker, EmptyReport
-from pylint.checkers.utils import check_messages, clobber_in_except, is_inside_except
-
-
+import collections
+import itertools
+import sys
 import re
 
+import six
+from six.moves import zip  # pylint: disable=redefined-builtin
+
+from logilab.common.ureports import Table
+
+import astroid
+import astroid.bases
+from astroid import are_exclusive, InferenceError
+
+from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
+from pylint.utils import EmptyReport
+from pylint.reporters import diff_string
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import (
+    check_messages,
+    clobber_in_except,
+    is_builtin_object,
+    is_inside_except,
+    overrides_a_method,
+    safe_infer,
+    get_argument_from_call,
+    has_known_bases,
+    NoSuchArgumentError,
+    is_import_error,
+    unimplemented_abstract_methods,
+    )
+
+
 # regex for class/function/variable/constant name
 CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
 MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$')
 CONST_NAME_RGX = re.compile('(([A-Z_][A-Z0-9_]*)|(__.*__))$')
 COMP_VAR_RGX = re.compile('[A-Za-z_][A-Za-z0-9_]*$')
 DEFAULT_NAME_RGX = re.compile('[a-z_][a-z0-9_]{2,30}$')
+CLASS_ATTRIBUTE_RGX = re.compile(r'([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$')
 # do not require a doc string on system methods
 NO_REQUIRED_DOC_RGX = re.compile('__.*__')
+REVERSED_METHODS = (('__getitem__', '__len__'),
+                    ('__reversed__', ))
+
+PY33 = sys.version_info >= (3, 3)
+PY3K = sys.version_info >= (3, 0)
+BAD_FUNCTIONS = ['map', 'filter']
+if sys.version_info < (3, 0):
+    BAD_FUNCTIONS.append('input')
+
+# Name categories that are always consistent with all naming conventions.
+EXEMPT_NAME_CATEGORIES = set(('exempt', 'ignore'))
+
+# A mapping from builtin-qname -> symbol, to be used when generating messages
+# about dangerous default values as arguments
+DEFAULT_ARGUMENT_SYMBOLS = dict(
+    zip(['.'.join([astroid.bases.BUILTINS, x]) for x in ('set', 'dict', 'list')],
+        ['set()', '{}', '[]'])
+)
 
 del re
 
+def _redefines_import(node):
+    """ Detect that the given node (AssName) is inside an
+    exception handler and redefines an import from the tryexcept body.
+    Returns True if the node redefines an import, False otherwise.
+    """
+    current = node
+    while current and not isinstance(current.parent, astroid.ExceptHandler):
+        current = current.parent
+    if not current or not is_import_error(current.parent):
+        return False
+    try_block = current.parent.parent
+    for import_node in try_block.nodes_of_class((astroid.From, astroid.Import)):
+        for name, alias in import_node.names:
+            if alias:
+                if alias == node.name:
+                    return True
+            elif name == node.name:
+                return True
+    return False
+
 def in_loop(node):
     """return True if the node is inside a kind of for loop"""
     parent = node.parent
     while parent is not None:
-        if isinstance(parent, (astng.For, astng.ListComp, astng.SetComp,
-                               astng.DictComp, astng.GenExpr)):
+        if isinstance(parent, (astroid.For, astroid.ListComp, astroid.SetComp,
+                               astroid.DictComp, astroid.GenExpr)):
             return True
         parent = parent.parent
     return False
@@ -61,6 +121,76 @@
             return True
     return False
 
+def _loop_exits_early(loop):
+    """Returns true if a loop has a break statement in its body."""
+    loop_nodes = (astroid.For, astroid.While)
+    # Loop over body explicitly to avoid matching break statements
+    # in orelse.
+    for child in loop.body:
+        if isinstance(child, loop_nodes):
+            # break statement may be in orelse of child loop.
+            # pylint: disable=superfluous-parens
+            for orelse in (child.orelse or ()):
+                for _ in orelse.nodes_of_class(astroid.Break, skip_klass=loop_nodes):
+                    return True
+            continue
+        for _ in child.nodes_of_class(astroid.Break, skip_klass=loop_nodes):
+            return True
+    return False
+
+def _is_multi_naming_match(match, node_type, confidence):
+    return (match is not None and
+            match.lastgroup is not None and
+            match.lastgroup not in EXEMPT_NAME_CATEGORIES
+            and (node_type != 'method' or confidence != INFERENCE_FAILURE))
+
+
+if sys.version_info < (3, 0):
+    PROPERTY_CLASSES = set(('__builtin__.property', 'abc.abstractproperty'))
+else:
+    PROPERTY_CLASSES = set(('builtins.property', 'abc.abstractproperty'))
+
+
+def _determine_function_name_type(node):
+    """Determine the name type whose regex the a function's name should match.
+
+    :param node: A function node.
+    :returns: One of ('function', 'method', 'attr')
+    """
+    if not node.is_method():
+        return 'function'
+    if node.decorators:
+        decorators = node.decorators.nodes
+    else:
+        decorators = []
+    for decorator in decorators:
+        # If the function is a property (decorated with @property
+        # or @abc.abstractproperty), the name type is 'attr'.
+        if (isinstance(decorator, astroid.Name) or
+                (isinstance(decorator, astroid.Getattr) and
+                 decorator.attrname == 'abstractproperty')):
+            infered = safe_infer(decorator)
+            if infered and infered.qname() in PROPERTY_CLASSES:
+                return 'attr'
+        # If the function is decorated using the prop_method.{setter,getter}
+        # form, treat it like an attribute as well.
+        elif (isinstance(decorator, astroid.Getattr) and
+              decorator.attrname in ('setter', 'deleter')):
+            return 'attr'
+    return 'method'
+
+
+
+def _has_abstract_methods(node):
+    """
+    Determine if the given `node` has abstract methods.
+
+    The methods should be made abstract by decorating them
+    with `abc` decorators.
+    """
+    return len(unimplemented_abstract_methods(node)) > 0
+
+
 def report_by_type_stats(sect, stats, old_stats):
     """make a report of
 
@@ -112,115 +242,193 @@
     """
     if node.decorators:
         for decorator in node.decorators.nodes:
-            if (isinstance(decorator, astng.Getattr) and
-                decorator.expr.name == node.name):
+            if (isinstance(decorator, astroid.Getattr) and
+                    getattr(decorator.expr, 'name', None) == node.name):
                 return True
     return False
 
 class _BasicChecker(BaseChecker):
-    __implements__ = IASTNGChecker
+    __implements__ = IAstroidChecker
     name = 'basic'
 
 class BasicErrorChecker(_BasicChecker):
     msgs = {
-    'E0100': ('__init__ method is a generator',
-              'Used when the special class method __init__ is turned into a '
-              'generator by a yield in its body.'),
-    'E0101': ('Explicit return in __init__',
-              'Used when the special class method __init__ has an explicit \
-              return value.'),
-    'E0102': ('%s already defined line %s',
-              'Used when a function / class / method is redefined.'),
-    'E0103': ('%r not properly in loop',
-              'Used when break or continue keywords are used outside a loop.'),
+        'E0100': ('__init__ method is a generator',
+                  'init-is-generator',
+                  'Used when the special class method __init__ is turned into a '
+                  'generator by a yield in its body.'),
+        'E0101': ('Explicit return in __init__',
+                  'return-in-init',
+                  'Used when the special class method __init__ has an explicit '
+                  'return value.'),
+        'E0102': ('%s already defined line %s',
+                  'function-redefined',
+                  'Used when a function / class / method is redefined.'),
+        'E0103': ('%r not properly in loop',
+                  'not-in-loop',
+                  'Used when break or continue keywords are used outside a loop.'),
+        'E0104': ('Return outside function',
+                  'return-outside-function',
+                  'Used when a "return" statement is found outside a function or '
+                  'method.'),
+        'E0105': ('Yield outside function',
+                  'yield-outside-function',
+                  'Used when a "yield" statement is found outside a function or '
+                  'method.'),
+        'E0106': ('Return with argument inside generator',
+                  'return-arg-in-generator',
+                  'Used when a "return" statement with an argument is found '
+                  'outside in a generator function or method (e.g. with some '
+                  '"yield" statements).',
+                  {'maxversion': (3, 3)}),
+        'E0107': ("Use of the non-existent %s operator",
+                  'nonexistent-operator',
+                  "Used when you attempt to use the C-style pre-increment or"
+                  "pre-decrement operator -- and ++, which doesn't exist in Python."),
+        'E0108': ('Duplicate argument name %s in function definition',
+                  'duplicate-argument-name',
+                  'Duplicate argument names in function definitions are syntax'
+                  ' errors.'),
+        'E0110': ('Abstract class %r with abstract methods instantiated',
+                  'abstract-class-instantiated',
+                  'Used when an abstract class with `abc.ABCMeta` as metaclass '
+                  'has abstract methods and is instantiated.'),
+        'W0120': ('Else clause on loop without a break statement',
+                  'useless-else-on-loop',
+                  'Loops should only have an else clause if they can exit early '
+                  'with a break statement, otherwise the statements under else '
+                  'should be on the same scope as the loop itself.'),
+        }
 
-    'E0104': ('Return outside function',
-              'Used when a "return" statement is found outside a function or '
-              'method.'),
-    'E0105': ('Yield outside function',
-              'Used when a "yield" statement is found outside a function or '
-              'method.'),
-    'E0106': ('Return with argument inside generator',
-              'Used when a "return" statement with an argument is found '
-              'outside in a generator function or method (e.g. with some '
-              '"yield" statements).'),
-    'E0107': ("Use of the non-existent %s operator",
-              "Used when you attempt to use the C-style pre-increment or"
-              "pre-decrement operator -- and ++, which doesn't exist in Python."),
-    }
-
-    def __init__(self, linter):
-        _BasicChecker.__init__(self, linter)
-
-    @check_messages('E0102')
+    @check_messages('function-redefined')
     def visit_class(self, node):
         self._check_redefinition('class', node)
 
-    @check_messages('E0100', 'E0101', 'E0102', 'E0106')
+    @check_messages('init-is-generator', 'return-in-init',
+                    'function-redefined', 'return-arg-in-generator',
+                    'duplicate-argument-name')
     def visit_function(self, node):
         if not redefined_by_decorator(node):
             self._check_redefinition(node.is_method() and 'method' or 'function', node)
         # checks for max returns, branch, return in __init__
-        returns = node.nodes_of_class(astng.Return,
-                                      skip_klass=(astng.Function, astng.Class))
+        returns = node.nodes_of_class(astroid.Return,
+                                      skip_klass=(astroid.Function, astroid.Class))
         if node.is_method() and node.name == '__init__':
             if node.is_generator():
-                self.add_message('E0100', node=node)
+                self.add_message('init-is-generator', node=node)
             else:
                 values = [r.value for r in returns]
-                if  [v for v in values if not (v is None or
-                    (isinstance(v, astng.Const) and v.value is None)
-                    or  (isinstance(v, astng.Name) and v.name == 'None'))]:
-                    self.add_message('E0101', node=node)
+                # Are we returning anything but None from constructors
+                if [v for v in values
+                        if not (v is None or
+                                (isinstance(v, astroid.Const) and v.value is None) or
+                                (isinstance(v, astroid.Name)  and v.name == 'None')
+                               )]:
+                    self.add_message('return-in-init', node=node)
         elif node.is_generator():
             # make sure we don't mix non-None returns and yields
-            for retnode in returns:
-                if isinstance(retnode.value, astng.Const) and \
-                       retnode.value.value is not None:
-                    self.add_message('E0106', node=node,
-                                     line=retnode.fromlineno)
+            if not PY33:
+                for retnode in returns:
+                    if isinstance(retnode.value, astroid.Const) and \
+                           retnode.value.value is not None:
+                        self.add_message('return-arg-in-generator', node=node,
+                                         line=retnode.fromlineno)
+        # Check for duplicate names
+        args = set()
+        for name in node.argnames():
+            if name in args:
+                self.add_message('duplicate-argument-name', node=node, args=(name,))
+            else:
+                args.add(name)
 
-    @check_messages('E0104')
+
+    @check_messages('return-outside-function')
     def visit_return(self, node):
-        if not isinstance(node.frame(), astng.Function):
-            self.add_message('E0104', node=node)
+        if not isinstance(node.frame(), astroid.Function):
+            self.add_message('return-outside-function', node=node)
 
-    @check_messages('E0105')
+    @check_messages('yield-outside-function')
     def visit_yield(self, node):
-        if not isinstance(node.frame(), astng.Function):
-            self.add_message('E0105', node=node)
+        if not isinstance(node.frame(), (astroid.Function, astroid.Lambda)):
+            self.add_message('yield-outside-function', node=node)
 
-    @check_messages('E0103')
+    @check_messages('not-in-loop')
     def visit_continue(self, node):
         self._check_in_loop(node, 'continue')
 
-    @check_messages('E0103')
+    @check_messages('not-in-loop')
     def visit_break(self, node):
         self._check_in_loop(node, 'break')
 
-    @check_messages('E0107')
+    @check_messages('useless-else-on-loop')
+    def visit_for(self, node):
+        self._check_else_on_loop(node)
+
+    @check_messages('useless-else-on-loop')
+    def visit_while(self, node):
+        self._check_else_on_loop(node)
+
+    @check_messages('nonexistent-operator')
     def visit_unaryop(self, node):
-        """check use of the non-existent ++ adn -- operator operator"""
+        """check use of the non-existent ++ and -- operator operator"""
         if ((node.op in '+-') and
-            isinstance(node.operand, astng.UnaryOp) and
-            (node.operand.op == node.op)):
-            self.add_message('E0107', node=node, args=node.op*2)
+                isinstance(node.operand, astroid.UnaryOp) and
+                (node.operand.op == node.op)):
+            self.add_message('nonexistent-operator', node=node, args=node.op*2)
+
+    @check_messages('abstract-class-instantiated')
+    def visit_callfunc(self, node):
+        """ Check instantiating abstract class with
+        abc.ABCMeta as metaclass.
+        """
+        try:
+            infered = next(node.func.infer())
+        except astroid.InferenceError:
+            return
+        if not isinstance(infered, astroid.Class):
+            return
+        # __init__ was called
+        metaclass = infered.metaclass()
+        abstract_methods = _has_abstract_methods(infered)
+        if metaclass is None:
+            # Python 3.4 has `abc.ABC`, which won't be detected
+            # by ClassNode.metaclass()
+            for ancestor in infered.ancestors():
+                if ancestor.qname() == 'abc.ABC' and abstract_methods:
+                    self.add_message('abstract-class-instantiated',
+                                     args=(infered.name, ),
+                                     node=node)
+                    break
+            return
+        if metaclass.qname() == 'abc.ABCMeta' and abstract_methods:
+            self.add_message('abstract-class-instantiated',
+                             args=(infered.name, ),
+                             node=node)
+
+    def _check_else_on_loop(self, node):
+        """Check that any loop with an else clause has a break statement."""
+        if node.orelse and not _loop_exits_early(node):
+            self.add_message('useless-else-on-loop', node=node,
+                             # This is not optimal, but the line previous
+                             # to the first statement in the else clause
+                             # will usually be the one that contains the else:.
+                             line=node.orelse[0].lineno - 1)
 
     def _check_in_loop(self, node, node_name):
         """check that a node is inside a for or while loop"""
         _node = node.parent
         while _node:
-            if isinstance(_node, (astng.For, astng.While)):
+            if isinstance(_node, (astroid.For, astroid.While)):
                 break
             _node = _node.parent
         else:
-            self.add_message('E0103', node=node, args=node_name)
+            self.add_message('not-in-loop', node=node, args=node_name)
 
     def _check_redefinition(self, redeftype, node):
         """check for redefinition of a function / method / class name"""
         defined_self = node.parent.frame()[node.name]
         if defined_self is not node and not are_exclusive(node, defined_self):
-            self.add_message('E0102', node=node,
+            self.add_message('function-redefined', node=node,
                              args=(redeftype, defined_self.fromlineno))
 
 
@@ -228,7 +436,6 @@
 class BasicChecker(_BasicChecker):
     """checks for :
     * doc strings
-    * modules / classes / functions / methods / arguments / variables name
     * number of arguments, local variables, branches, returns and statements in
 functions, methods
     * required module attributes
@@ -237,60 +444,87 @@
     * uses of the global statement
     """
 
-    __implements__ = IASTNGChecker
+    __implements__ = IAstroidChecker
 
     name = 'basic'
     msgs = {
-    'W0101': ('Unreachable code',
-              'Used when there is some code behind a "return" or "raise" \
-              statement, which will never be accessed.'),
-    'W0102': ('Dangerous default value %s as argument',
-              'Used when a mutable value as list or dictionary is detected in \
-              a default value for an argument.'),
-    'W0104': ('Statement seems to have no effect',
-              'Used when a statement doesn\'t have (or at least seems to) \
-              any effect.'),
-    'W0105': ('String statement has no effect',
-              'Used when a string is used as a statement (which of course \
-              has no effect). This is a particular case of W0104 with its \
-              own message so you can easily disable it if you\'re using \
-              those strings as documentation, instead of comments.'),
-    'W0106': ('Expression "%s" is assigned to nothing',
-              'Used when an expression that is not a function call is assigned\
-              to nothing. Probably something else was intended.'),
-    'W0108': ('Lambda may not be necessary',
-              'Used when the body of a lambda expression is a function call \
-              on the same argument list as the lambda itself; such lambda \
-              expressions are in all but a few cases replaceable with the \
-              function being called in the body of the lambda.'),
-    'W0109': ("Duplicate key %r in dictionary",
-              "Used when a dictionary expression binds the same key multiple \
-              times."),
-    'W0122': ('Use of the exec statement',
-              'Used when you use the "exec" statement, to discourage its \
-              usage. That doesn\'t mean you can not use it !'),
+        'W0101': ('Unreachable code',
+                  'unreachable',
+                  'Used when there is some code behind a "return" or "raise" '
+                  'statement, which will never be accessed.'),
+        'W0102': ('Dangerous default value %s as argument',
+                  'dangerous-default-value',
+                  'Used when a mutable value as list or dictionary is detected in '
+                  'a default value for an argument.'),
+        'W0104': ('Statement seems to have no effect',
+                  'pointless-statement',
+                  'Used when a statement doesn\'t have (or at least seems to) '
+                  'any effect.'),
+        'W0105': ('String statement has no effect',
+                  'pointless-string-statement',
+                  'Used when a string is used as a statement (which of course '
+                  'has no effect). This is a particular case of W0104 with its '
+                  'own message so you can easily disable it if you\'re using '
+                  'those strings as documentation, instead of comments.'),
+        'W0106': ('Expression "%s" is assigned to nothing',
+                  'expression-not-assigned',
+                  'Used when an expression that is not a function call is assigned '
+                  'to nothing. Probably something else was intended.'),
+        'W0108': ('Lambda may not be necessary',
+                  'unnecessary-lambda',
+                  'Used when the body of a lambda expression is a function call '
+                  'on the same argument list as the lambda itself; such lambda '
+                  'expressions are in all but a few cases replaceable with the '
+                  'function being called in the body of the lambda.'),
+        'W0109': ("Duplicate key %r in dictionary",
+                  'duplicate-key',
+                  'Used when a dictionary expression binds the same key multiple '
+                  'times.'),
+        'W0122': ('Use of exec',
+                  'exec-used',
+                  'Used when you use the "exec" statement (function for Python '
+                  '3), to discourage its usage. That doesn\'t '
+                  'mean you can not use it !'),
+        'W0123': ('Use of eval',
+                  'eval-used',
+                  'Used when you use the "eval" function, to discourage its '
+                  'usage. Consider using `ast.literal_eval` for safely evaluating '
+                  'strings containing Python expressions '
+                  'from untrusted sources. '),
+        'W0141': ('Used builtin function %r',
+                  'bad-builtin',
+                  'Used when a black listed builtin function is used (see the '
+                  'bad-function option). Usual black listed functions are the ones '
+                  'like map, or filter , where Python offers now some cleaner '
+                  'alternative like list comprehension.'),
+        'W0142': ('Used * or ** magic',
+                  'star-args',
+                  'Used when a function or method is called using `*args` or '
+                  '`**kwargs` to dispatch arguments. This doesn\'t improve '
+                  'readability and should be used with care.'),
+        'W0150': ("%s statement in finally block may swallow exception",
+                  'lost-exception',
+                  'Used when a break or a return statement is found inside the '
+                  'finally clause of a try...finally block: the exceptions raised '
+                  'in the try clause will be silently swallowed instead of being '
+                  're-raised.'),
+        'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
+                  'assert-on-tuple',
+                  'A call of assert on a tuple will always evaluate to true if '
+                  'the tuple is not empty, and will always evaluate to false if '
+                  'it is.'),
+        'C0121': ('Missing required attribute "%s"', # W0103
+                  'missing-module-attribute',
+                  'Used when an attribute required for modules is missing.'),
 
-    'W0141': ('Used builtin function %r',
-              'Used when a black listed builtin function is used (see the '
-              'bad-function option). Usual black listed functions are the ones '
-              'like map, or filter , where Python offers now some cleaner '
-              'alternative like list comprehension.'),
-    'W0142': ('Used * or ** magic',
-              'Used when a function or method is called using `*args` or '
-              '`**kwargs` to dispatch arguments. This doesn\'t improve '
-              'readability and should be used with care.'),
-    'W0150': ("%s statement in finally block may swallow exception",
-              "Used when a break or a return statement is found inside the \
-              finally clause of a try...finally block: the exceptions raised \
-              in the try clause will be silently swallowed instead of being \
-              re-raised."),
-    'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
-              'A call of assert on a tuple will always evaluate to true if '
-              'the tuple is not empty, and will always evaluate to false if '
-              'it is.'),
-
-    'C0121': ('Missing required attribute "%s"', # W0103
-              'Used when an attribute required for modules is missing.'),
+        'E0109': ('Missing argument to reversed()',
+                  'missing-reversed-argument',
+                  'Used when reversed() builtin didn\'t receive an argument.'),
+        'E0111': ('The first reversed() argument is not a sequence',
+                  'bad-reversed-sequence',
+                  'Used when the first argument to reversed() builtin '
+                  'isn\'t a sequence (does not implement __reversed__, '
+                  'nor __getitem__ and __len__'),
 
     }
 
@@ -299,15 +533,15 @@
                  'metavar' : '<attributes>',
                  'help' : 'Required attributes for module, separated by a '
                           'comma'}
-                ),
+               ),
                ('bad-functions',
-                {'default' : ('map', 'filter', 'apply', 'input'),
+                {'default' : BAD_FUNCTIONS,
                  'type' :'csv', 'metavar' : '<builtin function names>',
                  'help' : 'List of builtins function names that should not be '
                           'used, separated by a comma'}
-                ),
-               )
-    reports = ( ('RP0101', 'Statistics by type', report_by_type_stats), )
+               ),
+              )
+    reports = (('RP0101', 'Statistics by type', report_by_type_stats),)
 
     def __init__(self, linter):
         _BasicChecker.__init__(self, linter)
@@ -321,45 +555,60 @@
         self.stats = self.linter.add_stats(module=0, function=0,
                                            method=0, class_=0)
 
+    @check_messages('missing-module-attribute')
     def visit_module(self, node):
         """check module name, docstring and required arguments
         """
         self.stats['module'] += 1
         for attr in self.config.required_attributes:
             if attr not in node:
-                self.add_message('C0121', node=node, args=attr)
+                self.add_message('missing-module-attribute', node=node, args=attr)
 
-    def visit_class(self, node):
+    def visit_class(self, node): # pylint: disable=unused-argument
         """check module name, docstring and redefinition
         increment branch counter
         """
         self.stats['class'] += 1
 
-    @check_messages('W0104', 'W0105')
+    @check_messages('pointless-statement', 'pointless-string-statement',
+                    'expression-not-assigned')
     def visit_discard(self, node):
         """check for various kind of statements without effect"""
         expr = node.value
-        if isinstance(expr, astng.Const) and isinstance(expr.value,
-                                                        basestring):
+        if isinstance(expr, astroid.Const) and isinstance(expr.value,
+                                                          six.string_types):
             # treat string statement in a separated message
-            self.add_message('W0105', node=node)
+            # Handle PEP-257 attribute docstrings.
+            # An attribute docstring is defined as being a string right after
+            # an assignment at the module level, class level or __init__ level.
+            scope = expr.scope()
+            if isinstance(scope, (astroid.Class, astroid.Module, astroid.Function)):
+                if isinstance(scope, astroid.Function) and scope.name != '__init__':
+                    pass
+                else:
+                    sibling = expr.previous_sibling()
+                    if (sibling is not None and sibling.scope() is scope and
+                            isinstance(sibling, astroid.Assign)):
+                        return
+            self.add_message('pointless-string-statement', node=node)
             return
         # ignore if this is :
         # * a direct function call
         # * the unique child of a try/except body
         # * a yield (which are wrapped by a discard node in _ast XXX)
         # warn W0106 if we have any underlying function call (we can't predict
-        # side effects), else W0104
-        if (isinstance(expr, (astng.Yield, astng.CallFunc)) or
-            (isinstance(node.parent, astng.TryExcept) and
-             node.parent.body == [node])):
+        # side effects), else pointless-statement
+        if (isinstance(expr, (astroid.Yield, astroid.CallFunc)) or
+                (isinstance(node.parent, astroid.TryExcept) and
+                 node.parent.body == [node])):
             return
-        if any(expr.nodes_of_class(astng.CallFunc)):
-            self.add_message('W0106', node=node, args=expr.as_string())
+        if any(expr.nodes_of_class(astroid.CallFunc)):
+            self.add_message('expression-not-assigned', node=node,
+                             args=expr.as_string())
         else:
-            self.add_message('W0104', node=node)
+            self.add_message('pointless-statement', node=node)
 
-    @check_messages('W0108')
+    @check_messages('unnecessary-lambda')
     def visit_lambda(self, node):
         """check whether or not the lambda is suspicious
         """
@@ -374,11 +623,11 @@
             # of the lambda.
             return
         call = node.body
-        if not isinstance(call, astng.CallFunc):
+        if not isinstance(call, astroid.CallFunc):
             # The body of the lambda must be a function call expression
             # for the lambda to be unnecessary.
             return
-        # XXX are lambda still different with astng >= 0.18 ?
+        # XXX are lambda still different with astroid >= 0.18 ?
         # *args and **kwargs need to be treated specially, since they
         # are structured differently between the lambda and the function
         # call (in the lambda they appear in the args.args list and are
@@ -388,15 +637,15 @@
         ordinary_args = list(node.args.args)
         if node.args.kwarg:
             if (not call.kwargs
-                or not isinstance(call.kwargs, astng.Name)
-                or node.args.kwarg != call.kwargs.name):
+                    or not isinstance(call.kwargs, astroid.Name)
+                    or node.args.kwarg != call.kwargs.name):
                 return
         elif call.kwargs:
             return
         if node.args.vararg:
             if (not call.starargs
-                or not isinstance(call.starargs, astng.Name)
-                or node.args.vararg != call.starargs.name):
+                    or not isinstance(call.starargs, astroid.Name)
+                    or node.args.vararg != call.starargs.name):
                 return
         elif call.starargs:
             return
@@ -404,32 +653,65 @@
         # ordinary_args[i].name == call.args[i].name.
         if len(ordinary_args) != len(call.args):
             return
-        for i in xrange(len(ordinary_args)):
-            if not isinstance(call.args[i], astng.Name):
+        for i in range(len(ordinary_args)):
+            if not isinstance(call.args[i], astroid.Name):
                 return
             if node.args.args[i].name != call.args[i].name:
                 return
-        self.add_message('W0108', line=node.fromlineno, node=node)
+        if (isinstance(node.body.func, astroid.Getattr) and
+                isinstance(node.body.func.expr, astroid.CallFunc)):
+            # Chained call, the intermediate call might
+            # return something else (but we don't check that, yet).
+            return
+        self.add_message('unnecessary-lambda', line=node.fromlineno, node=node)
 
+    @check_messages('dangerous-default-value')
     def visit_function(self, node):
         """check function name, docstring, arguments, redefinition,
         variable names, max locals
         """
         self.stats[node.is_method() and 'method' or 'function'] += 1
+        self._check_dangerous_default(node)
+
+    def _check_dangerous_default(self, node):
         # check for dangerous default values as arguments
+        is_iterable = lambda n: isinstance(n, (astroid.List,
+                                               astroid.Set,
+                                               astroid.Dict))
         for default in node.args.defaults:
             try:
-                value = default.infer().next()
-            except astng.InferenceError:
+                value = next(default.infer())
+            except astroid.InferenceError:
                 continue
-            if isinstance(value, (astng.Dict, astng.List)):
-                if value is default:
-                    msg = default.as_string()
-                else:
-                    msg = '%s (%s)' % (default.as_string(), value.as_string())
-                self.add_message('W0102', node=node, args=(msg,))
 
-    @check_messages('W0101', 'W0150')
+            if (isinstance(value, astroid.Instance) and
+                    value.qname() in DEFAULT_ARGUMENT_SYMBOLS):
+
+                if value is default:
+                    msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
+                elif type(value) is astroid.Instance or is_iterable(value):
+                    # We are here in the following situation(s):
+                    #   * a dict/set/list/tuple call which wasn't inferred
+                    #     to a syntax node ({}, () etc.). This can happen
+                    #     when the arguments are invalid or unknown to
+                    #     the inference.
+                    #   * a variable from somewhere else, which turns out to be a list
+                    #     or a dict.
+                    if is_iterable(default):
+                        msg = value.pytype()
+                    elif isinstance(default, astroid.CallFunc):
+                        msg = '%s() (%s)' % (value.name, value.qname())
+                    else:
+                        msg = '%s (%s)' % (default.as_string(), value.qname())
+                else:
+                    # this argument is a name
+                    msg = '%s (%s)' % (default.as_string(),
+                                       DEFAULT_ARGUMENT_SYMBOLS[value.qname()])
+                self.add_message('dangerous-default-value',
+                                 node=node,
+                                 args=(msg, ))
+
+    @check_messages('unreachable', 'lost-exception')
     def visit_return(self, node):
         """1 - check is the node has a right sibling (if so, that's some
         unreachable code)
@@ -438,16 +720,16 @@
         """
         self._check_unreachable(node)
         # Is it inside final body of a try...finally bloc ?
-        self._check_not_in_finally(node, 'return', (astng.Function,))
+        self._check_not_in_finally(node, 'return', (astroid.Function,))
 
-    @check_messages('W0101')
+    @check_messages('unreachable')
     def visit_continue(self, node):
         """check is the node has a right sibling (if so, that's some unreachable
         code)
         """
         self._check_unreachable(node)
 
-    @check_messages('W0101', 'W0150')
+    @check_messages('unreachable', 'lost-exception')
     def visit_break(self, node):
         """1 - check is the node has a right sibling (if so, that's some
         unreachable code)
@@ -457,36 +739,44 @@
         # 1 - Is it right sibling ?
         self._check_unreachable(node)
         # 2 - Is it inside final body of a try...finally bloc ?
-        self._check_not_in_finally(node, 'break', (astng.For, astng.While,))
+        self._check_not_in_finally(node, 'break', (astroid.For, astroid.While,))
 
-    @check_messages('W0101')
+    @check_messages('unreachable')
     def visit_raise(self, node):
-        """check is the node has a right sibling (if so, that's some unreachable
+        """check if the node has a right sibling (if so, that's some unreachable
         code)
         """
         self._check_unreachable(node)
 
-    @check_messages('W0122')
+    @check_messages('exec-used')
     def visit_exec(self, node):
         """just print a warning on exec statements"""
-        self.add_message('W0122', node=node)
+        self.add_message('exec-used', node=node)
 
-    @check_messages('W0141', 'W0142')
+    @check_messages('bad-builtin', 'star-args', 'eval-used',
+                    'exec-used', 'missing-reversed-argument',
+                    'bad-reversed-sequence')
     def visit_callfunc(self, node):
         """visit a CallFunc node -> check if this is not a blacklisted builtin
         call and check for * or ** use
         """
-        if isinstance(node.func, astng.Name):
+        if isinstance(node.func, astroid.Name):
             name = node.func.name
             # ignore the name if it's not a builtin (i.e. not defined in the
             # locals nor globals scope)
             if not (name in node.frame() or
                     name in node.root()):
+                if name == 'exec':
+                    self.add_message('exec-used', node=node)
+                elif name == 'reversed':
+                    self._check_reversed(node)
+                elif name == 'eval':
+                    self.add_message('eval-used', node=node)
                 if name in self.config.bad_functions:
-                    self.add_message('W0141', node=node, args=name)
+                    self.add_message('bad-builtin', node=node, args=name)
         if node.starargs or node.kwargs:
             scope = node.scope()
-            if isinstance(scope, astng.Function):
+            if isinstance(scope, astroid.Function):
                 toprocess = [(n, vn) for (n, vn) in ((node.starargs, scope.args.vararg),
                                                      (node.kwargs, scope.args.kwarg)) if n]
                 if toprocess:
@@ -494,32 +784,32 @@
                         if getattr(cfnode, 'name', None) == fargname:
                             toprocess.remove((cfnode, fargname))
                     if not toprocess:
-                        return # W0142 can be skipped
-            self.add_message('W0142', node=node.func)
+                        return # star-args can be skipped
+            self.add_message('star-args', node=node.func)
 
-    @check_messages('W0199')
+    @check_messages('assert-on-tuple')
     def visit_assert(self, node):
         """check the use of an assert statement on a tuple."""
-        if node.fail is None and isinstance(node.test, astng.Tuple) and \
-           len(node.test.elts) == 2:
-             self.add_message('W0199', line=node.fromlineno, node=node)
+        if node.fail is None and isinstance(node.test, astroid.Tuple) and \
+                len(node.test.elts) == 2:
+            self.add_message('assert-on-tuple', node=node)
 
-    @check_messages('W0109')
+    @check_messages('duplicate-key')
     def visit_dict(self, node):
         """check duplicate key in dictionary"""
         keys = set()
-        for k, v in node.items:
-            if isinstance(k, astng.Const):
+        for k, _ in node.items:
+            if isinstance(k, astroid.Const):
                 key = k.value
                 if key in keys:
-                    self.add_message('W0109', node=node, args=key)
+                    self.add_message('duplicate-key', node=node, args=key)
                 keys.add(key)
 
     def visit_tryfinally(self, node):
         """update try...finally flag"""
         self._tryfinallys.append(node)
 
-    def leave_tryfinally(self, node):
+    def leave_tryfinally(self, node): # pylint: disable=unused-argument
         """update try...finally flag"""
         self._tryfinallys.pop()
 
@@ -527,7 +817,7 @@
         """check unreachable code"""
         unreach_stmt = node.next_sibling()
         if unreach_stmt is not None:
-            self.add_message('W0101', node=unreach_stmt)
+            self.add_message('unreachable', node=unreach_stmt)
 
     def _check_not_in_finally(self, node, node_name, breaker_classes=()):
         """check that a node is not inside a finally clause of a
@@ -542,91 +832,133 @@
         _node = node
         while _parent and not isinstance(_parent, breaker_classes):
             if hasattr(_parent, 'finalbody') and _node in _parent.finalbody:
-                self.add_message('W0150', node=node, args=node_name)
+                self.add_message('lost-exception', node=node, args=node_name)
                 return
             _node = _parent
             _parent = _node.parent
 
+    def _check_reversed(self, node):
+        """ check that the argument to `reversed` is a sequence """
+        try:
+            argument = safe_infer(get_argument_from_call(node, position=0))
+        except NoSuchArgumentError:
+            self.add_message('missing-reversed-argument', node=node)
+        else:
+            if argument is astroid.YES:
+                return
+            if argument is None:
+                # Nothing was infered.
+                # Try to see if we have iter().
+                if isinstance(node.args[0], astroid.CallFunc):
+                    try:
+                        func = next(node.args[0].func.infer())
+                    except InferenceError:
+                        return
+                    if (getattr(func, 'name', None) == 'iter' and
+                            is_builtin_object(func)):
+                        self.add_message('bad-reversed-sequence', node=node)
+                return
 
+            if isinstance(argument, astroid.Instance):
+                if (argument._proxied.name == 'dict' and
+                        is_builtin_object(argument._proxied)):
+                    self.add_message('bad-reversed-sequence', node=node)
+                    return
+                elif any(ancestor.name == 'dict' and is_builtin_object(ancestor)
+                         for ancestor in argument._proxied.ancestors()):
+                    # mappings aren't accepted by reversed()
+                    self.add_message('bad-reversed-sequence', node=node)
+                    return
+
+                for methods in REVERSED_METHODS:
+                    for meth in methods:
+                        try:
+                            argument.getattr(meth)
+                        except astroid.NotFoundError:
+                            break
+                    else:
+                        break
+                else:
+                    # Check if it is a .deque. It doesn't seem that
+                    # we can retrieve special methods
+                    # from C implemented constructs.
+                    if argument._proxied.qname().endswith(".deque"):
+                        return
+                    self.add_message('bad-reversed-sequence', node=node)
+            elif not isinstance(argument, (astroid.List, astroid.Tuple)):
+                # everything else is not a proper sequence for reversed()
+                self.add_message('bad-reversed-sequence', node=node)
+
+_NAME_TYPES = {
+    'module': (MOD_NAME_RGX, 'module'),
+    'const': (CONST_NAME_RGX, 'constant'),
+    'class': (CLASS_NAME_RGX, 'class'),
+    'function': (DEFAULT_NAME_RGX, 'function'),
+    'method': (DEFAULT_NAME_RGX, 'method'),
+    'attr': (DEFAULT_NAME_RGX, 'attribute'),
+    'argument': (DEFAULT_NAME_RGX, 'argument'),
+    'variable': (DEFAULT_NAME_RGX, 'variable'),
+    'class_attribute': (CLASS_ATTRIBUTE_RGX, 'class attribute'),
+    'inlinevar': (COMP_VAR_RGX, 'inline iteration'),
+}
+
+def _create_naming_options():
+    name_options = []
+    for name_type, (rgx, human_readable_name) in six.iteritems(_NAME_TYPES):
+        name_type = name_type.replace('_', '-')
+        name_options.append((
+            '%s-rgx' % (name_type,),
+            {'default': rgx, 'type': 'regexp', 'metavar': '<regexp>',
+             'help': 'Regular expression matching correct %s names' % (human_readable_name,)}))
+        name_options.append((
+            '%s-name-hint' % (name_type,),
+            {'default': rgx.pattern, 'type': 'string', 'metavar': '<string>',
+             'help': 'Naming hint for %s names' % (human_readable_name,)}))
+    return tuple(name_options)
 
 class NameChecker(_BasicChecker):
     msgs = {
-    'C0102': ('Black listed name "%s"',
-              'Used when the name is listed in the black list (unauthorized \
-              names).'),
-    'C0103': ('Invalid name "%s" (should match %s)',
-              'Used when the name doesn\'t match the regular expression \
-              associated to its type (constant, variable, class...).'),
-
+        'C0102': ('Black listed name "%s"',
+                  'blacklisted-name',
+                  'Used when the name is listed in the black list (unauthorized '
+                  'names).'),
+        'C0103': ('Invalid %s name "%s"%s',
+                  'invalid-name',
+                  'Used when the name doesn\'t match the regular expression '
+                  'associated to its type (constant, variable, class...).'),
     }
-    options = (('module-rgx',
-                {'default' : MOD_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'module names'}
-                ),
-               ('const-rgx',
-                {'default' : CONST_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'module level names'}
-                ),
-               ('class-rgx',
-                {'default' : CLASS_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'class names'}
-                ),
-               ('function-rgx',
-                {'default' : DEFAULT_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'function names'}
-                ),
-               ('method-rgx',
-                {'default' : DEFAULT_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'method names'}
-                ),
-               ('attr-rgx',
-                {'default' : DEFAULT_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'instance attribute names'}
-                ),
-               ('argument-rgx',
-                {'default' : DEFAULT_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'argument names'}),
-               ('variable-rgx',
-                {'default' : DEFAULT_NAME_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'variable names'}
-                ),
-               ('inlinevar-rgx',
-                {'default' : COMP_VAR_RGX,
-                 'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'Regular expression which should only match correct '
-                          'list comprehension / generator expression variable \
-                          names'}
-                ),
-               # XXX use set
-               ('good-names',
+
+    options = (('good-names',
                 {'default' : ('i', 'j', 'k', 'ex', 'Run', '_'),
                  'type' :'csv', 'metavar' : '<names>',
                  'help' : 'Good variable names which should always be accepted,'
                           ' separated by a comma'}
-                ),
+               ),
                ('bad-names',
                 {'default' : ('foo', 'bar', 'baz', 'toto', 'tutu', 'tata'),
                  'type' :'csv', 'metavar' : '<names>',
                  'help' : 'Bad variable names which should always be refused, '
                           'separated by a comma'}
-                ),
-               )
+               ),
+               ('name-group',
+                {'default' : (),
+                 'type' :'csv', 'metavar' : '<name1:name2>',
+                 'help' : ('Colon-delimited sets of names that determine each'
+                           ' other\'s naming style when the name regexes'
+                           ' allow several styles.')}
+               ),
+               ('include-naming-hint',
+                {'default': False, 'type' : 'yn', 'metavar' : '<y_or_n>',
+                 'help': 'Include a hint for the correct naming format with invalid-name'}
+               ),
+              ) + _create_naming_options()
+
+
+    def __init__(self, linter):
+        _BasicChecker.__init__(self, linter)
+        self._name_category = {}
+        self._name_group = {}
+        self._bad_names = {}
 
     def open(self):
         self.stats = self.linter.add_stats(badname_module=0,
@@ -635,140 +967,275 @@
                                            badname_const=0,
                                            badname_variable=0,
                                            badname_inlinevar=0,
-                                           badname_argument=0)
+                                           badname_argument=0,
+                                           badname_class_attribute=0)
+        for group in self.config.name_group:
+            for name_type in group.split(':'):
+                self._name_group[name_type] = 'group_%s' % (group,)
 
-    @check_messages('C0102', 'C0103')
+    @check_messages('blacklisted-name', 'invalid-name')
     def visit_module(self, node):
         self._check_name('module', node.name.split('.')[-1], node)
+        self._bad_names = {}
 
-    @check_messages('C0102', 'C0103')
+    def leave_module(self, node): # pylint: disable=unused-argument
+        for all_groups in six.itervalues(self._bad_names):
+            if len(all_groups) < 2:
+                continue
+            groups = collections.defaultdict(list)
+            min_warnings = sys.maxsize
+            for group in six.itervalues(all_groups):
+                groups[len(group)].append(group)
+                min_warnings = min(len(group), min_warnings)
+            if len(groups[min_warnings]) > 1:
+                by_line = sorted(groups[min_warnings],
+                                 key=lambda group: min(warning[0].lineno for warning in group))
+                warnings = itertools.chain(*by_line[1:])
+            else:
+                warnings = groups[min_warnings][0]
+            for args in warnings:
+                self._raise_name_warning(*args)
+
+    @check_messages('blacklisted-name', 'invalid-name')
     def visit_class(self, node):
         self._check_name('class', node.name, node)
-        for attr, anodes in node.instance_attrs.items():
-            self._check_name('attr', attr, anodes[0])
+        for attr, anodes in six.iteritems(node.instance_attrs):
+            if not list(node.instance_attr_ancestors(attr)):
+                self._check_name('attr', attr, anodes[0])
 
-    @check_messages('C0102', 'C0103')
+    @check_messages('blacklisted-name', 'invalid-name')
     def visit_function(self, node):
-        self._check_name(node.is_method() and 'method' or 'function',
-                         node.name, node)
-        # check arguments name
+        # Do not emit any warnings if the method is just an implementation
+        # of a base class method.
+        confidence = HIGH
+        if node.is_method():
+            if overrides_a_method(node.parent.frame(), node.name):
+                return
+            confidence = (INFERENCE if has_known_bases(node.parent.frame())
+                          else INFERENCE_FAILURE)
+
+        self._check_name(_determine_function_name_type(node),
+                         node.name, node, confidence)
+        # Check argument names
         args = node.args.args
         if args is not None:
             self._recursive_check_names(args, node)
 
-    @check_messages('C0102', 'C0103')
+    @check_messages('blacklisted-name', 'invalid-name')
+    def visit_global(self, node):
+        for name in node.names:
+            self._check_name('const', name, node)
+
+    @check_messages('blacklisted-name', 'invalid-name')
     def visit_assname(self, node):
         """check module level assigned names"""
         frame = node.frame()
         ass_type = node.ass_type()
-        if isinstance(ass_type, (astng.Comprehension, astng.Comprehension)):
+        if isinstance(ass_type, astroid.Comprehension):
             self._check_name('inlinevar', node.name, node)
-        elif isinstance(frame, astng.Module):
-            if isinstance(ass_type, astng.Assign) and not in_loop(ass_type):
-                self._check_name('const', node.name, node)
-            elif isinstance(ass_type, astng.ExceptHandler):
+        elif isinstance(frame, astroid.Module):
+            if isinstance(ass_type, astroid.Assign) and not in_loop(ass_type):
+                if isinstance(safe_infer(ass_type.value), astroid.Class):
+                    self._check_name('class', node.name, node)
+                else:
+                    if not _redefines_import(node):
+                        # Don't emit if the name redefines an import
+                        # in an ImportError except handler.
+                        self._check_name('const', node.name, node)
+            elif isinstance(ass_type, astroid.ExceptHandler):
                 self._check_name('variable', node.name, node)
-        elif isinstance(frame, astng.Function):
+        elif isinstance(frame, astroid.Function):
             # global introduced variable aren't in the function locals
-            if node.name in frame:
-                self._check_name('variable', node.name, node)
+            if node.name in frame and node.name not in frame.argnames():
+                if not _redefines_import(node):
+                    self._check_name('variable', node.name, node)
+        elif isinstance(frame, astroid.Class):
+            if not list(frame.local_attr_ancestors(node.name)):
+                self._check_name('class_attribute', node.name, node)
 
     def _recursive_check_names(self, args, node):
         """check names in a possibly recursive list <arg>"""
         for arg in args:
-            if isinstance(arg, astng.AssName):
+            if isinstance(arg, astroid.AssName):
                 self._check_name('argument', arg.name, node)
             else:
                 self._recursive_check_names(arg.elts, node)
 
-    def _check_name(self, node_type, name, node):
+    def _find_name_group(self, node_type):
+        return self._name_group.get(node_type, node_type)
+
+    def _raise_name_warning(self, node, node_type, name, confidence):
+        type_label = _NAME_TYPES[node_type][1]
+        hint = ''
+        if self.config.include_naming_hint:
+            hint = ' (hint: %s)' % (getattr(self.config, node_type + '_name_hint'))
+        self.add_message('invalid-name', node=node, args=(type_label, name, hint),
+                         confidence=confidence)
+        self.stats['badname_' + node_type] += 1
+
+    def _check_name(self, node_type, name, node, confidence=HIGH):
         """check for a name using the type's regexp"""
         if is_inside_except(node):
             clobbering, _ = clobber_in_except(node)
             if clobbering:
                 return
-
         if name in self.config.good_names:
             return
         if name in self.config.bad_names:
             self.stats['badname_' + node_type] += 1
-            self.add_message('C0102', node=node, args=name)
+            self.add_message('blacklisted-name', node=node, args=name)
             return
         regexp = getattr(self.config, node_type + '_rgx')
-        if regexp.match(name) is None:
-            self.add_message('C0103', node=node, args=(name, regexp.pattern))
-            self.stats['badname_' + node_type] += 1
+        match = regexp.match(name)
+
+        if _is_multi_naming_match(match, node_type, confidence):
+            name_group = self._find_name_group(node_type)
+            bad_name_group = self._bad_names.setdefault(name_group, {})
+            warnings = bad_name_group.setdefault(match.lastgroup, [])
+            warnings.append((node, node_type, name, confidence))
+
+        if match is None:
+            self._raise_name_warning(node, node_type, name, confidence)
 
 
 class DocStringChecker(_BasicChecker):
     msgs = {
-    'C0111': ('Missing docstring', # W0131
-              'Used when a module, function, class or method has no docstring.\
-              Some special methods like __init__ doesn\'t necessary require a \
-              docstring.'),
-    'C0112': ('Empty docstring', # W0132
-              'Used when a module, function, class or method has an empty \
-              docstring (it would be too easy ;).'),
-    }
+        'C0111': ('Missing %s docstring', # W0131
+                  'missing-docstring',
+                  'Used when a module, function, class or method has no docstring.'
+                  'Some special methods like __init__ doesn\'t necessary require a '
+                  'docstring.'),
+        'C0112': ('Empty %s docstring', # W0132
+                  'empty-docstring',
+                  'Used when a module, function, class or method has an empty '
+                  'docstring (it would be too easy ;).'),
+        }
     options = (('no-docstring-rgx',
                 {'default' : NO_REQUIRED_DOC_RGX,
                  'type' : 'regexp', 'metavar' : '<regexp>',
                  'help' : 'Regular expression which should only match '
-                          'functions or classes name which do not require a '
-                          'docstring'}
-                ),
-               )
+                          'function or class names that do not require a '
+                          'docstring.'}
+               ),
+               ('docstring-min-length',
+                {'default' : -1,
+                 'type' : 'int', 'metavar' : '<int>',
+                 'help': ('Minimum line length for functions/classes that'
+                          ' require docstrings, shorter ones are exempt.')}
+               ),
+              )
+
 
     def open(self):
         self.stats = self.linter.add_stats(undocumented_module=0,
                                            undocumented_function=0,
                                            undocumented_method=0,
                                            undocumented_class=0)
-
+    @check_messages('missing-docstring', 'empty-docstring')
     def visit_module(self, node):
         self._check_docstring('module', node)
 
+    @check_messages('missing-docstring', 'empty-docstring')
     def visit_class(self, node):
         if self.config.no_docstring_rgx.match(node.name) is None:
             self._check_docstring('class', node)
 
+    @check_messages('missing-docstring', 'empty-docstring')
     def visit_function(self, node):
         if self.config.no_docstring_rgx.match(node.name) is None:
             ftype = node.is_method() and 'method' or 'function'
-            if isinstance(node.parent.frame(), astng.Class):
+            if isinstance(node.parent.frame(), astroid.Class):
                 overridden = False
+                confidence = (INFERENCE if has_known_bases(node.parent.frame())
+                              else INFERENCE_FAILURE)
                 # check if node is from a method overridden by its ancestor
                 for ancestor in node.parent.frame().ancestors():
                     if node.name in ancestor and \
-                       isinstance(ancestor[node.name], astng.Function):
+                       isinstance(ancestor[node.name], astroid.Function):
                         overridden = True
                         break
-                if not overridden:
-                    self._check_docstring(ftype, node)
+                self._check_docstring(ftype, node,
+                                      report_missing=not overridden,
+                                      confidence=confidence)
             else:
                 self._check_docstring(ftype, node)
 
-    def _check_docstring(self, node_type, node):
+    def _check_docstring(self, node_type, node, report_missing=True,
+                         confidence=HIGH):
         """check the node has a non empty docstring"""
         docstring = node.doc
         if docstring is None:
+            if not report_missing:
+                return
+            if node.body:
+                lines = node.body[-1].lineno - node.body[0].lineno + 1
+            else:
+                lines = 0
+            max_lines = self.config.docstring_min_length
+
+            if node_type != 'module' and max_lines > -1 and lines < max_lines:
+                return
             self.stats['undocumented_'+node_type] += 1
-            self.add_message('C0111', node=node)
+            if (node.body and isinstance(node.body[0], astroid.Discard) and
+                    isinstance(node.body[0].value, astroid.CallFunc)):
+                # Most likely a string with a format call. Let's see.
+                func = safe_infer(node.body[0].value.func)
+                if (isinstance(func, astroid.BoundMethod)
+                        and isinstance(func.bound, astroid.Instance)):
+                    # Strings in Python 3, others in Python 2.
+                    if PY3K and func.bound.name == 'str':
+                        return
+                    elif func.bound.name in ('str', 'unicode', 'bytes'):
+                        return
+            self.add_message('missing-docstring', node=node, args=(node_type,),
+                             confidence=confidence)
         elif not docstring.strip():
             self.stats['undocumented_'+node_type] += 1
-            self.add_message('C0112', node=node)
+            self.add_message('empty-docstring', node=node, args=(node_type,),
+                             confidence=confidence)
 
 
 class PassChecker(_BasicChecker):
-    """check is the pass statement is really necessary"""
+    """check if the pass statement is really necessary"""
     msgs = {'W0107': ('Unnecessary pass statement',
+                      'unnecessary-pass',
                       'Used when a "pass" statement that can be avoided is '
-                      'encountered.)'),
-            }
-
+                      'encountered.'),
+           }
+    @check_messages('unnecessary-pass')
     def visit_pass(self, node):
         if len(node.parent.child_sequence(node)) > 1:
-            self.add_message('W0107', node=node)
+            self.add_message('unnecessary-pass', node=node)
+
+
+class LambdaForComprehensionChecker(_BasicChecker):
+    """check for using a lambda where a comprehension would do.
+
+    See <http://www.artima.com/weblogs/viewpost.jsp?thread=98196>
+    where GvR says comprehensions would be clearer.
+    """
+
+    msgs = {'W0110': ('map/filter on lambda could be replaced by comprehension',
+                      'deprecated-lambda',
+                      'Used when a lambda is the first argument to "map" or '
+                      '"filter". It could be clearer as a list '
+                      'comprehension or generator expression.',
+                      {'maxversion': (3, 0)}),
+           }
+
+    @check_messages('deprecated-lambda')
+    def visit_callfunc(self, node):
+        """visit a CallFunc node, check if map or filter are called with a
+        lambda
+        """
+        if not node.args:
+            return
+        if not isinstance(node.args[0], astroid.Lambda):
+            return
+        infered = safe_infer(node.func)
+        if (is_builtin_object(infered)
+                and infered.name in ['map', 'filter']):
+            self.add_message('deprecated-lambda', node=node)
 
 
 def register(linter):
@@ -778,3 +1245,4 @@
     linter.register_checker(NameChecker(linter))
     linter.register_checker(DocStringChecker(linter))
     linter.register_checker(PassChecker(linter))
+    linter.register_checker(LambdaForComprehensionChecker(linter))
diff --git a/third_party/pylint/checkers/classes.py b/third_party/pylint/checkers/classes.py
index 60d20b6..1a10c35 100644
--- a/third_party/pylint/checkers/classes.py
+++ b/third_party/pylint/checkers/classes.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,17 +12,58 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """classes checker for Python code
 """
 from __future__ import generators
 
-from logilab import astng
-from logilab.astng import YES, Instance, are_exclusive
+import sys
+from collections import defaultdict
 
-from pylint.interfaces import IASTNGChecker
+import astroid
+from astroid import YES, Instance, are_exclusive, AssAttr, Class
+from astroid.bases import Generator, BUILTINS
+from astroid.inference import InferenceContext
+
+from pylint.interfaces import IAstroidChecker
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import PYMETHODS, overrides_a_method, check_messages
+from pylint.checkers.utils import (
+    PYMETHODS, overrides_a_method, check_messages, is_attr_private,
+    is_attr_protected, node_frame_class, safe_infer, is_builtin_object,
+    decorated_with_property, unimplemented_abstract_methods)
+import six
+
+if sys.version_info >= (3, 0):
+    NEXT_METHOD = '__next__'
+else:
+    NEXT_METHOD = 'next'
+ITER_METHODS = ('__iter__', '__getitem__')
+
+def _called_in_methods(func, klass, methods):
+    """ Check if the func was called in any of the given methods,
+    belonging to the *klass*. Returns True if so, False otherwise.
+    """
+    if not isinstance(func, astroid.Function):
+        return False
+    for method in methods:
+        try:
+            infered = klass.getattr(method)
+        except astroid.NotFoundError:
+            continue
+        for infer_method in infered:
+            for callfunc in infer_method.nodes_of_class(astroid.CallFunc):
+                try:
+                    bound = next(callfunc.func.infer())
+                except (astroid.InferenceError, StopIteration):
+                    continue
+                if not isinstance(bound, astroid.BoundMethod):
+                    continue
+                func_obj = bound._proxied
+                if isinstance(func_obj, astroid.UnboundMethod):
+                    func_obj = func_obj._proxied
+                if func_obj.name == func.name:
+                    return True
+    return False
 
 def class_is_abstract(node):
     """return true if the given class node should be considered as an abstract
@@ -34,82 +75,167 @@
                 return True
     return False
 
+def _is_attribute_property(name, klass):
+    """ Check if the given attribute *name* is a property
+    in the given *klass*.
+
+    It will look for `property` calls or for functions
+    with the given name, decorated by `property` or `property`
+    subclasses.
+    Returns ``True`` if the name is a property in the given klass,
+    ``False`` otherwise.
+    """
+
+    try:
+        attributes = klass.getattr(name)
+    except astroid.NotFoundError:
+        return False
+    property_name = "{0}.property".format(BUILTINS)
+    for attr in attributes:
+        try:
+            infered = next(attr.infer())
+        except astroid.InferenceError:
+            continue
+        if (isinstance(infered, astroid.Function) and
+                decorated_with_property(infered)):
+            return True
+        if infered.pytype() == property_name:
+            return True
+    return False
+
 
 MSGS = {
     'F0202': ('Unable to check methods signature (%s / %s)',
-              'Used when PyLint has been unable to check methods signature \
+              'method-check-failed',
+              'Used when Pylint has been unable to check methods signature \
               compatibility for an unexpected reason. Please report this kind \
               if you don\'t make sense of it.'),
 
-    'E0202': ('An attribute affected in %s line %s hide this method',
+    'E0202': ('An attribute defined in %s line %s hides this method',
+              'method-hidden',
               'Used when a class defines a method which is hidden by an '
               'instance attribute from an ancestor class or set by some '
               'client code.'),
     'E0203': ('Access to member %r before its definition line %s',
+              'access-member-before-definition',
               'Used when an instance member is accessed before it\'s actually\
               assigned.'),
     'W0201': ('Attribute %r defined outside __init__',
+              'attribute-defined-outside-init',
               'Used when an instance attribute is defined outside the __init__\
               method.'),
 
     'W0212': ('Access to a protected member %s of a client class', # E0214
+              'protected-access',
               'Used when a protected member (i.e. class member with a name \
               beginning with an underscore) is access outside the class or a \
               descendant of the class where it\'s defined.'),
 
     'E0211': ('Method has no argument',
+              'no-method-argument',
               'Used when a method which should have the bound instance as \
               first argument has no argument defined.'),
     'E0213': ('Method should have "self" as first argument',
+              'no-self-argument',
               'Used when a method has an attribute different the "self" as\
               first argument. This is considered as an error since this is\
               a so common convention that you shouldn\'t break it!'),
-    'C0202': ('Class method should have %s as first argument', # E0212
-              'Used when a class method has an attribute different than "cls"\
-              as first argument, to easily differentiate them from regular \
-              instance methods.'),
-    'C0203': ('Metaclass method should have "mcs" as first argument', # E0214
-              'Used when a metaclass method has an attribute different the \
-              "mcs" as first argument.'),
+    'C0202': ('Class method %s should have %s as first argument', # E0212
+              'bad-classmethod-argument',
+              'Used when a class method has a first argument named differently '
+              'than the value specified in valid-classmethod-first-arg option '
+              '(default to "cls"), recommended to easily differentiate them '
+              'from regular instance methods.'),
+    'C0203': ('Metaclass method %s should have %s as first argument', # E0214
+              'bad-mcs-method-argument',
+              'Used when a metaclass method has a first agument named '
+              'differently than the value specified in valid-classmethod-first'
+              '-arg option (default to "cls"), recommended to easily '
+              'differentiate them from regular instance methods.'),
+    'C0204': ('Metaclass class method %s should have %s as first argument',
+              'bad-mcs-classmethod-argument',
+              'Used when a metaclass class method has a first argument named '
+              'differently than the value specified in valid-metaclass-'
+              'classmethod-first-arg option (default to "mcs"), recommended to '
+              'easily differentiate them from regular instance methods.'),
 
     'W0211': ('Static method with %r as first argument',
-              'Used when a static method has "self" or "cls" as first argument.'
-              ),
+              'bad-staticmethod-argument',
+              'Used when a static method has "self" or a value specified in '
+              'valid-classmethod-first-arg option or '
+              'valid-metaclass-classmethod-first-arg option as first argument.'
+             ),
     'R0201': ('Method could be a function',
+              'no-self-use',
               'Used when a method doesn\'t use its bound instance, and so could\
               be written as a function.'
-              ),
+             ),
 
     'E0221': ('Interface resolved to %s is not a class',
+              'interface-is-not-class',
               'Used when a class claims to implement an interface which is not \
               a class.'),
     'E0222': ('Missing method %r from %s interface',
+              'missing-interface-method',
               'Used when a method declared in an interface is missing from a \
               class implementing this interface'),
-    'W0221': ('Arguments number differs from %s method',
+    'W0221': ('Arguments number differs from %s %r method',
+              'arguments-differ',
               'Used when a method has a different number of arguments than in \
               the implemented interface or in an overridden method.'),
-    'W0222': ('Signature differs from %s method',
+    'W0222': ('Signature differs from %s %r method',
+              'signature-differs',
               'Used when a method signature is different than in the \
               implemented interface or in an overridden method.'),
     'W0223': ('Method %r is abstract in class %r but is not overridden',
+              'abstract-method',
               'Used when an abstract method (i.e. raise NotImplementedError) is \
               not overridden in concrete class.'
-              ),
+             ),
     'F0220': ('failed to resolve interfaces implemented by %s (%s)', # W0224
-              'Used when a PyLint as failed to find interfaces implemented by \
+              'unresolved-interface',
+              'Used when a Pylint as failed to find interfaces implemented by \
                a class'),
 
 
     'W0231': ('__init__ method from base class %r is not called',
+              'super-init-not-called',
               'Used when an ancestor class method has an __init__ method \
               which is not called by a derived class.'),
     'W0232': ('Class has no __init__ method',
+              'no-init',
               'Used when a class has no __init__ method, neither its parent \
               classes.'),
     'W0233': ('__init__ method from a non direct base class %r is called',
+              'non-parent-init-called',
               'Used when an __init__ method is called on a class which is not \
               in the direct ancestors for the analysed class.'),
+    'W0234': ('__iter__ returns non-iterator',
+              'non-iterator-returned',
+              'Used when an __iter__ method returns something which is not an \
+               iterable (i.e. has no `%s` method)' % NEXT_METHOD),
+    'E0235': ('__exit__ must accept 3 arguments: type, value, traceback',
+              'bad-context-manager',
+              'Used when the __exit__ special method, belonging to a \
+               context manager, does not accept 3 arguments \
+               (type, value, traceback).'),
+    'E0236': ('Invalid object %r in __slots__, must contain '
+              'only non empty strings',
+              'invalid-slots-object',
+              'Used when an invalid (non-string) object occurs in __slots__.'),
+    'E0237': ('Assigning to attribute %r not defined in class slots',
+              'assigning-non-slot',
+              'Used when assigning to an attribute not defined '
+              'in the class slots.'),
+    'E0238': ('Invalid __slots__ object',
+              'invalid-slots',
+              'Used when an invalid __slots__ is found in class. '
+              'Only a string, an iterable or a sequence is permitted.'),
+    'E0239': ('Inheriting %r, which is not a class.',
+              'inherit-non-class',
+              'Used when a class inherits from something which is not a '
+              'class.'),
+
 
     }
 
@@ -124,7 +250,7 @@
     * unreachable code
     """
 
-    __implements__ = (IASTNGChecker,)
+    __implements__ = (IAstroidChecker,)
 
     # configuration section name
     name = 'classes'
@@ -134,38 +260,52 @@
     # configuration options
     options = (('ignore-iface-methods',
                 {'default' : (#zope interface
-        'isImplementedBy', 'deferred', 'extends', 'names',
-        'namesAndDescriptions', 'queryDescriptionFor',  'getBases',
-        'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue',
-        'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue',
-        'isImplementedByInstancesOf',
-        # twisted
-        'adaptWith',
-        # logilab.common interface
-        'is_implemented_by'),
+                    'isImplementedBy', 'deferred', 'extends', 'names',
+                    'namesAndDescriptions', 'queryDescriptionFor', 'getBases',
+                    'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue',
+                    'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue',
+                    'isImplementedByInstancesOf',
+                    # twisted
+                    'adaptWith',
+                    # logilab.common interface
+                    'is_implemented_by'),
                  'type' : 'csv',
                  'metavar' : '<method names>',
                  'help' : 'List of interface methods to ignore, \
 separated by a comma. This is used for instance to not check methods defines \
 in Zope\'s Interface base class.'}
-                ),
-
+               ),
                ('defining-attr-methods',
                 {'default' : ('__init__', '__new__', 'setUp'),
                  'type' : 'csv',
                  'metavar' : '<method names>',
                  'help' : 'List of method names used to declare (i.e. assign) \
 instance attributes.'}
-                ),
+               ),
                ('valid-classmethod-first-arg',
                 {'default' : ('cls',),
                  'type' : 'csv',
                  'metavar' : '<argument names>',
                  'help' : 'List of valid names for the first argument in \
 a class method.'}
-                ),
-
-               )
+               ),
+               ('valid-metaclass-classmethod-first-arg',
+                {'default' : ('mcs',),
+                 'type' : 'csv',
+                 'metavar' : '<argument names>',
+                 'help' : 'List of valid names for the first argument in \
+a metaclass class method.'}
+               ),
+               ('exclude-protected',
+                {
+                    'default': (
+                        # namedtuple public API.
+                        '_asdict', '_fields', '_replace', '_source', '_make'),
+                    'type': 'csv',
+                    'metavar': '<protected access exclusions>',
+                    'help': ('List of member names, which should be excluded '
+                             'from the protected access warning.')}
+               ))
 
     def __init__(self, linter=None):
         BaseChecker.__init__(self, linter)
@@ -176,17 +316,37 @@
     def visit_class(self, node):
         """init visit variable _accessed and check interfaces
         """
-        self._accessed.append({})
+        self._accessed.append(defaultdict(list))
         self._check_bases_classes(node)
         self._check_interfaces(node)
         # if not an interface, exception, metaclass
         if node.type == 'class':
             try:
                 node.local_attr('__init__')
-            except astng.NotFoundError:
-                self.add_message('W0232', args=node, node=node)
+            except astroid.NotFoundError:
+                self.add_message('no-init', args=node, node=node)
+        self._check_slots(node)
+        self._check_proper_bases(node)
 
-    @check_messages('E0203', 'W0201')
+    @check_messages('inherit-non-class')
+    def _check_proper_bases(self, node):
+        """
+        Detect that a class inherits something which is not
+        a class or a type.
+        """
+        for base in node.bases:
+            ancestor = safe_infer(base)
+            if ancestor in (YES, None):
+                continue
+            if (isinstance(ancestor, astroid.Instance) and
+                    ancestor.is_subtype_of('%s.type' % (BUILTINS,))):
+                continue
+            if not isinstance(ancestor, astroid.Class):
+                self.add_message('inherit-non-class',
+                                 args=base.as_string(), node=node)
+
+    @check_messages('access-member-before-definition',
+                    'attribute-defined-outside-init')
     def leave_class(self, cnode):
         """close a class node:
         check that instance attributes are defined in __init__ and check
@@ -197,36 +357,48 @@
         if cnode.type != 'metaclass':
             self._check_accessed_members(cnode, accessed)
         # checks attributes are defined in an allowed method such as __init__
-        if 'W0201' not in self.active_msgs:
+        if not self.linter.is_message_enabled('attribute-defined-outside-init'):
             return
         defining_methods = self.config.defining_attr_methods
-        for attr, nodes in cnode.instance_attrs.items():
+        current_module = cnode.root()
+        for attr, nodes in six.iteritems(cnode.instance_attrs):
+            # skip nodes which are not in the current module and it may screw up
+            # the output, while it's not worth it
             nodes = [n for n in nodes if not
-                    isinstance(n.statement(), (astng.Delete, astng.AugAssign))]
+                     isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
+                     and n.root() is current_module]
             if not nodes:
                 continue # error detected by typechecking
-            attr_defined = False
             # check if any method attr is defined in is a defining method
-            for node in nodes:
-                if node.frame().name in defining_methods:
-                    attr_defined = True
-            if not attr_defined:
-                # check attribute is defined in a parent's __init__
-                for parent in cnode.instance_attr_ancestors(attr):
-                    attr_defined = False
-                    # check if any parent method attr is defined in is a defining method
-                    for node in parent.instance_attrs[attr]:
-                        if node.frame().name in defining_methods:
-                            attr_defined = True
-                    if attr_defined:
-                        # we're done :)
-                        break
-                else:
-                    # check attribute is defined as a class attribute
-                    try:
-                        cnode.local_attr(attr)
-                    except astng.NotFoundError:
-                        self.add_message('W0201', args=attr, node=node)
+            if any(node.frame().name in defining_methods
+                   for node in nodes):
+                continue
+
+            # check attribute is defined in a parent's __init__
+            for parent in cnode.instance_attr_ancestors(attr):
+                attr_defined = False
+                # check if any parent method attr is defined in is a defining method
+                for node in parent.instance_attrs[attr]:
+                    if node.frame().name in defining_methods:
+                        attr_defined = True
+                if attr_defined:
+                    # we're done :)
+                    break
+            else:
+                # check attribute is defined as a class attribute
+                try:
+                    cnode.local_attr(attr)
+                except astroid.NotFoundError:
+                    for node in nodes:
+                        if node.frame().name not in defining_methods:
+                            # If the attribute was set by a callfunc in any
+                            # of the defining methods, then don't emit
+                            # the warning.
+                            if _called_in_methods(node.frame(), cnode,
+                                                  defining_methods):
+                                continue
+                            self.add_message('attribute-defined-outside-init',
+                                             args=attr, node=node)
 
     def visit_function(self, node):
         """check method arguments, overriding"""
@@ -242,26 +414,125 @@
             return
         # check signature if the method overloads inherited method
         for overridden in klass.local_attr_ancestors(node.name):
-            # get astng for the searched method
+            # get astroid for the searched method
             try:
                 meth_node = overridden[node.name]
             except KeyError:
                 # we have found the method but it's not in the local
                 # dictionary.
-                # This may happen with astng build from living objects
+                # This may happen with astroid build from living objects
                 continue
-            if not isinstance(meth_node, astng.Function):
+            if not isinstance(meth_node, astroid.Function):
                 continue
             self._check_signature(node, meth_node, 'overridden')
             break
-        # check if the method overload an attribute
+        if node.decorators:
+            for decorator in node.decorators.nodes:
+                if isinstance(decorator, astroid.Getattr) and \
+                        decorator.attrname in ('getter', 'setter', 'deleter'):
+                    # attribute affectation will call this method, not hiding it
+                    return
+                if isinstance(decorator, astroid.Name) and decorator.name == 'property':
+                    # attribute affectation will either call a setter or raise
+                    # an attribute error, anyway not hiding the function
+                    return
+        # check if the method is hidden by an attribute
         try:
             overridden = klass.instance_attr(node.name)[0] # XXX
-            args = (overridden.root().name, overridden.fromlineno)
-            self.add_message('E0202', args=args, node=node)
-        except astng.NotFoundError:
+            overridden_frame = overridden.frame()
+            if (isinstance(overridden_frame, astroid.Function)
+                    and overridden_frame.type == 'method'):
+                overridden_frame = overridden_frame.parent.frame()
+            if (isinstance(overridden_frame, Class)
+                    and klass.is_subtype_of(overridden_frame.qname())):
+                args = (overridden.root().name, overridden.fromlineno)
+                self.add_message('method-hidden', args=args, node=node)
+        except astroid.NotFoundError:
             pass
 
+        # check non-iterators in __iter__
+        if node.name == '__iter__':
+            self._check_iter(node)
+        elif node.name == '__exit__':
+            self._check_exit(node)
+
+    def _check_slots(self, node):
+        if '__slots__' not in node.locals:
+            return
+        for slots in node.igetattr('__slots__'):
+            # check if __slots__ is a valid type
+            for meth in ITER_METHODS:
+                try:
+                    slots.getattr(meth)
+                    break
+                except astroid.NotFoundError:
+                    continue
+            else:
+                self.add_message('invalid-slots', node=node)
+                continue
+
+            if isinstance(slots, astroid.Const):
+                # a string, ignore the following checks
+                continue
+            if not hasattr(slots, 'itered'):
+                # we can't obtain the values, maybe a .deque?
+                continue
+
+            if isinstance(slots, astroid.Dict):
+                values = [item[0] for item in slots.items]
+            else:
+                values = slots.itered()
+            if values is YES:
+                return
+
+            for elt in values:
+                try:
+                    self._check_slots_elt(elt)
+                except astroid.InferenceError:
+                    continue
+
+    def _check_slots_elt(self, elt):
+        for infered in elt.infer():
+            if infered is YES:
+                continue
+            if (not isinstance(infered, astroid.Const) or
+                    not isinstance(infered.value, six.string_types)):
+                self.add_message('invalid-slots-object',
+                                 args=infered.as_string(),
+                                 node=elt)
+                continue
+            if not infered.value:
+                self.add_message('invalid-slots-object',
+                                 args=infered.as_string(),
+                                 node=elt)
+
+    def _check_iter(self, node):
+        try:
+            infered = node.infer_call_result(node)
+        except astroid.InferenceError:
+            return
+
+        for infered_node in infered:
+            if (infered_node is YES
+                    or isinstance(infered_node, Generator)):
+                continue
+            if isinstance(infered_node, astroid.Instance):
+                try:
+                    infered_node.local_attr(NEXT_METHOD)
+                except astroid.NotFoundError:
+                    self.add_message('non-iterator-returned',
+                                     node=node)
+                    break
+
+    def _check_exit(self, node):
+        positional = sum(1 for arg in node.args.args if arg.name != 'self')
+        if positional < 3 and not node.args.vararg:
+            self.add_message('bad-context-manager',
+                             node=node)
+        elif positional > 3:
+            self.add_message('bad-context-manager',
+                             node=node)
+
     def leave_function(self, node):
         """on method node, check if this method couldn't be a function
 
@@ -272,15 +543,15 @@
         if node.is_method():
             if node.args.args is not None:
                 self._first_attrs.pop()
-            if 'R0201' not in self.active_msgs:
+            if not self.linter.is_message_enabled('no-self-use'):
                 return
             class_node = node.parent.frame()
             if (self._meth_could_be_func and node.type == 'method'
-                and not node.name in PYMETHODS
-                and not (node.is_abstract() or
-                         overrides_a_method(class_node, node.name))
-                and class_node.type != 'interface'):
-                self.add_message('R0201', node=node)
+                    and not node.name in PYMETHODS
+                    and not (node.is_abstract() or
+                             overrides_a_method(class_node, node.name))
+                    and class_node.type != 'interface'):
+                self.add_message('no-self-use', node=node)
 
     def visit_getattr(self, node):
         """check if the getattr is an access to a class member
@@ -289,32 +560,116 @@
         methods)
         """
         attrname = node.attrname
-        if self._first_attrs and isinstance(node.expr, astng.Name) and \
-               node.expr.name == self._first_attrs[-1]:
-            self._accessed[-1].setdefault(attrname, []).append(node)
+        # Check self
+        if self.is_first_attr(node):
+            self._accessed[-1][attrname].append(node)
             return
-        if 'W0212' not in self.active_msgs:
+        if not self.linter.is_message_enabled('protected-access'):
             return
-        if attrname[0] == '_' and not attrname == '_' and not (
-             attrname.startswith('__') and attrname.endswith('__')):
-            # XXX move this in a reusable function
-            klass = node.frame()
-            while klass is not None and not isinstance(klass, astng.Class):
-                if klass.parent is None:
-                    klass = None
-                else:
-                    klass = klass.parent.frame()
+
+        self._check_protected_attribute_access(node)
+
+    def visit_assattr(self, node):
+        if isinstance(node.ass_type(), astroid.AugAssign) and self.is_first_attr(node):
+            self._accessed[-1][node.attrname].append(node)
+        self._check_in_slots(node)
+
+    def _check_in_slots(self, node):
+        """ Check that the given assattr node
+        is defined in the class slots.
+        """
+        infered = safe_infer(node.expr)
+        if infered and isinstance(infered, Instance):
+            klass = infered._proxied
+            if '__slots__' not in klass.locals or not klass.newstyle:
+                return
+
+            slots = klass.slots()
+            if slots is None:
+                return
+            # If any ancestor doesn't use slots, the slots
+            # defined for this class are superfluous.
+            if any('__slots__' not in ancestor.locals and
+                   ancestor.name != 'object'
+                   for ancestor in klass.ancestors()):
+                return
+
+            if not any(slot.value == node.attrname for slot in slots):
+                # If we have a '__dict__' in slots, then
+                # assigning any name is valid.
+                if not any(slot.value == '__dict__' for slot in slots):
+                    if _is_attribute_property(node.attrname, klass):
+                        # Properties circumvent the slots mechanism,
+                        # so we should not emit a warning for them.
+                        return
+                    self.add_message('assigning-non-slot',
+                                     args=(node.attrname, ), node=node)
+
+    @check_messages('protected-access')
+    def visit_assign(self, assign_node):
+        node = assign_node.targets[0]
+        if not isinstance(node, AssAttr):
+            return
+
+        if self.is_first_attr(node):
+            return
+
+        self._check_protected_attribute_access(node)
+
+    def _check_protected_attribute_access(self, node):
+        '''Given an attribute access node (set or get), check if attribute
+        access is legitimate. Call _check_first_attr with node before calling
+        this method. Valid cases are:
+        * self._attr in a method or cls._attr in a classmethod. Checked by
+        _check_first_attr.
+        * Klass._attr inside "Klass" class.
+        * Klass2._attr inside "Klass" class when Klass2 is a base class of
+            Klass.
+        '''
+        attrname = node.attrname
+
+        if (is_attr_protected(attrname) and
+                attrname not in self.config.exclude_protected):
+
+            klass = node_frame_class(node)
+
             # XXX infer to be more safe and less dirty ??
             # in classes, check we are not getting a parent method
             # through the class object or through super
             callee = node.expr.as_string()
-            if klass is None or not (callee == klass.name or
-                callee in klass.basenames
-                or (isinstance(node.expr, astng.CallFunc)
-                    and isinstance(node.expr.func, astng.Name)
-                    and node.expr.func.name == 'super')):
-                self.add_message('W0212', node=node, args=attrname)
 
+            # We are not in a class, no remaining valid case
+            if klass is None:
+                self.add_message('protected-access', node=node, args=attrname)
+                return
+
+            # If the expression begins with a call to super, that's ok.
+            if isinstance(node.expr, astroid.CallFunc) and \
+               isinstance(node.expr.func, astroid.Name) and \
+               node.expr.func.name == 'super':
+                return
+
+            # We are in a class, one remaining valid cases, Klass._attr inside
+            # Klass
+            if not (callee == klass.name or callee in klass.basenames):
+                # Detect property assignments in the body of the class.
+                # This is acceptable:
+                #
+                # class A:
+                #     b = property(lambda: self._b)
+
+                stmt = node.parent.statement()
+                try:
+                    if (isinstance(stmt, astroid.Assign) and
+                            (stmt in klass.body or klass.parent_of(stmt)) and
+                            isinstance(stmt.value, astroid.CallFunc) and
+                            isinstance(stmt.value.func, astroid.Name) and
+                            stmt.value.func.name == 'property' and
+                            is_builtin_object(next(stmt.value.func.infer(), None))):
+                        return
+                except astroid.InferenceError:
+                    pass
+                self.add_message('protected-access', node=node, args=attrname)
 
     def visit_name(self, node):
         """check if the name handle an access to a class member
@@ -327,19 +682,19 @@
     def _check_accessed_members(self, node, accessed):
         """check that accessed members are defined"""
         # XXX refactor, probably much simpler now that E0201 is in type checker
-        for attr, nodes in accessed.items():
+        for attr, nodes in six.iteritems(accessed):
             # deactivate "except doesn't do anything", that's expected
             # pylint: disable=W0704
-            # is it a class attribute ?
             try:
+                # is it a class attribute ?
                 node.local_attr(attr)
                 # yes, stop here
                 continue
-            except astng.NotFoundError:
+            except astroid.NotFoundError:
                 pass
             # is it an instance attribute of a parent class ?
             try:
-                node.instance_attr_ancestors(attr).next()
+                next(node.instance_attr_ancestors(attr))
                 # yes, stop here
                 continue
             except StopIteration:
@@ -347,9 +702,22 @@
             # is it an instance attribute ?
             try:
                 defstmts = node.instance_attr(attr)
-            except astng.NotFoundError:
+            except astroid.NotFoundError:
                 pass
             else:
+                # filter out augment assignment nodes
+                defstmts = [stmt for stmt in defstmts if stmt not in nodes]
+                if not defstmts:
+                    # only augment assignment for this node, no-member should be
+                    # triggered by the typecheck checker
+                    continue
+                # filter defstmts to only pick the first one when there are
+                # several assignments in the same scope
+                scope = defstmts[0].scope()
+                defstmts = [stmt for i, stmt in enumerate(defstmts)
+                            if i == 0 or stmt.scope() is not scope]
+                # if there are still more than one, don't attempt to be smarter
+                # than we can be
                 if len(defstmts) == 1:
                     defstmt = defstmts[0]
                     # check that if the node is accessed in the same method as
@@ -358,16 +726,19 @@
                     lno = defstmt.fromlineno
                     for _node in nodes:
                         if _node.frame() is frame and _node.fromlineno < lno \
-                           and not are_exclusive(_node.statement(), defstmt, ('AttributeError', 'Exception', 'BaseException')):
-                            self.add_message('E0203', node=_node,
-                                             args=(attr, lno))
+                           and not are_exclusive(_node.statement(), defstmt,
+                                                 ('AttributeError', 'Exception', 'BaseException')):
+                            self.add_message('access-member-before-definition',
+                                             node=_node, args=(attr, lno))
 
     def _check_first_arg_for_type(self, node, metaclass=0):
         """check the name of first argument, expect:
 
         * 'self' for a regular method
-        * 'cls' for a class method
-        * 'mcs' for a metaclass
+        * 'cls' for a class method or a metaclass regular method (actually
+          valid-classmethod-first-arg value)
+        * 'mcs' for a metaclass class method (actually
+          valid-metaclass-classmethod-first-arg)
         * not one of the above for a static method
         """
         # don't care about functions with unknown argument (builtins)
@@ -378,48 +749,79 @@
         first = self._first_attrs[-1]
         # static method
         if node.type == 'staticmethod':
-            if first_arg in ('self', 'cls', 'mcs'):
-                self.add_message('W0211', args=first, node=node)
+            if (first_arg == 'self' or
+                    first_arg in self.config.valid_classmethod_first_arg or
+                    first_arg in self.config.valid_metaclass_classmethod_first_arg):
+                self.add_message('bad-staticmethod-argument', args=first, node=node)
+                return
             self._first_attrs[-1] = None
         # class / regular method with no args
         elif not node.args.args:
-            self.add_message('E0211', node=node)
-        # metaclass method
+            self.add_message('no-method-argument', node=node)
+        # metaclass
         elif metaclass:
-            if first != 'mcs':
-                self.add_message('C0203', node=node)
-        # class method
-        elif node.type == 'classmethod':
-            if first not in self.config.valid_classmethod_first_arg:
-                if len(self.config.valid_classmethod_first_arg) == 1:
-                    valid = repr(self.config.valid_classmethod_first_arg[0])
-                else:
-                    valid = ', '.join(
-                      repr(v)
-                      for v in self.config.valid_classmethod_first_arg[:-1])
-                    valid = '%s or %r' % (
-                        valid, self.config.valid_classmethod_first_arg[-1])
-                self.add_message('C0202', args=valid, node=node)
-        # regular method without self as argument
-        elif first != 'self':
-            self.add_message('E0213', node=node)
+            # metaclass __new__ or classmethod
+            if node.type == 'classmethod':
+                self._check_first_arg_config(
+                    first,
+                    self.config.valid_metaclass_classmethod_first_arg, node,
+                    'bad-mcs-classmethod-argument', node.name)
+            # metaclass regular method
+            else:
+                self._check_first_arg_config(
+                    first,
+                    self.config.valid_classmethod_first_arg, node,
+                    'bad-mcs-method-argument',
+                    node.name)
+        # regular class
+        else:
+            # class method
+            if node.type == 'classmethod':
+                self._check_first_arg_config(
+                    first,
+                    self.config.valid_classmethod_first_arg, node,
+                    'bad-classmethod-argument',
+                    node.name)
+            # regular method without self as argument
+            elif first != 'self':
+                self.add_message('no-self-argument', node=node)
+
+    def _check_first_arg_config(self, first, config, node, message,
+                                method_name):
+        if first not in config:
+            if len(config) == 1:
+                valid = repr(config[0])
+            else:
+                valid = ', '.join(repr(v) for v in config[:-1])
+                valid = '%s or %r' % (valid, config[-1])
+            self.add_message(message, args=(method_name, valid), node=node)
 
     def _check_bases_classes(self, node):
         """check that the given class node implements abstract methods from
         base classes
         """
+        def is_abstract(method):
+            return method.is_abstract(pass_is_abstract=False)
+
         # check if this class abstract
         if class_is_abstract(node):
             return
-        for method in node.methods():
+
+        methods = sorted(
+            unimplemented_abstract_methods(node, is_abstract).items(),
+            key=lambda item: item[0],
+        )
+        for name, method in methods:
             owner = method.parent.frame()
             if owner is node:
                 continue
             # owner is not this class, it must be a parent class
             # check that the ancestor's method is not abstract
-            if method.is_abstract(pass_is_abstract=False):
-                self.add_message('W0223', node=node,
-                                 args=(method.name, owner.name))
+            if name in node.locals:
+                # it is redefined as an attribute or with a descriptor
+                continue
+            self.add_message('abstract-method', node=node,
+                             args=(name, owner.name))
 
     def _check_interfaces(self, node):
         """check that the given class node really implements declared
@@ -428,9 +830,9 @@
         e0221_hack = [False]
         def iface_handler(obj):
             """filter interface objects, it should be classes"""
-            if not isinstance(obj, astng.Class):
+            if not isinstance(obj, astroid.Class):
                 e0221_hack[0] = True
-                self.add_message('E0221', node=node,
+                self.add_message('interface-is-not-class', node=node,
                                  args=(obj.as_string(),))
                 return False
             return True
@@ -443,11 +845,12 @@
                         # don't check method beginning with an underscore,
                         # usually belonging to the interface implementation
                         continue
-                    # get class method astng
+                    # get class method astroid
                     try:
                         method = node_method(node, name)
-                    except astng.NotFoundError:
-                        self.add_message('E0222', args=(name, iface.name),
+                    except astroid.NotFoundError:
+                        self.add_message('missing-interface-method',
+                                         args=(name, iface.name),
                                          node=node)
                         continue
                     # ignore inherited methods
@@ -455,63 +858,77 @@
                         continue
                     # check signature
                     self._check_signature(method, imethod,
-                                         '%s interface' % iface.name)
-        except astng.InferenceError:
+                                          '%s interface' % iface.name)
+        except astroid.InferenceError:
             if e0221_hack[0]:
                 return
             implements = Instance(node).getattr('__implements__')[0]
             assignment = implements.parent
-            assert isinstance(assignment, astng.Assign)
+            assert isinstance(assignment, astroid.Assign)
             # assignment.expr can be a Name or a Tuple or whatever.
             # Use as_string() for the message
             # FIXME: in case of multiple interfaces, find which one could not
             #        be resolved
-            self.add_message('F0220', node=implements,
+            self.add_message('unresolved-interface', node=implements,
                              args=(node.name, assignment.value.as_string()))
 
     def _check_init(self, node):
         """check that the __init__ method call super or ancestors'__init__
         method
         """
-        if not set(('W0231', 'W0233')) & self.active_msgs:
+        if (not self.linter.is_message_enabled('super-init-not-called') and
+                not self.linter.is_message_enabled('non-parent-init-called')):
             return
         klass_node = node.parent.frame()
         to_call = _ancestors_to_call(klass_node)
         not_called_yet = dict(to_call)
-        for stmt in node.nodes_of_class(astng.CallFunc):
+        for stmt in node.nodes_of_class(astroid.CallFunc):
             expr = stmt.func
-            if not isinstance(expr, astng.Getattr) \
+            if not isinstance(expr, astroid.Getattr) \
                    or expr.attrname != '__init__':
                 continue
             # skip the test if using super
-            if isinstance(expr.expr, astng.CallFunc) and \
-               isinstance(expr.expr.func, astng.Name) and \
+            if isinstance(expr.expr, astroid.CallFunc) and \
+                   isinstance(expr.expr.func, astroid.Name) and \
                expr.expr.func.name == 'super':
                 return
             try:
-                klass = expr.expr.infer().next()
+                klass = next(expr.expr.infer())
                 if klass is YES:
                     continue
+                # The infered klass can be super(), which was
+                # assigned to a variable and the `__init__` was called later.
+                #
+                # base = super()
+                # base.__init__(...)
+
+                if (isinstance(klass, astroid.Instance) and
+                        isinstance(klass._proxied, astroid.Class) and
+                        is_builtin_object(klass._proxied) and
+                        klass._proxied.name == 'super'):
+                    return
                 try:
                     del not_called_yet[klass]
                 except KeyError:
                     if klass not in to_call:
-                        self.add_message('W0233', node=expr, args=klass.name)
-            except astng.InferenceError:
+                        self.add_message('non-parent-init-called',
+                                         node=expr, args=klass.name)
+            except astroid.InferenceError:
                 continue
-        for klass in not_called_yet.keys():
-            if klass.name == 'object':
+        for klass, method in six.iteritems(not_called_yet):
+            if klass.name == 'object' or method.parent.name == 'object':
                 continue
-            self.add_message('W0231', args=klass.name, node=node)
+            self.add_message('super-init-not-called', args=klass.name, node=node)
 
     def _check_signature(self, method1, refmethod, class_type):
         """check that the signature of the two given methods match
 
         class_type is in 'class', 'interface'
         """
-        if not (isinstance(method1, astng.Function)
-                and isinstance(refmethod, astng.Function)):
-            self.add_message('F0202', args=(method1, refmethod), node=method1)
+        if not (isinstance(method1, astroid.Function)
+                and isinstance(refmethod, astroid.Function)):
+            self.add_message('method-check-failed',
+                             args=(method1, refmethod), node=method1)
             return
         # don't care about functions with unknown argument (builtins)
         if method1.args.args is None or refmethod.args.args is None:
@@ -519,11 +936,23 @@
         # if we use *args, **kwargs, skip the below checks
         if method1.args.vararg or method1.args.kwarg:
             return
+        if is_attr_private(method1.name):
+            return
         if len(method1.args.args) != len(refmethod.args.args):
-            self.add_message('W0221', args=class_type, node=method1)
+            self.add_message('arguments-differ',
+                             args=(class_type, method1.name),
+                             node=method1)
         elif len(method1.args.defaults) < len(refmethod.args.defaults):
-            self.add_message('W0222', args=class_type, node=method1)
+            self.add_message('signature-differs',
+                             args=(class_type, method1.name),
+                             node=method1)
 
+    def is_first_attr(self, node):
+        """Check that attribute lookup name use first attribute variable name
+        (self for method, cls for classmethod and mcs for metaclass).
+        """
+        return self._first_attrs and isinstance(node.expr, astroid.Name) and \
+                   node.expr.name == self._first_attrs[-1]
 
 def _ancestors_to_call(klass_node, method='__init__'):
     """return a dictionary where keys are the list of base classes providing
@@ -532,21 +961,20 @@
     to_call = {}
     for base_node in klass_node.ancestors(recurs=False):
         try:
-            base_node.local_attr(method)
-            to_call[base_node] = 1
-        except astng.NotFoundError:
+            to_call[base_node] = next(base_node.igetattr(method))
+        except astroid.InferenceError:
             continue
     return to_call
 
 
 def node_method(node, method_name):
-    """get astng for <method_name> on the given class node, ensuring it
+    """get astroid for <method_name> on the given class node, ensuring it
     is a Function node
     """
     for n in node.local_attr(method_name):
-        if isinstance(n, astng.Function):
+        if isinstance(n, astroid.Function):
             return n
-    raise astng.NotFoundError(method_name)
+    raise astroid.NotFoundError(method_name)
 
 def register(linter):
     """required method to auto register this checker """
diff --git a/third_party/pylint/checkers/design_analysis.py b/third_party/pylint/checkers/design_analysis.py
index 0deb6c7..0a7a307 100644
--- a/third_party/pylint/checkers/design_analysis.py
+++ b/third_party/pylint/checkers/design_analysis.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2003-2006 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,24 +12,22 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-"""check for signs of poor design
-
-
- see http://intranet.logilab.fr/jpl/view?rql=Any%20X%20where%20X%20eid%201243
- FIXME: missing 13, 15, 16
-"""
-
-from logilab.astng import Function, If, InferenceError
-
-from pylint.interfaces import IASTNGChecker
-from pylint.checkers import BaseChecker
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""check for signs of poor design"""
 
 import re
+from collections import defaultdict
+
+from astroid import Function, If, InferenceError
+
+from pylint.interfaces import IAstroidChecker
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages
 
 # regexp for ignored argument name
 IGNORED_ARGUMENT_NAMES = re.compile('_.*')
 
+
 def class_is_abstract(klass):
     """return true if the given class node should be considered as an abstract
     class
@@ -43,49 +41,61 @@
 
 MSGS = {
     'R0901': ('Too many ancestors (%s/%s)',
+              'too-many-ancestors',
               'Used when class has too many parent classes, try to reduce \
-              this to get a more simple (and so easier to use) class.'),
+              this to get a simpler (and so easier to use) class.'),
     'R0902': ('Too many instance attributes (%s/%s)',
+              'too-many-instance-attributes',
               'Used when class has too many instance attributes, try to reduce \
-              this to get a more simple (and so easier to use) class.'),
+              this to get a simpler (and so easier to use) class.'),
     'R0903': ('Too few public methods (%s/%s)',
+              'too-few-public-methods',
               'Used when class has too few public methods, so be sure it\'s \
               really worth it.'),
     'R0904': ('Too many public methods (%s/%s)',
+              'too-many-public-methods',
               'Used when class has too many public methods, try to reduce \
-              this to get a more simple (and so easier to use) class.'),
-    
+              this to get a simpler (and so easier to use) class.'),
+
     'R0911': ('Too many return statements (%s/%s)',
+              'too-many-return-statements',
               'Used when a function or method has too many return statement, \
               making it hard to follow.'),
     'R0912': ('Too many branches (%s/%s)',
+              'too-many-branches',
               'Used when a function or method has too many branches, \
               making it hard to follow.'),
     'R0913': ('Too many arguments (%s/%s)',
+              'too-many-arguments',
               'Used when a function or method takes too many arguments.'),
     'R0914': ('Too many local variables (%s/%s)',
+              'too-many-locals',
               'Used when a function or method has too many local variables.'),
     'R0915': ('Too many statements (%s/%s)',
+              'too-many-statements',
               'Used when a function or method has too many statements. You \
               should then split it in smaller functions / methods.'),
-    
+
     'R0921': ('Abstract class not referenced',
+              'abstract-class-not-used',
               'Used when an abstract class is not used as ancestor anywhere.'),
     'R0922': ('Abstract class is only referenced %s times',
+              'abstract-class-little-used',
               'Used when an abstract class is used less than X times as \
               ancestor.'),
     'R0923': ('Interface not implemented',
+              'interface-not-implemented',
               'Used when an interface class is not implemented anywhere.'),
     }
 
 
 class MisdesignChecker(BaseChecker):
-    """checks for sign of poor/misdesign:                                      
-    * number of methods, attributes, local variables...                        
-    * size, complexity of functions, methods                                   
+    """checks for sign of poor/misdesign:
+    * number of methods, attributes, local variables...
+    * size, complexity of functions, methods
     """
-    
-    __implements__ = (IASTNGChecker,)
+
+    __implements__ = (IAstroidChecker,)
 
     # configuration section name
     name = 'design'
@@ -96,107 +106,111 @@
     options = (('max-args',
                 {'default' : 5, 'type' : 'int', 'metavar' : '<int>',
                  'help': 'Maximum number of arguments for function / method'}
-                ),
+               ),
                ('ignored-argument-names',
                 {'default' : IGNORED_ARGUMENT_NAMES,
                  'type' :'regexp', 'metavar' : '<regexp>',
                  'help' : 'Argument names that match this expression will be '
                           'ignored. Default to name with leading underscore'}
-                ),
+               ),
                ('max-locals',
                 {'default' : 15, 'type' : 'int', 'metavar' : '<int>',
                  'help': 'Maximum number of locals for function / method body'}
-                ),
+               ),
                ('max-returns',
                 {'default' : 6, 'type' : 'int', 'metavar' : '<int>',
                  'help': 'Maximum number of return / yield for function / '
                          'method body'}
-                ),
-               ('max-branchs',
+               ),
+               ('max-branches',
                 {'default' : 12, 'type' : 'int', 'metavar' : '<int>',
                  'help': 'Maximum number of branch for function / method body'}
-                ),
+               ),
                ('max-statements',
                 {'default' : 50, 'type' : 'int', 'metavar' : '<int>',
                  'help': 'Maximum number of statements in function / method '
                          'body'}
-                ),
+               ),
                ('max-parents',
                 {'default' : 7,
                  'type' : 'int',
                  'metavar' : '<num>',
                  'help' : 'Maximum number of parents for a class (see R0901).'}
-                ),
+               ),
                ('max-attributes',
                 {'default' : 7,
                  'type' : 'int',
                  'metavar' : '<num>',
                  'help' : 'Maximum number of attributes for a class \
 (see R0902).'}
-                ),
+               ),
                ('min-public-methods',
                 {'default' : 2,
                  'type' : 'int',
                  'metavar' : '<num>',
                  'help' : 'Minimum number of public methods for a class \
 (see R0903).'}
-                ),
+               ),
                ('max-public-methods',
                 {'default' : 20,
                  'type' : 'int',
                  'metavar' : '<num>',
                  'help' : 'Maximum number of public methods for a class \
 (see R0904).'}
-                ),
-               )
+               ),
+              )
 
     def __init__(self, linter=None):
         BaseChecker.__init__(self, linter)
         self.stats = None
         self._returns = None
-        self._branchs = None
+        self._branches = None
         self._used_abstracts = None
         self._used_ifaces = None
         self._abstracts = None
         self._ifaces = None
         self._stmts = 0
-        
+
     def open(self):
         """initialize visit variables"""
         self.stats = self.linter.add_stats()
         self._returns = []
-        self._branchs = []
+        self._branches = defaultdict(int)
         self._used_abstracts = {}
         self._used_ifaces = {}
         self._abstracts = []
         self._ifaces = []
 
+    # Check 'R0921', 'R0922', 'R0923'
     def close(self):
         """check that abstract/interface classes are used"""
         for abstract in self._abstracts:
             if not abstract in self._used_abstracts:
-                self.add_message('R0921', node=abstract)
+                self.add_message('abstract-class-not-used', node=abstract)
             elif self._used_abstracts[abstract] < 2:
-                self.add_message('R0922', node=abstract,
+                self.add_message('abstract-class-little-used', node=abstract,
                                  args=self._used_abstracts[abstract])
         for iface in self._ifaces:
             if not iface in self._used_ifaces:
-                self.add_message('R0923', node=iface)
-                
+                self.add_message('interface-not-implemented', node=iface)
+
+    @check_messages('too-many-ancestors', 'too-many-instance-attributes',
+                    'too-few-public-methods', 'too-many-public-methods',
+                    'abstract-class-not-used', 'abstract-class-little-used',
+                    'interface-not-implemented')
     def visit_class(self, node):
         """check size of inheritance hierarchy and number of instance attributes
         """
-        self._inc_branch()
         # Is the total inheritance hierarchy is 7 or less?
         nb_parents = len(list(node.ancestors()))
         if nb_parents > self.config.max_parents:
-            self.add_message('R0901', node=node,
+            self.add_message('too-many-ancestors', node=node,
                              args=(nb_parents, self.config.max_parents))
         # Does the class contain less than 20 attributes for
         # non-GUI classes (40 for GUI)?
         # FIXME detect gui classes
         if len(node.instance_attrs) > self.config.max_attributes:
-            self.add_message('R0902', node=node,
+            self.add_message('too-many-instance-attributes', node=node,
                              args=(len(node.instance_attrs),
                                    self.config.max_attributes))
         # update abstract / interface classes structures
@@ -212,23 +226,27 @@
             for iface in node.interfaces():
                 self._used_ifaces[iface] = 1
         except InferenceError:
-            # XXX log ? 
+            # XXX log ?
             pass
         for parent in node.ancestors():
             try:
                 self._used_abstracts[parent] += 1
             except KeyError:
                 self._used_abstracts[parent] = 1
-            
+
+    @check_messages('too-many-ancestors', 'too-many-instance-attributes',
+                    'too-few-public-methods', 'too-many-public-methods',
+                    'abstract-class-not-used', 'abstract-class-little-used',
+                    'interface-not-implemented')
     def leave_class(self, node):
         """check number of public methods"""
         nb_public_methods = 0
-        for method in node.methods():
+        for method in node.mymethods():
             if not method.name.startswith('_'):
                 nb_public_methods += 1
         # Does the class contain less than 20 public methods ?
         if nb_public_methods > self.config.max_public_methods:
-            self.add_message('R0904', node=node,
+            self.add_message('too-many-public-methods', node=node,
                              args=(nb_public_methods,
                                    self.config.max_public_methods))
         # stop here for exception, metaclass and interface classes
@@ -236,19 +254,19 @@
             return
         # Does the class contain more than 5 public methods ?
         if nb_public_methods < self.config.min_public_methods:
-            self.add_message('R0903', node=node,
+            self.add_message('too-few-public-methods', node=node,
                              args=(nb_public_methods,
                                    self.config.min_public_methods))
 
-        
+    @check_messages('too-many-return-statements', 'too-many-branches',
+                    'too-many-arguments', 'too-many-locals',
+                    'too-many-statements')
     def visit_function(self, node):
         """check function name, docstring, arguments, redefinition,
         variable names, max locals
         """
-        self._inc_branch()
         # init branch and returns counters
         self._returns.append(0)
-        self._branchs.append(0)
         # check number of arguments
         args = node.args.args
         if args is not None:
@@ -257,33 +275,36 @@
                  if self.config.ignored_argument_names.match(arg.name)])
             argnum = len(args) - ignored_args_num
             if  argnum > self.config.max_args:
-                self.add_message('R0913', node=node,
+                self.add_message('too-many-arguments', node=node,
                                  args=(len(args), self.config.max_args))
         else:
             ignored_args_num = 0
         # check number of local variables
         locnum = len(node.locals) - ignored_args_num
         if locnum > self.config.max_locals:
-            self.add_message('R0914', node=node,
+            self.add_message('too-many-locals', node=node,
                              args=(locnum, self.config.max_locals))
         # init statements counter
         self._stmts = 1
 
+    @check_messages('too-many-return-statements', 'too-many-branches',
+                    'too-many-arguments', 'too-many-locals',
+                    'too-many-statements')
     def leave_function(self, node):
         """most of the work is done here on close:
         checks for max returns, branch, return in __init__
         """
         returns = self._returns.pop()
         if returns > self.config.max_returns:
-            self.add_message('R0911', node=node,
+            self.add_message('too-many-return-statements', node=node,
                              args=(returns, self.config.max_returns))
-        branchs = self._branchs.pop()
-        if branchs > self.config.max_branchs:
-            self.add_message('R0912', node=node,
-                             args=(branchs, self.config.max_branchs))
+        branches = self._branches[node]
+        if branches > self.config.max_branches:
+            self.add_message('too-many-branches', node=node,
+                             args=(branches, self.config.max_branches))
         # check number of statements
         if self._stmts > self.config.max_statements:
-            self.add_message('R0915', node=node,
+            self.add_message('too-many-statements', node=node,
                              args=(self._stmts, self.config.max_statements))
 
     def visit_return(self, _):
@@ -291,7 +312,7 @@
         if not self._returns:
             return # return outside function, reported by the base checker
         self._returns[-1] += 1
-        
+
     def visit_default(self, node):
         """default visit method -> increments the statements counter if
         necessary
@@ -300,42 +321,40 @@
             self._stmts += 1
 
     def visit_tryexcept(self, node):
-        """increments the branchs counter"""
-        branchs = len(node.handlers)
+        """increments the branches counter"""
+        branches = len(node.handlers)
         if node.orelse:
-            branchs += 1
-        self._inc_branch(branchs)
-        self._stmts += branchs
-        
-    def visit_tryfinally(self, _):
-        """increments the branchs counter"""
-        self._inc_branch(2)
+            branches += 1
+        self._inc_branch(node, branches)
+        self._stmts += branches
+
+    def visit_tryfinally(self, node):
+        """increments the branches counter"""
+        self._inc_branch(node, 2)
         self._stmts += 2
-        
+
     def visit_if(self, node):
-        """increments the branchs counter"""
-        branchs = 1
+        """increments the branches counter"""
+        branches = 1
         # don't double count If nodes coming from some 'elif'
-        if node.orelse and (len(node.orelse)>1 or
+        if node.orelse and (len(node.orelse) > 1 or
                             not isinstance(node.orelse[0], If)):
-            branchs += 1
-        self._inc_branch(branchs)
-        self._stmts += branchs
-        
+            branches += 1
+        self._inc_branch(node, branches)
+        self._stmts += branches
+
     def visit_while(self, node):
-        """increments the branchs counter"""
-        branchs = 1
+        """increments the branches counter"""
+        branches = 1
         if node.orelse:
-            branchs += 1
-        self._inc_branch(branchs)
-        
+            branches += 1
+        self._inc_branch(node, branches)
+
     visit_for = visit_while
 
-    def _inc_branch(self, branchsnum=1):
-        """increments the branchs counter"""
-        branchs = self._branchs
-        for i in xrange(len(branchs)):
-            branchs[i] += branchsnum
+    def _inc_branch(self, node, branchesnum=1):
+        """increments the branches counter"""
+        self._branches[node.scope()] += branchesnum
 
     # FIXME: make a nice report...
 
diff --git a/third_party/pylint/checkers/exceptions.py b/third_party/pylint/checkers/exceptions.py
index 08f4334..88a8f22 100644
--- a/third_party/pylint/checkers/exceptions.py
+++ b/third_party/pylint/checkers/exceptions.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2003-2007 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
@@ -11,68 +11,110 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """exceptions handling (raising, catching, exceptions classes) checker
 """
 import sys
 
+import astroid
+from astroid import YES, Instance, unpack_infer, List, Tuple
 from logilab.common.compat import builtins
-BUILTINS_NAME = builtins.__name__
-from logilab import astng
-from logilab.astng import YES, Instance, unpack_infer
 
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import is_empty, is_raising
-from pylint.interfaces import IASTNGChecker
+from pylint.checkers.utils import (
+    is_empty,
+    is_raising,
+    check_messages,
+    inherit_from_std_ex,
+    EXCEPTIONS_MODULE,
+    has_known_bases,
+    safe_infer)
+from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE
 
 
+def _annotated_unpack_infer(stmt, context=None):
+    """
+    Recursively generate nodes inferred by the given statement.
+    If the inferred value is a list or a tuple, recurse on the elements.
+    Returns an iterator which yields tuples in the format
+    ('original node', 'infered node').
+    """
+    if isinstance(stmt, (List, Tuple)):
+        for elt in stmt.elts:
+            inferred = safe_infer(elt)
+            if inferred and inferred is not YES:
+                yield elt, inferred
+        return
+    for infered in stmt.infer(context):
+        if infered is YES:
+            continue
+        yield stmt, infered
+
+
+PY3K = sys.version_info >= (3, 0)
 OVERGENERAL_EXCEPTIONS = ('Exception',)
-
+BUILTINS_NAME = builtins.__name__
 MSGS = {
-    'E0701': (
-    'Bad except clauses order (%s)',
-    'Used when except clauses are not in the correct order (from the \
-    more specific to the more generic). If you don\'t fix the order, \
-    some exceptions may not be catched by the most specific handler.'),
-    'E0702': ('Raising %s while only classes, instances or string are allowed',
+    'E0701': ('Bad except clauses order (%s)',
+              'bad-except-order',
+              'Used when except clauses are not in the correct order (from the '
+              'more specific to the more generic). If you don\'t fix the order, '
+              'some exceptions may not be catched by the most specific handler.'),
+    'E0702': ('Raising %s while only classes or instances are allowed',
+              'raising-bad-type',
               'Used when something which is neither a class, an instance or a \
               string is raised (i.e. a `TypeError` will be raised).'),
+    'E0703': ('Exception context set to something which is not an '
+              'exception, nor None',
+              'bad-exception-context',
+              'Used when using the syntax "raise ... from ...", '
+              'where the exception context is not an exception, '
+              'nor None.',
+              {'minversion': (3, 0)}),
     'E0710': ('Raising a new style class which doesn\'t inherit from BaseException',
+              'raising-non-exception',
               'Used when a new style class which doesn\'t inherit from \
                BaseException is raised.'),
     'E0711': ('NotImplemented raised - should raise NotImplementedError',
+              'notimplemented-raised',
               'Used when NotImplemented is raised instead of \
               NotImplementedError'),
-    
-    'W0701': ('Raising a string exception',
-              'Used when a string exception is raised.'),
+    'E0712': ('Catching an exception which doesn\'t inherit from BaseException: %s',
+              'catching-non-exception',
+              'Used when a class which doesn\'t inherit from \
+               BaseException is used as an exception in an except clause.'),
     'W0702': ('No exception type(s) specified',
+              'bare-except',
               'Used when an except clause doesn\'t specify exceptions type to \
               catch.'),
     'W0703': ('Catching too general exception %s',
+              'broad-except',
               'Used when an except catches a too general exception, \
               possibly burying unrelated errors.'),
     'W0704': ('Except doesn\'t do anything',
+              'pointless-except',
               'Used when an except clause does nothing but "pass" and there is\
               no "else" clause.'),
     'W0710': ('Exception doesn\'t inherit from standard "Exception" class',
+              'nonstandard-exception',
               'Used when a custom exception class is raised but doesn\'t \
-              inherit from the builtin "Exception" class.'),
+              inherit from the builtin "Exception" class.',
+              {'maxversion': (3, 0)}),
+    'W0711': ('Exception to catch is the result of a binary "%s" operation',
+              'binary-op-exception',
+              'Used when the exception to catch is of the form \
+              "except A or B:".  If intending to catch multiple, \
+              rewrite as "except (A, B):"'),
     }
 
 
-if sys.version_info < (3, 0):
-    EXCEPTIONS_MODULE = "exceptions"
-else:
-    EXCEPTIONS_MODULE = "builtins"
-
 class ExceptionsChecker(BaseChecker):
-    """checks for                                                              
-    * excepts without exception filter                                         
+    """checks for
+    * excepts without exception filter
     * type of raise argument : string, Exceptions, other values
     """
-    
-    __implements__ = IASTNGChecker
+
+    __implements__ = IAstroidChecker
 
     name = 'exceptions'
     msgs = MSGS
@@ -83,114 +125,207 @@
                  'help' : 'Exceptions that will emit a warning '
                           'when being caught. Defaults to "%s"' % (
                               ', '.join(OVERGENERAL_EXCEPTIONS),)}
-                ),
-               )
+               ),
+              )
 
+    @check_messages('nonstandard-exception',
+                    'raising-bad-type', 'raising-non-exception',
+                    'notimplemented-raised', 'bad-exception-context')
     def visit_raise(self, node):
         """visit raise possibly inferring value"""
         # ignore empty raise
         if node.exc is None:
             return
+        if PY3K and node.cause:
+            self._check_bad_exception_context(node)
+
         expr = node.exc
         if self._check_raise_value(node, expr):
             return
         else:
             try:
-                value = unpack_infer(expr).next()
-            except astng.InferenceError:
+                value = next(unpack_infer(expr))
+            except astroid.InferenceError:
                 return
             self._check_raise_value(node, value)
 
+    def _check_bad_exception_context(self, node):
+        """Verify that the exception context is properly set.
+
+        An exception context can be only `None` or an exception.
+        """
+        cause = safe_infer(node.cause)
+        if cause in (YES, None):
+            return
+        if isinstance(cause, astroid.Const):
+            if cause.value is not None:
+                self.add_message('bad-exception-context',
+                                 node=node)
+        elif (not isinstance(cause, astroid.Class) and
+              not inherit_from_std_ex(cause)):
+            self.add_message('bad-exception-context',
+                             node=node)
+
     def _check_raise_value(self, node, expr):
         """check for bad values, string exception and class inheritance
         """
         value_found = True
-        if isinstance(expr, astng.Const):
+        if isinstance(expr, astroid.Const):
             value = expr.value
-            if isinstance(value, str):
-                self.add_message('W0701', node=node)
-            else:
-                self.add_message('E0702', node=node,
+            if not isinstance(value, str):
+                # raising-string will be emitted from python3 porting checker.
+                self.add_message('raising-bad-type', node=node,
                                  args=value.__class__.__name__)
-        elif (isinstance(expr, astng.Name) and \
-                 expr.name in ('None', 'True', 'False')) or \
-                 isinstance(expr, (astng.List, astng.Dict, astng.Tuple, 
-                                   astng.Module, astng.Function)):
-            self.add_message('E0702', node=node, args=expr.name)
-        elif ( (isinstance(expr, astng.Name) and expr.name == 'NotImplemented')
-               or (isinstance(expr, astng.CallFunc) and
-                   isinstance(expr.func, astng.Name) and
-                   expr.func.name == 'NotImplemented') ):
-            self.add_message('E0711', node=node)
-        elif isinstance(expr, astng.BinOp) and expr.op == '%':
-            self.add_message('W0701', node=node)
-        elif isinstance(expr, (Instance, astng.Class)):
+        elif ((isinstance(expr, astroid.Name) and
+               expr.name in ('None', 'True', 'False')) or
+              isinstance(expr, (astroid.List, astroid.Dict, astroid.Tuple,
+                                astroid.Module, astroid.Function))):
+            emit = True
+            if not PY3K and isinstance(expr, astroid.Tuple):
+                # On Python 2, using the following is not an error:
+                #    raise (ZeroDivisionError, None)
+                #    raise (ZeroDivisionError, )
+                # What's left to do is to check that the first
+                # argument is indeed an exception.
+                # Verifying the other arguments is not
+                # the scope of this check.
+                first = expr.elts[0]
+                inferred = safe_infer(first)
+                if isinstance(inferred, Instance):
+                    # pylint: disable=protected-access
+                    inferred = inferred._proxied
+                if (inferred is YES or
+                        isinstance(inferred, astroid.Class)
+                        and inherit_from_std_ex(inferred)):
+                    emit = False
+            if emit:
+                self.add_message('raising-bad-type',
+                                 node=node,
+                                 args=expr.name)
+        elif ((isinstance(expr, astroid.Name) and expr.name == 'NotImplemented')
+              or (isinstance(expr, astroid.CallFunc) and
+                  isinstance(expr.func, astroid.Name) and
+                  expr.func.name == 'NotImplemented')):
+            self.add_message('notimplemented-raised', node=node)
+        elif isinstance(expr, (Instance, astroid.Class)):
             if isinstance(expr, Instance):
+                # pylint: disable=protected-access
                 expr = expr._proxied
-            if (isinstance(expr, astng.Class) and
-                    not inherit_from_std_ex(expr) and
-                    expr.root().name != BUILTINS_NAME):
+            if (isinstance(expr, astroid.Class) and
+                    not inherit_from_std_ex(expr)):
                 if expr.newstyle:
-                    self.add_message('E0710', node=node)
+                    self.add_message('raising-non-exception', node=node)
                 else:
-                    self.add_message('W0710', node=node)
+                    if has_known_bases(expr):
+                        confidence = INFERENCE
+                    else:
+                        confidence = INFERENCE_FAILURE
+                    self.add_message(
+                        'nonstandard-exception', node=node,
+                        confidence=confidence)
             else:
                 value_found = False
         else:
             value_found = False
         return value_found
 
+    def _check_catching_non_exception(self, handler, exc, part):
+        if isinstance(exc, astroid.Tuple):
+            # Check if it is a tuple of exceptions.
+            inferred = [safe_infer(elt) for elt in exc.elts]
+            if any(node is astroid.YES for node in inferred):
+                # Don't emit if we don't know every component.
+                return
+            if all(node and inherit_from_std_ex(node)
+                   for node in inferred):
+                return
 
+        if not isinstance(exc, astroid.Class):
+            # Don't emit the warning if the infered stmt
+            # is None, but the exception handler is something else,
+            # maybe it was redefined.
+            if (isinstance(exc, astroid.Const) and
+                    exc.value is None):
+                if ((isinstance(handler.type, astroid.Const) and
+                     handler.type.value is None) or
+                        handler.type.parent_of(exc)):
+                    # If the exception handler catches None or
+                    # the exception component, which is None, is
+                    # defined by the entire exception handler, then
+                    # emit a warning.
+                    self.add_message('catching-non-exception',
+                                     node=handler.type,
+                                     args=(part.as_string(), ))
+            else:
+                self.add_message('catching-non-exception',
+                                 node=handler.type,
+                                 args=(part.as_string(), ))
+            return
+        if (not inherit_from_std_ex(exc) and
+                exc.root().name != BUILTINS_NAME):
+            if has_known_bases(exc):
+                self.add_message('catching-non-exception',
+                                 node=handler.type,
+                                 args=(exc.name, ))
+
+    @check_messages('bare-except', 'broad-except', 'pointless-except',
+                    'binary-op-exception', 'bad-except-order',
+                    'catching-non-exception')
     def visit_tryexcept(self, node):
         """check for empty except"""
         exceptions_classes = []
         nb_handlers = len(node.handlers)
-        for index, handler  in enumerate(node.handlers):
+        for index, handler in enumerate(node.handlers):
             # single except doing nothing but "pass" without else clause
-            if nb_handlers == 1 and is_empty(handler.body) and not node.orelse:
-                self.add_message('W0704', node=handler.type or handler.body[0])
+            if is_empty(handler.body) and not node.orelse:
+                self.add_message('pointless-except',
+                                 node=handler.type or handler.body[0])
             if handler.type is None:
-                if nb_handlers == 1 and not is_raising(handler.body):
-                    self.add_message('W0702', node=handler)
+                if not is_raising(handler.body):
+                    self.add_message('bare-except', node=handler)
                 # check if a "except:" is followed by some other
                 # except
-                elif index < (nb_handlers - 1):
+                if index < (nb_handlers - 1):
                     msg = 'empty except clause should always appear last'
-                    self.add_message('E0701', node=node, args=msg)
+                    self.add_message('bad-except-order', node=node, args=msg)
+
+            elif isinstance(handler.type, astroid.BoolOp):
+                self.add_message('binary-op-exception',
+                                 node=handler, args=handler.type.op)
             else:
                 try:
-                    excs = list(unpack_infer(handler.type))
-                except astng.InferenceError:
+                    excs = list(_annotated_unpack_infer(handler.type))
+                except astroid.InferenceError:
                     continue
-                for exc in excs:
-                    # XXX skip other non class nodes 
-                    if exc is YES or not isinstance(exc, astng.Class):
+                for part, exc in excs:
+                    if exc is YES:
                         continue
+                    if (isinstance(exc, astroid.Instance)
+                            and inherit_from_std_ex(exc)):
+                        # pylint: disable=protected-access
+                        exc = exc._proxied
+
+                    self._check_catching_non_exception(handler, exc, part)
+
+                    if not isinstance(exc, astroid.Class):
+                        continue
+
                     exc_ancestors = [anc for anc in exc.ancestors()
-                                     if isinstance(anc, astng.Class)]
+                                     if isinstance(anc, astroid.Class)]
                     for previous_exc in exceptions_classes:
                         if previous_exc in exc_ancestors:
                             msg = '%s is an ancestor class of %s' % (
                                 previous_exc.name, exc.name)
-                            self.add_message('E0701', node=handler.type, args=msg)
+                            self.add_message('bad-except-order',
+                                             node=handler.type, args=msg)
                     if (exc.name in self.config.overgeneral_exceptions
-                        and exc.root().name == EXCEPTIONS_MODULE
-                        and nb_handlers == 1 and not is_raising(handler.body)):
-                        self.add_message('W0703', args=exc.name, node=handler.type)
-                exceptions_classes += excs
+                            and exc.root().name == EXCEPTIONS_MODULE
+                            and not is_raising(handler.body)):
+                        self.add_message('broad-except',
+                                         args=exc.name, node=handler.type)
 
+                exceptions_classes += [exc for _, exc in excs]
 
-def inherit_from_std_ex(node):
-    """return true if the given class node is subclass of
-    exceptions.Exception
-    """
-    if node.name in ('Exception', 'BaseException') \
-            and node.root().name == EXCEPTIONS_MODULE:
-        return True
-    for parent in node.ancestors(recurs=False):
-        if inherit_from_std_ex(parent):
-            return True
-    return False
 
 def register(linter):
     """required method to auto register this checker"""
diff --git a/third_party/pylint/checkers/format.py b/third_party/pylint/checkers/format.py
index 0784e6a..94a9e8e 100644
--- a/third_party/pylint/checkers/format.py
+++ b/third_party/pylint/checkers/format.py
@@ -1,5 +1,5 @@
-# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com).
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+#
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -11,7 +11,7 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """Python code format's checker.
 
 By default try to follow Guido's style guide :
@@ -21,139 +21,387 @@
 Some parts of the process_token method is based from The Tab Nanny std module.
 """
 
-import re, sys
+import keyword
+import sys
 import tokenize
-if not hasattr(tokenize, 'NL'):
-    raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
+from functools import reduce # pylint: disable=redefined-builtin
 
-from logilab.common.textutils import pretty_match
-from logilab.astng import nodes
+import six
+from six.moves import zip, map, filter # pylint: disable=redefined-builtin
 
-from pylint.interfaces import IRawChecker, IASTNGChecker
-from pylint.checkers import BaseRawChecker
+from astroid import nodes
+
+from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
+from pylint.checkers import BaseTokenChecker
 from pylint.checkers.utils import check_messages
+from pylint.utils import WarningScope, OPTION_RGX
+
+_CONTINUATION_BLOCK_OPENERS = ['elif', 'except', 'for', 'if', 'while', 'def', 'class']
+_KEYWORD_TOKENS = ['assert', 'del', 'elif', 'except', 'for', 'if', 'in', 'not',
+                   'raise', 'return', 'while', 'yield']
+if sys.version_info < (3, 0):
+    _KEYWORD_TOKENS.append('print')
+
+_SPACED_OPERATORS = ['==', '<', '>', '!=', '<>', '<=', '>=',
+                     '+=', '-=', '*=', '**=', '/=', '//=', '&=', '|=', '^=',
+                     '%=', '>>=', '<<=']
+_OPENING_BRACKETS = ['(', '[', '{']
+_CLOSING_BRACKETS = [')', ']', '}']
+_TAB_LENGTH = 8
+
+_EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT])
+_JUNK_TOKENS = (tokenize.COMMENT, tokenize.NL)
+
+# Whitespace checking policy constants
+_MUST = 0
+_MUST_NOT = 1
+_IGNORE = 2
+
+# Whitespace checking config constants
+_DICT_SEPARATOR = 'dict-separator'
+_TRAILING_COMMA = 'trailing-comma'
+_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR]
 
 MSGS = {
     'C0301': ('Line too long (%s/%s)',
+              'line-too-long',
               'Used when a line is longer than a given number of characters.'),
-    'C0302': ('Too many lines in module (%s)', # was W0302
+    'C0302': ('Too many lines in module (%s/%s)', # was W0302
+              'too-many-lines',
               'Used when a module has too much lines, reducing its readability.'
-              ),
-
+             ),
+    'C0303': ('Trailing whitespace',
+              'trailing-whitespace',
+              'Used when there is whitespace between the end of a line and the '
+              'newline.'),
+    'C0304': ('Final newline missing',
+              'missing-final-newline',
+              'Used when the last line in a file is missing a newline.'),
     'W0311': ('Bad indentation. Found %s %s, expected %s',
+              'bad-indentation',
               'Used when an unexpected number of indentation\'s tabulations or '
               'spaces has been found.'),
+    'C0330': ('Wrong %s indentation%s.\n%s%s',
+              'bad-continuation',
+              'TODO'),
     'W0312': ('Found indentation with %ss instead of %ss',
+              'mixed-indentation',
               'Used when there are some mixed tabs and spaces in a module.'),
     'W0301': ('Unnecessary semicolon', # was W0106
+              'unnecessary-semicolon',
               'Used when a statement is ended by a semi-colon (";"), which \
               isn\'t necessary (that\'s python, not C ;).'),
     'C0321': ('More than one statement on a single line',
-              'Used when more than on statement are found on the same line.'),
-    'C0322': ('Operator not preceded by a space\n%s',
-              'Used when one of the following operator (!= | <= | == | >= | < '
-              '| > | = | \+= | -= | \*= | /= | %) is not preceded by a space.'),
-    'C0323': ('Operator not followed by a space\n%s',
-              'Used when one of the following operator (!= | <= | == | >= | < '
-              '| > | = | \+= | -= | \*= | /= | %) is not followed by a space.'),
-    'C0324': ('Comma not followed by a space\n%s',
-              'Used when a comma (",") is not followed by a space.'),
-    }
-
-if sys.version_info < (3, 0):
-
-    MSGS.update({
-    'W0331': ('Use of the <> operator',
-              'Used when the deprecated "<>" operator is used instead \
-              of "!=".'),
-    'W0332': ('Use l as long integer identifier',
+              'multiple-statements',
+              'Used when more than on statement are found on the same line.',
+              {'scope': WarningScope.NODE}),
+    'C0325' : ('Unnecessary parens after %r keyword',
+               'superfluous-parens',
+               'Used when a single item in parentheses follows an if, for, or '
+               'other keyword.'),
+    'C0326': ('%s space %s %s %s\n%s',
+              'bad-whitespace',
+              ('Used when a wrong number of spaces is used around an operator, '
+               'bracket or block opener.'),
+              {'old_names': [('C0323', 'no-space-after-operator'),
+                             ('C0324', 'no-space-after-comma'),
+                             ('C0322', 'no-space-before-operator')]}),
+    'W0332': ('Use of "l" as long integer identifier',
+              'lowercase-l-suffix',
               'Used when a lower case "l" is used to mark a long integer. You '
               'should use a upper case "L" since the letter "l" looks too much '
-              'like the digit "1"'),
-    'W0333': ('Use of the `` operator',
-              'Used when the deprecated "``" (backtick) operator is used '
-              'instead  of the str() function.'),
-    })
-
-# simple quoted string rgx
-SQSTRING_RGX = r'"([^"\\]|\\.)*?"'
-# simple apostrophed rgx
-SASTRING_RGX = r"'([^'\\]|\\.)*?'"
-# triple quoted string rgx
-TQSTRING_RGX = r'"""([^"]|("(?!"")))*?(""")'
-# triple apostrophed string rgx # FIXME english please
-TASTRING_RGX = r"'''([^']|('(?!'')))*?(''')"
-
-# finally, the string regular expression
-STRING_RGX = re.compile('(%s)|(%s)|(%s)|(%s)' % (TQSTRING_RGX, TASTRING_RGX,
-                                                 SQSTRING_RGX, SASTRING_RGX),
-                        re.MULTILINE|re.DOTALL)
-
-COMMENT_RGX = re.compile("#.*$", re.M)
-
-OPERATORS = r'!=|<=|==|>=|<|>|=|\+=|-=|\*=|/=|%'
-
-OP_RGX_MATCH_1 = r'[^(]*(?<!\s|\^|<|>|=|\+|-|\*|/|!|%%|&|\|)(%s).*' % OPERATORS
-OP_RGX_SEARCH_1 = r'(?<!\s|\^|<|>|=|\+|-|\*|/|!|%%|&|\|)(%s)' % OPERATORS
-
-OP_RGX_MATCH_2 = r'[^(]*(%s)(?!\s|=|>|<).*' % OPERATORS
-OP_RGX_SEARCH_2 = r'(%s)(?!\s|=|>)' % OPERATORS
-
-BAD_CONSTRUCT_RGXS = (
-
-    (re.compile(OP_RGX_MATCH_1, re.M),
-     re.compile(OP_RGX_SEARCH_1, re.M),
-     'C0322'),
-
-    (re.compile(OP_RGX_MATCH_2, re.M),
-     re.compile(OP_RGX_SEARCH_2, re.M),
-     'C0323'),
-
-    (re.compile(r'.*,[^(\s|\]|}|\))].*', re.M), 
-     re.compile(r',[^\s)]', re.M),
-     'C0324'),
-    )
+              'like the digit "1"',
+              {'maxversion': (3, 0)}),
+    'C0327': ('Mixed line endings LF and CRLF',
+              'mixed-line-endings',
+              'Used when there are mixed (LF and CRLF) newline signs in a file.'),
+    'C0328': ('Unexpected line ending format. There is \'%s\' while it should be \'%s\'.',
+              'unexpected-line-ending-format',
+              'Used when there is different newline than expected.'),
+    }
 
 
-def get_string_coords(line):
-    """return a list of string positions (tuple (start, end)) in the line
-    """
-    result = []
-    for match in re.finditer(STRING_RGX, line):
-        result.append( (match.start(), match.end()) )
+def _underline_token(token):
+    length = token[3][1] - token[2][1]
+    offset = token[2][1]
+    return token[4] + (' ' * offset) + ('^' * length)
+
+
+def _column_distance(token1, token2):
+    if token1 == token2:
+        return 0
+    if token2[3] < token1[3]:
+        token1, token2 = token2, token1
+    if token1[3][0] != token2[2][0]:
+        return None
+    return token2[2][1] - token1[3][1]
+
+
+def _last_token_on_line_is(tokens, line_end, token):
+    return (line_end > 0 and tokens.token(line_end-1) == token or
+            line_end > 1 and tokens.token(line_end-2) == token
+            and tokens.type(line_end-1) == tokenize.COMMENT)
+
+
+def _token_followed_by_eol(tokens, position):
+    return (tokens.type(position+1) == tokenize.NL or
+            tokens.type(position+1) == tokenize.COMMENT and
+            tokens.type(position+2) == tokenize.NL)
+
+
+def _get_indent_length(line):
+    """Return the length of the indentation on the given token's line."""
+    result = 0
+    for char in line:
+        if char == ' ':
+            result += 1
+        elif char == '\t':
+            result += _TAB_LENGTH
+        else:
+            break
     return result
 
-def in_coords(match, string_coords):
-    """return true if the match is in the string coord"""
-    mstart = match.start()
-    for start, end in string_coords:
-        if mstart >= start and mstart < end:
-            return True
-    return False
 
-def check_line(line):
-    """check a line for a bad construction
-    if it founds one, return a message describing the problem
-    else return None
+def _get_indent_hint_line(bar_positions, bad_position):
+    """Return a line with |s for each of the positions in the given lists."""
+    if not bar_positions:
+        return ''
+    markers = [(pos, '|') for pos in bar_positions]
+    markers.append((bad_position, '^'))
+    markers.sort()
+    line = [' '] * (markers[-1][0] + 1)
+    for position, marker in markers:
+        line[position] = marker
+    return ''.join(line)
+
+
+class _ContinuedIndent(object):
+    __slots__ = ('valid_outdent_offsets',
+                 'valid_continuation_offsets',
+                 'context_type',
+                 'token',
+                 'position')
+
+    def __init__(self,
+                 context_type,
+                 token,
+                 position,
+                 valid_outdent_offsets,
+                 valid_continuation_offsets):
+        self.valid_outdent_offsets = valid_outdent_offsets
+        self.valid_continuation_offsets = valid_continuation_offsets
+        self.context_type = context_type
+        self.position = position
+        self.token = token
+
+
+# The contexts for hanging indents.
+# A hanging indented dictionary value after :
+HANGING_DICT_VALUE = 'dict-value'
+# Hanging indentation in an expression.
+HANGING = 'hanging'
+# Hanging indentation in a block header.
+HANGING_BLOCK = 'hanging-block'
+# Continued indentation inside an expression.
+CONTINUED = 'continued'
+# Continued indentation in a block header.
+CONTINUED_BLOCK = 'continued-block'
+
+SINGLE_LINE = 'single'
+WITH_BODY = 'multi'
+
+_CONTINUATION_MSG_PARTS = {
+    HANGING_DICT_VALUE: ('hanging', ' in dict value'),
+    HANGING: ('hanging', ''),
+    HANGING_BLOCK: ('hanging', ' before block'),
+    CONTINUED: ('continued', ''),
+    CONTINUED_BLOCK: ('continued', ' before block'),
+}
+
+
+def _Offsets(*args):
+    """Valid indentation offsets for a continued line."""
+    return dict((a, None) for a in args)
+
+
+def _BeforeBlockOffsets(single, with_body):
+    """Valid alternative indent offsets for continued lines before blocks.
+
+    :param single: Valid offset for statements on a single logical line.
+    :param with_body: Valid offset for statements on several lines.
     """
-    cleanstr = COMMENT_RGX.sub('', STRING_RGX.sub('', line))
-    for rgx_match, rgx_search, msg_id in BAD_CONSTRUCT_RGXS:
-        if rgx_match.match(cleanstr):
-            string_positions = get_string_coords(line)
-            for match in re.finditer(rgx_search, line):
-                if not in_coords(match, string_positions):
-                    return msg_id, pretty_match(match, line.rstrip())
+    return {single: SINGLE_LINE, with_body: WITH_BODY}
 
 
-class FormatChecker(BaseRawChecker):
+class TokenWrapper(object):
+    """A wrapper for readable access to token information."""
+
+    def __init__(self, tokens):
+        self._tokens = tokens
+
+    def token(self, idx):
+        return self._tokens[idx][1]
+
+    def type(self, idx):
+        return self._tokens[idx][0]
+
+    def start_line(self, idx):
+        return self._tokens[idx][2][0]
+
+    def start_col(self, idx):
+        return self._tokens[idx][2][1]
+
+    def line(self, idx):
+        return self._tokens[idx][4]
+
+
+class ContinuedLineState(object):
+    """Tracker for continued indentation inside a logical line."""
+
+    def __init__(self, tokens, config):
+        self._line_start = -1
+        self._cont_stack = []
+        self._is_block_opener = False
+        self.retained_warnings = []
+        self._config = config
+        self._tokens = TokenWrapper(tokens)
+
+    @property
+    def has_content(self):
+        return bool(self._cont_stack)
+
+    @property
+    def _block_indent_size(self):
+        return len(self._config.indent_string.replace('\t', ' ' * _TAB_LENGTH))
+
+    @property
+    def _continuation_size(self):
+        return self._config.indent_after_paren
+
+    def handle_line_start(self, pos):
+        """Record the first non-junk token at the start of a line."""
+        if self._line_start > -1:
+            return
+        self._is_block_opener = self._tokens.token(pos) in _CONTINUATION_BLOCK_OPENERS
+        self._line_start = pos
+
+    def next_physical_line(self):
+        """Prepares the tracker for a new physical line (NL)."""
+        self._line_start = -1
+        self._is_block_opener = False
+
+    def next_logical_line(self):
+        """Prepares the tracker for a new logical line (NEWLINE).
+
+        A new logical line only starts with block indentation.
+        """
+        self.next_physical_line()
+        self.retained_warnings = []
+        self._cont_stack = []
+
+    def add_block_warning(self, token_position, state, valid_offsets):
+        self.retained_warnings.append((token_position, state, valid_offsets))
+
+    def get_valid_offsets(self, idx):
+        """"Returns the valid offsets for the token at the given position."""
+        # The closing brace on a dict or the 'for' in a dict comprehension may
+        # reset two indent levels because the dict value is ended implicitly
+        stack_top = -1
+        if self._tokens.token(idx) in ('}', 'for') and self._cont_stack[-1].token == ':':
+            stack_top = -2
+        indent = self._cont_stack[stack_top]
+        if self._tokens.token(idx) in _CLOSING_BRACKETS:
+            valid_offsets = indent.valid_outdent_offsets
+        else:
+            valid_offsets = indent.valid_continuation_offsets
+        return indent, valid_offsets.copy()
+
+    def _hanging_indent_after_bracket(self, bracket, position):
+        """Extracts indentation information for a hanging indent."""
+        indentation = _get_indent_length(self._tokens.line(position))
+        if self._is_block_opener and self._continuation_size == self._block_indent_size:
+            return _ContinuedIndent(
+                HANGING_BLOCK,
+                bracket,
+                position,
+                _Offsets(indentation + self._continuation_size, indentation),
+                _BeforeBlockOffsets(indentation + self._continuation_size,
+                                    indentation + self._continuation_size * 2))
+        elif bracket == ':':
+            # If the dict key was on the same line as the open brace, the new
+            # correct indent should be relative to the key instead of the
+            # current indent level
+            paren_align = self._cont_stack[-1].valid_outdent_offsets
+            next_align = self._cont_stack[-1].valid_continuation_offsets.copy()
+            next_align_keys = list(next_align.keys())
+            next_align[next_align_keys[0] + self._continuation_size] = True
+            # Note that the continuation of
+            # d = {
+            #       'a': 'b'
+            #            'c'
+            # }
+            # is handled by the special-casing for hanging continued string indents.
+            return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align)
+        else:
+            return _ContinuedIndent(
+                HANGING,
+                bracket,
+                position,
+                _Offsets(indentation, indentation + self._continuation_size),
+                _Offsets(indentation + self._continuation_size))
+
+    def _continuation_inside_bracket(self, bracket, pos):
+        """Extracts indentation information for a continued indent."""
+        indentation = _get_indent_length(self._tokens.line(pos))
+        if self._is_block_opener and self._tokens.start_col(pos+1) - indentation == self._block_indent_size:
+            return _ContinuedIndent(
+                CONTINUED_BLOCK,
+                bracket,
+                pos,
+                _Offsets(self._tokens.start_col(pos)),
+                _BeforeBlockOffsets(self._tokens.start_col(pos+1),
+                                    self._tokens.start_col(pos+1) + self._continuation_size))
+        else:
+            return _ContinuedIndent(
+                CONTINUED,
+                bracket,
+                pos,
+                _Offsets(self._tokens.start_col(pos)),
+                _Offsets(self._tokens.start_col(pos+1)))
+
+    def pop_token(self):
+        self._cont_stack.pop()
+
+    def push_token(self, token, position):
+        """Pushes a new token for continued indentation on the stack.
+
+        Tokens that can modify continued indentation offsets are:
+          * opening brackets
+          * 'lambda'
+          * : inside dictionaries
+
+        push_token relies on the caller to filter out those
+        interesting tokens.
+
+        :param token: The concrete token
+        :param position: The position of the token in the stream.
+        """
+        if _token_followed_by_eol(self._tokens, position):
+            self._cont_stack.append(
+                self._hanging_indent_after_bracket(token, position))
+        else:
+            self._cont_stack.append(
+                self._continuation_inside_bracket(token, position))
+
+
+class FormatChecker(BaseTokenChecker):
     """checks for :
     * unauthorized constructions
     * strict indentation
     * line length
-    * use of <> instead of !=
     """
 
-    __implements__ = (IRawChecker, IASTNGChecker)
+    __implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
 
     # configuration section name
     name = 'format'
@@ -162,40 +410,292 @@
     # configuration options
     # for available dict keys/values see the optik parser 'add_option' method
     options = (('max-line-length',
-                {'default' : 80, 'type' : "int", 'metavar' : '<int>',
+                {'default' : 100, 'type' : "int", 'metavar' : '<int>',
                  'help' : 'Maximum number of characters on a single line.'}),
+               ('ignore-long-lines',
+                {'type': 'regexp', 'metavar': '<regexp>',
+                 'default': r'^\s*(# )?<?https?://\S+>?$',
+                 'help': ('Regexp for a line that is allowed to be longer than '
+                          'the limit.')}),
+               ('single-line-if-stmt',
+                {'default': False, 'type' : 'yn', 'metavar' : '<y_or_n>',
+                 'help' : ('Allow the body of an if to be on the same '
+                           'line as the test if there is no else.')}),
+               ('no-space-check',
+                {'default': ','.join(_NO_SPACE_CHECK_CHOICES),
+                 'type': 'multiple_choice',
+                 'choices': _NO_SPACE_CHECK_CHOICES,
+                 'help': ('List of optional constructs for which whitespace '
+                          'checking is disabled')}),
                ('max-module-lines',
                 {'default' : 1000, 'type' : 'int', 'metavar' : '<int>',
                  'help': 'Maximum number of lines in a module'}
-                ),
+               ),
                ('indent-string',
                 {'default' : '    ', 'type' : "string", 'metavar' : '<string>',
-                 'help' : 'String used as indentation unit. This is usually \
-"    " (4 spaces) or "\\t" (1 tab).'}),
-               )
+                 'help' : 'String used as indentation unit. This is usually '
+                          '"    " (4 spaces) or "\\t" (1 tab).'}),
+               ('indent-after-paren',
+                {'type': 'int', 'metavar': '<int>', 'default': 4,
+                 'help': 'Number of spaces of indent required inside a hanging '
+                         ' or continued line.'}),
+               ('expected-line-ending-format',
+                {'type': 'choice', 'metavar': '<empty or LF or CRLF>', 'default': '',
+                 'choices': ['', 'LF', 'CRLF'],
+                 'help': 'Expected format of line ending, e.g. empty (any line ending), LF or CRLF.'}),
+              )
+
     def __init__(self, linter=None):
-        BaseRawChecker.__init__(self, linter)
+        BaseTokenChecker.__init__(self, linter)
         self._lines = None
         self._visited_lines = None
+        self._bracket_stack = [None]
 
-    def process_module(self, node):
-        """extracts encoding from the stream and decodes each line, so that
-        international text's length is properly calculated.
-        """
-        stream = node.file_stream
-        stream.seek(0) # XXX may be removed with astng > 0.23
-        readline = stream.readline
-        if sys.version_info < (3, 0):
-            if node.file_encoding is not None:
-                readline = lambda: stream.readline().decode(node.file_encoding, 'replace')
-        self.process_tokens(tokenize.generate_tokens(readline))
+    def _pop_token(self):
+        self._bracket_stack.pop()
+        self._current_line.pop_token()
 
-    def new_line(self, tok_type, line, line_num, junk):
+    def _push_token(self, token, idx):
+        self._bracket_stack.append(token)
+        self._current_line.push_token(token, idx)
+
+    def new_line(self, tokens, line_end, line_start):
         """a new line has been encountered, process it if necessary"""
-        if not tok_type in junk:
+        if _last_token_on_line_is(tokens, line_end, ';'):
+            self.add_message('unnecessary-semicolon', line=tokens.start_line(line_end))
+
+        line_num = tokens.start_line(line_start)
+        line = tokens.line(line_start)
+        if tokens.type(line_start) not in _JUNK_TOKENS:
             self._lines[line_num] = line.split('\n')[0]
         self.check_lines(line, line_num)
 
+    def process_module(self, module):
+        self._keywords_with_parens = set()
+        if 'print_function' in module.future_imports:
+            self._keywords_with_parens.add('print')
+
+    def _check_keyword_parentheses(self, tokens, start):
+        """Check that there are not unnecessary parens after a keyword.
+
+        Parens are unnecessary if there is exactly one balanced outer pair on a
+        line, and it is followed by a colon, and contains no commas (i.e. is not a
+        tuple).
+
+        Args:
+        tokens: list of Tokens; the entire list of Tokens.
+        start: int; the position of the keyword in the token list.
+        """
+        # If the next token is not a paren, we're fine.
+        if self._inside_brackets(':') and tokens[start][1] == 'for':
+            self._pop_token()
+        if tokens[start+1][1] != '(':
+            return
+
+        found_and_or = False
+        depth = 0
+        keyword_token = tokens[start][1]
+        line_num = tokens[start][2][0]
+
+        for i in range(start, len(tokens) - 1):
+            token = tokens[i]
+
+            # If we hit a newline, then assume any parens were for continuation.
+            if token[0] == tokenize.NL:
+                return
+
+            if token[1] == '(':
+                depth += 1
+            elif token[1] == ')':
+                depth -= 1
+                if not depth:
+                    # ')' can't happen after if (foo), since it would be a syntax error.
+                    if (tokens[i+1][1] in (':', ')', ']', '}', 'in') or
+                            tokens[i+1][0] in (tokenize.NEWLINE,
+                                               tokenize.ENDMARKER,
+                                               tokenize.COMMENT)):
+                        # The empty tuple () is always accepted.
+                        if i == start + 2:
+                            return
+                        if keyword_token == 'not':
+                            if not found_and_or:
+                                self.add_message('superfluous-parens', line=line_num,
+                                                 args=keyword_token)
+                        elif keyword_token in ('return', 'yield'):
+                            self.add_message('superfluous-parens', line=line_num,
+                                             args=keyword_token)
+                        elif keyword_token not in self._keywords_with_parens:
+                            if not (tokens[i+1][1] == 'in' and found_and_or):
+                                self.add_message('superfluous-parens', line=line_num,
+                                                 args=keyword_token)
+                    return
+            elif depth == 1:
+                # This is a tuple, which is always acceptable.
+                if token[1] == ',':
+                    return
+                # 'and' and 'or' are the only boolean operators with lower precedence
+                # than 'not', so parens are only required when they are found.
+                elif token[1] in ('and', 'or'):
+                    found_and_or = True
+                # A yield inside an expression must always be in parentheses,
+                # quit early without error.
+                elif token[1] == 'yield':
+                    return
+                # A generator expression always has a 'for' token in it, and
+                # the 'for' token is only legal inside parens when it is in a
+                # generator expression.  The parens are necessary here, so bail
+                # without an error.
+                elif token[1] == 'for':
+                    return
+
+    def _opening_bracket(self, tokens, i):
+        self._push_token(tokens[i][1], i)
+        # Special case: ignore slices
+        if tokens[i][1] == '[' and tokens[i+1][1] == ':':
+            return
+
+        if (i > 0 and (tokens[i-1][0] == tokenize.NAME and
+                       not (keyword.iskeyword(tokens[i-1][1]))
+                       or tokens[i-1][1] in _CLOSING_BRACKETS)):
+            self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
+        else:
+            self._check_space(tokens, i, (_IGNORE, _MUST_NOT))
+
+    def _closing_bracket(self, tokens, i):
+        if self._inside_brackets(':'):
+            self._pop_token()
+        self._pop_token()
+        # Special case: ignore slices
+        if tokens[i-1][1] == ':' and tokens[i][1] == ']':
+            return
+        policy_before = _MUST_NOT
+        if tokens[i][1] in _CLOSING_BRACKETS and tokens[i-1][1] == ',':
+            if _TRAILING_COMMA in self.config.no_space_check:
+                policy_before = _IGNORE
+
+        self._check_space(tokens, i, (policy_before, _IGNORE))
+
+    def _check_equals_spacing(self, tokens, i):
+        """Check the spacing of a single equals sign."""
+        if self._inside_brackets('(') or self._inside_brackets('lambda'):
+            self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
+        else:
+            self._check_space(tokens, i, (_MUST, _MUST))
+
+    def _open_lambda(self, tokens, i): # pylint:disable=unused-argument
+        self._push_token('lambda', i)
+
+    def _handle_colon(self, tokens, i):
+        # Special case: ignore slices
+        if self._inside_brackets('['):
+            return
+        if (self._inside_brackets('{') and
+                _DICT_SEPARATOR in self.config.no_space_check):
+            policy = (_IGNORE, _IGNORE)
+        else:
+            policy = (_MUST_NOT, _MUST)
+        self._check_space(tokens, i, policy)
+
+        if self._inside_brackets('lambda'):
+            self._pop_token()
+        elif self._inside_brackets('{'):
+            self._push_token(':', i)
+
+    def _handle_comma(self, tokens, i):
+        # Only require a following whitespace if this is
+        # not a hanging comma before a closing bracket.
+        if tokens[i+1][1] in _CLOSING_BRACKETS:
+            self._check_space(tokens, i, (_MUST_NOT, _IGNORE))
+        else:
+            self._check_space(tokens, i, (_MUST_NOT, _MUST))
+        if self._inside_brackets(':'):
+            self._pop_token()
+
+    def _check_surrounded_by_space(self, tokens, i):
+        """Check that a binary operator is surrounded by exactly one space."""
+        self._check_space(tokens, i, (_MUST, _MUST))
+
+    def _check_space(self, tokens, i, policies):
+        def _policy_string(policy):
+            if policy == _MUST:
+                return 'Exactly one', 'required'
+            else:
+                return 'No', 'allowed'
+
+        def _name_construct(token):
+            if token[1] == ',':
+                return 'comma'
+            elif token[1] == ':':
+                return ':'
+            elif token[1] in '()[]{}':
+                return 'bracket'
+            elif token[1] in ('<', '>', '<=', '>=', '!=', '=='):
+                return 'comparison'
+            else:
+                if self._inside_brackets('('):
+                    return 'keyword argument assignment'
+                else:
+                    return 'assignment'
+
+        good_space = [True, True]
+        token = tokens[i]
+        pairs = [(tokens[i-1], token), (token, tokens[i+1])]
+
+        for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
+            if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
+                continue
+
+            distance = _column_distance(*token_pair)
+            if distance is None:
+                continue
+            good_space[other_idx] = (
+                (policy == _MUST and distance == 1) or
+                (policy == _MUST_NOT and distance == 0))
+
+        warnings = []
+        if not any(good_space) and policies[0] == policies[1]:
+            warnings.append((policies[0], 'around'))
+        else:
+            for ok, policy, position in zip(good_space, policies, ('before', 'after')):
+                if not ok:
+                    warnings.append((policy, position))
+        for policy, position in warnings:
+            construct = _name_construct(token)
+            count, state = _policy_string(policy)
+            self.add_message('bad-whitespace', line=token[2][0],
+                             args=(count, state, position, construct,
+                                   _underline_token(token)))
+
+    def _inside_brackets(self, left):
+        return self._bracket_stack[-1] == left
+
+    def _prepare_token_dispatcher(self):
+        raw = [
+            (_KEYWORD_TOKENS,
+             self._check_keyword_parentheses),
+
+            (_OPENING_BRACKETS, self._opening_bracket),
+
+            (_CLOSING_BRACKETS, self._closing_bracket),
+
+            (['='], self._check_equals_spacing),
+
+            (_SPACED_OPERATORS, self._check_surrounded_by_space),
+
+            ([','], self._handle_comma),
+
+            ([':'], self._handle_colon),
+
+            (['lambda'], self._open_lambda),
+
+            ]
+
+        dispatch = {}
+        for tokens, handler in raw:
+            for token in tokens:
+                dispatch[token] = handler
+        return dispatch
+
     def process_tokens(self, tokens):
         """process tokens and search for :
 
@@ -205,70 +705,150 @@
          _ optionally bad construct (if given, bad_construct must be a compiled
            regular expression).
         """
-        indent = tokenize.INDENT
-        dedent = tokenize.DEDENT
-        newline = tokenize.NEWLINE
-        junk = (tokenize.COMMENT, tokenize.NL)
+        self._bracket_stack = [None]
         indents = [0]
-        check_equal = 0
+        check_equal = False
         line_num = 0
-        previous = None
         self._lines = {}
         self._visited_lines = {}
-        for (tok_type, token, start, _, line) in tokens:
+        token_handlers = self._prepare_token_dispatcher()
+        self._last_line_ending = None
+
+        self._current_line = ContinuedLineState(tokens, self.config)
+        for idx, (tok_type, token, start, _, line) in enumerate(tokens):
             if start[0] != line_num:
-                if previous is not None and previous[0] == tokenize.OP and previous[1] == ';':
-                    self.add_message('W0301', line=previous[2])
-                previous = None
                 line_num = start[0]
-                self.new_line(tok_type, line, line_num, junk)
-            if tok_type not in (indent, dedent, newline) + junk:
-                previous = tok_type, token, start[0]
+                # A tokenizer oddity: if an indented line contains a multi-line
+                # docstring, the line member of the INDENT token does not contain
+                # the full line; therefore we check the next token on the line.
+                if tok_type == tokenize.INDENT:
+                    self.new_line(TokenWrapper(tokens), idx-1, idx+1)
+                else:
+                    self.new_line(TokenWrapper(tokens), idx-1, idx)
 
-            if tok_type == tokenize.OP:
-                if token == '<>':
-                    self.add_message('W0331', line=line_num)
-            elif tok_type == tokenize.NUMBER:
-                if token.endswith('l'):
-                    self.add_message('W0332', line=line_num)
-
-            elif tok_type == newline:
+            if tok_type == tokenize.NEWLINE:
                 # a program statement, or ENDMARKER, will eventually follow,
                 # after some (possibly empty) run of tokens of the form
                 #     (NL | COMMENT)* (INDENT | DEDENT+)?
                 # If an INDENT appears, setting check_equal is wrong, and will
                 # be undone when we see the INDENT.
-                check_equal = 1
-
-            elif tok_type == indent:
-                check_equal = 0
+                check_equal = True
+                self._process_retained_warnings(TokenWrapper(tokens), idx)
+                self._current_line.next_logical_line()
+                self._check_line_ending(token, line_num)
+            elif tok_type == tokenize.INDENT:
+                check_equal = False
                 self.check_indent_level(token, indents[-1]+1, line_num)
                 indents.append(indents[-1]+1)
-
-            elif tok_type == dedent:
+            elif tok_type == tokenize.DEDENT:
                 # there's nothing we need to check here!  what's important is
                 # that when the run of DEDENTs ends, the indentation of the
                 # program statement (or ENDMARKER) that triggered the run is
                 # equal to what's left at the top of the indents stack
-                check_equal = 1
+                check_equal = True
                 if len(indents) > 1:
                     del indents[-1]
-
-            elif check_equal and tok_type not in junk:
-                # this is the first "real token" following a NEWLINE, so it
+            elif tok_type == tokenize.NL:
+                self._check_continued_indentation(TokenWrapper(tokens), idx+1)
+                self._current_line.next_physical_line()
+            elif tok_type != tokenize.COMMENT:
+                self._current_line.handle_line_start(idx)
+                # This is the first concrete token following a NEWLINE, so it
                 # must be the first token of the next program statement, or an
                 # ENDMARKER; the "line" argument exposes the leading whitespace
                 # for this statement; in the case of ENDMARKER, line is an empty
                 # string, so will properly match the empty string with which the
                 # "indents" stack was seeded
-                check_equal = 0
-                self.check_indent_level(line, indents[-1], line_num)
+                if check_equal:
+                    check_equal = False
+                    self.check_indent_level(line, indents[-1], line_num)
+
+            if tok_type == tokenize.NUMBER and token.endswith('l'):
+                self.add_message('lowercase-l-suffix', line=line_num)
+
+            try:
+                handler = token_handlers[token]
+            except KeyError:
+                pass
+            else:
+                handler(tokens, idx)
 
         line_num -= 1 # to be ok with "wc -l"
         if line_num > self.config.max_module_lines:
-            self.add_message('C0302', args=line_num, line=1)
+            # Get the line where the too-many-lines (or its message id)
+            # was disabled or default to 1.
+            symbol = self.linter.msgs_store.check_message_id('too-many-lines')
+            names = (symbol.msgid, 'too-many-lines')
+            line = next(filter(None,
+                               map(self.linter._pragma_lineno.get, names)), 1)
+            self.add_message('too-many-lines',
+                             args=(line_num, self.config.max_module_lines),
+                             line=line)
 
-    @check_messages('C0321' ,'C03232', 'C0323', 'C0324')
+    def _check_line_ending(self, line_ending, line_num):
+        # check if line endings are mixed
+        if self._last_line_ending is not None:
+            if line_ending != self._last_line_ending:
+                self.add_message('mixed-line-endings', line=line_num)
+
+        self._last_line_ending = line_ending
+
+        # check if line ending is as expected
+        expected = self.config.expected_line_ending_format
+        if expected:
+            line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "")  # reduce multiple \n\n\n\n to one \n
+            line_ending = 'LF' if line_ending == '\n' else 'CRLF'
+            if line_ending != expected:
+                self.add_message('unexpected-line-ending-format', args=(line_ending, expected), line=line_num)
+
+
+    def _process_retained_warnings(self, tokens, current_pos):
+        single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ':')
+
+        for indent_pos, state, offsets in self._current_line.retained_warnings:
+            block_type = offsets[tokens.start_col(indent_pos)]
+            hints = dict((k, v) for k, v in six.iteritems(offsets)
+                         if v != block_type)
+            if single_line_block_stmt and block_type == WITH_BODY:
+                self._add_continuation_message(state, hints, tokens, indent_pos)
+            elif not single_line_block_stmt and block_type == SINGLE_LINE:
+                self._add_continuation_message(state, hints, tokens, indent_pos)
+
+    def _check_continued_indentation(self, tokens, next_idx):
+        def same_token_around_nl(token_type):
+            return (tokens.type(next_idx) == token_type and
+                    tokens.type(next_idx-2) == token_type)
+
+        # Do not issue any warnings if the next line is empty.
+        if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL:
+            return
+
+        state, valid_offsets = self._current_line.get_valid_offsets(next_idx)
+        # Special handling for hanging comments and strings. If the last line ended
+        # with a comment (string) and the new line contains only a comment, the line
+        # may also be indented to the start of the previous token.
+        if same_token_around_nl(tokenize.COMMENT) or same_token_around_nl(tokenize.STRING):
+            valid_offsets[tokens.start_col(next_idx-2)] = True
+
+        # We can only decide if the indentation of a continued line before opening
+        # a new block is valid once we know of the body of the block is on the
+        # same line as the block opener. Since the token processing is single-pass,
+        # emitting those warnings is delayed until the block opener is processed.
+        if (state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK)
+                and tokens.start_col(next_idx) in valid_offsets):
+            self._current_line.add_block_warning(next_idx, state, valid_offsets)
+        elif tokens.start_col(next_idx) not in valid_offsets:
+            self._add_continuation_message(state, valid_offsets, tokens, next_idx)
+
+    def _add_continuation_message(self, state, offsets, tokens, position):
+        readable_type, readable_position = _CONTINUATION_MSG_PARTS[state.context_type]
+        hint_line = _get_indent_hint_line(offsets, tokens.start_col(position))
+        self.add_message(
+            'bad-continuation',
+            line=tokens.start_line(position),
+            args=(readable_type, readable_position, tokens.line(position), hint_line))
+
+    @check_messages('multiple-statements')
     def visit_default(self, node):
         """check the node line number and check it if not yet done"""
         if not node.is_statement:
@@ -279,16 +859,19 @@
         if prev_sibl is not None:
             prev_line = prev_sibl.fromlineno
         else:
-            prev_line = node.parent.statement().fromlineno
+            # The line on which a finally: occurs in a try/finally
+            # is not directly represented in the AST. We infer it
+            # by taking the last line of the body and adding 1, which
+            # should be the line of finally:
+            if (isinstance(node.parent, nodes.TryFinally)
+                    and node in node.parent.finalbody):
+                prev_line = node.parent.body[0].tolineno + 1
+            else:
+                prev_line = node.parent.statement().fromlineno
         line = node.fromlineno
         assert line, node
         if prev_line == line and self._visited_lines.get(line) != 2:
-            # py2.5 try: except: finally:
-            if not (isinstance(node, nodes.TryExcept)
-                    and isinstance(node.parent, nodes.TryFinally)
-                    and node.fromlineno == node.parent.fromlineno):
-                self.add_message('C0321', node=node)
-                self._visited_lines[line] = 2
+            self._check_multi_statement_line(node, line)
             return
         if line in self._visited_lines:
             return
@@ -298,31 +881,51 @@
             tolineno = node.tolineno
         assert tolineno, node
         lines = []
-        for line in xrange(line, tolineno + 1):
+        for line in range(line, tolineno + 1):
             self._visited_lines[line] = 1
             try:
                 lines.append(self._lines[line].rstrip())
             except KeyError:
                 lines.append('')
-        try:
-            msg_def = check_line('\n'.join(lines))
-            if msg_def:
-                self.add_message(msg_def[0], node=node, args=msg_def[1])
-        except KeyError:
-            # FIXME: internal error !
-            pass
 
-    @check_messages('W0333')
-    def visit_backquote(self, node):
-        self.add_message('W0333', node=node)
+    def _check_multi_statement_line(self, node, line):
+        """Check for lines containing multiple statements."""
+        # Do not warn about multiple nested context managers
+        # in with statements.
+        if isinstance(node, nodes.With):
+            return
+        # For try... except... finally..., the two nodes
+        # appear to be on the same line due to how the AST is built.
+        if (isinstance(node, nodes.TryExcept) and
+                isinstance(node.parent, nodes.TryFinally)):
+            return
+        if (isinstance(node.parent, nodes.If) and not node.parent.orelse
+                and self.config.single_line_if_stmt):
+            return
+        self.add_message('multiple-statements', node=node)
+        self._visited_lines[line] = 2
 
     def check_lines(self, lines, i):
         """check lines have less than a maximum number of characters
         """
         max_chars = self.config.max_line_length
-        for line in lines.splitlines():
-            if len(line) > max_chars:
-                self.add_message('C0301', line=i, args=(len(line), max_chars))
+        ignore_long_line = self.config.ignore_long_lines
+
+        for line in lines.splitlines(True):
+            if not line.endswith('\n'):
+                self.add_message('missing-final-newline', line=i)
+            else:
+                stripped_line = line.rstrip()
+                if line[len(stripped_line):] not in ('\n', '\r\n'):
+                    self.add_message('trailing-whitespace', line=i)
+                # Don't count excess whitespace in the line length.
+                line = stripped_line
+            mobj = OPTION_RGX.search(line)
+            if mobj and mobj.group(1).split('=', 1)[0].strip() == 'disable':
+                line = line.split('#')[0].rstrip()
+
+            if len(line) > max_chars and not ignore_long_line.search(line):
+                self.add_message('line-too-long', line=i, args=(len(line), max_chars))
             i += 1
 
     def check_indent_level(self, string, expected, line_num):
@@ -343,15 +946,15 @@
                     args = ('tab', 'space')
                 else:
                     args = ('space', 'tab')
-                self.add_message('W0312', args=args, line=line_num)
+                self.add_message('mixed-indentation', args=args, line=line_num)
                 return level
             suppl += string[0]
-            string = string [1:]
+            string = string[1:]
         if level != expected or suppl:
             i_type = 'spaces'
             if indent[0] == '\t':
                 i_type = 'tabs'
-            self.add_message('W0311', line=line_num,
+            self.add_message('bad-indentation', line=line_num,
                              args=(level * unit_size + len(suppl), i_type,
                                    expected * unit_size))
 
diff --git a/third_party/pylint/checkers/imports.py b/third_party/pylint/checkers/imports.py
index 7e6a4f8..1969eeb 100644
--- a/third_party/pylint/checkers/imports.py
+++ b/third_party/pylint/checkers/imports.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,55 +12,63 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """imports checkers for Python code"""
 
+import sys
+from collections import defaultdict
+
+import six
+from six.moves import map # pylint: disable=redefined-builtin
+
 from logilab.common.graph import get_cycles, DotBackend
-from logilab.common.modutils import is_standard_module
 from logilab.common.ureports import VerbatimText, Paragraph
 
-from logilab import astng
-from logilab.astng import are_exclusive
+import astroid
+from astroid import are_exclusive
+from astroid.modutils import get_module_part, is_standard_module
 
-from pylint.interfaces import IASTNGChecker
-from pylint.checkers import BaseChecker, EmptyReport
+from pylint.interfaces import IAstroidChecker
+from pylint.utils import EmptyReport
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages, is_import_error
 
+def _except_import_error(node):
+    """
+    Check if the try-except node has an ImportError handler.
+    Return True if an ImportError handler was infered, False otherwise.
+    """
+    if not isinstance(node, astroid.TryExcept):
+        return
+    return any(map(is_import_error, node.handlers))
 
 def get_first_import(node, context, name, base, level):
     """return the node where [base.]<name> is imported or None if not found
     """
+    fullname = '%s.%s' % (base, name) if base else name
+
     first = None
     found = False
-    for first in context.values():
-        if isinstance(first, astng.Import):
-            if name in [iname[0] for iname in first.names]:
+    for first in context.body:
+        if first is node:
+            continue
+        if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
+            continue
+        if isinstance(first, astroid.Import):
+            if any(fullname == iname[0] for iname in first.names):
                 found = True
                 break
-        elif isinstance(first, astng.From):
-            if base == first.modname and level == first.level and \
-                   name in [iname[0] for iname in first.names]:
+        elif isinstance(first, astroid.From):
+            if level == first.level and any(
+                    fullname == '%s.%s' % (first.modname, iname[0])
+                    for iname in first.names):
                 found = True
                 break
-    if found and first is not node and not are_exclusive(first, node):
+    if found and not are_exclusive(first, node):
         return first
 
 # utilities to represents import dependencies as tree and dot graph ###########
 
-def filter_dependencies_info(dep_info, package_dir, mode='external'):
-    """filter external or internal dependencies from dep_info (return a
-    new dictionary containing the filtered modules only)
-    """
-    if mode == 'external':
-        filter_func = lambda x: not is_standard_module(x, (package_dir,))
-    else:
-        assert mode == 'internal'
-        filter_func = lambda x: is_standard_module(x, (package_dir,))
-    result = {}
-    for importee, importers in dep_info.items():
-        if filter_func(importee):
-            result[importee] = importers
-    return result
-
 def make_tree_defs(mod_files_list):
     """get a list of 2-uple (module, list_of_files_which_import_this_module),
     it will return a dictionary to represent this as a tree
@@ -86,7 +94,7 @@
             lines.append('%s %s' % (mod, files))
             sub_indent_str = '  '
         else:
-            lines.append('%s\-%s %s' % (indent_str, mod, files))
+            lines.append(r'%s\-%s %s' % (indent_str, mod, files))
             if i == len(nodes)-1:
                 sub_indent_str = '%s  ' % indent_str
             else:
@@ -100,16 +108,16 @@
     """write dependencies as a dot (graphviz) file
     """
     done = {}
-    printer = DotBackend(filename[:-4], rankdir = "LR")
+    printer = DotBackend(filename[:-4], rankdir='LR')
     printer.emit('URL="." node[shape="box"]')
-    for modname, dependencies in dep_info.items():
+    for modname, dependencies in sorted(six.iteritems(dep_info)):
         done[modname] = 1
         printer.emit_node(modname)
         for modname in dependencies:
             if modname not in done:
                 done[modname] = 1
                 printer.emit_node(modname)
-    for depmodname, dependencies in dep_info.items():
+    for depmodname, dependencies in sorted(six.iteritems(dep_info)):
         for modname in dependencies:
             printer.emit_edge(modname, depmodname)
     printer.generate(filename)
@@ -128,26 +136,36 @@
 
 MSGS = {
     'F0401': ('Unable to import %s',
+              'import-error',
               'Used when pylint has been unable to import a module.'),
     'R0401': ('Cyclic import (%s)',
+              'cyclic-import',
               'Used when a cyclic import between two or more modules is \
               detected.'),
 
     'W0401': ('Wildcard import %s',
+              'wildcard-import',
               'Used when `from module import *` is detected.'),
     'W0402': ('Uses of a deprecated module %r',
+              'deprecated-module',
               'Used a module marked as deprecated is imported.'),
     'W0403': ('Relative import %r, should be %r',
-              'Used when an import relative to the package directory is \
-              detected.'),
+              'relative-import',
+              'Used when an import relative to the package directory is '
+              'detected.',
+              {'maxversion': (3, 0)}),
     'W0404': ('Reimport %r (imported line %s)',
+              'reimported',
               'Used when a module is reimported multiple times.'),
     'W0406': ('Module import itself',
+              'import-self',
               'Used when a module is importing itself.'),
 
     'W0410': ('__future__ import is not the first non docstring statement',
+              'misplaced-future',
               'Python 2.5 and greater require __future__ import to be the \
-              first non docstring statement in the module.'),
+              first non docstring statement in the module.',
+              {'maxversion': (3, 0)}),
     }
 
 class ImportsChecker(BaseChecker):
@@ -158,43 +176,45 @@
     * uses of deprecated modules
     """
 
-    __implements__ = IASTNGChecker
+    __implements__ = IAstroidChecker
 
     name = 'imports'
     msgs = MSGS
     priority = -2
 
+    if sys.version_info < (3,):
+        deprecated_modules = ('regsub', 'TERMIOS', 'Bastion', 'rexec')
+    else:
+        deprecated_modules = ('stringprep', 'optparse')
     options = (('deprecated-modules',
-                {'default' : ('regsub', 'string', 'TERMIOS',
-                              'Bastion', 'rexec'),
+                {'default' : deprecated_modules,
                  'type' : 'csv',
                  'metavar' : '<modules>',
                  'help' : 'Deprecated modules which should not be used, \
 separated by a comma'}
-                ),
+               ),
                ('import-graph',
                 {'default' : '',
                  'type' : 'string',
                  'metavar' : '<file.dot>',
                  'help' : 'Create a graph of every (i.e. internal and \
 external) dependencies in the given file (report RP0402 must not be disabled)'}
-                ),
+               ),
                ('ext-import-graph',
                 {'default' : '',
                  'type' : 'string',
                  'metavar' : '<file.dot>',
                  'help' : 'Create a graph of external dependencies in the \
 given file (report RP0402 must not be disabled)'}
-                ),
+               ),
                ('int-import-graph',
                 {'default' : '',
                  'type' : 'string',
                  'metavar' : '<file.dot>',
                  'help' : 'Create a graph of internal dependencies in the \
 given file (report RP0402 must not be disabled)'}
-                ),
-
-               )
+               ),
+              )
 
     def __init__(self, linter=None):
         BaseChecker.__init__(self, linter)
@@ -205,27 +225,28 @@
                          self.report_external_dependencies),
                         ('RP0402', 'Modules dependencies graph',
                          self.report_dependencies_graph),
-                        )
+                       )
 
     def open(self):
         """called before visiting project (i.e set of modules)"""
         self.linter.add_stats(dependencies={})
         self.linter.add_stats(cycles=[])
         self.stats = self.linter.stats
-        self.import_graph = {}
+        self.import_graph = defaultdict(set)
 
     def close(self):
         """called before visiting project (i.e set of modules)"""
         # don't try to compute cycles if the associated message is disabled
-        if self.linter.is_message_enabled('R0401'):
-            for cycle in get_cycles(self.import_graph):
-                self.add_message('R0401', args=' -> '.join(cycle))
+        if self.linter.is_message_enabled('cyclic-import'):
+            vertices = list(self.import_graph)
+            for cycle in get_cycles(self.import_graph, vertices=vertices):
+                self.add_message('cyclic-import', args=' -> '.join(cycle))
 
     def visit_import(self, node):
         """triggered when an import statement is seen"""
         modnode = node.root()
         for name, _ in node.names:
-            importedmodnode = self.get_imported_module(modnode, node, name)
+            importedmodnode = self.get_imported_module(node, name)
             if importedmodnode is None:
                 continue
             self._check_relative_import(modnode, node, importedmodnode, name)
@@ -233,7 +254,9 @@
             self._check_deprecated_module(node, name)
             self._check_reimport(node, name)
 
-
+    # TODO This appears to be the list of all messages of the checker...
+    # @check_messages('W0410', 'W0401', 'W0403', 'W0402', 'W0404', 'W0406', 'F0401')
+    @check_messages(*(MSGS.keys()))
     def visit_from(self, node):
         """triggered when a from statement is seen"""
         basename = node.modname
@@ -242,39 +265,41 @@
             prev = node.previous_sibling()
             if prev:
                 # consecutive future statements are possible
-                if not (isinstance(prev, astng.From)
-                       and prev.modname == '__future__'):
-                    self.add_message('W0410', node=node)
+                if not (isinstance(prev, astroid.From)
+                        and prev.modname == '__future__'):
+                    self.add_message('misplaced-future', node=node)
             return
+        for name, _ in node.names:
+            if name == '*':
+                self.add_message('wildcard-import', args=basename, node=node)
         modnode = node.root()
-        importedmodnode = self.get_imported_module(modnode, node, basename)
+        importedmodnode = self.get_imported_module(node, basename)
         if importedmodnode is None:
             return
         self._check_relative_import(modnode, node, importedmodnode, basename)
         self._check_deprecated_module(node, basename)
         for name, _ in node.names:
-            if name == '*':
-                self.add_message('W0401', args=basename, node=node)
-                continue
-            self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
-            self._check_reimport(node, name, basename, node.level)
+            if name != '*':
+                self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
+                self._check_reimport(node, name, basename, node.level)
 
-    def get_imported_module(self, modnode, importnode, modname):
+    def get_imported_module(self, importnode, modname):
         try:
             return importnode.do_import_module(modname)
-        except astng.InferenceError, ex:
+        except astroid.InferenceError as ex:
             if str(ex) != modname:
                 args = '%r (%s)' % (modname, ex)
             else:
                 args = repr(modname)
-            self.add_message("F0401", args=args, node=importnode)
+            if not _except_import_error(importnode.parent):
+                self.add_message("import-error", args=args, node=importnode)
 
     def _check_relative_import(self, modnode, importnode, importedmodnode,
                                importedasname):
         """check relative import. node is either an Import or From node, modname
         the imported module name.
         """
-        if 'W0403' not in self.active_msgs:
+        if not self.linter.is_message_enabled('relative-import'):
             return
         if importedmodnode.file is None:
             return False # built-in module
@@ -284,52 +309,56 @@
             return False
         if importedmodnode.name != importedasname:
             # this must be a relative import...
-            self.add_message('W0403', args=(importedasname, importedmodnode.name),
+            self.add_message('relative-import',
+                             args=(importedasname, importedmodnode.name),
                              node=importnode)
 
     def _add_imported_module(self, node, importedmodname):
         """notify an imported module, used to analyze dependencies"""
+        try:
+            importedmodname = get_module_part(importedmodname)
+        except ImportError:
+            pass
         context_name = node.root().name
         if context_name == importedmodname:
             # module importing itself !
-            self.add_message('W0406', node=node)
+            self.add_message('import-self', node=node)
         elif not is_standard_module(importedmodname):
             # handle dependencies
             importedmodnames = self.stats['dependencies'].setdefault(
                 importedmodname, set())
             if not context_name in importedmodnames:
                 importedmodnames.add(context_name)
-            if is_standard_module( importedmodname, (self.package_dir(),) ):
-                # update import graph
-                mgraph = self.import_graph.setdefault(context_name, set())
-                if not importedmodname in mgraph:
-                    mgraph.add(importedmodname)
+            # update import graph
+            mgraph = self.import_graph[context_name]
+            if importedmodname not in mgraph:
+                mgraph.add(importedmodname)
 
     def _check_deprecated_module(self, node, mod_path):
         """check if the module is deprecated"""
         for mod_name in self.config.deprecated_modules:
             if mod_path == mod_name or mod_path.startswith(mod_name + '.'):
-                self.add_message('W0402', node=node, args=mod_path)
+                self.add_message('deprecated-module', node=node, args=mod_path)
 
-    def _check_reimport(self, node, name, basename=None, level=0):
+    def _check_reimport(self, node, name, basename=None, level=None):
         """check if the import is necessary (i.e. not already done)"""
-        if 'W0404' not in self.active_msgs:
+        if not self.linter.is_message_enabled('reimported'):
             return
         frame = node.frame()
         root = node.root()
         contexts = [(frame, level)]
         if root is not frame:
-            contexts.append((root, 0))
+            contexts.append((root, None))
         for context, level in contexts:
             first = get_first_import(node, context, name, basename, level)
             if first is not None:
-                self.add_message('W0404', node=node,
+                self.add_message('reimported', node=node,
                                  args=(name, first.fromlineno))
 
 
     def report_external_dependencies(self, sect, _, dummy):
         """return a verbatim layout for displaying dependencies"""
-        dep_info = make_tree_defs(self._external_dependencies_info().items())
+        dep_info = make_tree_defs(six.iteritems(self._external_dependencies_info()))
         if not dep_info:
             raise EmptyReport()
         tree_str = repr_tree_defs(dep_info)
@@ -359,8 +388,11 @@
         cache them
         """
         if self.__ext_dep_info is None:
-            self.__ext_dep_info = filter_dependencies_info(
-                self.stats['dependencies'], self.package_dir(), 'external')
+            package = self.linter.current_name
+            self.__ext_dep_info = result = {}
+            for importee, importers in six.iteritems(self.stats['dependencies']):
+                if not importee.startswith(package):
+                    result[importee] = importers
         return self.__ext_dep_info
 
     def _internal_dependencies_info(self):
@@ -368,8 +400,11 @@
         cache them
         """
         if self.__int_dep_info is None:
-            self.__int_dep_info = filter_dependencies_info(
-                self.stats['dependencies'], self.package_dir(), 'internal')
+            package = self.linter.current_name
+            self.__int_dep_info = result = {}
+            for importee, importers in six.iteritems(self.stats['dependencies']):
+                if importee.startswith(package):
+                    result[importee] = importers
         return self.__int_dep_info
 
 
diff --git a/third_party/pylint/checkers/logging.py b/third_party/pylint/checkers/logging.py
index 89899b6..897c1c7 100644
--- a/third_party/pylint/checkers/logging.py
+++ b/third_party/pylint/checkers/logging.py
@@ -10,36 +10,52 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """checker for use of Python logging
 """
 
-from logilab import astng
+import astroid
 from pylint import checkers
 from pylint import interfaces
 from pylint.checkers import utils
+from pylint.checkers.utils import check_messages
+
+import six
 
 
 MSGS = {
     'W1201': ('Specify string format arguments as logging function parameters',
-             'Used when a logging statement has a call form of '
-             '"logging.<logging method>(format_string % (format_args...))". '
-             'Such calls should leave string interpolation to the logging '
-             'method itself and be written '
-             '"logging.<logging method>(format_string, format_args...)" '
-             'so that the program may avoid incurring the cost of the '
-             'interpolation in those cases in which no message will be '
-             'logged. For more, see '
-             'http://www.python.org/dev/peps/pep-0282/.'),
+              'logging-not-lazy',
+              'Used when a logging statement has a call form of '
+              '"logging.<logging method>(format_string % (format_args...))". '
+              'Such calls should leave string interpolation to the logging '
+              'method itself and be written '
+              '"logging.<logging method>(format_string, format_args...)" '
+              'so that the program may avoid incurring the cost of the '
+              'interpolation in those cases in which no message will be '
+              'logged. For more, see '
+              'http://www.python.org/dev/peps/pep-0282/.'),
+    'W1202': ('Use % formatting in logging functions but pass the % '
+              'parameters as arguments',
+              'logging-format-interpolation',
+              'Used when a logging statement has a call form of '
+              '"logging.<logging method>(format_string.format(format_args...))"'
+              '. Such calls should use % formatting instead, but leave '
+              'interpolation to the logging function by passing the parameters '
+              'as arguments.'),
     'E1200': ('Unsupported logging format character %r (%#02x) at index %d',
+              'logging-unsupported-format',
               'Used when an unsupported format character is used in a logging\
               statement format string.'),
     'E1201': ('Logging format string ends in middle of conversion specifier',
+              'logging-format-truncated',
               'Used when a logging statement format string terminates before\
               the end of a conversion specifier.'),
     'E1205': ('Too many arguments for logging format string',
+              'logging-too-many-args',
               'Used when a logging format string is given too few arguments.'),
     'E1206': ('Not enough arguments for logging format string',
+              'logging-too-few-args',
               'Used when a logging format string is given too many arguments'),
     }
 
@@ -48,64 +64,137 @@
     'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn',
     'warning'])
 
+def is_method_call(callfunc_node, types=(), methods=()):
+    """Determines if a CallFunc node represents a method call.
+
+    Args:
+      callfunc_node: The CallFunc AST node to check.
+      types: Optional sequence of caller type names to restrict check.
+      methods: Optional sequence of method names to restrict check.
+
+    Returns:
+      True, if the node represents a method call for the given type and
+      method names, False otherwise.
+    """
+    if not isinstance(callfunc_node, astroid.CallFunc):
+        return False
+    func = utils.safe_infer(callfunc_node.func)
+    return (isinstance(func, astroid.BoundMethod)
+            and isinstance(func.bound, astroid.Instance)
+            and (func.bound.name in types if types else True)
+            and (func.name in methods if methods else True))
+
+
 
 class LoggingChecker(checkers.BaseChecker):
     """Checks use of the logging module."""
 
-    __implements__ = interfaces.IASTNGChecker
+    __implements__ = interfaces.IAstroidChecker
     name = 'logging'
     msgs = MSGS
 
-    def visit_module(self, unused_node):
+    options = (('logging-modules',
+                {'default': ('logging',),
+                 'type': 'csv',
+                 'metavar': '<comma separated list>',
+                 'help': 'Logging modules to check that the string format '
+                         'arguments are in logging function parameter format'}
+               ),
+              )
+
+    def visit_module(self, node): # pylint: disable=unused-argument
         """Clears any state left in this checker from last module checked."""
         # The code being checked can just as easily "import logging as foo",
         # so it is necessary to process the imports and store in this field
         # what name the logging module is actually given.
-        self._logging_name = None
+        self._logging_names = set()
+        logging_mods = self.config.logging_modules
+
+        self._logging_modules = set(logging_mods)
+        self._from_imports = {}
+        for logging_mod in logging_mods:
+            parts = logging_mod.rsplit('.', 1)
+            if len(parts) > 1:
+                self._from_imports[parts[0]] = parts[1]
+
+    def visit_from(self, node):
+        """Checks to see if a module uses a non-Python logging module."""
+        try:
+            logging_name = self._from_imports[node.modname]
+            for module, as_name in node.names:
+                if module == logging_name:
+                    self._logging_names.add(as_name or module)
+        except KeyError:
+            pass
 
     def visit_import(self, node):
         """Checks to see if this module uses Python's built-in logging."""
         for module, as_name in node.names:
-            if module == 'logging':
-                if as_name:
-                    self._logging_name = as_name
-                else:
-                    self._logging_name = 'logging'
+            if module in self._logging_modules:
+                self._logging_names.add(as_name or module)
 
+    @check_messages(*(MSGS.keys()))
     def visit_callfunc(self, node):
-        """Checks calls to (simple forms of) logging methods."""
-        if (not isinstance(node.func, astng.Getattr)
-            or not isinstance(node.func.expr, astng.Name)
-            or node.func.expr.name != self._logging_name):
-            return
-        self._check_convenience_methods(node)
-        self._check_log_methods(node)
+        """Checks calls to logging methods."""
+        def is_logging_name():
+            return (isinstance(node.func, astroid.Getattr) and
+                    isinstance(node.func.expr, astroid.Name) and
+                    node.func.expr.name in self._logging_names)
 
-    def _check_convenience_methods(self, node):
-        """Checks calls to logging convenience methods (like logging.warn)."""
-        if node.func.attrname not in CHECKED_CONVENIENCE_FUNCTIONS:
-            return
-        if node.starargs or node.kwargs or not node.args:
-            # Either no args, star args, or double-star args. Beyond the
-            # scope of this checker.
-            return
-        if isinstance(node.args[0], astng.BinOp) and node.args[0].op == '%':
-            self.add_message('W1201', node=node)
-        elif isinstance(node.args[0], astng.Const):
-            self._check_format_string(node, 0)
+        def is_logger_class():
+            try:
+                for inferred in node.func.infer():
+                    if isinstance(inferred, astroid.BoundMethod):
+                        parent = inferred._proxied.parent
+                        if (isinstance(parent, astroid.Class) and
+                                (parent.qname() == 'logging.Logger' or
+                                 any(ancestor.qname() == 'logging.Logger'
+                                     for ancestor in parent.ancestors()))):
+                            return True, inferred._proxied.name
+            except astroid.exceptions.InferenceError:
+                pass
+            return False, None
 
-    def _check_log_methods(self, node):
+        if is_logging_name():
+            name = node.func.attrname
+        else:
+            result, name = is_logger_class()
+            if not result:
+                return
+        self._check_log_method(node, name)
+
+    def _check_log_method(self, node, name):
         """Checks calls to logging.log(level, format, *format_args)."""
-        if node.func.attrname != 'log':
+        if name == 'log':
+            if node.starargs or node.kwargs or len(node.args) < 2:
+                # Either a malformed call, star args, or double-star args. Beyond
+                # the scope of this checker.
+                return
+            format_pos = 1
+        elif name in CHECKED_CONVENIENCE_FUNCTIONS:
+            if node.starargs or node.kwargs or not node.args:
+                # Either no args, star args, or double-star args. Beyond the
+                # scope of this checker.
+                return
+            format_pos = 0
+        else:
             return
-        if node.starargs or node.kwargs or len(node.args) < 2:
-            # Either a malformed call, star args, or double-star args. Beyond
-            # the scope of this checker.
-            return
-        if isinstance(node.args[1], astng.BinOp) and node.args[1].op == '%':
-            self.add_message('W1201', node=node)
-        elif isinstance(node.args[1], astng.Const):
-            self._check_format_string(node, 1)
+
+        if isinstance(node.args[format_pos], astroid.BinOp) and node.args[format_pos].op == '%':
+            self.add_message('logging-not-lazy', node=node)
+        elif isinstance(node.args[format_pos], astroid.CallFunc):
+            self._check_call_func(node.args[format_pos])
+        elif isinstance(node.args[format_pos], astroid.Const):
+            self._check_format_string(node, format_pos)
+
+    def _check_call_func(self, callfunc_node):
+        """Checks that function call is not format_string.format().
+
+        Args:
+          callfunc_node: CallFunc AST node to be checked.
+        """
+        if is_method_call(callfunc_node, ('str', 'unicode'), ('format',)):
+            self.add_message('logging-format-interpolation', node=callfunc_node)
 
     def _check_format_string(self, node, format_arg):
         """Checks that format string tokens match the supplied arguments.
@@ -114,13 +203,13 @@
           node: AST node to be checked.
           format_arg: Index of the format string in the node arguments.
         """
-        num_args = self._count_supplied_tokens(node.args[format_arg + 1:])
+        num_args = _count_supplied_tokens(node.args[format_arg + 1:])
         if not num_args:
             # If no args were supplied, then all format strings are valid -
             # don't check any further.
             return
         format_string = node.args[format_arg].value
-        if not isinstance(format_string, basestring):
+        if not isinstance(format_string, six.string_types):
             # If the log format is constant non-string (e.g. logging.debug(5)),
             # ensure there are no arguments.
             required_num_args = 0
@@ -132,32 +221,34 @@
                     # Keyword checking on logging strings is complicated by
                     # special keywords - out of scope.
                     return
-            except utils.UnsupportedFormatCharacter, e:
-                c = format_string[e.index]
-                self.add_message('E1200', node=node, args=(c, ord(c), e.index))
+            except utils.UnsupportedFormatCharacter as ex:
+                char = format_string[ex.index]
+                self.add_message('logging-unsupported-format', node=node,
+                                 args=(char, ord(char), ex.index))
                 return
             except utils.IncompleteFormatString:
-                self.add_message('E1201', node=node)
+                self.add_message('logging-format-truncated', node=node)
                 return
         if num_args > required_num_args:
-            self.add_message('E1205', node=node)
+            self.add_message('logging-too-many-args', node=node)
         elif num_args < required_num_args:
-            self.add_message('E1206', node=node)
+            self.add_message('logging-too-few-args', node=node)
 
-    def _count_supplied_tokens(self, args):
-        """Counts the number of tokens in an args list.
 
-        The Python log functions allow for special keyword arguments: func,
-        exc_info and extra. To handle these cases correctly, we only count
-        arguments that aren't keywords.
+def _count_supplied_tokens(args):
+    """Counts the number of tokens in an args list.
 
-        Args:
-          args: List of AST nodes that are arguments for a log format string.
+    The Python log functions allow for special keyword arguments: func,
+    exc_info and extra. To handle these cases correctly, we only count
+    arguments that aren't keywords.
 
-        Returns:
-          Number of AST nodes that aren't keywords.
-        """
-        return sum(1 for arg in args if not isinstance(arg, astng.Keyword))
+    Args:
+      args: List of AST nodes that are arguments for a log format string.
+
+    Returns:
+      Number of AST nodes that aren't keywords.
+    """
+    return sum(1 for arg in args if not isinstance(arg, astroid.Keyword))
 
 
 def register(linter):
diff --git a/third_party/pylint/checkers/misc.py b/third_party/pylint/checkers/misc.py
index 7f09d40..7fbe70b 100644
--- a/third_party/pylint/checkers/misc.py
+++ b/third_party/pylint/checkers/misc.py
@@ -10,28 +10,37 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """ Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE).
  http://www.logilab.fr/ -- mailto:contact@logilab.fr
 
 Check source code is ascii only or has an encoding declaration (PEP 263)
 """
 
-import re, sys
+import re
 
 from pylint.interfaces import IRawChecker
 from pylint.checkers import BaseChecker
+import six
 
 
 MSGS = {
     'W0511': ('%s',
+              'fixme',
               'Used when a warning note as FIXME or XXX is detected.'),
-    }
+    'W0512': ('Cannot decode using encoding "%s", unexpected byte at position %d',
+              'invalid-encoded-data',
+              'Used when a source line cannot be decoded using the specified '
+              'source file encoding.',
+              {'maxversion': (3, 0)}),
+}
+
 
 class EncodingChecker(BaseChecker):
+
     """checks for:
     * warning notes in the code like FIXME, XXX
-    * PEP 263: source code with non ascii character but no encoding declaration
+    * encoding issues.
     """
     __implements__ = IRawChecker
 
@@ -40,36 +49,54 @@
     msgs = MSGS
 
     options = (('notes',
-                {'type' : 'csv', 'metavar' : '<comma separated values>',
-                 'default' : ('FIXME', 'XXX', 'TODO'),
-                 'help' : 'List of note tags to take in consideration, \
-separated by a comma.'
-                 }),
-               )
+                {'type': 'csv', 'metavar': '<comma separated values>',
+                 'default': ('FIXME', 'XXX', 'TODO'),
+                 'help': ('List of note tags to take in consideration, '
+                          'separated by a comma.')}),)
 
-    def __init__(self, linter=None):
-        BaseChecker.__init__(self, linter)
+    def _check_note(self, notes, lineno, line):
+        # First, simply check if the notes are in the line at all. This is an
+        # optimisation to prevent using the regular expression on every line,
+        # but rather only on lines which may actually contain one of the notes.
+        # This prevents a pathological problem with lines that are hundreds
+        # of thousands of characters long.
+        for note in self.config.notes:
+            if note in line:
+                break
+        else:
+            return
 
-    def process_module(self, node):
-        """inspect the source file to found encoding problem or fixmes like
+        match = notes.search(line)
+        if not match:
+            return
+        self.add_message('fixme', args=line[match.start(1):-1], line=lineno)
+
+    def _check_encoding(self, lineno, line, file_encoding):
+        try:
+            return six.text_type(line, file_encoding)
+        except UnicodeDecodeError as ex:
+            self.add_message('invalid-encoded-data', line=lineno,
+                             args=(file_encoding, ex.args[2]))
+
+    def process_module(self, module):
+        """inspect the source file to find encoding problem or fixmes like
         notes
         """
-        stream = node.file_stream
-        stream.seek(0) # XXX may be removed with astng > 0.23
-        # warning notes in the code
-        notes = []
-        for note in self.config.notes:
-            notes.append(re.compile(note))
-        linenum = 1
-        for line in stream.readlines():
-            for note in notes:
-                match = note.search(line)
-                if match:
-                    self.add_message('W0511', args=line[match.start():-1],
-                                     line=linenum)
-                    break
-            linenum += 1
+        if self.config.notes:
+            notes = re.compile(
+                r'.*?#\s*(%s)(:*\s*.+)' % "|".join(self.config.notes))
+        else:
+            notes = None
+        if module.file_encoding:
+            encoding = module.file_encoding
+        else:
+            encoding = 'ascii'
 
+        with module.stream() as stream:
+            for lineno, line in enumerate(stream):
+                line = self._check_encoding(lineno + 1, line, encoding)
+                if line is not None and notes:
+                    self._check_note(notes, lineno + 1, line)
 
 
 def register(linter):
diff --git a/third_party/pylint/checkers/newstyle.py b/third_party/pylint/checkers/newstyle.py
index 7bb146d..f74e7f1 100644
--- a/third_party/pylint/checkers/newstyle.py
+++ b/third_party/pylint/checkers/newstyle.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2005-2006 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2005-2014 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,39 +12,61 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """check for new / old style related problems
 """
+import sys
 
-from logilab import astng
+import astroid
 
-from pylint.interfaces import IASTNGChecker
+from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import check_messages
+from pylint.checkers.utils import (
+    check_messages,
+    has_known_bases,
+    node_frame_class,
+)
 
 MSGS = {
     'E1001': ('Use of __slots__ on an old style class',
-              'Used when an old style class uses the __slots__ attribute.'),
+              'slots-on-old-class',
+              'Used when an old style class uses the __slots__ attribute.',
+              {'maxversion': (3, 0)}),
     'E1002': ('Use of super on an old style class',
-              'Used when an old style class uses the super builtin.'),
-    'E1003': ('Bad first argument %r given to super class',
+              'super-on-old-class',
+              'Used when an old style class uses the super builtin.',
+              {'maxversion': (3, 0)}),
+    'E1003': ('Bad first argument %r given to super()',
+              'bad-super-call',
               'Used when another argument than the current class is given as \
               first argument of the super builtin.'),
+    'E1004': ('Missing argument to super()',
+              'missing-super-argument',
+              'Used when the super builtin didn\'t receive an \
+               argument.',
+              {'maxversion': (3, 0)}),
     'W1001': ('Use of "property" on an old style class',
-              'Used when PyLint detect the use of the builtin "property" \
+              'property-on-old-class',
+              'Used when Pylint detect the use of the builtin "property" \
               on an old style class while this is relying on new style \
-              classes features'),
+              classes features.',
+              {'maxversion': (3, 0)}),
+    'C1001': ('Old-style class defined.',
+              'old-style-class',
+              'Used when a class is defined that does not inherit from another'
+              'class and does not inherit explicitly from "object".',
+              {'maxversion': (3, 0)})
     }
 
 
 class NewStyleConflictChecker(BaseChecker):
     """checks for usage of new style capabilities on old style classes and
-    other new/old styles conflicts problems                                    
-    * use of property, __slots__, super                                        
-    * "super" usage                                                            
+    other new/old styles conflicts problems
+    * use of property, __slots__, super
+    * "super" usage
     """
-    
-    __implements__ = (IASTNGChecker,)
+
+    __implements__ = (IAstroidChecker,)
 
     # configuration section name
     name = 'newstyle'
@@ -54,53 +76,95 @@
     # configuration options
     options = ()
 
-    @check_messages('E1001')
+    @check_messages('slots-on-old-class', 'old-style-class')
     def visit_class(self, node):
-        """check __slots__ usage
-        """        
+        """ Check __slots__ in old style classes and old
+        style class definition.
+        """
         if '__slots__' in node and not node.newstyle:
-            self.add_message('E1001', node=node)
+            confidence = (INFERENCE if has_known_bases(node)
+                          else INFERENCE_FAILURE)
+            self.add_message('slots-on-old-class', node=node,
+                             confidence=confidence)
+        # The node type could be class, exception, metaclass, or
+        # interface.  Presumably, the non-class-type nodes would always
+        # have an explicit base class anyway.
+        if not node.bases and node.type == 'class' and not node.metaclass():
+            # We use confidence HIGH here because this message should only ever
+            # be emitted for classes at the root of the inheritance hierarchyself.
+            self.add_message('old-style-class', node=node, confidence=HIGH)
 
-    @check_messages('W1001')
+    @check_messages('property-on-old-class')
     def visit_callfunc(self, node):
         """check property usage"""
         parent = node.parent.frame()
-        if (isinstance(parent, astng.Class) and
-            not parent.newstyle and
-            isinstance(node.func, astng.Name)):
+        if (isinstance(parent, astroid.Class) and
+                not parent.newstyle and
+                isinstance(node.func, astroid.Name)):
+            confidence = (INFERENCE if has_known_bases(parent)
+                          else INFERENCE_FAILURE)
             name = node.func.name
             if name == 'property':
-                self.add_message('W1001', node=node)
+                self.add_message('property-on-old-class', node=node,
+                                 confidence=confidence)
 
-    @check_messages('E1002', 'E1003')
+    @check_messages('super-on-old-class', 'bad-super-call', 'missing-super-argument')
     def visit_function(self, node):
         """check use of super"""
         # ignore actual functions or method within a new style class
         if not node.is_method():
             return
         klass = node.parent.frame()
-        for stmt in node.nodes_of_class(astng.CallFunc):
+        for stmt in node.nodes_of_class(astroid.CallFunc):
+            if node_frame_class(stmt) != node_frame_class(node):
+                # Don't look down in other scopes.
+                continue
             expr = stmt.func
-            if not isinstance(expr, astng.Getattr):
+            if not isinstance(expr, astroid.Getattr):
                 continue
             call = expr.expr
             # skip the test if using super
-            if isinstance(call, astng.CallFunc) and \
-               isinstance(call.func, astng.Name) and \
+            if isinstance(call, astroid.CallFunc) and \
+               isinstance(call.func, astroid.Name) and \
                call.func.name == 'super':
+                confidence = (INFERENCE if has_known_bases(klass)
+                              else INFERENCE_FAILURE)
                 if not klass.newstyle:
                     # super should not be used on an old style class
-                    self.add_message('E1002', node=node)
+                    self.add_message('super-on-old-class', node=node,
+                                     confidence=confidence)
                 else:
                     # super first arg should be the class
-                    try:
-                        supcls = (call.args and call.args[0].infer().next()
-                                  or None)
-                    except astng.InferenceError:
+                    if not call.args and sys.version_info[0] == 3:
+                        # unless Python 3
                         continue
+
+                    try:
+                        supcls = (call.args and next(call.args[0].infer())
+                                  or None)
+                    except astroid.InferenceError:
+                        continue
+
+                    if supcls is None:
+                        self.add_message('missing-super-argument', node=call,
+                                         confidence=confidence)
+                        continue
+
                     if klass is not supcls:
-                        supcls = getattr(supcls, 'name', supcls)
-                        self.add_message('E1003', node=node, args=supcls)
+                        name = None
+                        # if supcls is not YES, then supcls was infered
+                        # and use its name. Otherwise, try to look
+                        # for call.args[0].name
+                        if supcls is not astroid.YES:
+                            name = supcls.name
+                        else:
+                            if hasattr(call.args[0], 'name'):
+                                name = call.args[0].name
+                        if name is not None:
+                            self.add_message('bad-super-call',
+                                             node=call,
+                                             args=(name, ),
+                                             confidence=confidence)
 
 
 def register(linter):
diff --git a/third_party/pylint/checkers/python3.py b/third_party/pylint/checkers/python3.py
new file mode 100644
index 0000000..59c37bf
--- /dev/null
+++ b/third_party/pylint/checkers/python3.py
@@ -0,0 +1,476 @@
+# Copyright 2014 Google Inc.
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""Check Python 2 code for Python 2/3 source-compatible issues."""
+from __future__ import absolute_import
+
+import re
+import tokenize
+
+import astroid
+from pylint import checkers, interfaces
+from pylint.utils import WarningScope
+from pylint.checkers import utils
+
+
+_ZERO = re.compile("^0+$")
+
+def _is_old_octal(literal):
+    if _ZERO.match(literal):
+        return False
+    if re.match('0\d+', literal):
+        try:
+            int(literal, 8)
+        except ValueError:
+            return False
+        return True
+
+def _check_dict_node(node):
+    inferred_types = set()
+    try:
+        inferred = node.infer()
+        for inferred_node in inferred:
+            inferred_types.add(inferred_node)
+    except (astroid.InferenceError, astroid.UnresolvableName):
+        pass
+    return (not inferred_types
+            or any(isinstance(x, astroid.Dict) for x in inferred_types))
+
+
+class Python3Checker(checkers.BaseChecker):
+
+    __implements__ = interfaces.IAstroidChecker
+    enabled = False
+    name = 'python3'
+
+    msgs = {
+        # Errors for what will syntactically break in Python 3, warnings for
+        # everything else.
+        'E1601': ('print statement used',
+                  'print-statement',
+                  'Used when a print statement is used '
+                  '(`print` is a function in Python 3)',
+                  {'maxversion': (3, 0)}),
+        'E1602': ('Parameter unpacking specified',
+                  'parameter-unpacking',
+                  'Used when parameter unpacking is specified for a function'
+                  "(Python 3 doesn't allow it)",
+                  {'maxversion': (3, 0)}),
+        'E1603': ('Implicit unpacking of exceptions is not supported '
+                  'in Python 3',
+                  'unpacking-in-except',
+                  'Python3 will not allow implicit unpacking of '
+                  'exceptions in except clauses. '
+                  'See http://www.python.org/dev/peps/pep-3110/',
+                  {'maxversion': (3, 0),
+                   'old_names': [('W0712', 'unpacking-in-except')]}),
+        'E1604': ('Use raise ErrorClass(args) instead of '
+                  'raise ErrorClass, args.',
+                  'old-raise-syntax',
+                  "Used when the alternate raise syntax "
+                  "'raise foo, bar' is used "
+                  "instead of 'raise foo(bar)'.",
+                  {'maxversion': (3, 0),
+                   'old_names': [('W0121', 'old-raise-syntax')]}),
+        'E1605': ('Use of the `` operator',
+                  'backtick',
+                  'Used when the deprecated "``" (backtick) operator is used '
+                  'instead  of the str() function.',
+                  {'scope': WarningScope.NODE,
+                   'maxversion': (3, 0),
+                   'old_names': [('W0333', 'backtick')]}),
+        'W1601': ('apply built-in referenced',
+                  'apply-builtin',
+                  'Used when the apply built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1602': ('basestring built-in referenced',
+                  'basestring-builtin',
+                  'Used when the basestring built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1603': ('buffer built-in referenced',
+                  'buffer-builtin',
+                  'Used when the buffer built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1604': ('cmp built-in referenced',
+                  'cmp-builtin',
+                  'Used when the cmp built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1605': ('coerce built-in referenced',
+                  'coerce-builtin',
+                  'Used when the coerce built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1606': ('execfile built-in referenced',
+                  'execfile-builtin',
+                  'Used when the execfile built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1607': ('file built-in referenced',
+                  'file-builtin',
+                  'Used when the file built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1608': ('long built-in referenced',
+                  'long-builtin',
+                  'Used when the long built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1609': ('raw_input built-in referenced',
+                  'raw_input-builtin',
+                  'Used when the raw_input built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1610': ('reduce built-in referenced',
+                  'reduce-builtin',
+                  'Used when the reduce built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1611': ('StandardError built-in referenced',
+                  'standarderror-builtin',
+                  'Used when the StandardError built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1612': ('unicode built-in referenced',
+                  'unicode-builtin',
+                  'Used when the unicode built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1613': ('xrange built-in referenced',
+                  'xrange-builtin',
+                  'Used when the xrange built-in function is referenced '
+                  '(missing from Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1614': ('__coerce__ method defined',
+                  'coerce-method',
+                  'Used when a __coerce__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1615': ('__delslice__ method defined',
+                  'delslice-method',
+                  'Used when a __delslice__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1616': ('__getslice__ method defined',
+                  'getslice-method',
+                  'Used when a __getslice__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1617': ('__setslice__ method defined',
+                  'setslice-method',
+                  'Used when a __setslice__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1618': ('import missing `from __future__ import absolute_import`',
+                  'no-absolute-import',
+                  'Used when an import is not accompanied by '
+                  '`from __future__ import absolute_import`'
+                  ' (default behaviour in Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1619': ('division w/o __future__ statement',
+                  'old-division',
+                  'Used for non-floor division w/o a float literal or '
+                  '``from __future__ import division``'
+                  '(Python 3 returns a float for int division unconditionally)',
+                  {'maxversion': (3, 0)}),
+        'W1620': ('Calling a dict.iter*() method',
+                  'dict-iter-method',
+                  'Used for calls to dict.iterkeys(), itervalues() or iteritems() '
+                  '(Python 3 lacks these methods)',
+                  {'maxversion': (3, 0)}),
+        'W1621': ('Calling a dict.view*() method',
+                  'dict-view-method',
+                  'Used for calls to dict.viewkeys(), viewvalues() or viewitems() '
+                  '(Python 3 lacks these methods)',
+                  {'maxversion': (3, 0)}),
+        'W1622': ('Called a next() method on an object',
+                  'next-method-called',
+                  "Used when an object's next() method is called "
+                  '(Python 3 uses the next() built-in function)',
+                  {'maxversion': (3, 0)}),
+        'W1623': ("Assigning to a class' __metaclass__ attribute",
+                  'metaclass-assignment',
+                  "Used when a metaclass is specified by assigning to __metaclass__ "
+                  '(Python 3 specifies the metaclass as a class statement argument)',
+                  {'maxversion': (3, 0)}),
+        'W1624': ('Indexing exceptions will not work on Python 3',
+                  'indexing-exception',
+                  'Indexing exceptions will not work on Python 3. Use '
+                  '`exception.args[index]` instead.',
+                  {'maxversion': (3, 0),
+                   'old_names': [('W0713', 'indexing-exception')]}),
+        'W1625': ('Raising a string exception',
+                  'raising-string',
+                  'Used when a string exception is raised. This will not '
+                  'work on Python 3.',
+                  {'maxversion': (3, 0),
+                   'old_names': [('W0701', 'raising-string')]}),
+        'W1626': ('reload built-in referenced',
+                  'reload-builtin',
+                  'Used when the reload built-in function is referenced '
+                  '(missing from Python 3). You can use instead imp.reload '
+                  'or importlib.reload.',
+                  {'maxversion': (3, 0)}),
+        'W1627': ('__oct__ method defined',
+                  'oct-method',
+                  'Used when a __oct__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1628': ('__hex__ method defined',
+                  'hex-method',
+                  'Used when a __hex__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1629': ('__nonzero__ method defined',
+                  'nonzero-method',
+                  'Used when a __nonzero__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1630': ('__cmp__ method defined',
+                  'cmp-method',
+                  'Used when a __cmp__ method is defined '
+                  '(method is not used by Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1631': ('map is used as implicitly evaluated call',
+                  'implicit-map-evaluation',
+                  'Used when the map builtin is used as implicitly '
+                  'evaluated call, as in "map(func, args)" on a single line. '
+                  'This behaviour will not work in Python 3, where '
+                  'map is a generator and must be evaluated. '
+                  'Prefer a for-loop as alternative.',
+                  {'maxversion': (3, 0)}),
+        'W1632': ('input built-in referenced',
+                  'input-builtin',
+                  'Used when the input built-in is referenced '
+                  '(backwards-incompatible semantics in Python 3)',
+                  {'maxversion': (3, 0)}),
+        'W1633': ('round built-in referenced',
+                  'round-builtin',
+                  'Used when the round built-in is referenced '
+                  '(backwards-incompatible semantics in Python 3)',
+                  {'maxversion': (3, 0)}),
+    }
+
+    _bad_builtins = frozenset([
+        'apply',
+        'basestring',
+        'buffer',
+        'cmp',
+        'coerce',
+        'execfile',
+        'file',
+        'input',  # Not missing, but incompatible semantics
+        'long',
+        'raw_input',
+        'reduce',
+        'round',  # Not missing, but incompatible semantics
+        'StandardError',
+        'unicode',
+        'xrange',
+        'reload',
+    ])
+
+    _unused_magic_methods = frozenset([
+        '__coerce__',
+        '__delslice__',
+        '__getslice__',
+        '__setslice__',
+        '__oct__',
+        '__hex__',
+        '__nonzero__',
+        '__cmp__',
+    ])
+
+    def __init__(self, *args, **kwargs):
+        self._future_division = False
+        self._future_absolute_import = False
+        super(Python3Checker, self).__init__(*args, **kwargs)
+
+    def visit_function(self, node):
+        if node.is_method() and node.name in self._unused_magic_methods:
+            method_name = node.name
+            if node.name.startswith('__'):
+                method_name = node.name[2:-2]
+            self.add_message(method_name + '-method', node=node)
+
+    @utils.check_messages('parameter-unpacking')
+    def visit_arguments(self, node):
+        for arg in node.args:
+            if isinstance(arg, astroid.Tuple):
+                self.add_message('parameter-unpacking', node=arg)
+
+    @utils.check_messages('implicit-map-evaluation')
+    def visit_discard(self, node):
+        if (isinstance(node.value, astroid.CallFunc) and
+                isinstance(node.value.func, astroid.Name) and
+                node.value.func.name == 'map'):
+            module = node.value.func.lookup('map')[0]
+            if getattr(module, 'name', None) == '__builtin__':
+                self.add_message('implicit-map-evaluation', node=node)
+
+    def visit_name(self, node):
+        """Detect when a "bad" built-in is referenced."""
+        found_node = node.lookup(node.name)[0]
+        if getattr(found_node, 'name', None) == '__builtin__':
+            if node.name in self._bad_builtins:
+                message = node.name.lower() + '-builtin'
+                self.add_message(message, node=node)
+
+    @utils.check_messages('print-statement')
+    def visit_print(self, node):
+        self.add_message('print-statement', node=node)
+
+    @utils.check_messages('no-absolute-import')
+    def visit_from(self, node):
+        if node.modname == '__future__':
+            for name, _ in node.names:
+                if name == 'division':
+                    self._future_division = True
+                elif name == 'absolute_import':
+                    self._future_absolute_import = True
+        elif not self._future_absolute_import:
+            self.add_message('no-absolute-import', node=node)
+
+    @utils.check_messages('no-absolute-import')
+    def visit_import(self, node):
+        if not self._future_absolute_import:
+            self.add_message('no-absolute-import', node=node)
+
+    @utils.check_messages('metaclass-assignment')
+    def visit_class(self, node):
+        if '__metaclass__' in node.locals:
+            self.add_message('metaclass-assignment', node=node)
+
+    @utils.check_messages('old-division')
+    def visit_binop(self, node):
+        if not self._future_division and node.op == '/':
+            for arg in (node.left, node.right):
+                if isinstance(arg, astroid.Const) and isinstance(arg.value, float):
+                    break
+            else:
+                self.add_message('old-division', node=node)
+
+    @utils.check_messages('next-method-called',
+                          'dict-iter-method',
+                          'dict-view-method')
+    def visit_callfunc(self, node):
+        if not isinstance(node.func, astroid.Getattr):
+            return
+        if any([node.args, node.starargs, node.kwargs]):
+            return
+        if node.func.attrname == 'next':
+            self.add_message('next-method-called', node=node)
+        else:
+            if _check_dict_node(node.func.expr):
+                if node.func.attrname in ('iterkeys', 'itervalues', 'iteritems'):
+                    self.add_message('dict-iter-method', node=node)
+                elif node.func.attrname in ('viewkeys', 'viewvalues', 'viewitems'):
+                    self.add_message('dict-view-method', node=node)
+
+    @utils.check_messages('indexing-exception')
+    def visit_subscript(self, node):
+        """ Look for indexing exceptions. """
+        try:
+            for infered in node.value.infer():
+                if not isinstance(infered, astroid.Instance):
+                    continue
+                if utils.inherit_from_std_ex(infered):
+                    self.add_message('indexing-exception', node=node)
+        except astroid.InferenceError:
+            return
+
+    @utils.check_messages('unpacking-in-except')
+    def visit_excepthandler(self, node):
+        """Visit an except handler block and check for exception unpacking."""
+        if isinstance(node.name, (astroid.Tuple, astroid.List)):
+            self.add_message('unpacking-in-except', node=node)
+
+    @utils.check_messages('backtick')
+    def visit_backquote(self, node):
+        self.add_message('backtick', node=node)
+
+    @utils.check_messages('raising-string', 'old-raise-syntax')
+    def visit_raise(self, node):
+        """Visit a raise statement and check for raising
+        strings or old-raise-syntax.
+        """
+        if (node.exc is not None and
+                node.inst is not None and
+                node.tback is None):
+            self.add_message('old-raise-syntax', node=node)
+
+        # Ignore empty raise.
+        if node.exc is None:
+            return
+        expr = node.exc
+        if self._check_raise_value(node, expr):
+            return
+        else:
+            try:
+                value = next(astroid.unpack_infer(expr))
+            except astroid.InferenceError:
+                return
+            self._check_raise_value(node, value)
+
+    def _check_raise_value(self, node, expr):
+        if isinstance(expr, astroid.Const):
+            value = expr.value
+            if isinstance(value, str):
+                self.add_message('raising-string', node=node)
+                return True
+
+
+class Python3TokenChecker(checkers.BaseTokenChecker):
+    __implements__ = interfaces.ITokenChecker
+    name = 'python3'
+    enabled = False
+
+    msgs = {
+        'E1606': ('Use of long suffix',
+                  'long-suffix',
+                  'Used when "l" or "L" is used to mark a long integer. '
+                  'This will not work in Python 3, since `int` and `long` '
+                  'types have merged.',
+                  {'maxversion': (3, 0)}),
+        'E1607': ('Use of the <> operator',
+                  'old-ne-operator',
+                  'Used when the deprecated "<>" operator is used instead '
+                  'of "!=". This is removed in Python 3.',
+                  {'maxversion': (3, 0),
+                   'old_names': [('W0331', 'old-ne-operator')]}),
+        'E1608': ('Use of old octal literal',
+                  'old-octal-literal',
+                  'Usen when encountering the old octal syntax, '
+                  'removed in Python 3. To use the new syntax, '
+                  'prepend 0o on the number.',
+                  {'maxversion': (3, 0)}),
+    }
+
+    def process_tokens(self, tokens):
+        for idx, (tok_type, token, start, _, _) in enumerate(tokens):
+            if tok_type == tokenize.NUMBER:
+                if token.lower().endswith('l'):
+                    # This has a different semantic than lowercase-l-suffix.
+                    self.add_message('long-suffix', line=start[0])
+                elif _is_old_octal(token):
+                    self.add_message('old-octal-literal', line=start[0])
+            if tokens[idx][1] == '<>':
+                self.add_message('old-ne-operator', line=tokens[idx][2][0])
+
+
+def register(linter):
+    linter.register_checker(Python3Checker(linter))
+    linter.register_checker(Python3TokenChecker(linter))
diff --git a/third_party/pylint/checkers/raw_metrics.py b/third_party/pylint/checkers/raw_metrics.py
index 872ca7b..71fecf6 100644
--- a/third_party/pylint/checkers/raw_metrics.py
+++ b/third_party/pylint/checkers/raw_metrics.py
@@ -1,3 +1,6 @@
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -9,7 +12,7 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """ Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
  http://www.logilab.fr/ -- mailto:contact@logilab.fr
 
@@ -24,8 +27,9 @@
 
 from logilab.common.ureports import Table
 
-from pylint.interfaces import IRawChecker
-from pylint.checkers import BaseRawChecker, EmptyReport
+from pylint.interfaces import ITokenChecker
+from pylint.utils import EmptyReport
+from pylint.checkers import BaseTokenChecker
 from pylint.reporters import diff_string
 
 def report_raw_stats(sect, stats, old_stats):
@@ -50,28 +54,28 @@
     sect.append(Table(children=lines, cols=5, rheaders=1))
 
 
-class RawMetricsChecker(BaseRawChecker):
-    """does not check anything but gives some raw metrics :                    
-    * total number of lines                                                    
-    * total number of code lines                                               
-    * total number of docstring lines                                          
-    * total number of comments lines                                           
-    * total number of empty lines                                              
+class RawMetricsChecker(BaseTokenChecker):
+    """does not check anything but gives some raw metrics :
+    * total number of lines
+    * total number of code lines
+    * total number of docstring lines
+    * total number of comments lines
+    * total number of empty lines
     """
 
-    __implements__ = (IRawChecker,)
+    __implements__ = (ITokenChecker,)
 
     # configuration section name
     name = 'metrics'
     # configuration options
-    options = ( )
+    options = ()
     # messages
     msgs = {}
     # reports
-    reports = ( ('RP0701', 'Raw metrics', report_raw_stats), )
+    reports = (('RP0701', 'Raw metrics', report_raw_stats),)
 
     def __init__(self, linter):
-        BaseRawChecker.__init__(self, linter)
+        BaseTokenChecker.__init__(self, linter)
         self.stats = None
 
     def open(self):
diff --git a/third_party/pylint/checkers/similar.py b/third_party/pylint/checkers/similar.py
index 1e38ed6..9542077 100644
--- a/third_party/pylint/checkers/similar.py
+++ b/third_party/pylint/checkers/similar.py
@@ -1,5 +1,5 @@
 # pylint: disable=W0622
-# Copyright (c) 2004-2006 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -13,37 +13,47 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """a similarities / code duplication command line tool and pylint checker
 """
-from __future__ import generators
-
+from __future__ import print_function
 import sys
-from itertools import izip
+from collections import defaultdict
 
 from logilab.common.ureports import Table
 
 from pylint.interfaces import IRawChecker
 from pylint.checkers import BaseChecker, table_lines_from_stats
 
+import six
+from six.moves import zip
 
-class Similar:
+
+class Similar(object):
     """finds copy-pasted lines of code in a project"""
 
     def __init__(self, min_lines=4, ignore_comments=False,
-                 ignore_docstrings=False):
+                 ignore_docstrings=False, ignore_imports=False):
         self.min_lines = min_lines
         self.ignore_comments = ignore_comments
         self.ignore_docstrings = ignore_docstrings
+        self.ignore_imports = ignore_imports
         self.linesets = []
 
-    def append_stream(self, streamid, stream):
+    def append_stream(self, streamid, stream, encoding=None):
         """append a file to search for similarities"""
-        stream.seek(0) # XXX may be removed with astng > 0.23
-        self.linesets.append(LineSet(streamid,
-                                     stream.readlines(),
-                                     self.ignore_comments,
-                                     self.ignore_docstrings))
+        if encoding is None:
+            readlines = stream.readlines
+        else:
+            readlines = lambda: [line.decode(encoding) for line in stream]
+        try:
+            self.linesets.append(LineSet(streamid,
+                                         readlines(),
+                                         self.ignore_comments,
+                                         self.ignore_docstrings,
+                                         self.ignore_imports))
+        except UnicodeDecodeError:
+            pass
 
     def run(self):
         """start looking for similarities and display results on stdout"""
@@ -51,20 +61,20 @@
 
     def _compute_sims(self):
         """compute similarities in appended files"""
-        no_duplicates = {}
+        no_duplicates = defaultdict(list)
         for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
-            duplicate = no_duplicates.setdefault(num, [])
+            duplicate = no_duplicates[num]
             for couples in duplicate:
                 if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
-                    couples.add( (lineset1, idx1) )
-                    couples.add( (lineset2, idx2) )
+                    couples.add((lineset1, idx1))
+                    couples.add((lineset2, idx2))
                     break
             else:
-                duplicate.append( set([(lineset1, idx1), (lineset2, idx2)]) )
+                duplicate.append(set([(lineset1, idx1), (lineset2, idx2)]))
         sims = []
-        for num, ensembles in no_duplicates.iteritems():
+        for num, ensembles in six.iteritems(no_duplicates):
             for couples in ensembles:
-                sims.append( (num, couples) )
+                sims.append((num, couples))
         sims.sort()
         sims.reverse()
         return sims
@@ -73,19 +83,19 @@
         """display computed similarities on stdout"""
         nb_lignes_dupliquees = 0
         for num, couples in sims:
-            print
-            print num, "similar lines in", len(couples), "files"
+            print()
+            print(num, "similar lines in", len(couples), "files")
             couples = sorted(couples)
             for lineset, idx in couples:
-                print "==%s:%s" % (lineset.name, idx)
+                print("==%s:%s" % (lineset.name, idx))
             # pylint: disable=W0631
             for line in lineset._real_lines[idx:idx+num]:
-                print "  ", line,
+                print("  ", line.rstrip())
             nb_lignes_dupliquees += num * (len(couples)-1)
         nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
-        print "TOTAL lines=%s duplicates=%s percent=%.2f" \
+        print("TOTAL lines=%s duplicates=%s percent=%.2f" \
             % (nb_total_lignes, nb_lignes_dupliquees,
-               nb_lignes_dupliquees*100. / nb_total_lignes)
+               nb_lignes_dupliquees*100. / nb_total_lignes))
 
     def _find_common(self, lineset1, lineset2):
         """find similarities in the two given linesets"""
@@ -97,10 +107,10 @@
         while index1 < len(lineset1):
             skip = 1
             num = 0
-            for index2 in find( lineset1[index1] ):
+            for index2 in find(lineset1[index1]):
                 non_blank = 0
                 for num, ((_, line1), (_, line2)) in enumerate(
-                    izip(lines1(index1), lines2(index2))):
+                        zip(lines1(index1), lines2(index2))):
                     if line1 != line2:
                         if non_blank > min_lines:
                             yield num, lineset1, index1, lineset2, index2
@@ -125,7 +135,11 @@
                 for sim in self._find_common(lineset, lineset2):
                     yield sim
 
-def stripped_lines(lines, ignore_comments, ignore_docstrings):
+def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
+    """return lines with leading/trailing whitespace and any ignored code
+    features removed
+    """
+
     strippedlines = []
     docstring = None
     for line in lines:
@@ -139,6 +153,9 @@
                 if line.endswith(docstring):
                     docstring = None
                 line = ''
+        if ignore_imports:
+            if line.startswith("import ") or line.startswith("from "):
+                line = ''
         if ignore_comments:
             # XXX should use regex in checkers/format to avoid cutting
             # at a "#" in a string
@@ -146,14 +163,16 @@
         strippedlines.append(line)
     return strippedlines
 
-class LineSet:
+
+class LineSet(object):
     """Holds and indexes all the lines of a single source file"""
     def __init__(self, name, lines, ignore_comments=False,
-                 ignore_docstrings=False):
+                 ignore_docstrings=False, ignore_imports=False):
         self.name = name
         self._real_lines = lines
         self._stripped_lines = stripped_lines(lines, ignore_comments,
-                                              ignore_docstrings)
+                                              ignore_docstrings,
+                                              ignore_imports)
         self._index = self._mk_index()
 
     def __str__(self):
@@ -191,14 +210,15 @@
 
     def _mk_index(self):
         """create the index for this set"""
-        index = {}
+        index = defaultdict(list)
         for line_no, line in enumerate(self._stripped_lines):
             if line:
-                index.setdefault(line, []).append( line_no )
+                index[line].append(line_no)
         return index
 
 
 MSGS = {'R0801': ('Similar lines in %s files\n%s',
+                  'duplicate-code',
                   'Indicates that a set of similar lines has been detected \
                   among multiple file. This usually means that the code should \
                   be refactored to avoid this duplication.')}
@@ -232,14 +252,18 @@
                ('ignore-comments',
                 {'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
                  'help': 'Ignore comments when computing similarities.'}
-                ),
+               ),
                ('ignore-docstrings',
                 {'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
                  'help': 'Ignore docstrings when computing similarities.'}
-                ),
-               )
+               ),
+               ('ignore-imports',
+                {'default' : False, 'type' : 'yn', 'metavar' : '<y or n>',
+                 'help': 'Ignore imports when computing similarities.'}
+               ),
+              )
     # reports
-    reports = ( ('R0801', 'Duplication', report_similarities), ) # XXX actually a Refactoring message
+    reports = (('RP0801', 'Duplication', report_similarities),)
 
     def __init__(self, linter=None):
         BaseChecker.__init__(self, linter)
@@ -259,6 +283,8 @@
             self.ignore_comments = self.config.ignore_comments
         elif optname == 'ignore-docstrings':
             self.ignore_docstrings = self.config.ignore_docstrings
+        elif optname == 'ignore-imports':
+            self.ignore_imports = self.config.ignore_imports
 
     def open(self):
         """init the checkers: reset linesets and statistics information"""
@@ -273,7 +299,10 @@
 
         stream must implement the readlines method
         """
-        self.append_stream(self.linter.current_name, node.file_stream)
+        with node.stream() as stream:
+            self.append_stream(self.linter.current_name,
+                               stream,
+                               node.file_encoding)
 
     def close(self):
         """compute and display similarities on closing (i.e. end of parsing)"""
@@ -300,21 +329,24 @@
 
 def usage(status=0):
     """display command line usage information"""
-    print "finds copy pasted blocks in a set of files"
-    print
-    print 'Usage: symilar [-d|--duplicates min_duplicated_lines] \
-[-i|--ignore-comments] file1...'
+    print("finds copy pasted blocks in a set of files")
+    print()
+    print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
+[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
     sys.exit(status)
 
-def run(argv=None):
+def Run(argv=None):
     """standalone command line access point"""
     if argv is None:
         argv = sys.argv[1:]
     from getopt import getopt
     s_opts = 'hdi'
-    l_opts = ('help', 'duplicates=', 'ignore-comments')
+    l_opts = ('help', 'duplicates=', 'ignore-comments', 'ignore-imports',
+              'ignore-docstrings')
     min_lines = 4
     ignore_comments = False
+    ignore_docstrings = False
+    ignore_imports = False
     opts, args = getopt(argv, s_opts, l_opts)
     for opt, val in opts:
         if opt in ('-d', '--duplicates'):
@@ -323,12 +355,18 @@
             usage()
         elif opt in ('-i', '--ignore-comments'):
             ignore_comments = True
+        elif opt in ('--ignore-docstrings',):
+            ignore_docstrings = True
+        elif opt in ('--ignore-imports',):
+            ignore_imports = True
     if not args:
         usage(1)
-    sim = Similar(min_lines, ignore_comments)
+    sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
     for filename in args:
-        sim.append_stream(filename, open(filename))
+        with open(filename) as stream:
+            sim.append_stream(filename, stream)
     sim.run()
+    sys.exit(0)
 
 if __name__ == '__main__':
-    run()
+    Run()
diff --git a/third_party/pylint/checkers/spelling.py b/third_party/pylint/checkers/spelling.py
new file mode 100644
index 0000000..6cc604a
--- /dev/null
+++ b/third_party/pylint/checkers/spelling.py
@@ -0,0 +1,241 @@
+# Copyright 2014 Michal Nowikowski.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""Checker for spelling errors in comments and docstrings.
+"""
+
+import sys
+import tokenize
+import string
+import re
+
+if sys.version_info[0] >= 3:
+    maketrans = str.maketrans
+else:
+    maketrans = string.maketrans
+
+from pylint.interfaces import ITokenChecker, IAstroidChecker
+from pylint.checkers import BaseTokenChecker
+from pylint.checkers.utils import check_messages
+
+try:
+    import enchant
+except ImportError:
+    enchant = None
+
+if enchant is not None:
+    br = enchant.Broker()
+    dicts = br.list_dicts()
+    dict_choices = [''] + [d[0] for d in dicts]
+    dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
+    dicts = ", ".join(dicts)
+    instr = ""
+else:
+    dicts = "none"
+    dict_choices = ['']
+    instr = " To make it working install python-enchant package."
+
+table = maketrans("", "")
+
+class SpellingChecker(BaseTokenChecker):
+    """Check spelling in comments and docstrings"""
+    __implements__ = (ITokenChecker, IAstroidChecker)
+    name = 'spelling'
+    msgs = {
+        'C0401': ('Wrong spelling of a word \'%s\' in a comment:\n%s\n'
+                  '%s\nDid you mean: \'%s\'?',
+                  'wrong-spelling-in-comment',
+                  'Used when a word in comment is not spelled correctly.'),
+        'C0402': ('Wrong spelling of a word \'%s\' in a docstring:\n%s\n'
+                  '%s\nDid you mean: \'%s\'?',
+                  'wrong-spelling-in-docstring',
+                  'Used when a word in docstring is not spelled correctly.'),
+        }
+    options = (('spelling-dict',
+                {'default' : '', 'type' : 'choice', 'metavar' : '<dict name>',
+                 'choices': dict_choices,
+                 'help' : 'Spelling dictionary name. '
+                          'Available dictionaries: %s.%s' % (dicts, instr)}),
+               ('spelling-ignore-words',
+                {'default' : '',
+                 'type' : 'string',
+                 'metavar' : '<comma separated words>',
+                 'help' : 'List of comma separated words that '
+                          'should not be checked.'}),
+               ('spelling-private-dict-file',
+                {'default' : '',
+                 'type' : 'string',
+                 'metavar' : '<path to file>',
+                 'help' : 'A path to a file that contains private '
+                          'dictionary; one word per line.'}),
+               ('spelling-store-unknown-words',
+                {'default' : 'n', 'type' : 'yn', 'metavar' : '<y_or_n>',
+                 'help' : 'Tells whether to store unknown words to '
+                          'indicated private dictionary in '
+                          '--spelling-private-dict-file option instead of '
+                          'raising a message.'}),
+              )
+
+    def open(self):
+        self.initialized = False
+        self.private_dict_file = None
+
+        if enchant is None:
+            return
+        dict_name = self.config.spelling_dict
+        if not dict_name:
+            return
+
+        self.ignore_list = [w.strip() for w in self.config.spelling_ignore_words.split(",")]
+        # "param" appears in docstring in param description and
+        # "pylint" appears in comments in pylint pragmas.
+        self.ignore_list.extend(["param", "pylint"])
+
+        if self.config.spelling_private_dict_file:
+            self.spelling_dict = enchant.DictWithPWL(
+                dict_name, self.config.spelling_private_dict_file)
+            self.private_dict_file = open(
+                self.config.spelling_private_dict_file, "a")
+        else:
+            self.spelling_dict = enchant.Dict(dict_name)
+
+        if self.config.spelling_store_unknown_words:
+            self.unknown_words = set()
+
+        # Prepare regex for stripping punctuation signs from text.
+        # ' and _ are treated in a special way.
+        puncts = string.punctuation.replace("'", "").replace("_", "")
+        self.punctuation_regex = re.compile('[%s]' % re.escape(puncts))
+        self.initialized = True
+
+    def close(self):
+        if self.private_dict_file:
+            self.private_dict_file.close()
+
+    def _check_spelling(self, msgid, line, line_num):
+        line2 = line.strip()
+        # Replace ['afadf with afadf (but preserve don't)
+        line2 = re.sub("'([^a-zA-Z]|$)", " ", line2)
+        # Replace afadf'] with afadf (but preserve don't)
+        line2 = re.sub("([^a-zA-Z]|^)'", " ", line2)
+        # Replace punctuation signs with space e.g. and/or -> and or
+        line2 = self.punctuation_regex.sub(' ', line2)
+
+        words = []
+        for word in line2.split():
+            # Skip words with digits.
+            if len(re.findall(r"\d", word)) > 0:
+                continue
+
+            # Skip words with mixed big and small letters,
+            # they are probaly class names.
+            if (len(re.findall("[A-Z]", word)) > 0 and
+                    len(re.findall("[a-z]", word)) > 0 and
+                    len(word) > 2):
+                continue
+
+            # Skip words with _ - they are probably function parameter names.
+            if word.count('_') > 0:
+                continue
+
+            words.append(word)
+
+        # Go through words and check them.
+        for word in words:
+            # Skip words from ignore list.
+            if word in self.ignore_list:
+                continue
+
+            orig_word = word
+            word = word.lower()
+
+            # Strip starting u' from unicode literals and r' from raw strings.
+            if (word.startswith("u'") or
+                    word.startswith('u"') or
+                    word.startswith("r'") or
+                    word.startswith('r"')) and len(word) > 2:
+                word = word[2:]
+
+            # If it is a known word, then continue.
+            if self.spelling_dict.check(word):
+                continue
+
+            # Store word to private dict or raise a message.
+            if self.config.spelling_store_unknown_words:
+                if word not in self.unknown_words:
+                    self.private_dict_file.write("%s\n" % word)
+                    self.unknown_words.add(word)
+            else:
+                # Present up to 4 suggestions.
+                # TODO: add support for customising this.
+                suggestions = self.spelling_dict.suggest(word)[:4]
+
+                m = re.search(r"(\W|^)(%s)(\W|$)" % word, line.lower())
+                if m:
+                    # Start position of second group in regex.
+                    col = m.regs[2][0]
+                else:
+                    col = line.lower().index(word)
+                indicator = (" " * col) + ("^" * len(word))
+
+                self.add_message(msgid, line=line_num,
+                                 args=(orig_word, line,
+                                       indicator,
+                                       "' or '".join(suggestions)))
+
+    def process_tokens(self, tokens):
+        if not self.initialized:
+            return
+
+        # Process tokens and look for comments.
+        for (tok_type, token, (start_row, _), _, _) in tokens:
+            if tok_type == tokenize.COMMENT:
+                self._check_spelling('wrong-spelling-in-comment',
+                                     token, start_row)
+
+    @check_messages('wrong-spelling-in-docstring')
+    def visit_module(self, node):
+        if not self.initialized:
+            return
+        self._check_docstring(node)
+
+    @check_messages('wrong-spelling-in-docstring')
+    def visit_class(self, node):
+        if not self.initialized:
+            return
+        self._check_docstring(node)
+
+    @check_messages('wrong-spelling-in-docstring')
+    def visit_function(self, node):
+        if not self.initialized:
+            return
+        self._check_docstring(node)
+
+    def _check_docstring(self, node):
+        """check the node has any spelling errors"""
+        docstring = node.doc
+        if not docstring:
+            return
+
+        start_line = node.lineno + 1
+
+        # Go through lines of docstring
+        for idx, line in enumerate(docstring.splitlines()):
+            self._check_spelling('wrong-spelling-in-docstring',
+                                 line, start_line + idx)
+
+
+def register(linter):
+    """required method to auto register this checker """
+    linter.register_checker(SpellingChecker(linter))
diff --git a/third_party/pylint/checkers/stdlib.py b/third_party/pylint/checkers/stdlib.py
new file mode 100644
index 0000000..b6b8026
--- /dev/null
+++ b/third_party/pylint/checkers/stdlib.py
@@ -0,0 +1,173 @@
+# Copyright 2012 Google Inc.
+#
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""Checkers for various standard library functions."""
+
+import re
+import six
+import sys
+
+import astroid
+from astroid.bases import Instance
+
+from pylint.interfaces import IAstroidChecker
+from pylint.checkers import BaseChecker
+from pylint.checkers import utils
+
+
+if sys.version_info >= (3, 0):
+    OPEN_MODULE = '_io'
+else:
+    OPEN_MODULE = '__builtin__'
+
+
+def _check_mode_str(mode):
+    # check type
+    if not isinstance(mode, six.string_types):
+        return False
+    # check syntax
+    modes = set(mode)
+    _mode = "rwatb+U"
+    creating = False
+    if six.PY3:
+        _mode += "x"
+        creating = "x" in modes
+    if modes - set(_mode) or len(mode) > len(modes):
+        return False
+    # check logic
+    reading = "r" in modes
+    writing = "w" in modes
+    appending = "a" in modes
+    updating = "+" in modes
+    text = "t" in modes
+    binary = "b" in modes
+    if "U" in modes:
+        if writing or appending or creating and six.PY3:
+            return False
+        reading = True
+        if not six.PY3:
+            binary = True
+    if text and binary:
+        return False
+    total = reading + writing + appending + (creating if six.PY3 else 0)
+    if total > 1:
+        return False
+    if not (reading or writing or appending or creating and six.PY3):
+        return False
+    # other 2.x constraints
+    if not six.PY3:
+        if "U" in mode:
+            mode = mode.replace("U", "")
+            if "r" not in mode:
+                mode = "r" + mode
+        return mode[0] in ("r", "w", "a", "U")
+    return True
+
+
+class StdlibChecker(BaseChecker):
+    __implements__ = (IAstroidChecker,)
+    name = 'stdlib'
+
+    msgs = {
+        'W1501': ('"%s" is not a valid mode for open.',
+                  'bad-open-mode',
+                  'Python supports: r, w, a[, x] modes with b, +, '
+                  'and U (only with r) options. '
+                  'See http://docs.python.org/2/library/functions.html#open'),
+        'W1502': ('Using datetime.time in a boolean context.',
+                  'boolean-datetime',
+                  'Using datetetime.time in a boolean context can hide '
+                  'subtle bugs when the time they represent matches '
+                  'midnight UTC. This behaviour was fixed in Python 3.5. '
+                  'See http://bugs.python.org/issue13936 for reference.',
+                  {'maxversion': (3, 5)}),
+        'W1503': ('Redundant use of %s with constant '
+                  'value %r',
+                  'redundant-unittest-assert',
+                  'The first argument of assertTrue and assertFalse is'
+                  'a condition. If a constant is passed as parameter, that'
+                  'condition will be always true. In this case a warning '
+                  'should be emitted.')
+    }
+
+    @utils.check_messages('bad-open-mode', 'redundant-unittest-assert')
+    def visit_callfunc(self, node):
+        """Visit a CallFunc node."""
+        if hasattr(node, 'func'):
+            infer = utils.safe_infer(node.func)
+            if infer:
+                if infer.root().name == OPEN_MODULE:
+                    if getattr(node.func, 'name', None) in ('open', 'file'):
+                        self._check_open_mode(node)
+                if infer.root().name == 'unittest.case':
+                    self._check_redundant_assert(node, infer)
+
+    @utils.check_messages('boolean-datetime')
+    def visit_unaryop(self, node):
+        if node.op == 'not':
+            self._check_datetime(node.operand)
+
+    @utils.check_messages('boolean-datetime')
+    def visit_if(self, node):
+        self._check_datetime(node.test)
+
+    @utils.check_messages('boolean-datetime')
+    def visit_ifexp(self, node):
+        self._check_datetime(node.test)
+
+    @utils.check_messages('boolean-datetime')
+    def visit_boolop(self, node):
+        for value in node.values:
+            self._check_datetime(value)
+
+    def _check_redundant_assert(self, node, infer):
+        if (isinstance(infer, astroid.BoundMethod) and
+                node.args and isinstance(node.args[0], astroid.Const) and
+                infer.name in ['assertTrue', 'assertFalse']):
+            self.add_message('redundant-unittest-assert',
+                             args=(infer.name, node.args[0].value, ),
+                             node=node)
+
+    def _check_datetime(self, node):
+        """ Check that a datetime was infered.
+        If so, emit boolean-datetime warning.
+        """
+        try:
+            infered = next(node.infer())
+        except astroid.InferenceError:
+            return
+        if (isinstance(infered, Instance) and
+                infered.qname() == 'datetime.time'):
+            self.add_message('boolean-datetime', node=node)
+
+
+    def _check_open_mode(self, node):
+        """Check that the mode argument of an open or file call is valid."""
+        try:
+            mode_arg = utils.get_argument_from_call(node, position=1,
+                                                    keyword='mode')
+        except utils.NoSuchArgumentError:
+            return
+        if mode_arg:
+            mode_arg = utils.safe_infer(mode_arg)
+            if (isinstance(mode_arg, astroid.Const)
+                    and not _check_mode_str(mode_arg.value)):
+                self.add_message('bad-open-mode', node=node,
+                                 args=mode_arg.value)
+
+
+def register(linter):
+    """required method to auto register this checker """
+    linter.register_checker(StdlibChecker(linter))
diff --git a/third_party/pylint/checkers/string_format.py b/third_party/pylint/checkers/string_format.py
deleted file mode 100644
index c420a60..0000000
--- a/third_party/pylint/checkers/string_format.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# Copyright (c) 2009-2010 Arista Networks, Inc. - James Lingard
-# Copyright (c) 2004-2010 LOGILAB S.A. (Paris, FRANCE).
-# http://www.logilab.fr/ -- mailto:contact@logilab.fr
-# This program is free software; you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-
-
-"""Checker for string formatting operations.
-"""
-
-import string
-from logilab import astng
-from pylint.interfaces import IASTNGChecker
-from pylint.checkers import BaseChecker
-from pylint.checkers import utils
-
-
-MSGS = {
-    'E1300': ("Unsupported format character %r (%#02x) at index %d",
-              "Used when a unsupported format character is used in a format\
-              string."),
-    'E1301': ("Format string ends in middle of conversion specifier",
-              "Used when a format string terminates before the end of a \
-              conversion specifier."),
-    'E1302': ("Mixing named and unnamed conversion specifiers in format string",
-              "Used when a format string contains both named (e.g. '%(foo)d') \
-              and unnamed (e.g. '%d') conversion specifiers.  This is also \
-              used when a named conversion specifier contains * for the \
-              minimum field width and/or precision."),
-    'E1303': ("Expected mapping for format string, not %s",
-              "Used when a format string that uses named conversion specifiers \
-              is used with an argument that is not a mapping."),
-    'W1300': ("Format string dictionary key should be a string, not %s",
-              "Used when a format string that uses named conversion specifiers \
-              is used with a dictionary whose keys are not all strings."),
-    'W1301': ("Unused key %r in format string dictionary",
-              "Used when a format string that uses named conversion specifiers \
-              is used with a dictionary that conWtains keys not required by the \
-              format string."),
-    'E1304': ("Missing key %r in format string dictionary",
-              "Used when a format string that uses named conversion specifiers \
-              is used with a dictionary that doesn't contain all the keys \
-              required by the format string."),
-    'E1305': ("Too many arguments for format string",
-              "Used when a format string that uses unnamed conversion \
-              specifiers is given too few arguments."),
-    'E1306': ("Not enough arguments for format string",
-              "Used when a format string that uses unnamed conversion \
-              specifiers is given too many arguments"),
-    }
-
-OTHER_NODES = (astng.Const, astng.List, astng.Backquote,
-               astng.Lambda, astng.Function,
-               astng.ListComp, astng.SetComp, astng.GenExpr)
-
-class StringFormatChecker(BaseChecker):
-    """Checks string formatting operations to ensure that the format string
-    is valid and the arguments match the format string.
-    """
-
-    __implements__ = (IASTNGChecker,)
-    name = 'string_format'
-    msgs = MSGS
-
-    def visit_binop(self, node):
-        if node.op != '%':
-            return
-        left = node.left
-        args = node.right
-
-        if not (isinstance(left, astng.Const)
-            and isinstance(left.value, basestring)):
-            return
-        format_string = left.value
-        try:
-            required_keys, required_num_args = \
-                utils.parse_format_string(format_string)
-        except utils.UnsupportedFormatCharacter, e:
-            c = format_string[e.index]
-            self.add_message('E1300', node=node, args=(c, ord(c), e.index))
-            return
-        except utils.IncompleteFormatString:
-            self.add_message('E1301', node=node)
-            return
-        if required_keys and required_num_args:
-            # The format string uses both named and unnamed format
-            # specifiers.
-            self.add_message('E1302', node=node)
-        elif required_keys:
-            # The format string uses only named format specifiers.
-            # Check that the RHS of the % operator is a mapping object
-            # that contains precisely the set of keys required by the
-            # format string.
-            if isinstance(args, astng.Dict):
-                keys = set()
-                unknown_keys = False
-                for k, v in args.items:
-                    if isinstance(k, astng.Const):
-                        key = k.value
-                        if isinstance(key, basestring):
-                            keys.add(key)
-                        else:
-                            self.add_message('W1300', node=node, args=key)
-                    else:
-                        # One of the keys was something other than a
-                        # constant.  Since we can't tell what it is,
-                        # supress checks for missing keys in the
-                        # dictionary.
-                        unknown_keys = True
-                if not unknown_keys:
-                    for key in required_keys:
-                        if key not in keys:
-                            self.add_message('E1304', node=node, args=key)
-                for key in keys:
-                    if key not in required_keys:
-                        self.add_message('W1301', node=node, args=key)
-            elif isinstance(args, OTHER_NODES + (astng.Tuple,)):
-                type_name = type(args).__name__
-                self.add_message('E1303', node=node, args=type_name)
-            # else:
-                # The RHS of the format specifier is a name or
-                # expression.  It may be a mapping object, so
-                # there's nothing we can check.
-        else:
-            # The format string uses only unnamed format specifiers.
-            # Check that the number of arguments passed to the RHS of
-            # the % operator matches the number required by the format
-            # string.
-            if isinstance(args, astng.Tuple):
-                num_args = len(args.elts)
-            elif isinstance(args, OTHER_NODES + (astng.Dict, astng.DictComp)):
-                num_args = 1
-            else:
-                # The RHS of the format specifier is a name or
-                # expression.  It could be a tuple of unknown size, so
-                # there's nothing we can check.
-                num_args = None
-            if num_args is not None:
-                if num_args > required_num_args:
-                    self.add_message('E1305', node=node)
-                elif num_args < required_num_args:
-                    self.add_message('E1306', node=node)
-
-
-def register(linter):
-    """required method to auto register this checker """
-    linter.register_checker(StringFormatChecker(linter))
diff --git a/third_party/pylint/checkers/strings.py b/third_party/pylint/checkers/strings.py
new file mode 100644
index 0000000..e88085d
--- /dev/null
+++ b/third_party/pylint/checkers/strings.py
@@ -0,0 +1,615 @@
+# Copyright (c) 2009-2010 Arista Networks, Inc. - James Lingard
+# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
+# Copyright 2012 Google Inc.
+#
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""Checker for string formatting operations.
+"""
+
+import sys
+import tokenize
+import string
+import numbers
+
+import astroid
+
+from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
+from pylint.checkers import BaseChecker, BaseTokenChecker
+from pylint.checkers import utils
+from pylint.checkers.utils import check_messages
+
+import six
+
+
+_PY3K = sys.version_info[:2] >= (3, 0)
+_PY27 = sys.version_info[:2] == (2, 7)
+
+MSGS = {
+    'E1300': ("Unsupported format character %r (%#02x) at index %d",
+              "bad-format-character",
+              "Used when a unsupported format character is used in a format\
+              string."),
+    'E1301': ("Format string ends in middle of conversion specifier",
+              "truncated-format-string",
+              "Used when a format string terminates before the end of a \
+              conversion specifier."),
+    'E1302': ("Mixing named and unnamed conversion specifiers in format string",
+              "mixed-format-string",
+              "Used when a format string contains both named (e.g. '%(foo)d') \
+              and unnamed (e.g. '%d') conversion specifiers.  This is also \
+              used when a named conversion specifier contains * for the \
+              minimum field width and/or precision."),
+    'E1303': ("Expected mapping for format string, not %s",
+              "format-needs-mapping",
+              "Used when a format string that uses named conversion specifiers \
+              is used with an argument that is not a mapping."),
+    'W1300': ("Format string dictionary key should be a string, not %s",
+              "bad-format-string-key",
+              "Used when a format string that uses named conversion specifiers \
+              is used with a dictionary whose keys are not all strings."),
+    'W1301': ("Unused key %r in format string dictionary",
+              "unused-format-string-key",
+              "Used when a format string that uses named conversion specifiers \
+              is used with a dictionary that conWtains keys not required by the \
+              format string."),
+    'E1304': ("Missing key %r in format string dictionary",
+              "missing-format-string-key",
+              "Used when a format string that uses named conversion specifiers \
+              is used with a dictionary that doesn't contain all the keys \
+              required by the format string."),
+    'E1305': ("Too many arguments for format string",
+              "too-many-format-args",
+              "Used when a format string that uses unnamed conversion \
+              specifiers is given too many arguments."),
+    'E1306': ("Not enough arguments for format string",
+              "too-few-format-args",
+              "Used when a format string that uses unnamed conversion \
+              specifiers is given too few arguments"),
+
+    'W1302': ("Invalid format string",
+              "bad-format-string",
+              "Used when a PEP 3101 format string is invalid.",
+              {'minversion': (2, 7)}),
+    'W1303': ("Missing keyword argument %r for format string",
+              "missing-format-argument-key",
+              "Used when a PEP 3101 format string that uses named fields "
+              "doesn't receive one or more required keywords.",
+              {'minversion': (2, 7)}),
+    'W1304': ("Unused format argument %r",
+              "unused-format-string-argument",
+              "Used when a PEP 3101 format string that uses named "
+              "fields is used with an argument that "
+              "is not required by the format string.",
+              {'minversion': (2, 7)}),
+    'W1305': ("Format string contains both automatic field numbering "
+              "and manual field specification",
+              "format-combined-specification",
+              "Usen when a PEP 3101 format string contains both automatic "
+              "field numbering (e.g. '{}') and manual field "
+              "specification (e.g. '{0}').",
+              {'minversion': (2, 7)}),
+    'W1306': ("Missing format attribute %r in format specifier %r",
+              "missing-format-attribute",
+              "Used when a PEP 3101 format string uses an "
+              "attribute specifier ({0.length}), but the argument "
+              "passed for formatting doesn't have that attribute.",
+              {'minversion': (2, 7)}),
+    'W1307': ("Using invalid lookup key %r in format specifier %r",
+              "invalid-format-index",
+              "Used when a PEP 3101 format string uses a lookup specifier "
+              "({a[1]}), but the argument passed for formatting "
+              "doesn't contain or doesn't have that key as an attribute.",
+              {'minversion': (2, 7)})
+    }
+
+OTHER_NODES = (astroid.Const, astroid.List, astroid.Backquote,
+               astroid.Lambda, astroid.Function,
+               astroid.ListComp, astroid.SetComp, astroid.GenExpr)
+
+if _PY3K:
+    import _string
+
+    def split_format_field_names(format_string):
+        return _string.formatter_field_name_split(format_string)
+else:
+    def _field_iterator_convertor(iterator):
+        for is_attr, key in iterator:
+            if isinstance(key, numbers.Number):
+                yield is_attr, int(key)
+            else:
+                yield is_attr, key
+
+    def split_format_field_names(format_string):
+        keyname, fielditerator = format_string._formatter_field_name_split()
+        # it will return longs, instead of ints, which will complicate
+        # the output
+        return keyname, _field_iterator_convertor(fielditerator)
+
+
+def collect_string_fields(format_string):
+    """ Given a format string, return an iterator
+    of all the valid format fields. It handles nested fields
+    as well.
+    """
+
+    formatter = string.Formatter()
+    try:
+        parseiterator = formatter.parse(format_string)
+        for result in parseiterator:
+            if all(item is None for item in result[1:]):
+                # not a replacement format
+                continue
+            name = result[1]
+            nested = result[2]
+            yield name
+            if nested:
+                for field in collect_string_fields(nested):
+                    yield field
+    except ValueError:
+        # probably the format string is invalid
+        # should we check the argument of the ValueError?
+        raise utils.IncompleteFormatString(format_string)
+
+def parse_format_method_string(format_string):
+    """
+    Parses a PEP 3101 format string, returning a tuple of
+    (keys, num_args, manual_pos_arg),
+    where keys is the set of mapping keys in the format string, num_args
+    is the number of arguments required by the format string and
+    manual_pos_arg is the number of arguments passed with the position.
+    """
+    keys = []
+    num_args = 0
+    manual_pos_arg = set()
+    for name in collect_string_fields(format_string):
+        if name and str(name).isdigit():
+            manual_pos_arg.add(str(name))
+        elif name:
+            keyname, fielditerator = split_format_field_names(name)
+            if isinstance(keyname, numbers.Number):
+                # In Python 2 it will return long which will lead
+                # to different output between 2 and 3
+                manual_pos_arg.add(keyname)
+                keyname = int(keyname)
+            keys.append((keyname, list(fielditerator)))
+        else:
+            num_args += 1
+    return keys, num_args, len(manual_pos_arg)
+
+def get_args(callfunc):
+    """ Get the arguments from the given `CallFunc` node.
+    Return a tuple, where the first element is the
+    number of positional arguments and the second element
+    is the keyword arguments in a dict.
+    """
+    positional = 0
+    named = {}
+
+    for arg in callfunc.args:
+        if isinstance(arg, astroid.Keyword):
+            named[arg.arg] = utils.safe_infer(arg.value)
+        else:
+            positional += 1
+    return positional, named
+
+def get_access_path(key, parts):
+    """ Given a list of format specifiers, returns
+    the final access path (e.g. a.b.c[0][1]).
+    """
+    path = []
+    for is_attribute, specifier in parts:
+        if is_attribute:
+            path.append(".{}".format(specifier))
+        else:
+            path.append("[{!r}]".format(specifier))
+    return str(key) + "".join(path)
+
+
+class StringFormatChecker(BaseChecker):
+    """Checks string formatting operations to ensure that the format string
+    is valid and the arguments match the format string.
+    """
+
+    __implements__ = (IAstroidChecker,)
+    name = 'string'
+    msgs = MSGS
+
+    @check_messages(*(MSGS.keys()))
+    def visit_binop(self, node):
+        if node.op != '%':
+            return
+        left = node.left
+        args = node.right
+
+        if not (isinstance(left, astroid.Const)
+                and isinstance(left.value, six.string_types)):
+            return
+        format_string = left.value
+        try:
+            required_keys, required_num_args = \
+                utils.parse_format_string(format_string)
+        except utils.UnsupportedFormatCharacter as e:
+            c = format_string[e.index]
+            self.add_message('bad-format-character',
+                             node=node, args=(c, ord(c), e.index))
+            return
+        except utils.IncompleteFormatString:
+            self.add_message('truncated-format-string', node=node)
+            return
+        if required_keys and required_num_args:
+            # The format string uses both named and unnamed format
+            # specifiers.
+            self.add_message('mixed-format-string', node=node)
+        elif required_keys:
+            # The format string uses only named format specifiers.
+            # Check that the RHS of the % operator is a mapping object
+            # that contains precisely the set of keys required by the
+            # format string.
+            if isinstance(args, astroid.Dict):
+                keys = set()
+                unknown_keys = False
+                for k, _ in args.items:
+                    if isinstance(k, astroid.Const):
+                        key = k.value
+                        if isinstance(key, six.string_types):
+                            keys.add(key)
+                        else:
+                            self.add_message('bad-format-string-key',
+                                             node=node, args=key)
+                    else:
+                        # One of the keys was something other than a
+                        # constant.  Since we can't tell what it is,
+                        # supress checks for missing keys in the
+                        # dictionary.
+                        unknown_keys = True
+                if not unknown_keys:
+                    for key in required_keys:
+                        if key not in keys:
+                            self.add_message('missing-format-string-key',
+                                             node=node, args=key)
+                for key in keys:
+                    if key not in required_keys:
+                        self.add_message('unused-format-string-key',
+                                         node=node, args=key)
+            elif isinstance(args, OTHER_NODES + (astroid.Tuple,)):
+                type_name = type(args).__name__
+                self.add_message('format-needs-mapping',
+                                 node=node, args=type_name)
+            # else:
+                # The RHS of the format specifier is a name or
+                # expression.  It may be a mapping object, so
+                # there's nothing we can check.
+        else:
+            # The format string uses only unnamed format specifiers.
+            # Check that the number of arguments passed to the RHS of
+            # the % operator matches the number required by the format
+            # string.
+            if isinstance(args, astroid.Tuple):
+                num_args = len(args.elts)
+            elif isinstance(args, OTHER_NODES + (astroid.Dict, astroid.DictComp)):
+                num_args = 1
+            else:
+                # The RHS of the format specifier is a name or
+                # expression.  It could be a tuple of unknown size, so
+                # there's nothing we can check.
+                num_args = None
+            if num_args is not None:
+                if num_args > required_num_args:
+                    self.add_message('too-many-format-args', node=node)
+                elif num_args < required_num_args:
+                    self.add_message('too-few-format-args', node=node)
+
+
+class StringMethodsChecker(BaseChecker):
+    __implements__ = (IAstroidChecker,)
+    name = 'string'
+    msgs = {
+        'E1310': ("Suspicious argument in %s.%s call",
+                  "bad-str-strip-call",
+                  "The argument to a str.{l,r,}strip call contains a"
+                  " duplicate character, "),
+        }
+
+    @check_messages(*(MSGS.keys()))
+    def visit_callfunc(self, node):
+        func = utils.safe_infer(node.func)
+        if (isinstance(func, astroid.BoundMethod)
+                and isinstance(func.bound, astroid.Instance)
+                and func.bound.name in ('str', 'unicode', 'bytes')):
+            if func.name in ('strip', 'lstrip', 'rstrip') and node.args:
+                arg = utils.safe_infer(node.args[0])
+                if not isinstance(arg, astroid.Const):
+                    return
+                if len(arg.value) != len(set(arg.value)):
+                    self.add_message('bad-str-strip-call', node=node,
+                                     args=(func.bound.name, func.name))
+            elif func.name == 'format':
+                if _PY27 or _PY3K:
+                    self._check_new_format(node, func)
+
+    def _check_new_format(self, node, func):
+        """ Check the new string formatting. """
+        # TODO: skip (for now) format nodes which don't have
+        #       an explicit string on the left side of the format operation.
+        #       We do this because our inference engine can't properly handle
+        #       redefinitions of the original string.
+        #       For more details, see issue 287.
+        #
+        # Note that there may not be any left side at all, if the format method
+        # has been assigned to another variable. See issue 351. For example:
+        #
+        #    fmt = 'some string {}'.format
+        #    fmt('arg')
+        if (isinstance(node.func, astroid.Getattr)
+                and not isinstance(node.func.expr, astroid.Const)):
+            return
+        try:
+            strnode = next(func.bound.infer())
+        except astroid.InferenceError:
+            return
+        if not isinstance(strnode, astroid.Const):
+            return
+        if node.starargs or node.kwargs:
+            # TODO: Don't complicate the logic, skip these for now.
+            return
+        try:
+            positional, named = get_args(node)
+        except astroid.InferenceError:
+            return
+        try:
+            fields, num_args, manual_pos = parse_format_method_string(strnode.value)
+        except utils.IncompleteFormatString:
+            self.add_message('bad-format-string', node=node)
+            return
+
+        named_fields = set(field[0] for field in fields
+                           if isinstance(field[0], six.string_types))
+        if num_args and manual_pos:
+            self.add_message('format-combined-specification',
+                             node=node)
+            return
+
+        check_args = False
+        # Consider "{[0]} {[1]}" as num_args.
+        num_args += sum(1 for field in named_fields
+                        if field == '')
+        if named_fields:
+            for field in named_fields:
+                if field not in named and field:
+                    self.add_message('missing-format-argument-key',
+                                     node=node,
+                                     args=(field, ))
+            for field in named:
+                if field not in named_fields:
+                    self.add_message('unused-format-string-argument',
+                                     node=node,
+                                     args=(field, ))
+            # num_args can be 0 if manual_pos is not.
+            num_args = num_args or manual_pos
+            if positional or num_args:
+                empty = any(True for field in named_fields
+                            if field == '')
+                if named or empty:
+                    # Verify the required number of positional arguments
+                    # only if the .format got at least one keyword argument.
+                    # This means that the format strings accepts both
+                    # positional and named fields and we should warn
+                    # when one of the them is missing or is extra.
+                    check_args = True
+        else:
+            check_args = True
+        if check_args:
+            # num_args can be 0 if manual_pos is not.
+            num_args = num_args or manual_pos
+            if positional > num_args:
+                self.add_message('too-many-format-args', node=node)
+            elif positional < num_args:
+                self.add_message('too-few-format-args', node=node)
+
+        self._check_new_format_specifiers(node, fields, named)
+
+    def _check_new_format_specifiers(self, node, fields, named):
+        """
+        Check attribute and index access in the format
+        string ("{0.a}" and "{0[a]}").
+        """
+        for key, specifiers in fields:
+            # Obtain the argument. If it can't be obtained
+            # or infered, skip this check.
+            if key == '':
+                # {[0]} will have an unnamed argument, defaulting
+                # to 0. It will not be present in `named`, so use the value
+                # 0 for it.
+                key = 0
+            if isinstance(key, numbers.Number):
+                try:
+                    argname = utils.get_argument_from_call(node, key)
+                except utils.NoSuchArgumentError:
+                    continue
+            else:
+                if key not in named:
+                    continue
+                argname = named[key]
+            if argname in (astroid.YES, None):
+                continue
+            try:
+                argument = next(argname.infer())
+            except astroid.InferenceError:
+                continue
+            if not specifiers or argument is astroid.YES:
+                # No need to check this key if it doesn't
+                # use attribute / item access
+                continue
+            if argument.parent and isinstance(argument.parent, astroid.Arguments):
+                # Ignore any object coming from an argument,
+                # because we can't infer its value properly.
+                continue
+            previous = argument
+            parsed = []
+            for is_attribute, specifier in specifiers:
+                if previous is astroid.YES:
+                    break
+                parsed.append((is_attribute, specifier))
+                if is_attribute:
+                    try:
+                        previous = previous.getattr(specifier)[0]
+                    except astroid.NotFoundError:
+                        if (hasattr(previous, 'has_dynamic_getattr') and
+                                previous.has_dynamic_getattr()):
+                            # Don't warn if the object has a custom __getattr__
+                            break
+                        path = get_access_path(key, parsed)
+                        self.add_message('missing-format-attribute',
+                                         args=(specifier, path),
+                                         node=node)
+                        break
+                else:
+                    warn_error = False
+                    if hasattr(previous, 'getitem'):
+                        try:
+                            previous = previous.getitem(specifier)
+                        except (IndexError, TypeError):
+                            warn_error = True
+                    else:
+                        try:
+                            # Lookup __getitem__ in the current node,
+                            # but skip further checks, because we can't
+                            # retrieve the looked object
+                            previous.getattr('__getitem__')
+                            break
+                        except astroid.NotFoundError:
+                            warn_error = True
+                    if warn_error:
+                        path = get_access_path(key, parsed)
+                        self.add_message('invalid-format-index',
+                                         args=(specifier, path),
+                                         node=node)
+                        break
+
+                try:
+                    previous = next(previous.infer())
+                except astroid.InferenceError:
+                    # can't check further if we can't infer it
+                    break
+
+
+
+class StringConstantChecker(BaseTokenChecker):
+    """Check string literals"""
+    __implements__ = (ITokenChecker, IRawChecker)
+    name = 'string_constant'
+    msgs = {
+        'W1401': ('Anomalous backslash in string: \'%s\'. '
+                  'String constant might be missing an r prefix.',
+                  'anomalous-backslash-in-string',
+                  'Used when a backslash is in a literal string but not as an '
+                  'escape.'),
+        'W1402': ('Anomalous Unicode escape in byte string: \'%s\'. '
+                  'String constant might be missing an r or u prefix.',
+                  'anomalous-unicode-escape-in-string',
+                  'Used when an escape like \\u is encountered in a byte '
+                  'string where it has no effect.'),
+        }
+
+    # Characters that have a special meaning after a backslash in either
+    # Unicode or byte strings.
+    ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'\"01234567'
+
+    # TODO(mbp): Octal characters are quite an edge case today; people may
+    # prefer a separate warning where they occur.  \0 should be allowed.
+
+    # Characters that have a special meaning after a backslash but only in
+    # Unicode strings.
+    UNICODE_ESCAPE_CHARACTERS = 'uUN'
+
+    def process_module(self, module):
+        self._unicode_literals = 'unicode_literals' in module.future_imports
+
+    def process_tokens(self, tokens):
+        for (tok_type, token, (start_row, _), _, _) in tokens:
+            if tok_type == tokenize.STRING:
+                # 'token' is the whole un-parsed token; we can look at the start
+                # of it to see whether it's a raw or unicode string etc.
+                self.process_string_token(token, start_row)
+
+    def process_string_token(self, token, start_row):
+        for i, c in enumerate(token):
+            if c in '\'\"':
+                quote_char = c
+                break
+        # pylint: disable=undefined-loop-variable
+        prefix = token[:i].lower() #  markers like u, b, r.
+        after_prefix = token[i:]
+        if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
+            string_body = after_prefix[3:-3]
+        else:
+            string_body = after_prefix[1:-1]  # Chop off quotes
+        # No special checks on raw strings at the moment.
+        if 'r' not in prefix:
+            self.process_non_raw_string_token(prefix, string_body, start_row)
+
+    def process_non_raw_string_token(self, prefix, string_body, start_row):
+        """check for bad escapes in a non-raw string.
+
+        prefix: lowercase string of eg 'ur' string prefix markers.
+        string_body: the un-parsed body of the string, not including the quote
+        marks.
+        start_row: integer line number in the source.
+        """
+        # Walk through the string; if we see a backslash then escape the next
+        # character, and skip over it.  If we see a non-escaped character,
+        # alert, and continue.
+        #
+        # Accept a backslash when it escapes a backslash, or a quote, or
+        # end-of-line, or one of the letters that introduce a special escape
+        # sequence <http://docs.python.org/reference/lexical_analysis.html>
+        #
+        # TODO(mbp): Maybe give a separate warning about the rarely-used
+        # \a \b \v \f?
+        #
+        # TODO(mbp): We could give the column of the problem character, but
+        # add_message doesn't seem to have a way to pass it through at present.
+        i = 0
+        while True:
+            i = string_body.find('\\', i)
+            if i == -1:
+                break
+            # There must be a next character; having a backslash at the end
+            # of the string would be a SyntaxError.
+            next_char = string_body[i+1]
+            match = string_body[i:i+2]
+            if next_char in self.UNICODE_ESCAPE_CHARACTERS:
+                if 'u' in prefix:
+                    pass
+                elif (_PY3K or self._unicode_literals) and 'b' not in prefix:
+                    pass  # unicode by default
+                else:
+                    self.add_message('anomalous-unicode-escape-in-string',
+                                     line=start_row, args=(match, ))
+            elif next_char not in self.ESCAPE_CHARACTERS:
+                self.add_message('anomalous-backslash-in-string',
+                                 line=start_row, args=(match, ))
+            # Whether it was a valid escape or not, backslash followed by
+            # another character can always be consumed whole: the second
+            # character can never be the start of a new backslash escape.
+            i += 2
+
+
+
+def register(linter):
+    """required method to auto register this checker """
+    linter.register_checker(StringFormatChecker(linter))
+    linter.register_checker(StringMethodsChecker(linter))
+    linter.register_checker(StringConstantChecker(linter))
diff --git a/third_party/pylint/checkers/typecheck.py b/third_party/pylint/checkers/typecheck.py
index b0a591f..9f074ae 100644
--- a/third_party/pylint/checkers/typecheck.py
+++ b/third_party/pylint/checkers/typecheck.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2006-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2006-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,59 +12,121 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-"""try to find more bugs in the code using astng inference capabilities
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""try to find more bugs in the code using astroid inference capabilities
 """
 
 import re
 import shlex
 
-from logilab import astng
-from logilab.astng import InferenceError, NotFoundError, YES, Instance
+import astroid
+from astroid import InferenceError, NotFoundError, YES, Instance
+from astroid.bases import BUILTINS
 
-from pylint.interfaces import IASTNGChecker
+from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import safe_infer, is_super, check_messages
+from pylint.checkers.utils import (
+    safe_infer, is_super,
+    check_messages, decorated_with_property)
 
 MSGS = {
     'E1101': ('%s %r has no %r member',
-              'Used when a variable is accessed for an unexistent member.'),
+              'no-member',
+              'Used when a variable is accessed for an unexistent member.',
+              {'old_names': [('E1103', 'maybe-no-member')]}),
     'E1102': ('%s is not callable',
+              'not-callable',
               'Used when an object being called has been inferred to a non \
               callable object'),
-    'E1103': ('%s %r has no %r member (but some types could not be inferred)',
-              'Used when a variable is accessed for an unexistent member, but \
-              astng was not able to interpret all possible types of this \
-              variable.'),
     'E1111': ('Assigning to function call which doesn\'t return',
+              'assignment-from-no-return',
               'Used when an assignment is done on a function call but the \
               inferred function doesn\'t return anything.'),
     'W1111': ('Assigning to function call which only returns None',
+              'assignment-from-none',
               'Used when an assignment is done on a function call but the \
               inferred function returns nothing but None.'),
 
-    'E1120': ('No value passed for parameter %s in function call',
+    'E1120': ('No value for argument %s in %s call',
+              'no-value-for-parameter',
               'Used when a function call passes too few arguments.'),
-    'E1121': ('Too many positional arguments for function call',
+    'E1121': ('Too many positional arguments for %s call',
+              'too-many-function-args',
               'Used when a function call passes too many positional \
               arguments.'),
-    'E1122': ('Duplicate keyword argument %r in function call',
-              'Used when a function call passes the same keyword argument \
-              multiple times.'),
-    'E1123': ('Passing unexpected keyword argument %r in function call',
+    'E1123': ('Unexpected keyword argument %r in %s call',
+              'unexpected-keyword-arg',
               'Used when a function call passes a keyword argument that \
               doesn\'t correspond to one of the function\'s parameter names.'),
-    'E1124': ('Multiple values passed for parameter %r in function call',
+    'E1124': ('Argument %r passed by position and keyword in %s call',
+              'redundant-keyword-arg',
               'Used when a function call would result in assigning multiple \
               values to a function parameter, one value from a positional \
               argument and one from a keyword argument.'),
+    'E1125': ('Missing mandatory keyword argument %r in %s call',
+              'missing-kwoa',
+              ('Used when a function call does not pass a mandatory'
+               ' keyword-only argument.'),
+              {'minversion': (3, 0)}),
+    'E1126': ('Sequence index is not an int, slice, or instance with __index__',
+              'invalid-sequence-index',
+              'Used when a sequence type is indexed with an invalid type. '
+              'Valid types are ints, slices, and objects with an __index__ '
+              'method.'),
+    'E1127': ('Slice index is not an int, None, or instance with __index__',
+              'invalid-slice-index',
+              'Used when a slice index is not an integer, None, or an object \
+               with an __index__ method.'),
     }
 
+# builtin sequence types in Python 2 and 3.
+SEQUENCE_TYPES = set(['str', 'unicode', 'list', 'tuple', 'bytearray',
+                      'xrange', 'range', 'bytes', 'memoryview'])
+
+def _determine_callable(callable_obj):
+    # Ordering is important, since BoundMethod is a subclass of UnboundMethod,
+    # and Function inherits Lambda.
+    if isinstance(callable_obj, astroid.BoundMethod):
+        # Bound methods have an extra implicit 'self' argument.
+        return callable_obj, 1, callable_obj.type
+    elif isinstance(callable_obj, astroid.UnboundMethod):
+        return callable_obj, 0, 'unbound method'
+    elif isinstance(callable_obj, astroid.Function):
+        return callable_obj, 0, callable_obj.type
+    elif isinstance(callable_obj, astroid.Lambda):
+        return callable_obj, 0, 'lambda'
+    elif isinstance(callable_obj, astroid.Class):
+        # Class instantiation, lookup __new__ instead.
+        # If we only find object.__new__, we can safely check __init__
+        # instead.
+        try:
+            # Use the last definition of __new__.
+            new = callable_obj.local_attr('__new__')[-1]
+        except astroid.NotFoundError:
+            new = None
+
+        if not new or new.parent.scope().name == 'object':
+            try:
+                # Use the last definition of __init__.
+                callable_obj = callable_obj.local_attr('__init__')[-1]
+            except astroid.NotFoundError:
+                # do nothing, covered by no-init.
+                raise ValueError
+        else:
+            callable_obj = new
+
+        if not isinstance(callable_obj, astroid.Function):
+            raise ValueError
+        # both have an extra implicit 'cls'/'self' argument.
+        return callable_obj, 1, 'constructor'
+    else:
+        raise ValueError
+
 class TypeChecker(BaseChecker):
     """try to find bugs in the code using type inference
     """
 
-    __implements__ = (IASTNGChecker,)
+    __implements__ = (IAstroidChecker,)
 
     # configuration section name
     name = 'typecheck'
@@ -77,31 +139,38 @@
                  'help' : 'Tells whether missing members accessed in mixin \
 class should be ignored. A mixin class is detected if its name ends with \
 "mixin" (case insensitive).'}
-                ),
-
+               ),
+               ('ignored-modules',
+                {'default': (),
+                 'type': 'csv',
+                 'metavar': '<module names>',
+                 'help': 'List of module names for which member attributes \
+should not be checked (useful for modules/projects where namespaces are \
+manipulated during runtime and thus existing member attributes cannot be \
+deduced by static analysis'},
+               ),
                ('ignored-classes',
                 {'default' : ('SQLObject',),
                  'type' : 'csv',
                  'metavar' : '<members names>',
                  'help' : 'List of classes names for which member attributes \
 should not be checked (useful for classes with attributes dynamically set).'}
-                 ),
+               ),
 
                ('zope',
                 {'default' : False, 'type' : 'yn', 'metavar': '<y_or_n>',
                  'help' : 'When zope mode is activated, add a predefined set \
 of Zope acquired attributes to generated-members.'}
-                ),
+               ),
                ('generated-members',
-                {'default' : (
-        'REQUEST', 'acl_users', 'aq_parent'),
+                {'default' : ('REQUEST', 'acl_users', 'aq_parent'),
                  'type' : 'string',
                  'metavar' : '<members names>',
                  'help' : 'List of members which are set dynamically and \
 missed by pylint inference system, and so shouldn\'t trigger E0201 when \
 accessed. Python regular expressions are accepted.'}
-                ),
-        )
+               ),
+              )
 
     def open(self):
         # do this in open since config not fully initialized in __init__
@@ -110,13 +179,13 @@
             self.generated_members.extend(('REQUEST', 'acl_users', 'aq_parent'))
 
     def visit_assattr(self, node):
-        if isinstance(node.ass_type(), astng.AugAssign):
+        if isinstance(node.ass_type(), astroid.AugAssign):
             self.visit_getattr(node)
 
     def visit_delattr(self, node):
         self.visit_getattr(node)
 
-    @check_messages('E1101', 'E1103')
+    @check_messages('no-member')
     def visit_getattr(self, node):
         """check that the accessed attribute exists
 
@@ -132,6 +201,7 @@
         if isinstance(self.config.generated_members, str):
             gen = shlex.shlex(self.config.generated_members)
             gen.whitespace += ','
+            gen.wordchars += '[]-+'
             self.config.generated_members = tuple(tok.strip('"') for tok in gen)
         for pattern in self.config.generated_members:
             # attribute is marked as generated, stop here
@@ -151,7 +221,7 @@
                 inference_failure = True
                 continue
             # skip None anyway
-            if isinstance(owner, astng.Const) and owner.value is None:
+            if isinstance(owner, astroid.Const) and owner.value is None:
                 continue
             # XXX "super" / metaclass call
             if is_super(owner) or getattr(owner, 'type', None) == 'metaclass':
@@ -163,20 +233,34 @@
                 continue
             try:
                 if not [n for n in owner.getattr(node.attrname)
-                        if not isinstance(n.statement(), astng.AugAssign)]:
+                        if not isinstance(n.statement(), astroid.AugAssign)]:
                     missingattr.add((owner, name))
                     continue
             except AttributeError:
                 # XXX method / function
                 continue
             except NotFoundError:
-                if isinstance(owner, astng.Function) and owner.decorators:
-                   continue
+                if isinstance(owner, astroid.Function) and owner.decorators:
+                    continue
                 if isinstance(owner, Instance) and owner.has_dynamic_getattr():
                     continue
-                # explicit skipping of optparse'Values class
-                if owner.name == 'Values' and owner.root().name == 'optparse':
+                # explicit skipping of module member access
+                if owner.root().name in self.config.ignored_modules:
                     continue
+                if isinstance(owner, astroid.Class):
+                    # Look up in the metaclass only if the owner is itself
+                    # a class.
+                    # TODO: getattr doesn't return by default members
+                    # from the metaclass, because handling various cases
+                    # of methods accessible from the metaclass itself
+                    # and/or subclasses only is too complicated for little to
+                    # no benefit.
+                    metaclass = owner.metaclass()
+                    try:
+                        if metaclass and metaclass.getattr(node.attrname):
+                            continue
+                    except NotFoundError:
+                        pass
                 missingattr.add((owner, name))
                 continue
             # stop on the first found
@@ -193,88 +277,120 @@
                 if actual in done:
                     continue
                 done.add(actual)
-                if inference_failure:
-                    msgid = 'E1103'
-                else:
-                    msgid = 'E1101'
-                self.add_message(msgid, node=node,
+                confidence = INFERENCE if not inference_failure else INFERENCE_FAILURE
+                self.add_message('no-member', node=node,
                                  args=(owner.display_type(), name,
-                                       node.attrname))
+                                       node.attrname),
+                                 confidence=confidence)
 
-
+    @check_messages('assignment-from-no-return', 'assignment-from-none')
     def visit_assign(self, node):
         """check that if assigning to a function call, the function is
         possibly returning something valuable
         """
-        if not isinstance(node.value, astng.CallFunc):
+        if not isinstance(node.value, astroid.CallFunc):
             return
         function_node = safe_infer(node.value.func)
         # skip class, generator and incomplete function definition
-        if not (isinstance(function_node, astng.Function) and
+        if not (isinstance(function_node, astroid.Function) and
                 function_node.root().fully_defined()):
             return
         if function_node.is_generator() \
                or function_node.is_abstract(pass_is_abstract=False):
             return
-        returns = list(function_node.nodes_of_class(astng.Return,
-                                                    skip_klass=astng.Function))
+        returns = list(function_node.nodes_of_class(astroid.Return,
+                                                    skip_klass=astroid.Function))
         if len(returns) == 0:
-            self.add_message('E1111', node=node)
+            self.add_message('assignment-from-no-return', node=node)
         else:
             for rnode in returns:
-                if not (isinstance(rnode.value, astng.Const)
-                        and rnode.value.value is None):
+                if not (isinstance(rnode.value, astroid.Const)
+                        and rnode.value.value is None
+                        or rnode.value is None):
                     break
             else:
-                self.add_message('W1111', node=node)
+                self.add_message('assignment-from-none', node=node)
 
+    def _check_uninferable_callfunc(self, node):
+        """
+        Check that the given uninferable CallFunc node does not
+        call an actual function.
+        """
+        if not isinstance(node.func, astroid.Getattr):
+            return
+
+        # Look for properties. First, obtain
+        # the lhs of the Getattr node and search the attribute
+        # there. If that attribute is a property or a subclass of properties,
+        # then most likely it's not callable.
+
+        # TODO: since astroid doesn't understand descriptors very well
+        # we will not handle them here, right now.
+
+        expr = node.func.expr
+        klass = safe_infer(expr)
+        if (klass is None or klass is astroid.YES or
+                not isinstance(klass, astroid.Instance)):
+            return
+
+        try:
+            attrs = klass._proxied.getattr(node.func.attrname)
+        except astroid.NotFoundError:
+            return
+
+        for attr in attrs:
+            if attr is astroid.YES:
+                continue
+            if not isinstance(attr, astroid.Function):
+                continue
+
+            # Decorated, see if it is decorated with a property.
+            # Also, check the returns and see if they are callable.
+            if decorated_with_property(attr):
+                if all(return_node.callable()
+                       for return_node in attr.infer_call_result(node)):
+                    continue
+                else:
+                    self.add_message('not-callable', node=node,
+                                     args=node.func.as_string())
+                    break
+
+    @check_messages(*(list(MSGS.keys())))
     def visit_callfunc(self, node):
         """check that called functions/methods are inferred to callable objects,
         and that the arguments passed to the function match the parameters in
         the inferred function's definition
         """
-
         # Build the set of keyword arguments, checking for duplicate keywords,
         # and count the positional arguments.
         keyword_args = set()
         num_positional_args = 0
         for arg in node.args:
-            if isinstance(arg, astng.Keyword):
-                keyword = arg.arg
-                if keyword in keyword_args:
-                    self.add_message('E1122', node=node, args=keyword)
-                keyword_args.add(keyword)
+            if isinstance(arg, astroid.Keyword):
+                keyword_args.add(arg.arg)
             else:
                 num_positional_args += 1
 
         called = safe_infer(node.func)
         # only function, generator and object defining __call__ are allowed
         if called is not None and not called.callable():
-            self.add_message('E1102', node=node, args=node.func.as_string())
+            self.add_message('not-callable', node=node,
+                             args=node.func.as_string())
 
-        # Note that BoundMethod is a subclass of UnboundMethod (huh?), so must
-        # come first in this 'if..else'.
-        if isinstance(called, astng.BoundMethod):
-            # Bound methods have an extra implicit 'self' argument.
-            num_positional_args += 1
-        elif isinstance(called, astng.UnboundMethod):
-            if called.decorators is not None:
-                for d in called.decorators.nodes:
-                    if isinstance(d, astng.Name) and (d.name == 'classmethod'):
-                        # Class methods have an extra implicit 'cls' argument.
-                        num_positional_args += 1
-                        break
-        elif (isinstance(called, astng.Function) or
-              isinstance(called, astng.Lambda)):
-            pass
-        else:
+        self._check_uninferable_callfunc(node)
+
+        try:
+            called, implicit_args, callable_name = _determine_callable(called)
+        except ValueError:
+            # Any error occurred during determining the function type, most of
+            # those errors are handled by different warnings.
             return
-
+        num_positional_args += implicit_args
         if called.args.args is None:
             # Built-in functions have no argument information.
             return
 
-        if len( called.argnames() ) != len( set( called.argnames() ) ):
+        if len(called.argnames()) != len(set(called.argnames())):
             # Duplicate parameter name (see E9801).  We can't really make sense
             # of the function call in this case, so just return.
             return
@@ -284,15 +400,15 @@
         parameters = []
         parameter_name_to_index = {}
         for i, arg in enumerate(called.args.args):
-            if isinstance(arg, astng.Tuple):
+            if isinstance(arg, astroid.Tuple):
                 name = None
                 # Don't store any parameter names within the tuple, since those
                 # are not assignable from keyword arguments.
             else:
-                if isinstance(arg, astng.Keyword):
+                if isinstance(arg, astroid.Keyword):
                     name = arg.arg
                 else:
-                    assert isinstance(arg, astng.AssName)
+                    assert isinstance(arg, astroid.AssName)
                     # This occurs with:
                     #    def f( (a), (b) ): pass
                     name = arg.name
@@ -303,6 +419,15 @@
                 defval = None
             parameters.append([(name, defval), False])
 
+        kwparams = {}
+        for i, arg in enumerate(called.args.kwonlyargs):
+            if isinstance(arg, astroid.Keyword):
+                name = arg.arg
+            else:
+                assert isinstance(arg, astroid.AssName)
+                name = arg.name
+            kwparams[name] = [called.args.kw_defaults[i], False]
+
         # Match the supplied arguments against the function parameters.
 
         # 1. Match the positional arguments.
@@ -315,7 +440,8 @@
                 break
             else:
                 # Too many positional arguments.
-                self.add_message('E1121', node=node)
+                self.add_message('too-many-function-args',
+                                 node=node, args=(callable_name,))
                 break
 
         # 2. Match the keyword arguments.
@@ -324,15 +450,24 @@
                 i = parameter_name_to_index[keyword]
                 if parameters[i][1]:
                     # Duplicate definition of function parameter.
-                    self.add_message('E1124', node=node, args=keyword)
+                    self.add_message('redundant-keyword-arg',
+                                     node=node, args=(keyword, callable_name))
                 else:
                     parameters[i][1] = True
+            elif keyword in kwparams:
+                if kwparams[keyword][1]:  # XXX is that even possible?
+                    # Duplicate definition of function parameter.
+                    self.add_message('redundant-keyword-arg', node=node,
+                                     args=(keyword, callable_name))
+                else:
+                    kwparams[keyword][1] = True
             elif called.args.kwarg is not None:
                 # The keyword argument gets assigned to the **kwargs parameter.
                 pass
             else:
                 # Unexpected keyword argument.
-                self.add_message('E1123', node=node, args=keyword)
+                self.add_message('unexpected-keyword-arg', node=node,
+                                 args=(keyword, callable_name))
 
         # 3. Match the *args, if any.  Note that Python actually processes
         #    *args _before_ any keyword arguments, but we wait until after
@@ -366,10 +501,126 @@
         for [(name, defval), assigned] in parameters:
             if (defval is None) and not assigned:
                 if name is None:
-                    display = '<tuple>'
+                    display_name = '<tuple>'
                 else:
                     display_name = repr(name)
-                self.add_message('E1120', node=node, args=display_name)
+                self.add_message('no-value-for-parameter', node=node,
+                                 args=(display_name, callable_name))
+
+        for name in kwparams:
+            defval, assigned = kwparams[name]
+            if defval is None and not assigned:
+                self.add_message('missing-kwoa', node=node,
+                                 args=(name, callable_name))
+
+    @check_messages('invalid-sequence-index')
+    def visit_extslice(self, node):
+        # Check extended slice objects as if they were used as a sequence
+        # index to check if the object being sliced can support them
+        return self.visit_index(node)
+
+    @check_messages('invalid-sequence-index')
+    def visit_index(self, node):
+        if not node.parent or not hasattr(node.parent, "value"):
+            return
+
+        # Look for index operations where the parent is a sequence type.
+        # If the types can be determined, only allow indices to be int,
+        # slice or instances with __index__.
+
+        parent_type = safe_infer(node.parent.value)
+        if not isinstance(parent_type, (astroid.Class, astroid.Instance)):
+            return
+
+        # Determine what method on the parent this index will use
+        # The parent of this node will be a Subscript, and the parent of that
+        # node determines if the Subscript is a get, set, or delete operation.
+        operation = node.parent.parent
+        if isinstance(operation, astroid.Assign):
+            methodname = '__setitem__'
+        elif isinstance(operation, astroid.Delete):
+            methodname = '__delitem__'
+        else:
+            methodname = '__getitem__'
+
+        # Check if this instance's __getitem__, __setitem__, or __delitem__, as
+        # appropriate to the statement, is implemented in a builtin sequence
+        # type. This way we catch subclasses of sequence types but skip classes
+        # that override __getitem__ and which may allow non-integer indices.
+        try:
+            methods = parent_type.getattr(methodname)
+            if methods is astroid.YES:
+                return
+            itemmethod = methods[0]
+        except (astroid.NotFoundError, IndexError):
+            return
+
+        if not isinstance(itemmethod, astroid.Function):
+            return
+        if itemmethod.root().name != BUILTINS:
+            return
+        if not itemmethod.parent:
+            return
+        if itemmethod.parent.name not in SEQUENCE_TYPES:
+            return
+
+        # For ExtSlice objects coming from visit_extslice, no further
+        # inference is necessary, since if we got this far the ExtSlice
+        # is an error.
+        if isinstance(node, astroid.ExtSlice):
+            index_type = node
+        else:
+            index_type = safe_infer(node)
+        if index_type is None or index_type is astroid.YES:
+            return
+
+        # Constants must be of type int
+        if isinstance(index_type, astroid.Const):
+            if isinstance(index_type.value, int):
+                return
+        # Instance values must be int, slice, or have an __index__ method
+        elif isinstance(index_type, astroid.Instance):
+            if index_type.pytype() in (BUILTINS + '.int', BUILTINS + '.slice'):
+                return
+            try:
+                index_type.getattr('__index__')
+                return
+            except astroid.NotFoundError:
+                pass
+
+        # Anything else is an error
+        self.add_message('invalid-sequence-index', node=node)
+
+    @check_messages('invalid-slice-index')
+    def visit_slice(self, node):
+        # Check the type of each part of the slice
+        for index in (node.lower, node.upper, node.step):
+            if index is None:
+                continue
+
+            index_type = safe_infer(index)
+            if index_type is None or index_type is astroid.YES:
+                continue
+
+            # Constants must of type int or None
+            if isinstance(index_type, astroid.Const):
+                if isinstance(index_type.value, (int, type(None))):
+                    continue
+            # Instance values must be of type int, None or an object
+            # with __index__
+            elif isinstance(index_type, astroid.Instance):
+                if index_type.pytype() in (BUILTINS + '.int',
+                                           BUILTINS + '.NoneType'):
+                    continue
+
+                try:
+                    index_type.getattr('__index__')
+                    return
+                except astroid.NotFoundError:
+                    pass
+
+            # Anything else is an error
+            self.add_message('invalid-slice-index', node=node)
 
 def register(linter):
     """required method to auto register this checker """
diff --git a/third_party/pylint/checkers/utils.py b/third_party/pylint/checkers/utils.py
index 2883c43..2cb01d5 100644
--- a/third_party/pylint/checkers/utils.py
+++ b/third_party/pylint/checkers/utils.py
@@ -1,6 +1,6 @@
 # pylint: disable=W0611
 #
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -14,20 +14,50 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """some functions that may be useful for various checkers
 """
 
+import re
+import sys
 import string
-from logilab import astng
-from logilab.common.compat import builtins
-BUILTINS_NAME = builtins.__name__
 
-COMP_NODE_TYPES = astng.ListComp, astng.SetComp, astng.DictComp, astng.GenExpr
+import astroid
+from astroid import scoped_nodes
+from logilab.common.compat import builtins
+
+BUILTINS_NAME = builtins.__name__
+COMP_NODE_TYPES = astroid.ListComp, astroid.SetComp, astroid.DictComp, astroid.GenExpr
+PY3K = sys.version_info[0] == 3
+
+if not PY3K:
+    EXCEPTIONS_MODULE = "exceptions"
+else:
+    EXCEPTIONS_MODULE = "builtins"
+ABC_METHODS = set(('abc.abstractproperty', 'abc.abstractmethod',
+                   'abc.abstractclassmethod', 'abc.abstractstaticmethod'))
+
+
+class NoSuchArgumentError(Exception):
+    pass
 
 def is_inside_except(node):
-    """Returns true if node is directly inside an exception handler"""
-    return isinstance(node.parent, astng.ExceptHandler)
+    """Returns true if node is inside the name of an except handler."""
+    current = node
+    while current and not isinstance(current.parent, astroid.ExceptHandler):
+        current = current.parent
+
+    return current and current is current.parent.name
+
+
+def get_all_elements(node):
+    """Recursively returns all atoms in nested lists and tuples."""
+    if isinstance(node, (astroid.Tuple, astroid.List)):
+        for child in node.elts:
+            for e in get_all_elements(child):
+                yield e
+    else:
+        yield node
 
 
 def clobber_in_except(node):
@@ -37,18 +67,18 @@
     Returns (True, args for W0623) if assignment clobbers an existing variable,
     (False, None) otherwise.
     """
-    if isinstance(node, astng.AssAttr):
-        return (True, (node.attrname, 'object %r' % (node.expr.name,)))
-    elif node is not None:
+    if isinstance(node, astroid.AssAttr):
+        return (True, (node.attrname, 'object %r' % (node.expr.as_string(),)))
+    elif isinstance(node, astroid.AssName):
         name = node.name
         if is_builtin(name):
             return (True, (name, 'builtins'))
         else:
-            scope, stmts = node.lookup(name)
-            if (stmts and 
-                not isinstance(stmts[0].ass_type(), 
-                               (astng.Assign, astng.AugAssign, astng.ExceptHandler))):
-                return (True, (name, 'outer scope (line %i)' % (stmts[0].lineno,)))
+            stmts = node.lookup(name)[1]
+            if (stmts and not isinstance(stmts[0].ass_type(),
+                                         (astroid.Assign, astroid.AugAssign,
+                                          astroid.ExceptHandler))):
+                return (True, (name, 'outer scope (line %s)' % stmts[0].fromlineno))
     return (False, None)
 
 
@@ -59,12 +89,14 @@
     """
     try:
         inferit = node.infer()
-        value = inferit.next()
-    except astng.InferenceError:
+        value = next(inferit)
+    except astroid.InferenceError:
         return
     try:
-        inferit.next()
+        next(inferit)
         return # None if there is ambiguity on the inferred node
+    except astroid.InferenceError:
+        return # there is some kind of ambiguity
     except StopIteration:
         return value
 
@@ -79,24 +111,28 @@
 def is_error(node):
     """return true if the function does nothing but raising an exception"""
     for child_node in node.get_children():
-        if isinstance(child_node, astng.Raise):
+        if isinstance(child_node, astroid.Raise):
             return True
         return False
 
 def is_raising(body):
     """return true if the given statement node raise an exception"""
     for node in body:
-        if isinstance(node, astng.Raise):
+        if isinstance(node, astroid.Raise):
             return True
     return False
 
 def is_empty(body):
     """return true if the given node does nothing but 'pass'"""
-    return len(body) == 1 and isinstance(body[0], astng.Pass)
+    return len(body) == 1 and isinstance(body[0], astroid.Pass)
 
-builtins =  __builtins__.copy()
+builtins = builtins.__dict__.copy()
 SPECIAL_BUILTINS = ('__builtins__',) # '__path__', '__file__')
 
+def is_builtin_object(node):
+    """Returns True if the given node is an object from the __builtin__ module."""
+    return node and node.root().name == BUILTINS_NAME
+
 def is_builtin(name): # was is_native_builtin
     """return true if <name> could be considered as a builtin defined by python
     """
@@ -115,35 +151,42 @@
     _node = var_node.parent
     while _node:
         if isinstance(_node, COMP_NODE_TYPES):
-            for ass_node in _node.nodes_of_class(astng.AssName):
+            for ass_node in _node.nodes_of_class(astroid.AssName):
                 if ass_node.name == varname:
                     return True
-        elif isinstance(_node, astng.For):
-            for ass_node in _node.target.nodes_of_class(astng.AssName):
+        elif isinstance(_node, astroid.For):
+            for ass_node in _node.target.nodes_of_class(astroid.AssName):
                 if ass_node.name == varname:
                     return True
-        elif isinstance(_node, astng.With):
-            if _node.vars is None:
-                # quickfix : case in which 'with' is used without 'as'
-                return False
-            if _node.vars.name == varname:
-                return True
-        elif isinstance(_node, (astng.Lambda, astng.Function)):
+        elif isinstance(_node, astroid.With):
+            for expr, ids in _node.items:
+                if expr.parent_of(var_node):
+                    break
+                if (ids and
+                        isinstance(ids, astroid.AssName) and
+                        ids.name == varname):
+                    return True
+        elif isinstance(_node, (astroid.Lambda, astroid.Function)):
             if _node.args.is_argument(varname):
                 return True
             if getattr(_node, 'name', None) == varname:
                 return True
             break
+        elif isinstance(_node, astroid.ExceptHandler):
+            if isinstance(_node.name, astroid.AssName):
+                ass_node = _node.name
+                if ass_node.name == varname:
+                    return True
         _node = _node.parent
     # possibly multiple statements on the same line using semi colon separator
     stmt = var_node.statement()
     _node = stmt.previous_sibling()
     lineno = stmt.fromlineno
     while _node and _node.fromlineno == lineno:
-        for ass_node in _node.nodes_of_class(astng.AssName):
+        for ass_node in _node.nodes_of_class(astroid.AssName):
             if ass_node.name == varname:
                 return True
-        for imp_node in _node.nodes_of_class( (astng.From, astng.Import)):
+        for imp_node in _node.nodes_of_class((astroid.From, astroid.Import)):
             if varname in [name[1] or name[0] for name in imp_node.names]:
                 return True
         _node = _node.previous_sibling()
@@ -154,9 +197,9 @@
     value
     """
     parent = node.scope()
-    if isinstance(parent, astng.Function):
+    if isinstance(parent, astroid.Function):
         for default_node in parent.args.defaults:
-            for default_name_node in default_node.nodes_of_class(astng.Name):
+            for default_name_node in default_node.nodes_of_class(astroid.Name):
                 if default_name_node is node:
                     return True
     return False
@@ -165,15 +208,18 @@
     """return true if the name is used in function decorator"""
     parent = node.parent
     while parent is not None:
-        if isinstance(parent, astng.Decorators):
+        if isinstance(parent, astroid.Decorators):
             return True
-        if parent.is_statement or isinstance(parent, astng.Lambda):
+        if (parent.is_statement or
+                isinstance(parent, astroid.Lambda) or
+                isinstance(parent, (scoped_nodes.ComprehensionScope,
+                                    scoped_nodes.ListComp))):
             break
         parent = parent.parent
     return False
 
 def is_ancestor_name(frame, node):
-    """return True if `frame` is a astng.Class node with `node` in the
+    """return True if `frame` is a astroid.Class node with `node` in the
     subtree of its bases attribute
     """
     try:
@@ -181,23 +227,23 @@
     except AttributeError:
         return False
     for base in bases:
-        if node in base.nodes_of_class(astng.Name):
+        if node in base.nodes_of_class(astroid.Name):
             return True
     return False
 
 def assign_parent(node):
     """return the higher parent which is not an AssName, Tuple or List node
     """
-    while node and isinstance(node, (astng.AssName,
-                                     astng.Tuple,
-                                     astng.List)):
+    while node and isinstance(node, (astroid.AssName,
+                                     astroid.Tuple,
+                                     astroid.List)):
         node = node.parent
     return node
 
 def overrides_an_abstract_method(class_node, name):
     """return True if pnode is a parent of node"""
     for ancestor in class_node.ancestors():
-        if name in ancestor and isinstance(ancestor[name], astng.Function) and \
+        if name in ancestor and isinstance(ancestor[name], astroid.Function) and \
                ancestor[name].is_abstract(pass_is_abstract=False):
             return True
     return False
@@ -205,7 +251,7 @@
 def overrides_a_method(class_node, name):
     """return True if <name> is a method overridden from an ancestor"""
     for ancestor in class_node.ancestors():
-        if name in ancestor and isinstance(ancestor[name], astng.Function):
+        if name in ancestor and isinstance(ancestor[name], astroid.Function):
             return True
     return False
 
@@ -229,7 +275,7 @@
                  '__or__', '__ior__', '__ror__',
                  '__xor__', '__ixor__', '__rxor__',
                  # XXX To be continued
-                 ))
+                ))
 
 def check_messages(*messages):
     """decorator to store messages that are handled by a checker method"""
@@ -265,52 +311,254 @@
         return (i, format_string[i])
     i = 0
     while i < len(format_string):
-        c = format_string[i]
-        if c == '%':
-            i, c = next_char(i)
+        char = format_string[i]
+        if char == '%':
+            i, char = next_char(i)
             # Parse the mapping key (optional).
             key = None
-            if c == '(':
+            if char == '(':
                 depth = 1
-                i, c = next_char(i)
+                i, char = next_char(i)
                 key_start = i
                 while depth != 0:
-                    if c == '(':
+                    if char == '(':
                         depth += 1
-                    elif c == ')':
+                    elif char == ')':
                         depth -= 1
-                    i, c = next_char(i)
+                    i, char = next_char(i)
                 key_end = i - 1
                 key = format_string[key_start:key_end]
 
             # Parse the conversion flags (optional).
-            while c in '#0- +':
-                i, c = next_char(i)
+            while char in '#0- +':
+                i, char = next_char(i)
             # Parse the minimum field width (optional).
-            if c == '*':
+            if char == '*':
                 num_args += 1
-                i, c = next_char(i)
+                i, char = next_char(i)
             else:
-                while c in string.digits:
-                    i, c = next_char(i)
+                while char in string.digits:
+                    i, char = next_char(i)
             # Parse the precision (optional).
-            if c == '.':
-                i, c = next_char(i)
-                if c == '*':
+            if char == '.':
+                i, char = next_char(i)
+                if char == '*':
                     num_args += 1
-                    i, c = next_char(i)
+                    i, char = next_char(i)
                 else:
-                    while c in string.digits:
-                        i, c = next_char(i)
+                    while char in string.digits:
+                        i, char = next_char(i)
             # Parse the length modifier (optional).
-            if c in 'hlL':
-                i, c = next_char(i)
+            if char in 'hlL':
+                i, char = next_char(i)
             # Parse the conversion type (mandatory).
-            if c not in 'diouxXeEfFgGcrs%':
+            if PY3K:
+                flags = 'diouxXeEfFgGcrs%a'
+            else:
+                flags = 'diouxXeEfFgGcrs%'
+            if char not in flags:
                 raise UnsupportedFormatCharacter(i)
             if key:
                 keys.add(key)
-            elif c != '%':
+            elif char != '%':
                 num_args += 1
         i += 1
     return keys, num_args
+
+
+def is_attr_protected(attrname):
+    """return True if attribute name is protected (start with _ and some other
+    details), False otherwise.
+    """
+    return attrname[0] == '_' and not attrname == '_' and not (
+        attrname.startswith('__') and attrname.endswith('__'))
+
+def node_frame_class(node):
+    """return klass node for a method node (or a staticmethod or a
+    classmethod), return null otherwise
+    """
+    klass = node.frame()
+
+    while klass is not None and not isinstance(klass, astroid.Class):
+        if klass.parent is None:
+            klass = None
+        else:
+            klass = klass.parent.frame()
+
+    return klass
+
+def is_super_call(expr):
+    """return True if expression node is a function call and if function name
+    is super. Check before that you're in a method.
+    """
+    return (isinstance(expr, astroid.CallFunc) and
+            isinstance(expr.func, astroid.Name) and
+            expr.func.name == 'super')
+
+def is_attr_private(attrname):
+    """Check that attribute name is private (at least two leading underscores,
+    at most one trailing underscore)
+    """
+    regex = re.compile('^_{2,}.*[^_]+_?$')
+    return regex.match(attrname)
+
+def get_argument_from_call(callfunc_node, position=None, keyword=None):
+    """Returns the specified argument from a function call.
+
+    :param callfunc_node: Node representing a function call to check.
+    :param int position: position of the argument.
+    :param str keyword: the keyword of the argument.
+
+    :returns: The node representing the argument, None if the argument is not found.
+    :raises ValueError: if both position and keyword are None.
+    :raises NoSuchArgumentError: if no argument at the provided position or with
+    the provided keyword.
+    """
+    if position is None and keyword is None:
+        raise ValueError('Must specify at least one of: position or keyword.')
+    try:
+        if position is not None and not isinstance(callfunc_node.args[position], astroid.Keyword):
+            return callfunc_node.args[position]
+    except IndexError as error:
+        raise NoSuchArgumentError(error)
+    if keyword:
+        for arg in callfunc_node.args:
+            if isinstance(arg, astroid.Keyword) and arg.arg == keyword:
+                return arg.value
+    raise NoSuchArgumentError
+
+def inherit_from_std_ex(node):
+    """
+    Return true if the given class node is subclass of
+    exceptions.Exception.
+    """
+    if node.name in ('Exception', 'BaseException') \
+            and node.root().name == EXCEPTIONS_MODULE:
+        return True
+    return any(inherit_from_std_ex(parent)
+               for parent in node.ancestors(recurs=False))
+
+def is_import_error(handler):
+    """
+    Check if the given exception handler catches
+    ImportError.
+
+    :param handler: A node, representing an ExceptHandler node.
+    :returns: True if the handler catches ImportError, False otherwise.
+    """
+    names = None
+    if isinstance(handler.type, astroid.Tuple):
+        names = [name for name in handler.type.elts
+                 if isinstance(name, astroid.Name)]
+    elif isinstance(handler.type, astroid.Name):
+        names = [handler.type]
+    else:
+        # Don't try to infer that.
+        return
+    for name in names:
+        try:
+            for infered in name.infer():
+                if (isinstance(infered, astroid.Class) and
+                        inherit_from_std_ex(infered) and
+                        infered.name == 'ImportError'):
+                    return True
+        except astroid.InferenceError:
+            continue
+
+def has_known_bases(klass):
+    """Returns true if all base classes of a class could be inferred."""
+    try:
+        return klass._all_bases_known
+    except AttributeError:
+        pass
+    for base in klass.bases:
+        result = safe_infer(base)
+        # TODO: check for A->B->A->B pattern in class structure too?
+        if (not isinstance(result, astroid.Class) or
+                result is klass or
+                not has_known_bases(result)):
+            klass._all_bases_known = False
+            return False
+    klass._all_bases_known = True
+    return True
+
+def decorated_with_property(node):
+    """ Detect if the given function node is decorated with a property. """
+    if not node.decorators:
+        return False
+    for decorator in node.decorators.nodes:
+        if not isinstance(decorator, astroid.Name):
+            continue
+        try:
+            for infered in decorator.infer():
+                if isinstance(infered, astroid.Class):
+                    if (infered.root().name == BUILTINS_NAME and
+                            infered.name == 'property'):
+                        return True
+                    for ancestor in infered.ancestors():
+                        if (ancestor.name == 'property' and
+                                ancestor.root().name == BUILTINS_NAME):
+                            return True
+        except astroid.InferenceError:
+            pass
+
+
+def decorated_with_abc(func):
+    """Determine if the `func` node is decorated with `abc` decorators."""
+    if func.decorators:
+        for node in func.decorators.nodes:
+            try:
+                infered = next(node.infer())
+            except astroid.InferenceError:
+                continue
+            if infered and infered.qname() in ABC_METHODS:
+                return True
+
+
+def unimplemented_abstract_methods(node, is_abstract_cb=decorated_with_abc):
+    """
+    Get the unimplemented abstract methods for the given *node*.
+
+    A method can be considered abstract if the callback *is_abstract_cb*
+    returns a ``True`` value. The check defaults to verifying that
+    a method is decorated with abstract methods.
+    The function will work only for new-style classes. For old-style
+    classes, it will simply return an empty dictionary.
+    For the rest of them, it will return a dictionary of abstract method
+    names and their inferred objects.
+    """
+    visited = {}
+    try:
+        mro = reversed(node.mro())
+    except NotImplementedError:
+        # Old style class, it will not have a mro.
+        return {}
+    except astroid.ResolveError:
+        # Probably inconsistent hierarchy, don'try
+        # to figure this out here.
+        return {}
+    for ancestor in mro:
+        for obj in ancestor.values():
+            infered = obj
+            if isinstance(obj, astroid.AssName):
+                infered = safe_infer(obj)
+                if not infered:
+                    continue
+                if not isinstance(infered, astroid.Function):
+                    if obj.name in visited:
+                        del visited[obj.name]
+            if isinstance(infered, astroid.Function):
+                # It's critical to use the original name,
+                # since after inferring, an object can be something
+                # else than expected, as in the case of the
+                # following assignment.
+                #
+                # class A:
+                #     def keys(self): pass
+                #     __iter__ = keys
+                abstract = is_abstract_cb(infered)
+                if abstract:
+                    visited[obj.name] = infered
+                elif not abstract and obj.name in visited:
+                    del visited[obj.name]
+    return visited
diff --git a/third_party/pylint/checkers/variables.py b/third_party/pylint/checkers/variables.py
index 484a952..8f6f957 100644
--- a/third_party/pylint/checkers/variables.py
+++ b/third_party/pylint/checkers/variables.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,90 +12,266 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """variables checkers for Python code
 """
-
+import os
 import sys
+import re
 from copy import copy
 
-from logilab import astng
-from logilab.astng import are_exclusive, builtin_lookup, ASTNGBuildingException
+import astroid
+from astroid import are_exclusive, builtin_lookup
+from astroid import modutils
 
-from pylint.interfaces import IASTNGChecker
+from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
+from pylint.utils import get_global_option
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import (PYMETHODS, is_ancestor_name, is_builtin,
-     is_defined_before, is_error, is_func_default, is_func_decorator,
-     assign_parent, check_messages, is_inside_except, clobber_in_except)
+from pylint.checkers.utils import (
+    PYMETHODS, is_ancestor_name, is_builtin,
+    is_defined_before, is_error, is_func_default, is_func_decorator,
+    assign_parent, check_messages, is_inside_except, clobber_in_except,
+    get_all_elements, has_known_bases)
+import six
 
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
+
+PY3K = sys.version_info >= (3, 0)
 
 def in_for_else_branch(parent, stmt):
-  """Returns True if stmt in inside the else branch for a parent For stmt."""
-  return (isinstance(parent, astng.For) and
-          any(else_stmt.parent_of(stmt) for else_stmt in parent.orelse))
+    """Returns True if stmt in inside the else branch for a parent For stmt."""
+    return (isinstance(parent, astroid.For) and
+            any(else_stmt.parent_of(stmt) for else_stmt in parent.orelse))
 
 def overridden_method(klass, name):
     """get overridden method if any"""
     try:
-        parent = klass.local_attr_ancestors(name).next()
+        parent = next(klass.local_attr_ancestors(name))
     except (StopIteration, KeyError):
         return None
     try:
         meth_node = parent[name]
     except KeyError:
         # We have found an ancestor defining <name> but it's not in the local
-        # dictionary. This may happen with astng built from living objects.
+        # dictionary. This may happen with astroid built from living objects.
         return None
-    if isinstance(meth_node, astng.Function):
+    if isinstance(meth_node, astroid.Function):
         return meth_node
     return None
 
+def _get_unpacking_extra_info(node, infered):
+    """return extra information to add to the message for unpacking-non-sequence
+    and unbalanced-tuple-unpacking errors
+    """
+    more = ''
+    infered_module = infered.root().name
+    if node.root().name == infered_module:
+        if node.lineno == infered.lineno:
+            more = ' %s' % infered.as_string()
+        elif infered.lineno:
+            more = ' defined at line %s' % infered.lineno
+    elif infered.lineno:
+        more = ' defined at line %s of %s' % (infered.lineno, infered_module)
+    return more
+
+def _detect_global_scope(node, frame, defframe):
+    """ Detect that the given frames shares a global
+    scope.
+
+    Two frames shares a global scope when neither
+    of them are hidden under a function scope, as well
+    as any of parent scope of them, until the root scope.
+    In this case, depending from something defined later on
+    will not work, because it is still undefined.
+
+    Example:
+        class A:
+            # B has the same global scope as `C`, leading to a NameError.
+            class B(C): ...
+        class C: ...
+
+    """
+    def_scope = scope = None
+    if frame and frame.parent:
+        scope = frame.parent.scope()
+    if defframe and defframe.parent:
+        def_scope = defframe.parent.scope()
+    if isinstance(frame, astroid.Function):
+        # If the parent of the current node is a
+        # function, then it can be under its scope
+        # (defined in, which doesn't concern us) or
+        # the `->` part of annotations. The same goes
+        # for annotations of function arguments, they'll have
+        # their parent the Arguments node.
+        if not isinstance(node.parent,
+                          (astroid.Function, astroid.Arguments)):
+            return False
+    elif any(not isinstance(f, (astroid.Class, astroid.Module))
+             for f in (frame, defframe)):
+        # Not interested in other frames, since they are already
+        # not in a global scope.
+        return False
+
+    break_scopes = []
+    for s in (scope, def_scope):
+        # Look for parent scopes. If there is anything different
+        # than a module or a class scope, then they frames don't
+        # share a global scope.
+        parent_scope = s
+        while parent_scope:
+            if not isinstance(parent_scope, (astroid.Class, astroid.Module)):
+                break_scopes.append(parent_scope)
+                break
+            if parent_scope.parent:
+                parent_scope = parent_scope.parent.scope()
+            else:
+                break
+    if break_scopes and len(set(break_scopes)) != 1:
+        # Store different scopes than expected.
+        # If the stored scopes are, in fact, the very same, then it means
+        # that the two frames (frame and defframe) shares the same scope,
+        # and we could apply our lineno analysis over them.
+        # For instance, this works when they are inside a function, the node
+        # that uses a definition and the definition itself.
+        return False
+    # At this point, we are certain that frame and defframe shares a scope
+    # and the definition of the first depends on the second.
+    return frame.lineno < defframe.lineno
+
+def _fix_dot_imports(not_consumed):
+    """ Try to fix imports with multiple dots, by returning a dictionary
+    with the import names expanded. The function unflattens root imports,
+    like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree'
+    and 'xml.sax' respectively.
+    """
+    # TODO: this should be improved in issue astroid #46
+    names = {}
+    for name, stmts in six.iteritems(not_consumed):
+        if any(isinstance(stmt, astroid.AssName)
+               and isinstance(stmt.ass_type(), astroid.AugAssign)
+               for stmt in stmts):
+            continue
+        for stmt in stmts:
+            if not isinstance(stmt, (astroid.From, astroid.Import)):
+                continue
+            for imports in stmt.names:
+                second_name = None
+                if imports[0] == "*":
+                    # In case of wildcard imports,
+                    # pick the name from inside the imported module.
+                    second_name = name
+                else:
+                    if imports[0].find(".") > -1 or name in imports:
+                        # Most likely something like 'xml.etree',
+                        # which will appear in the .locals as 'xml'.
+                        # Only pick the name if it wasn't consumed.
+                        second_name = imports[0]
+                if second_name and second_name not in names:
+                    names[second_name] = stmt
+    return sorted(names.items(), key=lambda a: a[1].fromlineno)
+
+def _find_frame_imports(name, frame):
+    """
+    Detect imports in the frame, with the required
+    *name*. Such imports can be considered assignments.
+    Returns True if an import for the given name was found.
+    """
+    imports = frame.nodes_of_class((astroid.Import, astroid.From))
+    for import_node in imports:
+        for import_name, import_alias in import_node.names:
+            # If the import uses an alias, check only that.
+            # Otherwise, check only the import name.
+            if import_alias:
+                if import_alias == name:
+                    return True
+            elif import_name and import_name == name:
+                return True
+
 
 MSGS = {
     'E0601': ('Using variable %r before assignment',
+              'used-before-assignment',
               'Used when a local variable is accessed before it\'s \
               assignment.'),
     'E0602': ('Undefined variable %r',
+              'undefined-variable',
               'Used when an undefined variable is accessed.'),
+    'E0603': ('Undefined variable name %r in __all__',
+              'undefined-all-variable',
+              'Used when an undefined variable name is referenced in __all__.'),
+    'E0604': ('Invalid object %r in __all__, must contain only strings',
+              'invalid-all-object',
+              'Used when an invalid (non-string) object occurs in __all__.'),
     'E0611': ('No name %r in module %r',
+              'no-name-in-module',
               'Used when a name cannot be found in a module.'),
 
     'W0601': ('Global variable %r undefined at the module level',
+              'global-variable-undefined',
               'Used when a variable is defined through the "global" statement \
               but the variable is not defined in the module scope.'),
     'W0602': ('Using global for %r but no assignment is done',
+              'global-variable-not-assigned',
               'Used when a variable is defined through the "global" statement \
               but no assignment to this variable is done.'),
     'W0603': ('Using the global statement', # W0121
+              'global-statement',
               'Used when you use the "global" statement to update a global \
-              variable. PyLint just try to discourage this \
+              variable. Pylint just try to discourage this \
               usage. That doesn\'t mean you can not use it !'),
     'W0604': ('Using the global statement at the module level', # W0103
+              'global-at-module-level',
               'Used when you use the "global" statement at the module level \
               since it has no effect'),
-    'W0611': ('Unused import %s',
+    'W0611': ('Unused %s',
+              'unused-import',
               'Used when an imported module or variable is not used.'),
     'W0612': ('Unused variable %r',
+              'unused-variable',
               'Used when a variable is defined but not used.'),
     'W0613': ('Unused argument %r',
+              'unused-argument',
               'Used when a function or method argument is not used.'),
     'W0614': ('Unused import %s from wildcard import',
+              'unused-wildcard-import',
               'Used when an imported module or variable is not used from a \
               \'from X import *\' style import.'),
 
     'W0621': ('Redefining name %r from outer scope (line %s)',
+              'redefined-outer-name',
               'Used when a variable\'s name hide a name defined in the outer \
               scope.'),
     'W0622': ('Redefining built-in %r',
+              'redefined-builtin',
               'Used when a variable or function override a built-in.'),
     'W0623': ('Redefining name %r from %s in exception handler',
+              'redefine-in-handler',
               'Used when an exception handler assigns the exception \
                to an existing name'),
 
     'W0631': ('Using possibly undefined loop variable %r',
+              'undefined-loop-variable',
               'Used when an loop variable (i.e. defined by a for loop or \
               a list comprehension or a generator expression) is used outside \
               the loop.'),
+
+    'W0632': ('Possible unbalanced tuple unpacking with '
+              'sequence%s: '
+              'left side has %d label(s), right side has %d value(s)',
+              'unbalanced-tuple-unpacking',
+              'Used when there is an unbalanced tuple unpacking in assignment'),
+
+    'W0633': ('Attempting to unpack a non-sequence%s',
+              'unpacking-non-sequence',
+              'Used when something which is not '
+              'a sequence is used in an unpack assignment'),
+
+    'W0640': ('Cell variable %s defined in loop',
+              'cell-var-from-loop',
+              'A variable used in a closure is defined in a loop. '
+              'This will result in all closures using the same value for '
+              'the closed-over variable.'),
+
     }
 
 class VariablesChecker(BaseChecker):
@@ -104,67 +280,150 @@
     * undefined variables
     * redefinition of variable from builtins or from an outer scope
     * use of variable before assignment
+    * __all__ consistency
     """
 
-    __implements__ = IASTNGChecker
+    __implements__ = IAstroidChecker
 
     name = 'variables'
     msgs = MSGS
     priority = -1
-    options = (
-               ("init-import",
+    options = (("init-import",
                 {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
                  'help' : 'Tells whether we should check for unused import in \
 __init__ files.'}),
                ("dummy-variables-rgx",
-                {'default': ('_|dummy'),
+                {'default': ('_$|dummy'),
                  'type' :'regexp', 'metavar' : '<regexp>',
-                 'help' : 'A regular expression matching the beginning of \
-                  the name of dummy variables (i.e. not used).'}),
+                 'help' : 'A regular expression matching the name of dummy \
+variables (i.e. expectedly not used).'}),
                ("additional-builtins",
                 {'default': (), 'type' : 'csv',
                  'metavar' : '<comma separated list>',
                  'help' : 'List of additional names supposed to be defined in \
 builtins. Remember that you should avoid to define new builtins when possible.'
-                 }),
+                }),
+               ("callbacks",
+                {'default' : ('cb_', '_cb'), 'type' : 'csv',
+                 'metavar' : '<callbacks>',
+                 'help' : 'List of strings which can identify a callback '
+                          'function by name. A callback name must start or '
+                          'end with one of those strings.'}
                )
+              )
     def __init__(self, linter=None):
         BaseChecker.__init__(self, linter)
         self._to_consume = None
         self._checking_mod_attr = None
-        self._vars = None
 
     def visit_module(self, node):
         """visit module : update consumption analysis variable
         checks globals doesn't overrides builtins
         """
         self._to_consume = [(copy(node.locals), {}, 'module')]
-        self._vars = []
-        for name, stmts in node.locals.items():
+        for name, stmts in six.iteritems(node.locals):
             if is_builtin(name) and not is_inside_except(stmts[0]):
                 # do not print Redefining builtin for additional builtins
-                self.add_message('W0622', args=name, node=stmts[0])
+                self.add_message('redefined-builtin', args=name, node=stmts[0])
 
-    @check_messages('W0611', 'W0614')
+    @check_messages('unused-import', 'unused-wildcard-import',
+                    'redefined-builtin', 'undefined-all-variable',
+                    'invalid-all-object')
     def leave_module(self, node):
         """leave module: check globals
         """
         assert len(self._to_consume) == 1
         not_consumed = self._to_consume.pop()[0]
+        # attempt to check for __all__ if defined
+        if '__all__' in node.locals:
+            assigned = next(node.igetattr('__all__'))
+            if assigned is not astroid.YES:
+                for elt in getattr(assigned, 'elts', ()):
+                    try:
+                        elt_name = next(elt.infer())
+                    except astroid.InferenceError:
+                        continue
+
+                    if not isinstance(elt_name, astroid.Const) \
+                             or not isinstance(elt_name.value, six.string_types):
+                        self.add_message('invalid-all-object',
+                                         args=elt.as_string(), node=elt)
+                        continue
+                    elt_name = elt_name.value
+                    # If elt is in not_consumed, remove it from not_consumed
+                    if elt_name in not_consumed:
+                        del not_consumed[elt_name]
+                        continue
+                    if elt_name not in node.locals:
+                        if not node.package:
+                            self.add_message('undefined-all-variable',
+                                             args=elt_name,
+                                             node=elt)
+                        else:
+                            basename = os.path.splitext(node.file)[0]
+                            if os.path.basename(basename) == '__init__':
+                                name = node.name + "." + elt_name
+                                try:
+                                    modutils.file_from_modpath(name.split("."))
+                                except ImportError:
+                                    self.add_message('undefined-all-variable',
+                                                     args=elt_name,
+                                                     node=elt)
+                                except SyntaxError:
+                                    # don't yield an syntax-error warning,
+                                    # because it will be later yielded
+                                    # when the file will be checked
+                                    pass
         # don't check unused imports in __init__ files
         if not self.config.init_import and node.package:
             return
-        for name, stmts in not_consumed.items():
-            stmt = stmts[0]
-            if isinstance(stmt, astng.Import):
-                self.add_message('W0611', args=name, node=stmt)
-            elif isinstance(stmt, astng.From) and stmt.modname != '__future__':
-                if stmt.names[0][0] == '*':
-                    self.add_message('W0614', args=name, node=stmt)
-                else:
-                    self.add_message('W0611', args=name, node=stmt)
+
+        self._check_imports(not_consumed)
+
+    def _check_imports(self, not_consumed):
+        local_names = _fix_dot_imports(not_consumed)
+        checked = set()
+        for name, stmt in local_names:
+            for imports in stmt.names:
+                real_name = imported_name = imports[0]
+                if imported_name == "*":
+                    real_name = name
+                as_name = imports[1]
+                if real_name in checked:
+                    continue
+                if name not in (real_name, as_name):
+                    continue
+                checked.add(real_name)
+
+                if (isinstance(stmt, astroid.Import) or
+                        (isinstance(stmt, astroid.From) and
+                         not stmt.modname)):
+                    if (isinstance(stmt, astroid.From) and
+                            SPECIAL_OBJ.search(imported_name)):
+                        # Filter special objects (__doc__, __all__) etc.,
+                        # because they can be imported for exporting.
+                        continue
+                    if as_name is None:
+                        msg = "import %s" % imported_name
+                    else:
+                        msg = "%s imported as %s" % (imported_name, as_name)
+                    self.add_message('unused-import', args=msg, node=stmt)
+                elif isinstance(stmt, astroid.From) and stmt.modname != '__future__':
+                    if SPECIAL_OBJ.search(imported_name):
+                        # Filter special objects (__doc__, __all__) etc.,
+                        # because they can be imported for exporting.
+                        continue
+                    if imported_name == '*':
+                        self.add_message('unused-wildcard-import',
+                                         args=name, node=stmt)
+                    else:
+                        if as_name is None:
+                            msg = "%s imported from %s" % (imported_name, stmt.modname)
+                        else:
+                            fields = (imported_name, stmt.modname, as_name)
+                            msg = "%s imported from %s as %s" % fields
+                        self.add_message('unused-import', args=msg, node=stmt)
         del self._to_consume
-        del self._vars
 
     def visit_class(self, node):
         """visit class: update consumption analysis variable
@@ -225,25 +484,27 @@
         """visit function: update consumption analysis variable and check locals
         """
         self._to_consume.append((copy(node.locals), {}, 'function'))
-        self._vars.append({})
-        if not set(('W0621', 'W0622')) & self.active_msgs:
+        if not (self.linter.is_message_enabled('redefined-outer-name') or
+                self.linter.is_message_enabled('redefined-builtin')):
             return
         globs = node.root().globals
         for name, stmt in node.items():
             if is_inside_except(stmt):
                 continue
-            if name in globs and not isinstance(stmt, astng.Global):
-                line = globs[name][0].lineno
-                self.add_message('W0621', args=(name, line), node=stmt)
+            if name in globs and not isinstance(stmt, astroid.Global):
+                line = globs[name][0].fromlineno
+                dummy_rgx = self.config.dummy_variables_rgx
+                if not dummy_rgx.match(name):
+                    self.add_message('redefined-outer-name', args=(name, line), node=stmt)
             elif is_builtin(name):
                 # do not print Redefining builtin for additional builtins
-                self.add_message('W0622', args=name, node=stmt)
+                self.add_message('redefined-builtin', args=name, node=stmt)
 
     def leave_function(self, node):
         """leave function: check function's locals are consumed"""
         not_consumed = self._to_consume.pop()[0]
-        self._vars.pop(0)
-        if not set(('W0612', 'W0613')) & self.active_msgs:
+        if not (self.linter.is_message_enabled('unused-variable') or
+                self.linter.is_message_enabled('unused-argument')):
             return
         # don't check arguments of function which are only raising an exception
         if is_error(node):
@@ -253,18 +514,46 @@
         klass = node.parent.frame()
         if is_method and (klass.type == 'interface' or node.is_abstract()):
             return
+        if is_method and isinstance(klass, astroid.Class):
+            confidence = INFERENCE if has_known_bases(klass) else INFERENCE_FAILURE
+        else:
+            confidence = HIGH
         authorized_rgx = self.config.dummy_variables_rgx
         called_overridden = False
         argnames = node.argnames()
-        for name, stmts in not_consumed.iteritems():
+        global_names = set()
+        nonlocal_names = set()
+        for global_stmt in node.nodes_of_class(astroid.Global):
+            global_names.update(set(global_stmt.names))
+        for nonlocal_stmt in node.nodes_of_class(astroid.Nonlocal):
+            nonlocal_names.update(set(nonlocal_stmt.names))
+
+        for name, stmts in six.iteritems(not_consumed):
             # ignore some special names specified by user configuration
             if authorized_rgx.match(name):
                 continue
             # ignore names imported by the global statement
             # FIXME: should only ignore them if it's assigned latter
             stmt = stmts[0]
-            if isinstance(stmt, astng.Global):
+            if isinstance(stmt, astroid.Global):
                 continue
+            if isinstance(stmt, (astroid.Import, astroid.From)):
+                # Detect imports, assigned to global statements.
+                if global_names:
+                    skip = False
+                    for import_name, import_alias in stmt.names:
+                        # If the import uses an alias, check only that.
+                        # Otherwise, check only the import name.
+                        if import_alias:
+                            if import_alias in global_names:
+                                skip = True
+                                break
+                        elif import_name in global_names:
+                            skip = True
+                            break
+                    if skip:
+                        continue
+
             # care about functions with unknown argument (builtins)
             if name in argnames:
                 if is_method:
@@ -279,26 +568,32 @@
                         continue
                     if node.name in PYMETHODS and node.name not in ('__init__', '__new__'):
                         continue
-                # don't check callback arguments XXX should be configurable
-                if node.name.startswith('cb_') or node.name.endswith('_cb'):
+                # don't check callback arguments
+                if any(node.name.startswith(cb) or node.name.endswith(cb)
+                       for cb in self.config.callbacks):
                     continue
-                self.add_message('W0613', args=name, node=stmt)
+                self.add_message('unused-argument', args=name, node=stmt,
+                                 confidence=confidence)
             else:
-                self.add_message('W0612', args=name, node=stmt)
+                if stmt.parent and isinstance(stmt.parent, astroid.Assign):
+                    if name in nonlocal_names:
+                        continue
+                self.add_message('unused-variable', args=name, node=stmt)
 
-    @check_messages('W0601', 'W0602', 'W0603', 'W0604', 'W0622')
+    @check_messages('global-variable-undefined', 'global-variable-not-assigned', 'global-statement',
+                    'global-at-module-level', 'redefined-builtin')
     def visit_global(self, node):
         """check names imported exists in the global scope"""
         frame = node.frame()
-        if isinstance(frame, astng.Module):
-            self.add_message('W0604', node=node)
+        if isinstance(frame, astroid.Module):
+            self.add_message('global-at-module-level', node=node)
             return
         module = frame.root()
         default_message = True
         for name in node.names:
             try:
                 assign_nodes = module.getattr(name)
-            except astng.NotFoundError:
+            except astroid.NotFoundError:
                 # unassigned global, skip
                 assign_nodes = []
             for anode in assign_nodes:
@@ -310,24 +605,52 @@
                     # same scope level assignment
                     break
             else:
-                # global but no assignment
-                self.add_message('W0602', args=name, node=node)
+                if not _find_frame_imports(name, frame):
+                    self.add_message('global-variable-not-assigned',
+                                     args=name, node=node)
                 default_message = False
             if not assign_nodes:
                 continue
             for anode in assign_nodes:
                 if anode.parent is None:
-                    self.add_message('W0622', args=name, node=node)
+                    self.add_message('redefined-builtin', args=name, node=node)
                     break
                 if anode.frame() is module:
                     # module level assignment
                     break
             else:
                 # global undefined at the module scope
-                self.add_message('W0601', args=name, node=node)
+                self.add_message('global-variable-undefined', args=name, node=node)
                 default_message = False
         if default_message:
-            self.add_message('W0603', node=node)
+            self.add_message('global-statement', node=node)
+
+    def _check_late_binding_closure(self, node, assignment_node):
+        def _is_direct_lambda_call():
+            return (isinstance(node_scope.parent, astroid.CallFunc)
+                    and node_scope.parent.func is node_scope)
+
+        node_scope = node.scope()
+        if not isinstance(node_scope, (astroid.Lambda, astroid.Function)):
+            return
+        if isinstance(node.parent, astroid.Arguments):
+            return
+
+        if isinstance(assignment_node, astroid.Comprehension):
+            if assignment_node.parent.parent_of(node.scope()):
+                self.add_message('cell-var-from-loop', node=node, args=node.name)
+        else:
+            assign_scope = assignment_node.scope()
+            maybe_for = assignment_node
+            while not isinstance(maybe_for, astroid.For):
+                if maybe_for is assign_scope:
+                    break
+                maybe_for = maybe_for.parent
+            else:
+                if (maybe_for.parent_of(node_scope)
+                        and not _is_direct_lambda_call()
+                        and not isinstance(node_scope.statement(), astroid.Return)):
+                    self.add_message('cell-var-from-loop', node=node, args=node.name)
 
     def _loopvar_name(self, node, name):
         # filter variables according to node's scope
@@ -337,7 +660,7 @@
         #astmts = [stmt for stmt in node.lookup(name)[1]
         #          if hasattr(stmt, 'ass_type')] and
         #          not stmt.statement().parent_of(node)]
-        if 'W0631' not in self.active_msgs:
+        if not self.linter.is_message_enabled('undefined-loop-variable'):
             return
         astmts = [stmt for stmt in node.lookup(name)[1]
                   if hasattr(stmt, 'ass_type')]
@@ -355,35 +678,38 @@
             _astmts = astmts[:1]
         for i, stmt in enumerate(astmts[1:]):
             if (astmts[i].statement().parent_of(stmt)
-                and not in_for_else_branch(astmts[i].statement(), stmt)):
+                    and not in_for_else_branch(astmts[i].statement(), stmt)):
                 continue
             _astmts.append(stmt)
         astmts = _astmts
         if len(astmts) == 1:
             ass = astmts[0].ass_type()
-            if isinstance(ass, (astng.For, astng.Comprehension, astng.GenExpr)) \
+            if isinstance(ass, (astroid.For, astroid.Comprehension, astroid.GenExpr)) \
                    and not ass.statement() is node.statement():
-                self.add_message('W0631', args=name, node=node)
+                self.add_message('undefined-loop-variable', args=name, node=node)
 
+    @check_messages('redefine-in-handler')
     def visit_excepthandler(self, node):
-        clobbering, args = clobber_in_except(node.name)
-        if clobbering:
-            self.add_message('W0623', args=args, node=node)
+        for name in get_all_elements(node.name):
+            clobbering, args = clobber_in_except(name)
+            if clobbering:
+                self.add_message('redefine-in-handler', args=args, node=name)
 
     def visit_assname(self, node):
-        if isinstance(node.ass_type(), astng.AugAssign):
+        if isinstance(node.ass_type(), astroid.AugAssign):
             self.visit_name(node)
 
     def visit_delname(self, node):
         self.visit_name(node)
 
+    @check_messages(*(MSGS.keys()))
     def visit_name(self, node):
         """check that a name is defined if the current scope and doesn't
         redefine a built-in
         """
         stmt = node.statement()
         if stmt.fromlineno is None:
-            # name node from a astng built from live code, skip
+            # name node from a astroid built from live code, skip
             assert not stmt.root().file.endswith('.py')
             return
         name = node.name
@@ -392,7 +718,7 @@
         # a decorator, then start from the parent frame of the function instead
         # of the function frame - and thus open an inner class scope
         if (is_func_default(node) or is_func_decorator(node)
-            or is_ancestor_name(frame, node)):
+                or is_ancestor_name(frame, node)):
             start_index = len(self._to_consume) - 2
         else:
             start_index = len(self._to_consume) - 1
@@ -406,12 +732,37 @@
             # names. The only exception is when the starting scope is a
             # comprehension and its direct outer scope is a class
             if scope_type == 'class' and i != start_index and not (
-                base_scope_type == 'comprehension' and i == start_index-1):
-                # XXX find a way to handle class scope in a smoother way
-                continue
+                    base_scope_type == 'comprehension' and i == start_index-1):
+                # Detect if we are in a local class scope, as an assignment.
+                # For example, the following is fair game.
+                #
+                # class A:
+                #    b = 1
+                #    c = lambda b=b: b * b
+                #
+                # class B:
+                #    tp = 1
+                #    def func(self, arg: tp):
+                #        ...
+
+                in_annotation = (
+                    PY3K and isinstance(frame, astroid.Function)
+                    and node.statement() is frame and
+                    (node in frame.args.annotations
+                     or node is frame.args.varargannotation
+                     or node is frame.args.kwargannotation))
+                if in_annotation:
+                    frame_locals = frame.parent.scope().locals
+                else:
+                    frame_locals = frame.locals
+                if not ((isinstance(frame, astroid.Class) or in_annotation)
+                        and name in frame_locals):
+                    continue
             # the name has already been consumed, only check it's not a loop
             # variable used outside the loop
             if name in consumed:
+                defnode = assign_parent(consumed[name][0])
+                self._check_late_binding_closure(node, defnode)
                 self._loopvar_name(node, name)
                 break
             # mark the name as consumed if it's defined in this scope
@@ -423,11 +774,12 @@
             # checks for use before assignment
             defnode = assign_parent(to_consume[name][0])
             if defnode is not None:
+                self._check_late_binding_closure(node, defnode)
                 defstmt = defnode.statement()
                 defframe = defstmt.frame()
                 maybee0601 = True
                 if not frame is defframe:
-                    maybee0601 = False
+                    maybee0601 = _detect_global_scope(node, frame, defframe)
                 elif defframe.parent is None:
                     # we are at the module level, check the name is not
                     # defined in builtins
@@ -438,52 +790,110 @@
                     # defined in global or builtin scope
                     if defframe.root().lookup(name)[1]:
                         maybee0601 = False
+                    else:
+                        # check if we have a nonlocal
+                        if name in defframe.locals:
+                            maybee0601 = not any(isinstance(child, astroid.Nonlocal)
+                                                 and name in child.names
+                                                 for child in defframe.get_children())
+
+                # Handle a couple of class scoping issues.
+                annotation_return = False
+                # The class reuses itself in the class scope.
+                recursive_klass = (frame is defframe and
+                                   defframe.parent_of(node) and
+                                   isinstance(defframe, astroid.Class) and
+                                   node.name == defframe.name)
+                if (self._to_consume[-1][-1] == 'lambda' and
+                        isinstance(frame, astroid.Class)
+                        and name in frame.locals):
+                    maybee0601 = True
+                elif (isinstance(defframe, astroid.Class) and
+                      isinstance(frame, astroid.Function)):
+                    # Special rule for function return annotations,
+                    # which uses the same name as the class where
+                    # the function lives.
+                    if (PY3K and node is frame.returns and
+                            defframe.parent_of(frame.returns)):
+                        maybee0601 = annotation_return = True
+
+                    if (maybee0601 and defframe.name in defframe.locals and
+                            defframe.locals[name][0].lineno < frame.lineno):
+                        # Detect class assignments with the same
+                        # name as the class. In this case, no warning
+                        # should be raised.
+                        maybee0601 = False
+                elif recursive_klass:
+                    maybee0601 = True
+                else:
+                    maybee0601 = maybee0601 and stmt.fromlineno <= defstmt.fromlineno
+
                 if (maybee0601
-                    and stmt.fromlineno <= defstmt.fromlineno
-                    and not is_defined_before(node)
-                    and not are_exclusive(stmt, defstmt, ('NameError', 'Exception', 'BaseException'))):
-                    if defstmt is stmt and isinstance(node, (astng.DelName,
-                                                             astng.AssName)):
-                        self.add_message('E0602', args=name, node=node)
+                        and not is_defined_before(node)
+                        and not are_exclusive(stmt, defstmt, ('NameError',
+                                                              'Exception',
+                                                              'BaseException'))):
+                    if recursive_klass or (defstmt is stmt and
+                                           isinstance(node, (astroid.DelName,
+                                                             astroid.AssName))):
+                        self.add_message('undefined-variable', args=name, node=node)
+                    elif annotation_return:
+                        self.add_message('undefined-variable', args=name, node=node)
                     elif self._to_consume[-1][-1] != 'lambda':
-                        # E0601 may *not* occurs in lambda scope
-                        self.add_message('E0601', args=name, node=node)
-            if not isinstance(node, astng.AssName): # Aug AssName
-                del to_consume[name]
-            else:
+                        # E0601 may *not* occurs in lambda scope.
+                        self.add_message('used-before-assignment', args=name, node=node)
+                    elif self._to_consume[-1][-1] == 'lambda':
+                        # E0601 can occur in class-level scope in lambdas, as in
+                        # the following example:
+                        #   class A:
+                        #      x = lambda attr: f + attr
+                        #      f = 42
+                        if isinstance(frame, astroid.Class) and name in frame.locals:
+                            if isinstance(node.parent, astroid.Arguments):
+                                # Doing the following is fine:
+                                #   class A:
+                                #      x = 42
+                                #      y = lambda attr=x: attr
+                                if stmt.fromlineno <= defstmt.fromlineno:
+                                    self.add_message('used-before-assignment',
+                                                     args=name, node=node)
+                            else:
+                                self.add_message('undefined-variable',
+                                                 args=name, node=node)
+
+            if isinstance(node, astroid.AssName): # Aug AssName
                 del consumed[name]
+            else:
+                del to_consume[name]
             # check it's not a loop variable used outside the loop
             self._loopvar_name(node, name)
             break
         else:
             # we have not found the name, if it isn't a builtin, that's an
             # undefined name !
-            if not (name in astng.Module.scope_attrs or is_builtin(name)
+            if not (name in astroid.Module.scope_attrs or is_builtin(name)
                     or name in self.config.additional_builtins):
-                self.add_message('E0602', args=name, node=node)
+                self.add_message('undefined-variable', args=name, node=node)
 
-    @check_messages('E0611')
+    @check_messages('no-name-in-module')
     def visit_import(self, node):
         """check modules attribute accesses"""
         for name, _ in node.names:
             parts = name.split('.')
             try:
-                module = node.infer_name_module(parts[0]).next()
-            except astng.ResolveError:
+                module = next(node.infer_name_module(parts[0]))
+            except astroid.ResolveError:
                 continue
             self._check_module_attrs(node, module, parts[1:])
 
-    @check_messages('E0611')
+    @check_messages('no-name-in-module')
     def visit_from(self, node):
         """check modules attribute accesses"""
         name_parts = node.modname.split('.')
         level = getattr(node, 'level', None)
         try:
             module = node.root().import_module(name_parts[0], level=level)
-        except ASTNGBuildingException:
-            return
-        except Exception, exc:
-            print 'Unhandled exception in VariablesChecker:', exc
+        except Exception: # pylint: disable=broad-except
             return
         module = self._check_module_attrs(node, module, name_parts[1:])
         if not module:
@@ -493,34 +903,93 @@
                 continue
             self._check_module_attrs(node, module, name.split('.'))
 
+    @check_messages('unbalanced-tuple-unpacking', 'unpacking-non-sequence')
+    def visit_assign(self, node):
+        """Check unbalanced tuple unpacking for assignments
+        and unpacking non-sequences.
+        """
+        if not isinstance(node.targets[0], (astroid.Tuple, astroid.List)):
+            return
+
+        targets = node.targets[0].itered()
+        try:
+            for infered in node.value.infer():
+                self._check_unpacking(infered, node, targets)
+        except astroid.InferenceError:
+            return
+
+    def _check_unpacking(self, infered, node, targets):
+        """ Check for unbalanced tuple unpacking
+        and unpacking non sequences.
+        """
+        if infered is astroid.YES:
+            return
+        if (isinstance(infered.parent, astroid.Arguments) and
+                isinstance(node.value, astroid.Name) and
+                node.value.name == infered.parent.vararg):
+            # Variable-length argument, we can't determine the length.
+            return
+        if isinstance(infered, (astroid.Tuple, astroid.List)):
+            # attempt to check unpacking is properly balanced
+            values = infered.itered()
+            if len(targets) != len(values):
+                # Check if we have starred nodes.
+                if any(isinstance(target, astroid.Starred)
+                       for target in targets):
+                    return
+                self.add_message('unbalanced-tuple-unpacking', node=node,
+                                 args=(_get_unpacking_extra_info(node, infered),
+                                       len(targets),
+                                       len(values)))
+        # attempt to check unpacking may be possible (ie RHS is iterable)
+        elif isinstance(infered, astroid.Instance):
+            for meth in ('__iter__', '__getitem__'):
+                try:
+                    infered.getattr(meth)
+                    break
+                except astroid.NotFoundError:
+                    continue
+            else:
+                self.add_message('unpacking-non-sequence', node=node,
+                                 args=(_get_unpacking_extra_info(node, infered),))
+        else:
+            self.add_message('unpacking-non-sequence', node=node,
+                             args=(_get_unpacking_extra_info(node, infered),))
+
+
     def _check_module_attrs(self, node, module, module_names):
         """check that module_names (list of string) are accessible through the
         given module
         if the latest access name corresponds to a module, return it
         """
-        assert isinstance(module, astng.Module), module
+        assert isinstance(module, astroid.Module), module
+        ignored_modules = get_global_option(self, 'ignored-modules',
+                                            default=[])
         while module_names:
             name = module_names.pop(0)
             if name == '__dict__':
                 module = None
                 break
             try:
-                module = module.getattr(name)[0].infer().next()
-                if module is astng.YES:
+                module = next(module.getattr(name)[0].infer())
+                if module is astroid.YES:
                     return None
-            except astng.NotFoundError:
-                self.add_message('E0611', args=(name, module.name), node=node)
+            except astroid.NotFoundError:
+                if module.name in ignored_modules:
+                    return None
+                self.add_message('no-name-in-module',
+                                 args=(name, module.name), node=node)
                 return None
-            except astng.InferenceError:
+            except astroid.InferenceError:
                 return None
         if module_names:
             # FIXME: other message if name is not the latest part of
             # module_names ?
             modname = module and module.name or '__dict__'
-            self.add_message('E0611', node=node,
+            self.add_message('no-name-in-module', node=node,
                              args=('.'.join(module_names), modname))
             return None
-        if isinstance(module, astng.Module):
+        if isinstance(module, astroid.Module):
             return module
         return None
 
@@ -540,6 +1009,57 @@
         # do not check for not used locals here
         self._to_consume.pop()
 
+    def leave_module(self, node):
+        """ Update consumption analysis variable
+        for metaclasses.
+        """
+        module_locals = self._to_consume[0][0]
+        module_imports = self._to_consume[0][1]
+        consumed = {}
+
+        for klass in node.nodes_of_class(astroid.Class):
+            found = metaclass = name = None
+            if not klass._metaclass:
+                # Skip if this class doesn't use
+                # explictly a metaclass, but inherits it from ancestors
+                continue
+
+            metaclass = klass.metaclass()
+
+            # Look the name in the already found locals.
+            # If it's not found there, look in the module locals
+            # and in the imported modules.
+            if isinstance(klass._metaclass, astroid.Name):
+                name = klass._metaclass.name
+            elif metaclass:
+                # if it uses a `metaclass=module.Class`
+                name = metaclass.root().name
+
+            if name:
+                found = consumed.setdefault(
+                    name, module_locals.get(name, module_imports.get(name)))
+
+            if found is None and not metaclass:
+                name = None
+                if isinstance(klass._metaclass, astroid.Name):
+                    name = klass._metaclass.name
+                elif isinstance(klass._metaclass, astroid.Getattr):
+                    name = klass._metaclass.as_string()
+
+                if name is not None:
+                    if not (name in astroid.Module.scope_attrs or
+                            is_builtin(name) or
+                            name in self.config.additional_builtins or
+                            name in node.locals):
+                        self.add_message('undefined-variable',
+                                         node=klass,
+                                         args=(name, ))
+        # Pop the consumed items, in order to
+        # avoid having unused-import false positives
+        for name in consumed:
+            module_locals.pop(name, None)
+        super(VariablesChecker3k, self).leave_module(node)
+
 if sys.version_info >= (3, 0):
     VariablesChecker = VariablesChecker3k
 
diff --git a/third_party/pylint/config.py b/third_party/pylint/config.py
index 60b51ee..ebfe578 100644
--- a/third_party/pylint/config.py
+++ b/third_party/pylint/config.py
@@ -1,3 +1,4 @@
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -9,14 +10,14 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-""" Copyright (c) 2003-2006 LOGILAB S.A. (Paris, FRANCE).
- http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""utilities for Pylint configuration :
 
-  utilities for PyLint configuration :
-   _ pylintrc
-   _ pylint.d (PYLINT_HOME)
+* pylintrc
+* pylint.d (PYLINTHOME)
 """
+from __future__ import with_statement
+from __future__ import print_function
 
 import pickle
 import os
@@ -34,12 +35,6 @@
     PYLINT_HOME = ".pylint.d"
 else:
     PYLINT_HOME = join(USER_HOME, '.pylint.d')
-        
-if not exists(PYLINT_HOME):
-    try:
-        os.mkdir(PYLINT_HOME)
-    except OSError:
-        print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME
 
 def get_pdata_path(base_name, recurs):
     """return the path of the file which should contain old search data for the
@@ -47,31 +42,38 @@
     """
     base_name = base_name.replace(os.sep, '_')
     return join(PYLINT_HOME, "%s%s%s"%(base_name, recurs, '.stats'))
-    
+
 def load_results(base):
     """try to unpickle and return data from file if it exists and is not
     corrupted
-    
+
     return an empty dictionary if it doesn't exists
     """
-    data_file = get_pdata_path(base, 1)        
+    data_file = get_pdata_path(base, 1)
     try:
-        return pickle.load(open(data_file))
-    except:
+        with open(data_file, _PICK_LOAD) as stream:
+            return pickle.load(stream)
+    except Exception: # pylint: disable=broad-except
         return {}
 
 if sys.version_info < (3, 0):
-    _PICK_MOD = 'w'
+    _PICK_DUMP, _PICK_LOAD = 'w', 'r'
 else:
-    _PICK_MOD = 'wb'
+    _PICK_DUMP, _PICK_LOAD = 'wb', 'rb'
 
 def save_results(results, base):
     """pickle results"""
+    if not exists(PYLINT_HOME):
+        try:
+            os.mkdir(PYLINT_HOME)
+        except OSError:
+            print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
     data_file = get_pdata_path(base, 1)
     try:
-        pickle.dump(results, open(data_file, _PICK_MOD))
-    except (IOError, OSError), ex:
-        print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex)
+        with open(data_file, _PICK_DUMP) as stream:
+            pickle.dump(results, stream)
+    except (IOError, OSError) as ex:
+        print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
 
 # location of the configuration file ##########################################
 
@@ -96,6 +98,8 @@
             pylintrc = ".pylintrc"
         else:
             pylintrc = join(user_home, '.pylintrc')
+            if not isfile(pylintrc):
+                pylintrc = join(user_home, '.config', 'pylintrc')
     if not isfile(pylintrc):
         if isfile('/etc/pylintrc'):
             pylintrc = '/etc/pylintrc'
@@ -106,14 +110,14 @@
 PYLINTRC = find_pylintrc()
 
 ENV_HELP = '''
-The following environment variables are used :                                 
-    * PYLINTHOME                                                               
-    path to the directory where data of persistent run will be stored. If not
-found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working
-directory).
-    * PYLINTRC                                                                 
-    path to the configuration file. If not found, it will use the first        
-existent file in ~/.pylintrc, /etc/pylintrc.
+The following environment variables are used:                                   
+    * PYLINTHOME                                                                
+    Path to the directory where the persistent for the run will be stored. If 
+not found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working 
+directory).                                                                     
+    * PYLINTRC                                                                  
+    Path to the configuration file. See the documentation for the method used
+to search for configuration file.
 ''' % globals()
 
 # evaluation messages #########################################################
diff --git a/third_party/pylint/epylint.py b/third_party/pylint/epylint.py
index f6b16e7..4fd683e 100644
--- a/third_party/pylint/epylint.py
+++ b/third_party/pylint/epylint.py
@@ -1,5 +1,19 @@
-#!/usr/bin/env python
 # -*- coding: utf-8; mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=python:et:sw=4:ts=4:sts=4
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """Emacs and Flymake compatible Pylint.
 
 This script is for integration with emacs and is compatible with flymake mode.
@@ -15,7 +29,8 @@
         a/b/x.py
         a/c/y.py
 
-   - Then if y.py imports x as "from a.b import x" the following produces pylint errors
+   - Then if y.py imports x as "from a.b import x" the following produces pylint
+     errors
 
        cd a/c; pylint y.py
 
@@ -27,89 +42,90 @@
      we are checking we need to go out of it to avoid these false positives.
 
 
-You may also use py_run to run pylint with desired options and get back (or not) its output.
+You may also use py_run to run pylint with desired options and get back (or not)
+its output.
 """
+from __future__ import print_function
 
-import sys, os, re
+import sys, os
+import os.path as osp
 from subprocess import Popen, PIPE
 
+def _get_env():
+    '''Extracts the environment PYTHONPATH and appends the current sys.path to
+    those.'''
+    env = dict(os.environ)
+    env['PYTHONPATH'] = os.pathsep.join(sys.path)
+    return env
 
-def lint(filename):
+def lint(filename, options=None):
     """Pylint the given file.
 
-    When run from emacs we will be in the directory of a file, and passed its filename.
-    If this file is part of a package and is trying to import other modules from within
-    its own package or another package rooted in a directory below it, pylint will classify
-    it as a failed import.
+    When run from emacs we will be in the directory of a file, and passed its
+    filename.  If this file is part of a package and is trying to import other
+    modules from within its own package or another package rooted in a directory
+    below it, pylint will classify it as a failed import.
 
-    To get around this, we traverse down the directory tree to find the root of the package this
-    module is in.  We then invoke pylint from this directory.
+    To get around this, we traverse down the directory tree to find the root of
+    the package this module is in.  We then invoke pylint from this directory.
 
-    Finally, we must correct the filenames in the output generated by pylint so Emacs doesn't
-    become confused (it will expect just the original filename, while pylint may extend it with
-    extra directories if we've traversed down the tree)
+    Finally, we must correct the filenames in the output generated by pylint so
+    Emacs doesn't become confused (it will expect just the original filename,
+    while pylint may extend it with extra directories if we've traversed down
+    the tree)
     """
     # traverse downwards until we are out of a python package
-    fullPath = os.path.abspath(filename)
-    parentPath, childPath = os.path.dirname(fullPath), os.path.basename(fullPath)
+    full_path = osp.abspath(filename)
+    parent_path = osp.dirname(full_path)
+    child_path = osp.basename(full_path)
 
-    while parentPath != "/" and os.path.exists(os.path.join(parentPath, '__init__.py')):
-        childPath = os.path.join(os.path.basename(parentPath), childPath)
-        parentPath = os.path.dirname(parentPath)
+    while parent_path != "/" and osp.exists(osp.join(parent_path, '__init__.py')):
+        child_path = osp.join(osp.basename(parent_path), child_path)
+        parent_path = osp.dirname(parent_path)
 
     # Start pylint
-    process = Popen('pylint -f parseable -r n --disable=C,R,I "%s"' %
-                    childPath, shell=True, stdout=PIPE, stderr=PIPE,
-                    cwd=parentPath)
-    p = process.stdout
+    # Ensure we use the python and pylint associated with the running epylint
+    from pylint import lint as lint_mod
+    lint_path = lint_mod.__file__
+    options = options or ['--disable=C,R,I']
+    cmd = [sys.executable, lint_path] + options + [
+        '--msg-template', '{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}',
+        '-r', 'n', child_path]
+    process = Popen(cmd, stdout=PIPE, cwd=parent_path, env=_get_env(),
+                    universal_newlines=True)
 
-    # The parseable line format is '%(path)s:%(line)s: [%(sigle)s%(obj)s] %(msg)s'
-    # NOTE: This would be cleaner if we added an Emacs reporter to pylint.reporters.text ..
-    regex = re.compile(r"\[(?P<type>[WE])(?P<remainder>.*?)\]")
-
-    def _replacement(mObj):
-        "Alter to include 'Error' or 'Warning'"
-        if mObj.group("type") == "W":
-            replacement = "Warning"
-        else:
-            replacement = "Error"
-        # replace as "Warning (W0511, funcName): Warning Text"
-        return "%s (%s%s):" % (replacement, mObj.group("type"), mObj.group("remainder"))
-
-    for line in p:
+    for line in process.stdout:
         # remove pylintrc warning
         if line.startswith("No config file found"):
             continue
-        line = regex.sub(_replacement, line, 1)
+
         # modify the file name thats output to reverse the path traversal we made
         parts = line.split(":")
-        if parts and parts[0] == childPath:
+        if parts and parts[0] == child_path:
             line = ":".join([filename] + parts[1:])
-        print line,
+        print(line, end=' ')
 
-    p.close()
-
-def Run():
-    lint(sys.argv[1])
+    process.wait()
+    return process.returncode
 
 
 def py_run(command_options='', return_std=False, stdout=None, stderr=None,
            script='epylint'):
-    """Run pylint from python (needs Python >= 2.4).
+    """Run pylint from python
 
     ``command_options`` is a string containing ``pylint`` command line options;
-    ``return_std`` (boolean) indicates return of created standart output
+    ``return_std`` (boolean) indicates return of created standard output
     and error (see below);
-    ``stdout`` and ``stderr`` are 'file-like' objects in which standart output
+    ``stdout`` and ``stderr`` are 'file-like' objects in which standard output
     could be written.
 
     Calling agent is responsible for stdout/err management (creation, close).
-    Default standart output and error are those from sys,
+    Default standard output and error are those from sys,
     or standalone ones (``subprocess.PIPE``) are used
     if they are not set and ``return_std``.
 
     If ``return_std`` is set to ``True``, this function returns a 2-uple
-    containing standart output and error related to created process,
+    containing standard output and error related to created process,
     as follows: ``(stdout, stderr)``.
 
     A trivial usage could be as follows:
@@ -118,14 +134,14 @@
         pylint 0.18.1,
             ...
 
-    To silently run Pylint on a module, and get its standart output and error:
+    To silently run Pylint on a module, and get its standard output and error:
         >>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True)
     """
     # Create command line to call pylint
     if os.name == 'nt':
         script += '.bat'
     command_line = script + ' ' + command_options
-    # Providing standart output and/or error if not set
+    # Providing standard output and/or error if not set
     if stdout is None:
         if return_std:
             stdout = PIPE
@@ -137,13 +153,24 @@
         else:
             stderr = sys.stderr
     # Call pylint in a subprocess
-    p = Popen(command_line, shell=True, stdout=stdout, stderr=stderr)
+    p = Popen(command_line, shell=True, stdout=stdout, stderr=stderr,
+              env=_get_env(), universal_newlines=True)
     p.wait()
-    # Return standart output and error
+    # Return standard output and error
     if return_std:
         return (p.stdout, p.stderr)
 
 
-if __name__ == '__main__':
-    lint(sys.argv[1])
+def Run():
+    if len(sys.argv) == 1:
+        print("Usage: %s <filename> [options]" % sys.argv[0])
+        sys.exit(1)
+    elif not osp.exists(sys.argv[1]):
+        print("%s does not exist" % sys.argv[1])
+        sys.exit(1)
+    else:
+        sys.exit(lint(sys.argv[1], sys.argv[2:]))
 
+
+if __name__ == '__main__':
+    Run()
diff --git a/third_party/pylint/gui.py b/third_party/pylint/gui.py
index 2d8e81e..9c9b138 100644
--- a/third_party/pylint/gui.py
+++ b/third_party/pylint/gui.py
@@ -1,15 +1,37 @@
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """Tkinker gui for pylint"""
+from __future__ import print_function
 
 import os
 import sys
 import re
-import Queue
 from threading import Thread
-from Tkinter import (Tk, Frame, Listbox, Entry, Label, Button, Scrollbar,
-                     Checkbutton, Radiobutton, IntVar, StringVar)
-from Tkinter import (TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W,
-                     HORIZONTAL, DISABLED, NORMAL, W, E)
-from tkFileDialog import askopenfilename, askdirectory
+
+import six
+
+from six.moves.tkinter import (
+    Tk, Frame, Listbox, Entry, Label, Button, Scrollbar,
+    Checkbutton, Radiobutton, IntVar, StringVar, PanedWindow,
+    TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W,
+    HORIZONTAL, DISABLED, NORMAL, W,
+)
+from six.moves.tkinter_tkfiledialog import (
+    askopenfilename, askdirectory,
+)
 
 import pylint.lint
 from pylint.reporters.guireporter import GUIReporter
@@ -21,7 +43,15 @@
           '(W)':'black', '(E)':'darkred',
           '(F)':'red'}
 
-class BasicStream:
+
+def convert_to_string(msg):
+    """make a string representation of a message"""
+    module_object = msg.module
+    if msg.obj:
+        module_object += ".%s" % msg.obj
+    return "(%s) %s [%d]: %s" % (msg.C, module_object, msg.line, msg.msg)
+
+class BasicStream(object):
     '''
     used in gui reporter instead of writing to stdout, it is written to
     this stream and saved in contents
@@ -33,34 +63,37 @@
         self.contents = []
         self.outdict = {}
         self.currout = None
-        self.nextTitle = None
+        self.next_title = None
 
     def write(self, text):
         """write text to the stream"""
         if re.match('^--+$', text.strip()) or re.match('^==+$', text.strip()):
             if self.currout:
-                self.outdict[self.currout].remove(self.nextTitle)
+                self.outdict[self.currout].remove(self.next_title)
                 self.outdict[self.currout].pop()
-            self.currout = self.nextTitle
+            self.currout = self.next_title
             self.outdict[self.currout] = ['']
 
         if text.strip():
-            self.nextTitle = text.strip()
+            self.next_title = text.strip()
 
-        if text.startswith('\n'):
+        if text.startswith(os.linesep):
             self.contents.append('')
-            if self.currout: self.outdict[self.currout].append('')
-        self.contents[-1] += text.strip('\n')
-        if self.currout: self.outdict[self.currout][-1] += text.strip('\n')
-        if text.endswith('\n') and text.strip():
+            if self.currout:
+                self.outdict[self.currout].append('')
+        self.contents[-1] += text.strip(os.linesep)
+        if self.currout:
+            self.outdict[self.currout][-1] += text.strip(os.linesep)
+        if text.endswith(os.linesep) and text.strip():
             self.contents.append('')
-            if self.currout: self.outdict[self.currout].append('')
+            if self.currout:
+                self.outdict[self.currout].append('')
 
     def fix_contents(self):
         """finalize what the contents of the dict should look like before output"""
         for item in self.outdict:
-            numEmpty = self.outdict[item].count('')
-            for i in range(numEmpty):
+            num_empty = self.outdict[item].count('')
+            for _ in range(num_empty):
                 self.outdict[item].remove('')
             if self.outdict[item]:
                 self.outdict[item].pop(0)
@@ -71,7 +104,7 @@
         self.gui.tabs = self.outdict
         try:
             self.gui.rating.set(self.outdict['Global evaluation'][0])
-        except:
+        except KeyError:
             self.gui.rating.set('Error')
         self.gui.refresh_results_window()
 
@@ -79,10 +112,10 @@
         self.contents = []
         self.outdict = {}
         self.currout = None
-        self.nextTitle = None
+        self.next_title = None
 
 
-class LintGui:
+class LintGui(object):
     """Build and control a window to interact with pylint"""
 
     def __init__(self, root=None):
@@ -92,14 +125,15 @@
         #reporter
         self.reporter = None
         #message queue for output from reporter
-        self.msg_queue = Queue.Queue()
+        self.msg_queue = six.moves.queue.Queue()
         self.msgs = []
+        self.visible_msgs = []
         self.filenames = []
         self.rating = StringVar()
         self.tabs = {}
         self.report_stream = BasicStream(self)
         #gui objects
-        self.lbMessages = None
+        self.lb_messages = None
         self.showhistory = None
         self.results = None
         self.btnRun = None
@@ -116,48 +150,65 @@
 
     def init_gui(self):
         """init helper"""
+
+        window = PanedWindow(self.root, orient="vertical")
+        window.pack(side=TOP, fill=BOTH, expand=True)
+
+        top_pane = Frame(window)
+        window.add(top_pane)
+        mid_pane = Frame(window)
+        window.add(mid_pane)
+        bottom_pane = Frame(window)
+        window.add(bottom_pane)
+
         #setting up frames
-        top_frame = Frame(self.root)
-        mid_frame = Frame(self.root)
-        radio_frame = Frame(self.root)
-        res_frame = Frame(self.root)
-        msg_frame = Frame(self.root)
-        check_frame = Frame(self.root)
-        history_frame = Frame(self.root)
-        btn_frame = Frame(self.root)
-        rating_frame = Frame(self.root)
+        top_frame = Frame(top_pane)
+        mid_frame = Frame(top_pane)
+        history_frame = Frame(top_pane)
+        radio_frame = Frame(mid_pane)
+        rating_frame = Frame(mid_pane)
+        res_frame = Frame(mid_pane)
+        check_frame = Frame(bottom_pane)
+        msg_frame = Frame(bottom_pane)
+        btn_frame = Frame(bottom_pane)
         top_frame.pack(side=TOP, fill=X)
         mid_frame.pack(side=TOP, fill=X)
         history_frame.pack(side=TOP, fill=BOTH, expand=True)
-        radio_frame.pack(side=TOP, fill=BOTH, expand=True)
-        rating_frame.pack(side=TOP, fill=BOTH, expand=True)
+        radio_frame.pack(side=TOP, fill=X)
+        rating_frame.pack(side=TOP, fill=X)
         res_frame.pack(side=TOP, fill=BOTH, expand=True)
-        check_frame.pack(side=TOP, fill=BOTH, expand=True)
+        check_frame.pack(side=TOP, fill=X)
         msg_frame.pack(side=TOP, fill=BOTH, expand=True)
         btn_frame.pack(side=TOP, fill=X)
 
+        # Binding F5 application-wide to run lint
+        self.root.bind('<F5>', self.run_lint)
+
         #Message ListBox
         rightscrollbar = Scrollbar(msg_frame)
         rightscrollbar.pack(side=RIGHT, fill=Y)
         bottomscrollbar = Scrollbar(msg_frame, orient=HORIZONTAL)
         bottomscrollbar.pack(side=BOTTOM, fill=X)
-        self.lbMessages = Listbox(msg_frame,
-                  yscrollcommand=rightscrollbar.set,
-                  xscrollcommand=bottomscrollbar.set,
-                  bg="white")
-        self.lbMessages.pack(expand=True, fill=BOTH)
-        rightscrollbar.config(command=self.lbMessages.yview)
-        bottomscrollbar.config(command=self.lbMessages.xview)
+        self.lb_messages = Listbox(
+            msg_frame,
+            yscrollcommand=rightscrollbar.set,
+            xscrollcommand=bottomscrollbar.set,
+            bg="white")
+        self.lb_messages.bind("<Double-Button-1>", self.show_sourcefile)
+        self.lb_messages.pack(expand=True, fill=BOTH)
+        rightscrollbar.config(command=self.lb_messages.yview)
+        bottomscrollbar.config(command=self.lb_messages.xview)
 
         #History ListBoxes
         rightscrollbar2 = Scrollbar(history_frame)
         rightscrollbar2.pack(side=RIGHT, fill=Y)
         bottomscrollbar2 = Scrollbar(history_frame, orient=HORIZONTAL)
         bottomscrollbar2.pack(side=BOTTOM, fill=X)
-        self.showhistory = Listbox(history_frame,
-                    yscrollcommand=rightscrollbar2.set,
-                    xscrollcommand=bottomscrollbar2.set,
-                    bg="white")
+        self.showhistory = Listbox(
+            history_frame,
+            yscrollcommand=rightscrollbar2.set,
+            xscrollcommand=bottomscrollbar2.set,
+            bg="white")
         self.showhistory.pack(expand=True, fill=BOTH)
         rightscrollbar2.config(command=self.showhistory.yview)
         bottomscrollbar2.config(command=self.showhistory.xview)
@@ -168,36 +219,37 @@
         self.status = Label(self.root, text="", bd=1, relief=SUNKEN, anchor=W)
         self.status.pack(side=BOTTOM, fill=X)
 
-        #labels
-        self.lblRatingLabel = Label(rating_frame, text='Rating:')
-        self.lblRatingLabel.pack(side=LEFT)
-        self.lblRating = Label(rating_frame, textvariable=self.rating)
-        self.lblRating.pack(side=LEFT)
+        #labelbl_ratingls
+        lbl_rating_label = Label(rating_frame, text='Rating:')
+        lbl_rating_label.pack(side=LEFT)
+        lbl_rating = Label(rating_frame, textvariable=self.rating)
+        lbl_rating.pack(side=LEFT)
         Label(mid_frame, text='Recently Used:').pack(side=LEFT)
         Label(top_frame, text='Module or package').pack(side=LEFT)
 
         #file textbox
-        self.txtModule = Entry(top_frame, background='white')
-        self.txtModule.bind('<Return>', self.run_lint)
-        self.txtModule.pack(side=LEFT, expand=True, fill=X)
+        self.txt_module = Entry(top_frame, background='white')
+        self.txt_module.bind('<Return>', self.run_lint)
+        self.txt_module.pack(side=LEFT, expand=True, fill=X)
 
         #results box
         rightscrollbar = Scrollbar(res_frame)
         rightscrollbar.pack(side=RIGHT, fill=Y)
         bottomscrollbar = Scrollbar(res_frame, orient=HORIZONTAL)
         bottomscrollbar.pack(side=BOTTOM, fill=X)
-        self.results = Listbox(res_frame,
-                  yscrollcommand=rightscrollbar.set,
-                  xscrollcommand=bottomscrollbar.set,
-                  bg="white", font="Courier")
+        self.results = Listbox(
+            res_frame,
+            yscrollcommand=rightscrollbar.set,
+            xscrollcommand=bottomscrollbar.set,
+            bg="white", font="Courier")
         self.results.pack(expand=True, fill=BOTH, side=BOTTOM)
         rightscrollbar.config(command=self.results.yview)
         bottomscrollbar.config(command=self.results.xview)
 
         #buttons
         Button(top_frame, text='Open', command=self.file_open).pack(side=LEFT)
-        Button(top_frame, text='Open Package', 
-               command=(lambda : self.file_open(package=True))).pack(side=LEFT)
+        Button(top_frame, text='Open Package',
+               command=(lambda: self.file_open(package=True))).pack(side=LEFT)
 
         self.btnRun = Button(top_frame, text='Run', command=self.run_lint)
         self.btnRun.pack(side=LEFT)
@@ -238,45 +290,56 @@
         #check boxes
         self.box = StringVar()
         # XXX should be generated
-        report = Radiobutton(radio_frame, text="Report", variable=self.box,
-                             value="Report", command=self.refresh_results_window)
-        rawMet = Radiobutton(radio_frame, text="Raw metrics", variable=self.box,
-                             value="Raw metrics", command=self.refresh_results_window)
-        dup = Radiobutton(radio_frame, text="Duplication", variable=self.box,
-                          value="Duplication", command=self.refresh_results_window)
-        ext = Radiobutton(radio_frame, text="External dependencies",
-                          variable=self.box, value="External dependencies",
-                          command=self.refresh_results_window)
-        stat = Radiobutton(radio_frame, text="Statistics by type",
-                           variable=self.box, value="Statistics by type",
-                           command=self.refresh_results_window)
-        msgCat = Radiobutton(radio_frame, text="Messages by category",
-                             variable=self.box, value="Messages by category",
-                             command=self.refresh_results_window)
-        msg = Radiobutton(radio_frame, text="Messages", variable=self.box,
-                            value="Messages", command=self.refresh_results_window)
+        report = Radiobutton(
+            radio_frame, text="Report", variable=self.box,
+            value="Report", command=self.refresh_results_window)
+        raw_met = Radiobutton(
+            radio_frame, text="Raw metrics", variable=self.box,
+            value="Raw metrics", command=self.refresh_results_window)
+        dup = Radiobutton(
+            radio_frame, text="Duplication", variable=self.box,
+            value="Duplication", command=self.refresh_results_window)
+        ext = Radiobutton(
+            radio_frame, text="External dependencies",
+            variable=self.box, value="External dependencies",
+            command=self.refresh_results_window)
+        stat = Radiobutton(
+            radio_frame, text="Statistics by type",
+            variable=self.box, value="Statistics by type",
+            command=self.refresh_results_window)
+        msg_cat = Radiobutton(
+            radio_frame, text="Messages by category",
+            variable=self.box, value="Messages by category",
+            command=self.refresh_results_window)
+        msg = Radiobutton(
+            radio_frame, text="Messages", variable=self.box,
+            value="Messages", command=self.refresh_results_window)
+        source_file = Radiobutton(
+            radio_frame, text="Source File", variable=self.box,
+            value="Source File", command=self.refresh_results_window)
         report.select()
         report.grid(column=0, row=0, sticky=W)
-        rawMet.grid(column=1, row=0, sticky=W)
+        raw_met.grid(column=1, row=0, sticky=W)
         dup.grid(column=2, row=0, sticky=W)
-        msg.grid(column=3, row=0, sticky=E)
+        msg.grid(column=3, row=0, sticky=W)
         stat.grid(column=0, row=1, sticky=W)
-        msgCat.grid(column=1, row=1, sticky=W)
-        ext.grid(column=2, row=1, columnspan=2, sticky=W)
+        msg_cat.grid(column=1, row=1, sticky=W)
+        ext.grid(column=2, row=1, sticky=W)
+        source_file.grid(column=3, row=1, sticky=W)
 
         #dictionary for check boxes and associated error term
         self.msg_type_dict = {
-            'I' : lambda : self.information_box.get() == 1,
-            'C' : lambda : self.convention_box.get() == 1,
-            'R' : lambda : self.refactor_box.get() == 1,
-            'E' : lambda : self.error_box.get() == 1,
-            'W' : lambda : self.warning_box.get() == 1,
-            'F' : lambda : self.fatal_box.get() == 1
+            'I': lambda: self.information_box.get() == 1,
+            'C': lambda: self.convention_box.get() == 1,
+            'R': lambda: self.refactor_box.get() == 1,
+            'E': lambda: self.error_box.get() == 1,
+            'W': lambda: self.warning_box.get() == 1,
+            'F': lambda: self.fatal_box.get() == 1
         }
-        self.txtModule.focus_set()
+        self.txt_module.focus_set()
 
 
-    def select_recent_file(self, event):
+    def select_recent_file(self, event): # pylint: disable=unused-argument
         """adds the selected file in the history listbox to the Module box"""
         if not self.showhistory.size():
             return
@@ -284,19 +347,21 @@
         selected = self.showhistory.curselection()
         item = self.showhistory.get(selected)
         #update module
-        self.txtModule.delete(0, END)
-        self.txtModule.insert(0, item)
+        self.txt_module.delete(0, END)
+        self.txt_module.insert(0, item)
 
     def refresh_msg_window(self):
         """refresh the message window with current output"""
         #clear the window
-        self.lbMessages.delete(0, END)
+        self.lb_messages.delete(0, END)
+        self.visible_msgs = []
         for msg in self.msgs:
-            if (self.msg_type_dict.get(msg[0])()):
-                msg_str = self.convert_to_string(msg)
-                self.lbMessages.insert(END, msg_str)
+            if self.msg_type_dict.get(msg.C)():
+                self.visible_msgs.append(msg)
+                msg_str = convert_to_string(msg)
+                self.lb_messages.insert(END, msg_str)
                 fg_color = COLORS.get(msg_str[:3], 'black')
-                self.lbMessages.itemconfigure(END, fg=fg_color)
+                self.lb_messages.itemconfigure(END, fg=fg_color)
 
     def refresh_results_window(self):
         """refresh the results window with current output"""
@@ -305,16 +370,9 @@
         try:
             for res in self.tabs[self.box.get()]:
                 self.results.insert(END, res)
-        except:
+        except KeyError:
             pass
 
-    def convert_to_string(self, msg):
-        """make a string representation of a message"""
-        if (msg[2] != ""):
-            return "(" + msg[0] + ") " + msg[1] + "." + msg[2] + " [" + msg[3] + "]: " + msg[4]
-        else:
-            return "(" + msg[0] + ") " + msg[1] + " [" + msg[3] + "]: " + msg[4]
-
     def process_incoming(self):
         """process the incoming messages from running pylint"""
         while self.msg_queue.qsize():
@@ -328,13 +386,14 @@
                 self.msgs.append(msg)
 
                 #displaying msg if message type is selected in check box
-                if (self.msg_type_dict.get(msg[0])()):
-                    msg_str = self.convert_to_string(msg)
-                    self.lbMessages.insert(END, msg_str)
+                if self.msg_type_dict.get(msg.C)():
+                    self.visible_msgs.append(msg)
+                    msg_str = convert_to_string(msg)
+                    self.lb_messages.insert(END, msg_str)
                     fg_color = COLORS.get(msg_str[:3], 'black')
-                    self.lbMessages.itemconfigure(END, fg=fg_color)
+                    self.lb_messages.itemconfigure(END, fg=fg_color)
 
-            except Queue.Empty:
+            except six.moves.queue.Empty:
                 pass
         return True
 
@@ -354,27 +413,29 @@
         """quit the application"""
         self.root.quit()
 
-    def halt(self):
+    def halt(self): # pylint: disable=no-self-use
         """program halt placeholder"""
         return
 
     def file_open(self, package=False, _=None):
         """launch a file browser"""
         if not package:
-            filename = askopenfilename(parent=self.root, filetypes=[('pythonfiles', '*.py'),
-                                                    ('allfiles', '*')], title='Select Module')
+            filename = askopenfilename(parent=self.root,
+                                       filetypes=[('pythonfiles', '*.py'),
+                                                  ('allfiles', '*')],
+                                       title='Select Module')
         else:
             filename = askdirectory(title="Select A Folder", mustexist=1)
 
         if filename == ():
             return
 
-        self.txtModule.delete(0, END)
-        self.txtModule.insert(0, filename)
+        self.txt_module.delete(0, END)
+        self.txt_module.insert(0, filename)
 
     def update_filenames(self):
         """update the list of recent filenames"""
-        filename = self.txtModule.get()
+        filename = self.txt_module.get()
         if not filename:
             filename = os.getcwd()
         if filename+'\n' in self.filenames:
@@ -407,13 +468,14 @@
         self.update_filenames()
         self.root.configure(cursor='watch')
         self.reporter = GUIReporter(self, output=self.report_stream)
-        module = self.txtModule.get()
+        module = self.txt_module.get()
         if not module:
             module = os.getcwd()
 
         #cleaning up msgs and windows
         self.msgs = []
-        self.lbMessages.delete(0, END)
+        self.visible_msgs = []
+        self.lb_messages.delete(0, END)
         self.tabs = {}
         self.results.delete(0, END)
         self.btnRun.config(state=DISABLED)
@@ -432,21 +494,38 @@
 
         self.root.configure(cursor='')
 
+    def show_sourcefile(self, event=None):  # pylint: disable=unused-argument
+        selected = self.lb_messages.curselection()
+        if not selected:
+            return
+
+        msg = self.visible_msgs[int(selected[0])]
+        scroll = msg.line - 3
+        if scroll < 0:
+            scroll = 0
+
+        self.tabs["Source File"] = open(msg.path, "r").readlines()
+        self.box.set("Source File")
+        self.refresh_results_window()
+        self.results.yview(scroll)
+        self.results.select_set(msg.line - 1)
+
 
 def lint_thread(module, reporter, gui):
     """thread for pylint"""
     gui.status.text = "processing module(s)"
-    lint_obj = pylint.lint.Run(args=[module], reporter=reporter, exit=False)
+    pylint.lint.Run(args=[module], reporter=reporter, exit=False)
     gui.msg_queue.put("DONE")
 
 
 def Run(args):
     """launch pylint gui from args"""
     if args:
-        print 'USAGE: pylint-gui\n launch a simple pylint gui using Tk'
-        return
+        print('USAGE: pylint-gui\n launch a simple pylint gui using Tk')
+        sys.exit(1)
     gui = LintGui()
     gui.mainloop()
+    sys.exit(0)
 
 if __name__ == '__main__':
     Run(sys.argv[1:])
diff --git a/third_party/pylint/interfaces.py b/third_party/pylint/interfaces.py
index 3d7bdad..64f5a95 100644
--- a/third_party/pylint/interfaces.py
+++ b/third_party/pylint/interfaces.py
@@ -9,17 +9,23 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-""" Copyright (c) 2002-2003 LOGILAB S.A. (Paris, FRANCE).
- http://www.logilab.fr/ -- mailto:contact@logilab.fr
-
-Interfaces for PyLint objects
-"""
-
-__revision__ = "$Id: interfaces.py,v 1.9 2004-04-24 12:14:53 syt Exp $"
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""Interfaces for Pylint objects"""
+from collections import namedtuple
 
 from logilab.common.interface import Interface
 
+Confidence = namedtuple('Confidence', ['name', 'description'])
+# Warning Certainties
+HIGH = Confidence('HIGH', 'No false positive possible.')
+INFERENCE = Confidence('INFERENCE', 'Warning based on inference result.')
+INFERENCE_FAILURE = Confidence('INFERENCE_FAILURE',
+                               'Warning based on inference with failures.')
+UNDEFINED = Confidence('UNDEFINED',
+                       'Warning without any associated confidence level.')
+
+CONFIDENCE_LEVELS = [HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
+
 
 class IChecker(Interface):
     """This is an base interface, not designed to be used elsewhere than for
@@ -32,53 +38,33 @@
     def close(self):
         """called after visiting project (i.e set of modules)"""
 
-##     def open_module(self):
-##         """called before visiting a module"""
-
-##     def close_module(self):
-##         """called after visiting a module"""
-
 
 class IRawChecker(IChecker):
     """interface for checker which need to parse the raw file
     """
 
-    def process_module(self, astng):
+    def process_module(self, astroid):
         """ process a module
 
-        the module's content is accessible via astng.file_stream
+        the module's content is accessible via astroid.stream
         """
 
 
-class IASTNGChecker(IChecker):
+class ITokenChecker(IChecker):
+    """Interface for checkers that need access to the token list."""
+    def process_tokens(self, tokens):
+        """Process a module.
+
+        tokens is a list of all source code tokens in the file.
+        """
+
+
+class IAstroidChecker(IChecker):
     """ interface for checker which prefers receive events according to
     statement type
     """
 
 
-class ILinter(Interface):
-    """interface for the linter class
-
-    the linter class will generate events to its registered checkers.
-    Each checker may interact with the linter instance using this API
-    """
-
-    def register_checker(self, checker):
-        """register a new checker class
-
-        checker is a class implementing IrawChecker or / and IASTNGChecker
-        """
-
-    def add_message(self, msg_id, line=None, node=None, args=None):
-        """add the message corresponding to the given id.
-
-        If provided, msg is expanded using args
-
-        astng checkers should provide the node argument,
-        raw checkers should provide the line argument.
-        """
-
-
 class IReporter(Interface):
     """ reporter collect messages and display results encapsulated in a layout
     """
@@ -95,4 +81,4 @@
         """
 
 
-__all__ = ('IRawChecker', 'IStatable', 'ILinter', 'IReporter')
+__all__ = ('IRawChecker', 'IAstroidChecker', 'ITokenChecker', 'IReporter')
diff --git a/third_party/pylint/lint.py b/third_party/pylint/lint.py
index 48ebdf2..082d8b3 100644
--- a/third_party/pylint/lint.py
+++ b/third_party/pylint/lint.py
@@ -1,5 +1,4 @@
-# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com).
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -13,10 +12,10 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """ %prog [options] module_or_package
 
-  Check that a module satisfy a coding standard (and more !).
+  Check that a module satisfies a coding standard (and more !).
 
     %prog --help
 
@@ -26,127 +25,232 @@
 
   Display help messages about given message identifiers and exit.
 """
+from __future__ import print_function
 
-# import this first to avoid builtin namespace pollution
-from pylint.checkers import utils
-
-import sys
+import collections
+import contextlib
+import itertools
+import operator
 import os
-import re
+try:
+    import multiprocessing
+except ImportError:
+    multiprocessing = None
+import sys
 import tokenize
-from warnings import warn
+import warnings
 
-from logilab.common.configuration import UnsupportedAction, OptionsManagerMixIn
-from logilab.common.optik_ext import check_csv
-from logilab.common.modutils import load_module_from_name
-from logilab.common.interface import implements
-from logilab.common.textutils import splitstrip
-from logilab.common.ureports import Table, Text, Section
+import astroid
+from astroid.__pkginfo__ import version as astroid_version
+from astroid import modutils
+from logilab.common import configuration
+from logilab.common import optik_ext
+from logilab.common import interface
+from logilab.common import textutils
+from logilab.common import ureports
 from logilab.common.__pkginfo__ import version as common_version
+import six
 
-from logilab.astng import MANAGER, nodes, ASTNGBuildingException
-from logilab.astng.__pkginfo__ import version as astng_version
-
-from pylint.utils import PyLintASTWalker, UnknownMessage, MessagesHandlerMixIn,\
-     ReportsHandlerMixIn, MSG_TYPES, expand_modules
-from pylint.interfaces import ILinter, IRawChecker, IASTNGChecker
-from pylint.checkers import BaseRawChecker, EmptyReport, \
-     table_lines_from_stats
-from pylint.reporters.text import TextReporter, ParseableTextReporter, \
-     VSTextReporter, ColorizedTextReporter
-from pylint.reporters.html import HTMLReporter
+from pylint import checkers
+from pylint import interfaces
+from pylint import reporters
+from pylint import utils
 from pylint import config
-
 from pylint.__pkginfo__ import version
 
 
-OPTION_RGX = re.compile('\s*#*\s*pylint:(.*)')
-REPORTER_OPT_MAP = {'text': TextReporter,
-                    'parseable': ParseableTextReporter,
-                    'msvs': VSTextReporter,
-                    'colorized': ColorizedTextReporter,
-                    'html': HTMLReporter,}
+MANAGER = astroid.MANAGER
+
+def _get_new_args(message):
+    location = (
+        message.abspath,
+        message.path,
+        message.module,
+        message.obj,
+        message.line,
+        message.column,
+    )
+    return (
+        message.msg_id,
+        message.symbol,
+        location,
+        message.msg,
+        message.confidence,
+    )
+
+def _get_python_path(filepath):
+    dirname = os.path.realpath(os.path.expanduser(filepath))
+    if not os.path.isdir(dirname):
+        dirname = os.path.dirname(dirname)
+    while True:
+        if not os.path.exists(os.path.join(dirname, "__init__.py")):
+            return dirname
+        old_dirname = dirname
+        dirname = os.path.dirname(dirname)
+        if old_dirname == dirname:
+            return os.getcwd()
+
+
+def _merge_stats(stats):
+    merged = {}
+    for stat in stats:
+        for key, item in six.iteritems(stat):
+            if key not in merged:
+                merged[key] = item
+            else:
+                if isinstance(item, dict):
+                    merged[key].update(item)
+                else:
+                    merged[key] = merged[key] + item
+    return merged
 
 
 # Python Linter class #########################################################
 
 MSGS = {
     'F0001': ('%s',
+              'fatal',
               'Used when an error occurred preventing the analysis of a \
               module (unable to find it for instance).'),
     'F0002': ('%s: %s',
-              'Used when an unexpected error occurred while building the ASTNG \
-              representation. This is usually accompanied by a traceback. \
-              Please report such errors !'),
+              'astroid-error',
+              'Used when an unexpected error occurred while building the '
+              'Astroid  representation. This is usually accompanied by a '
+              'traceback. Please report such errors !'),
     'F0003': ('ignored builtin module %s',
-              'Used to indicate that the user asked to analyze a builtin module\
-              which has been skipped.'),
-    'F0004': ('unexpected inferred value %s',
-              'Used to indicate that some value of an unexpected type has been \
-              inferred.'),
+              'ignored-builtin-module',
+              'Used to indicate that the user asked to analyze a builtin '
+              'module which has been skipped.'),
     'F0010': ('error while code parsing: %s',
-              'Used when an exception occured while building the ASTNG \
-               representation which could be handled by astng.'),
-
+              'parse-error',
+              'Used when an exception occured while building the Astroid '
+              'representation which could be handled by astroid.'),
 
     'I0001': ('Unable to run raw checkers on built-in module %s',
-              'Used to inform that a built-in module has not been checked \
-              using the raw checkers.'),
+              'raw-checker-failed',
+              'Used to inform that a built-in module has not been checked '
+              'using the raw checkers.'),
 
     'I0010': ('Unable to consider inline option %r',
-              'Used when an inline option is either badly formatted or can\'t \
-              be used inside modules.'),
+              'bad-inline-option',
+              'Used when an inline option is either badly formatted or can\'t '
+              'be used inside modules.'),
 
-    'I0011': ('Locally disabling %s',
-              'Used when an inline option disables a message or a messages \
-              category.'),
-    'I0012': ('Locally enabling %s',
-              'Used when an inline option enables a message or a messages \
-              category.'),
+    'I0011': ('Locally disabling %s (%s)',
+              'locally-disabled',
+              'Used when an inline option disables a message or a messages '
+              'category.'),
+    'I0012': ('Locally enabling %s (%s)',
+              'locally-enabled',
+              'Used when an inline option enables a message or a messages '
+              'category.'),
     'I0013': ('Ignoring entire file',
+              'file-ignored',
               'Used to inform that the file will not be checked'),
-
+    'I0020': ('Suppressed %s (from line %d)',
+              'suppressed-message',
+              'A message was triggered on a line, but suppressed explicitly '
+              'by a disable= comment in the file. This message is not '
+              'generated for messages that are ignored due to configuration '
+              'settings.'),
+    'I0021': ('Useless suppression of %s',
+              'useless-suppression',
+              'Reported when a message is explicitly disabled for a line or '
+              'a block of code, but never triggered.'),
+    'I0022': ('Pragma "%s" is deprecated, use "%s" instead',
+              'deprecated-pragma',
+              'Some inline pylint options have been renamed or reworked, '
+              'only the most recent form should be used. '
+              'NOTE:skip-all is only available with pylint >= 0.26',
+              {'old_names': [('I0014', 'deprecated-disable-all')]}),
 
     'E0001': ('%s',
+              'syntax-error',
               'Used when a syntax error is raised for a module.'),
 
     'E0011': ('Unrecognized file option %r',
+              'unrecognized-inline-option',
               'Used when an unknown inline option is encountered.'),
     'E0012': ('Bad option value %r',
+              'bad-option-value',
               'Used when a bad value for an inline option is encountered.'),
     }
 
 
-class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
-               BaseRawChecker):
+def _deprecated_option(shortname, opt_type):
+    def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument
+        sys.stderr.write('Warning: option %s is deprecated and ignored.\n' % (optname,))
+    return {'short': shortname, 'help': 'DEPRECATED', 'hide': True,
+            'type': opt_type, 'action': 'callback', 'callback': _warn_deprecated}
+
+
+if multiprocessing is not None:
+    class ChildLinter(multiprocessing.Process): # pylint: disable=no-member
+        def run(self):
+            tasks_queue, results_queue, self._config = self._args # pylint: disable=no-member
+
+            self._config["jobs"] = 1  # Child does not parallelize any further.
+
+            # Run linter for received files/modules.
+            for file_or_module in iter(tasks_queue.get, 'STOP'):
+                result = self._run_linter(file_or_module[0])
+                try:
+                    results_queue.put(result)
+                except Exception as ex:
+                    print("internal error with sending report for module %s" % file_or_module, file=sys.stderr)
+                    print(ex, file=sys.stderr)
+                    results_queue.put({})
+
+        def _run_linter(self, file_or_module):
+            linter = PyLinter()
+
+            # Register standard checkers.
+            linter.load_default_plugins()
+            # Load command line plugins.
+            # TODO linter.load_plugin_modules(self._plugins)
+
+            linter.load_configuration(**self._config)
+            linter.set_reporter(reporters.CollectingReporter())
+
+            # Run the checks.
+            linter.check(file_or_module)
+
+            msgs = [_get_new_args(m) for m in linter.reporter.messages]
+            return (file_or_module, linter.file_state.base_name, linter.current_name,
+                    msgs, linter.stats, linter.msg_status)
+
+
+class PyLinter(configuration.OptionsManagerMixIn,
+               utils.MessagesHandlerMixIn,
+               utils.ReportsHandlerMixIn,
+               checkers.BaseTokenChecker):
     """lint Python modules using external checkers.
 
     This is the main checker controlling the other ones and the reports
-    generation. It is itself both a raw checker and an astng checker in order
+    generation. It is itself both a raw checker and an astroid checker in order
     to:
     * handle message activation / deactivation at the module level
     * handle some basic but necessary stats'data (number of classes, methods...)
 
     IDE plugins developpers: you may have to call
-    `logilab.astng.builder.MANAGER.astng_cache.clear()` accross run if you want
+    `astroid.builder.MANAGER.astroid_cache.clear()` accross run if you want
     to ensure the latest code version is actually checked.
     """
 
-    __implements__ = (ILinter, IRawChecker)
+    __implements__ = (interfaces.ITokenChecker, )
 
     name = 'master'
     priority = 0
     level = 0
     msgs = MSGS
-    may_be_disabled = False
 
     @staticmethod
     def make_options():
         return (('ignore',
                  {'type' : 'csv', 'metavar' : '<file>[,<file>...]',
                   'dest' : 'black_list', 'default' : ('CVS',),
-                  'help' : 'Add files or directories to the blacklist. \
-They should be base names, not paths.'}),
+                  'help' : 'Add files or directories to the blacklist. '
+                           'They should be base names, not paths.'}),
                 ('persistent',
                  {'default': True, 'type' : 'yn', 'metavar' : '<y_or_n>',
                   'level': 1,
@@ -155,71 +259,120 @@
                 ('load-plugins',
                  {'type' : 'csv', 'metavar' : '<modules>', 'default' : (),
                   'level': 1,
-                  'help' : 'List of plugins (as comma separated values of \
-python modules names) to load, usually to register additional checkers.'}),
+                  'help' : 'List of plugins (as comma separated values of '
+                           'python modules names) to load, usually to register '
+                           'additional checkers.'}),
 
                 ('output-format',
-                 {'default': 'text', 'type': 'choice', 'metavar' : '<format>',
-                  'choices': REPORTER_OPT_MAP.keys(),
+                 {'default': 'text', 'type': 'string', 'metavar' : '<format>',
                   'short': 'f',
                   'group': 'Reports',
-                  'help' : 'Set the output format. Available formats are text,\
-                 parseable, colorized, msvs (visual studio) and html'}),
-
-                ('include-ids',
-                 {'type' : 'yn', 'metavar' : '<y_or_n>', 'default' : 0,
-                  'short': 'i',
-                  'group': 'Reports',
-                  'help' : 'Include message\'s id in output'}),
+                  'help' : 'Set the output format. Available formats are text,'
+                           ' parseable, colorized, msvs (visual studio) and html. You '
+                           'can also give a reporter class, eg mypackage.mymodule.'
+                           'MyReporterClass.'}),
 
                 ('files-output',
                  {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
                   'group': 'Reports', 'level': 1,
-                  'help' : 'Put messages in a separate file for each module / \
-package specified on the command line instead of printing them on stdout. \
-Reports (if any) will be written in a file name "pylint_global.[txt|html]".'}),
+                  'help' : 'Put messages in a separate file for each module / '
+                           'package specified on the command line instead of printing '
+                           'them on stdout. Reports (if any) will be written in a file '
+                           'name "pylint_global.[txt|html]".'}),
 
                 ('reports',
                  {'default': 1, 'type' : 'yn', 'metavar' : '<y_or_n>',
                   'short': 'r',
                   'group': 'Reports',
-                  'help' : 'Tells whether to display a full report or only the\
- messages'}),
+                  'help' : 'Tells whether to display a full report or only the '
+                           'messages'}),
 
                 ('evaluation',
                  {'type' : 'string', 'metavar' : '<python_expression>',
                   'group': 'Reports', 'level': 1,
-                  'default': '10.0 - ((float(5 * error + warning + refactor + \
-convention) / statement) * 10)',
-                  'help' : 'Python expression which should return a note less \
-than 10 (10 is the highest note). You have access to the variables errors \
-warning, statement which respectively contain the number of errors / warnings\
- messages and the total number of statements analyzed. This is used by the \
- global evaluation report (RP0004).'}),
+                  'default': '10.0 - ((float(5 * error + warning + refactor + '
+                             'convention) / statement) * 10)',
+                  'help' : 'Python expression which should return a note less '
+                           'than 10 (10 is the highest note). You have access '
+                           'to the variables errors warning, statement which '
+                           'respectively contain the number of errors / '
+                           'warnings messages and the total number of '
+                           'statements analyzed. This is used by the global '
+                           'evaluation report (RP0004).'}),
 
                 ('comment',
                  {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
                   'group': 'Reports', 'level': 1,
-                  'help' : 'Add a comment according to your evaluation note. \
-This is used by the global evaluation report (RP0004).'}),
+                  'help' : 'Add a comment according to your evaluation note. '
+                           'This is used by the global evaluation report (RP0004).'}),
+
+                ('confidence',
+                 {'type' : 'multiple_choice', 'metavar': '<levels>',
+                  'default': '',
+                  'choices': [c.name for c in interfaces.CONFIDENCE_LEVELS],
+                  'group': 'Messages control',
+                  'help' : 'Only show warnings with the listed confidence levels.'
+                           ' Leave empty to show all. Valid levels: %s' % (
+                               ', '.join(c.name for c in interfaces.CONFIDENCE_LEVELS),)}),
 
                 ('enable',
                  {'type' : 'csv', 'metavar': '<msg ids>',
                   'short': 'e',
                   'group': 'Messages control',
                   'help' : 'Enable the message, report, category or checker with the '
-                  'given id(s). You can either give multiple identifier '
-                  'separated by comma (,) or put this option multiple time.'}),
+                           'given id(s). You can either give multiple identifier '
+                           'separated by comma (,) or put this option multiple time. '
+                           'See also the "--disable" option for examples. '}),
 
                 ('disable',
                  {'type' : 'csv', 'metavar': '<msg ids>',
                   'short': 'd',
                   'group': 'Messages control',
                   'help' : 'Disable the message, report, category or checker '
-                  'with the given id(s). You can either give multiple identifier'
-                  ' separated by comma (,) or put this option multiple time '
-                  '(only on the command line, not in the configuration file '
-                  'where it should appear only once).'}),
+                           'with the given id(s). You can either give multiple identifiers'
+                           ' separated by comma (,) or put this option multiple times '
+                           '(only on the command line, not in the configuration file '
+                           'where it should appear only once).'
+                           'You can also use "--disable=all" to disable everything first '
+                           'and then reenable specific checks. For example, if you want '
+                           'to run only the similarities checker, you can use '
+                           '"--disable=all --enable=similarities". '
+                           'If you want to run only the classes checker, but have no '
+                           'Warning level messages displayed, use'
+                           '"--disable=all --enable=classes --disable=W"'}),
+
+                ('msg-template',
+                 {'type' : 'string', 'metavar': '<template>',
+                  'group': 'Reports',
+                  'help' : ('Template used to display messages. '
+                            'This is a python new-style format string '
+                            'used to format the message information. '
+                            'See doc for all details')
+                 }),
+
+                ('include-ids', _deprecated_option('i', 'yn')),
+                ('symbols', _deprecated_option('s', 'yn')),
+
+                ('jobs',
+                 {'type' : 'int', 'metavar': '<n-processes>',
+                  'short': 'j',
+                  'default': 1,
+                  'help' : '''Use multiple processes to speed up Pylint.''',
+                 }),
+
+                ('unsafe-load-any-extension',
+                 {'type': 'yn', 'metavar': '<yn>', 'default': False, 'hide': True,
+                  'help': ('Allow loading of arbitrary C extensions. Extensions'
+                           ' are imported into the active Python interpreter and'
+                           ' may run arbitrary code.')}),
+
+                ('extension-pkg-whitelist',
+                  {'type': 'csv', 'metavar': '<pkg[,pkg]>', 'default': [],
+                   'help': ('A comma-separated list of package or module names'
+                            ' from where C extensions may be loaded. Extensions are'
+                            ' loading into the active Python interpreter and may run'
+                            ' arbitrary code')}
+                  ),
                )
 
     option_groups = (
@@ -231,17 +384,21 @@
                  pylintrc=None):
         # some stuff has to be done before ancestors initialization...
         #
-        # checkers / reporter / astng manager
+        # messages store / checkers / reporter / astroid manager
+        self.msgs_store = utils.MessagesStore()
         self.reporter = None
-        self._checkers = {}
+        self._reporter_name = None
+        self._reporters = {}
+        self._checkers = collections.defaultdict(list)
+        self._pragma_lineno = {}
         self._ignore_file = False
         # visit variables
-        self.base_name = None
-        self.base_file = None
+        self.file_state = utils.FileState()
         self.current_name = None
         self.current_file = None
         self.stats = None
         # init options
+        self._external_opts = options
         self.options = options + PyLinter.make_options()
         self.option_groups = option_groups + PyLinter.option_groups
         self._options_methods = {
@@ -249,14 +406,15 @@
             'disable': self.disable}
         self._bw_options_methods = {'disable-msg': self.disable,
                                     'enable-msg': self.enable}
-        full_version = '%%prog %s, \nastng %s, common %s\nPython %s' % (
-            version, astng_version, common_version, sys.version)
-        OptionsManagerMixIn.__init__(self, usage=__doc__,
-                                     version=full_version,
-                                     config_file=pylintrc or config.PYLINTRC)
-        MessagesHandlerMixIn.__init__(self)
-        ReportsHandlerMixIn.__init__(self)
-        BaseRawChecker.__init__(self)
+        full_version = '%%prog %s, \nastroid %s, common %s\nPython %s' % (
+            version, astroid_version, common_version, sys.version)
+        configuration.OptionsManagerMixIn.__init__(
+            self, usage=__doc__,
+            version=full_version,
+            config_file=pylintrc or config.PYLINTRC)
+        utils.MessagesHandlerMixIn.__init__(self)
+        utils.ReportsHandlerMixIn.__init__(self)
+        checkers.BaseTokenChecker.__init__(self)
         # provided reports
         self.reports = (('RP0001', 'Messages by category',
                          report_total_messages_stats),
@@ -266,15 +424,20 @@
                          report_messages_stats),
                         ('RP0004', 'Global evaluation',
                          self.report_evaluation),
-                        )
+                       )
         self.register_checker(self)
-        self._dynamic_plugins = []
+        self._dynamic_plugins = set()
         self.load_provider_defaults()
-        self.set_reporter(reporter or TextReporter(sys.stdout))
+        if reporter:
+            self.set_reporter(reporter)
 
     def load_default_plugins(self):
-        from pylint import checkers
         checkers.initialize(self)
+        reporters.initialize(self)
+        # Make sure to load the default reporter, because
+        # the option has been set before the plugins had been loaded.
+        if not self.reporter:
+            self._load_reporter()
 
     def load_plugin_modules(self, modnames):
         """take a list of module names which are pylint plugins and load
@@ -283,10 +446,22 @@
         for modname in modnames:
             if modname in self._dynamic_plugins:
                 continue
-            self._dynamic_plugins.append(modname)
-            module = load_module_from_name(modname)
+            self._dynamic_plugins.add(modname)
+            module = modutils.load_module_from_name(modname)
             module.register(self)
 
+    def _load_reporter(self):
+        name = self._reporter_name.lower()
+        if name in self._reporters:
+            self.set_reporter(self._reporters[name]())
+        else:
+            qname = self._reporter_name
+            module = modutils.load_module_from_name(
+                modutils.get_module_part(qname))
+            class_name = qname.split('.')[-1]
+            reporter_class = getattr(module, class_name)
+            self.set_reporter(reporter_class())
+
     def set_reporter(self, reporter):
         """set the reporter used to display messages and reports"""
         self.reporter = reporter
@@ -296,46 +471,75 @@
         """overridden from configuration.OptionsProviderMixin to handle some
         special options
         """
-        if optname in self._options_methods or optname in self._bw_options_methods:
+        if optname in self._options_methods or \
+                optname in self._bw_options_methods:
             if value:
                 try:
                     meth = self._options_methods[optname]
                 except KeyError:
                     meth = self._bw_options_methods[optname]
-                    warn('%s is deprecated, replace it by %s' % (
-                        optname, optname.split('-')[0]), DeprecationWarning)
-                value = check_csv(None, optname, value)
+                    warnings.warn('%s is deprecated, replace it by %s' % (
+                                  optname, optname.split('-')[0]),
+                                  DeprecationWarning)
+                value = optik_ext.check_csv(None, optname, value)
                 if isinstance(value, (list, tuple)):
-                    for _id in value :
-                        meth(_id)
-                else :
+                    for _id in value:
+                        meth(_id, ignore_unknown=True)
+                else:
                     meth(value)
+                return # no need to call set_option, disable/enable methods do it
         elif optname == 'output-format':
-            self.set_reporter(REPORTER_OPT_MAP[value.lower()]())
+            self._reporter_name = value
+            # If the reporters are already available, load
+            # the reporter class.
+            if self._reporters:
+                self._load_reporter()
+
         try:
-            BaseRawChecker.set_option(self, optname, value, action, optdict)
-        except UnsupportedAction:
-            print >> sys.stderr, 'option %s can\'t be read from config file' % \
-                  optname
+            checkers.BaseTokenChecker.set_option(self, optname,
+                                                 value, action, optdict)
+        except configuration.UnsupportedAction:
+            print('option %s can\'t be read from config file' % \
+                  optname, file=sys.stderr)
+
+    def register_reporter(self, reporter_class):
+        self._reporters[reporter_class.name] = reporter_class
+
+    def report_order(self):
+        reports = sorted(self._reports, key=lambda x: getattr(x, 'name', ''))
+        try:
+            # Remove the current reporter and add it
+            # at the end of the list.
+            reports.pop(reports.index(self))
+        except ValueError:
+            pass
+        else:
+            reports.append(self)
+        return reports
 
     # checkers manipulation methods ############################################
 
     def register_checker(self, checker):
         """register a new checker
 
-        checker is an object implementing IRawChecker or / and IASTNGChecker
+        checker is an object implementing IRawChecker or / and IAstroidChecker
         """
         assert checker.priority <= 0, 'checker priority can\'t be >= 0'
-        self._checkers.setdefault(checker.name, []).append(checker)
+        self._checkers[checker.name].append(checker)
         for r_id, r_title, r_cb in checker.reports:
             self.register_report(r_id, r_title, r_cb, checker)
         self.register_options_provider(checker)
         if hasattr(checker, 'msgs'):
-            self.register_messages(checker)
+            self.msgs_store.register_messages(checker)
         checker.load_defaults()
 
+        # Register the checker, but disable all of its messages.
+        # TODO(cpopa): we should have a better API for this.
+        if not getattr(checker, 'enabled', True):
+            self.disable(checker.name)
+
     def disable_noerror_messages(self):
-        for msgcat, msgids in self._msgs_by_category.iteritems():
+        for msgcat, msgids in six.iteritems(self.msgs_store._msgs_by_category):
             if msgcat == 'E':
                 for msgid in msgids:
                     self.enable(msgid)
@@ -344,10 +548,10 @@
                     self.disable(msgid)
 
     def disable_reporters(self):
-       """disable all reporters"""
-       for reporters in self._reports.values():
-           for report_id, _title, _cb in reporters:
-               self.disable_report(report_id)
+        """disable all reporters"""
+        for reporters in six.itervalues(self._reports):
+            for report_id, _, _ in reporters:
+                self.disable_report(report_id)
 
     def error_mode(self):
         """error mode: enable only errors; no reports, no persistent"""
@@ -364,22 +568,25 @@
         """process tokens from the current module to search for module/block
         level options
         """
-        comment = tokenize.COMMENT
-        newline = tokenize.NEWLINE
-        for (tok_type, _, start, _, line) in tokens:
-            if tok_type not in (comment, newline):
+        control_pragmas = {'disable', 'enable'}
+        for (tok_type, content, start, _, _) in tokens:
+            if tok_type != tokenize.COMMENT:
                 continue
-            match = OPTION_RGX.search(line)
+            match = utils.OPTION_RGX.search(content)
             if match is None:
                 continue
-            if match.group(1).strip() == "disable-all":
-                self.add_message('I0013', line=start[0])
+            if match.group(1).strip() == "disable-all" or \
+                    match.group(1).strip() == 'skip-file':
+                if match.group(1).strip() == "disable-all":
+                    self.add_message('deprecated-pragma', line=start[0],
+                                     args=('disable-all', 'skip-file'))
+                self.add_message('file-ignored', line=start[0])
                 self._ignore_file = True
                 return
             try:
                 opt, value = match.group(1).split('=', 1)
             except ValueError:
-                self.add_message('I0010', args=match.group(1).strip(),
+                self.add_message('bad-inline-option', args=match.group(1).strip(),
                                  line=start[0])
                 continue
             opt = opt.strip()
@@ -388,67 +595,31 @@
                     meth = self._options_methods[opt]
                 except KeyError:
                     meth = self._bw_options_methods[opt]
-                    warn('%s is deprecated, replace it by %s (%s, line %s)' % (
-                        opt, opt.split('-')[0], self.current_file, line),
-                         DeprecationWarning)
-                for msgid in splitstrip(value):
-                    try:
-                        meth(msgid, 'module', start[0])
-                    except UnknownMessage:
-                        self.add_message('E0012', args=msgid, line=start[0])
-            else:
-                self.add_message('E0011', args=opt, line=start[0])
+                    # found a "(dis|en)able-msg" pragma deprecated suppresssion
+                    self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', '')))
+                for msgid in textutils.splitstrip(value):
+                    # Add the line where a control pragma was encountered.
+                    if opt in control_pragmas:
+                        self._pragma_lineno[msgid] = start[0]
 
-    def collect_block_lines(self, node, msg_state):
-        """walk ast to collect block level options line numbers"""
-        # recurse on children (depth first)
-        for child in node.get_children():
-            self.collect_block_lines(child, msg_state)
-        first = node.fromlineno
-        last = node.tolineno
-        # first child line number used to distinguish between disable
-        # which are the first child of scoped node with those defined later.
-        # For instance in the code below:
-        #
-        # 1.   def meth8(self):
-        # 2.        """test late disabling"""
-        # 3.        # pylint: disable=E1102
-        # 4.        print self.blip
-        # 5.        # pylint: disable=E1101
-        # 6.        print self.bla
-        #
-        # E1102 should be disabled from line 1 to 6 while E1101 from line 5 to 6
-        #
-        # this is necessary to disable locally messages applying to class /
-        # function using their fromlineno
-        if isinstance(node, (nodes.Module, nodes.Class, nodes.Function)) and node.body:
-            firstchildlineno = node.body[0].fromlineno
-        else:
-            firstchildlineno = last
-        for msgid, lines in msg_state.iteritems():
-            for lineno, state in lines.items():
-                if first <= lineno <= last:
-                    if lineno > firstchildlineno:
-                        state = True
-                    # set state for all lines for this block
-                    first, last = node.block_range(lineno)
-                    for line in xrange(first, last+1):
-                        # do not override existing entries
-                        if not line in self._module_msgs_state.get(msgid, ()):
-                            if line in lines: # state change in the same block
-                                state = lines[line]
-                            try:
-                                self._module_msgs_state[msgid][line] = state
-                            except KeyError:
-                                self._module_msgs_state[msgid] = {line: state}
-                    del lines[lineno]
+                    try:
+                        if (opt, msgid) == ('disable', 'all'):
+                            self.add_message('deprecated-pragma', line=start[0], args=('disable=all', 'skip-file'))
+                            self.add_message('file-ignored', line=start[0])
+                            self._ignore_file = True
+                            return
+                        meth(msgid, 'module', start[0])
+                    except utils.UnknownMessage:
+                        self.add_message('bad-option-value', args=msgid, line=start[0])
+            else:
+                self.add_message('unrecognized-inline-option', args=opt, line=start[0])
 
 
     # code checking methods ###################################################
 
     def get_checkers(self):
         """return all available checkers as a list"""
-        return [self] + [c for checkers in self._checkers.values()
+        return [self] + [c for checkers in six.itervalues(self._checkers)
                          for c in checkers if c is not self]
 
     def prepare_checkers(self):
@@ -458,50 +629,185 @@
         # get needed checkers
         neededcheckers = [self]
         for checker in self.get_checkers()[1:]:
+            # fatal errors should not trigger enable / disabling a checker
             messages = set(msg for msg in checker.msgs
-                           if self.is_message_enabled(msg))
+                           if msg[0] != 'F' and self.is_message_enabled(msg))
             if (messages or
-                any(self.report_is_enabled(r[0]) for r in checker.reports)):
+                    any(self.report_is_enabled(r[0]) for r in checker.reports)):
                 neededcheckers.append(checker)
-                checker.active_msgs = messages
+        # Sort checkers by priority
+        neededcheckers = sorted(neededcheckers,
+                                key=operator.attrgetter('priority'),
+                                reverse=True)
         return neededcheckers
 
+    def should_analyze_file(self, modname, path): # pylint: disable=unused-argument, no-self-use
+        """Returns whether or not a module should be checked.
+
+        This implementation returns True for all python source file, indicating
+        that all files should be linted.
+
+        Subclasses may override this method to indicate that modules satisfying
+        certain conditions should not be linted.
+
+        :param str modname: The name of the module to be checked.
+        :param str path: The full path to the source code of the module.
+        :returns: True if the module should be checked.
+        :rtype: bool
+        """
+        return path.endswith('.py')
+
     def check(self, files_or_modules):
         """main checking entry: check a list of files or modules from their
         name.
         """
-        self.reporter.include_ids = self.config.include_ids
+        # initialize msgs_state now that all messages have been registered into
+        # the store
+        for msg in self.msgs_store.messages:
+            if not msg.may_be_emitted():
+                self._msgs_state[msg.msgid] = False
+
         if not isinstance(files_or_modules, (list, tuple)):
             files_or_modules = (files_or_modules,)
-        walker = PyLintASTWalker(self)
+
+        if self.config.jobs == 1:
+            with fix_import_path(files_or_modules):
+                self._do_check(files_or_modules)
+        else:
+            # Hack that permits running pylint, on Windows, with -m switch
+            # and with --jobs, as in 'python -2 -m pylint .. --jobs'.
+            # For more details why this is needed,
+            # see Python issue http://bugs.python.org/issue10845.
+
+            mock_main = __name__ != '__main__' # -m switch
+            if mock_main:
+                sys.modules['__main__'] = sys.modules[__name__]
+            try:
+                self._parallel_check(files_or_modules)
+            finally:
+                if mock_main:
+                    sys.modules.pop('__main__')
+
+    def _parallel_task(self, files_or_modules):
+        # Prepare configuration for child linters.
+        filter_options = {'symbols', 'include-ids', 'long-help'}
+        filter_options.update([opt_name for opt_name, _ in self._external_opts])
+        config = {}
+        for opt_providers in six.itervalues(self._all_options):
+            for optname, optdict, val in opt_providers.options_and_values():
+                if optname not in filter_options:
+                    config[optname] = configuration.format_option_value(optdict, val)
+
+        childs = []
+        manager = multiprocessing.Manager()  # pylint: disable=no-member
+        tasks_queue = manager.Queue()  # pylint: disable=no-member
+        results_queue = manager.Queue()  # pylint: disable=no-member
+
+        for _ in range(self.config.jobs):
+            cl = ChildLinter(args=(tasks_queue, results_queue, config))
+            cl.start()  # pylint: disable=no-member
+            childs.append(cl)
+
+        # send files to child linters
+        for files_or_module in files_or_modules:
+            tasks_queue.put([files_or_module])
+
+        # collect results from child linters
+        failed = False
+        for _ in files_or_modules:
+            try:
+                result = results_queue.get()
+            except Exception as ex:
+                print("internal error while receiving results from child linter",
+                      file=sys.stderr)
+                print(ex, file=sys.stderr)
+                failed = True
+                break
+            yield result
+
+        # Stop child linters and wait for their completion.
+        for _ in range(self.config.jobs):
+            tasks_queue.put('STOP')
+        for cl in childs:
+            cl.join()
+
+        if failed:
+            print("Error occured, stopping the linter.", file=sys.stderr)
+            sys.exit(32)
+
+    def _parallel_check(self, files_or_modules):
+        # Reset stats.
+        self.open()
+
+        all_stats = []
+        for result in self._parallel_task(files_or_modules):
+            (
+                file_or_module,
+                self.file_state.base_name,
+                module,
+                messages,
+                stats,
+                msg_status
+            ) = result
+
+            if file_or_module == files_or_modules[-1]:
+                last_module = module
+
+            for msg in messages:
+                msg = utils.Message(*msg)
+                self.set_current_module(module)
+                self.reporter.handle_message(msg)
+
+            all_stats.append(stats)
+            self.msg_status |= msg_status
+
+        self.stats = _merge_stats(itertools.chain(all_stats, [self.stats]))
+        self.current_name = last_module
+
+        # Insert stats data to local checkers.
+        for checker in self.get_checkers():
+            if checker is not self:
+                checker.stats = self.stats
+
+    def _do_check(self, files_or_modules):
+        walker = utils.PyLintASTWalker(self)
         checkers = self.prepare_checkers()
-        rawcheckers = [c for c in checkers if implements(c, IRawChecker)
-                       and c is not self]
+        tokencheckers = [c for c in checkers
+                         if interface.implements(c, interfaces.ITokenChecker)
+                         and c is not self]
+        rawcheckers = [c for c in checkers
+                       if interface.implements(c, interfaces.IRawChecker)]
         # notify global begin
         for checker in checkers:
             checker.open()
-            if implements(checker, IASTNGChecker):
+            if interface.implements(checker, interfaces.IAstroidChecker):
                 walker.add_checker(checker)
         # build ast and check modules or packages
         for descr in self.expand_files(files_or_modules):
             modname, filepath = descr['name'], descr['path']
-            self.set_current_module(modname, filepath)
-            # get the module representation
-            astng = self.get_astng(filepath, modname)
-            if astng is None:
+            if not descr['isarg'] and not self.should_analyze_file(modname, filepath):
                 continue
-            self.base_name = descr['basename']
-            self.base_file = descr['basepath']
             if self.config.files_output:
                 reportfile = 'pylint_%s.%s' % (modname, self.reporter.extension)
                 self.reporter.set_output(open(reportfile, 'w'))
+            self.set_current_module(modname, filepath)
+            # get the module representation
+            ast_node = self.get_ast(filepath, modname)
+            if ast_node is None:
+                continue
+            # XXX to be correct we need to keep module_msgs_state for every
+            # analyzed module (the problem stands with localized messages which
+            # are only detected in the .close step)
+            self.file_state = utils.FileState(descr['basename'])
             self._ignore_file = False
             # fix the current file (if the source file was not available or
             # if it's actually a c extension)
-            self.current_file = astng.file
-            self.check_astng_module(astng, walker, rawcheckers)
+            self.current_file = ast_node.file # pylint: disable=maybe-no-member
+            self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers)
+            # warn about spurious inline messages handling
+            for msgid, line, args in self.file_state.iter_spurious_suppression_messages(self.msgs_store):
+                self.add_message(msgid, line, None, args)
         # notify global end
-        self.set_current_module('')
         self.stats['statement'] = walker.nbstatements
         checkers.reverse()
         for checker in checkers:
@@ -510,12 +816,12 @@
     def expand_files(self, modules):
         """get modules and errors from a list of modules and handle errors
         """
-        result, errors = expand_modules(modules, self.config.black_list)
+        result, errors = utils.expand_modules(modules, self.config.black_list)
         for error in errors:
             message = modname = error["mod"]
             key = error["key"]
             self.set_current_module(modname)
-            if key == "F0001":
+            if key == "fatal":
                 message = str(error["ex"]).replace(os.getcwd() + os.sep, '')
             self.add_message(key, args=message)
         return result
@@ -526,144 +832,164 @@
         """
         if not modname and filepath is None:
             return
+        self.reporter.on_set_current_module(modname, filepath)
         self.current_name = modname
         self.current_file = filepath or modname
         self.stats['by_module'][modname] = {}
         self.stats['by_module'][modname]['statement'] = 0
-        for msg_cat in MSG_TYPES.values():
+        for msg_cat in six.itervalues(utils.MSG_TYPES):
             self.stats['by_module'][modname][msg_cat] = 0
-        # XXX hack, to be correct we need to keep module_msgs_state
-        # for every analyzed module (the problem stands with localized
-        # messages which are only detected in the .close step)
-        if modname:
-            self._module_msgs_state = {}
-            self._module_msg_cats_state = {}
 
-    def get_astng(self, filepath, modname):
-        """return a astng representation for a module"""
+    def get_ast(self, filepath, modname):
+        """return a ast(roid) representation for a module"""
         try:
-            return MANAGER.astng_from_file(filepath, modname, source=True)
-        except SyntaxError, ex:
-            self.add_message('E0001', line=ex.lineno, args=ex.msg)
-        except ASTNGBuildingException, ex:
-            self.add_message('F0010', args=ex)
-        except Exception, ex:
+            return MANAGER.ast_from_file(filepath, modname, source=True)
+        except SyntaxError as ex:
+            self.add_message('syntax-error', line=ex.lineno, args=ex.msg)
+        except astroid.AstroidBuildingException as ex:
+            self.add_message('parse-error', args=ex)
+        except Exception as ex: # pylint: disable=broad-except
             import traceback
             traceback.print_exc()
-            self.add_message('F0002', args=(ex.__class__, ex))
+            self.add_message('astroid-error', args=(ex.__class__, ex))
 
-    def check_astng_module(self, astng, walker, rawcheckers):
-        """check a module from its astng representation, real work"""
-        # call raw checkers if possible
-        if not astng.pure_python:
-            self.add_message('I0001', args=astng.name)
+    def check_astroid_module(self, ast_node, walker,
+                             rawcheckers, tokencheckers):
+        """Check a module from its astroid representation."""
+        try:
+            tokens = utils.tokenize_module(ast_node)
+        except tokenize.TokenError as ex:
+            self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0])
+            return
+
+        if not ast_node.pure_python:
+            self.add_message('raw-checker-failed', args=ast_node.name)
         else:
-            #assert astng.file.endswith('.py')
-            # invoke IRawChecker interface on self to fetch module/block
+            #assert astroid.file.endswith('.py')
+            # invoke ITokenChecker interface on self to fetch module/block
             # level options
-            self.process_module(astng)
+            self.process_tokens(tokens)
             if self._ignore_file:
                 return False
             # walk ast to collect line numbers
-            orig_state = self._module_msgs_state.copy()
-            self._module_msgs_state = {}
-            self.collect_block_lines(astng, orig_state)
+            self.file_state.collect_block_lines(self.msgs_store, ast_node)
+            # run raw and tokens checkers
             for checker in rawcheckers:
-                checker.process_module(astng)
-        # generate events to astng checkers
-        walker.walk(astng)
+                checker.process_module(ast_node)
+            for checker in tokencheckers:
+                checker.process_tokens(tokens)
+        # generate events to astroid checkers
+        walker.walk(ast_node)
         return True
 
-    # IASTNGChecker interface #################################################
+    # IAstroidChecker interface #################################################
 
     def open(self):
         """initialize counters"""
-        self.stats = { 'by_module' : {},
-                       'by_msg' : {},
-                       }
-        for msg_cat in MSG_TYPES.values():
+        self.stats = {'by_module' : {},
+                      'by_msg' : {},
+                     }
+        MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
+        MANAGER.extension_package_whitelist.update(
+            self.config.extension_pkg_whitelist)
+        for msg_cat in six.itervalues(utils.MSG_TYPES):
             self.stats[msg_cat] = 0
 
-    def close(self):
+    def generate_reports(self):
         """close the whole package /module, it's time to make reports !
 
         if persistent run, pickle results for later comparison
         """
-        if self.base_name is not None:
-            # load old results if any
-            old_stats = config.load_results(self.base_name)
+        if self.file_state.base_name is not None:
+            # load previous results if any
+            previous_stats = config.load_results(self.file_state.base_name)
+            # XXX code below needs refactoring to be more reporter agnostic
+            self.reporter.on_close(self.stats, previous_stats)
             if self.config.reports:
-                self.make_reports(self.stats, old_stats)
-            elif self.config.output_format == 'html':
-                self.reporter.display_results(Section())
+                sect = self.make_reports(self.stats, previous_stats)
+                if self.config.files_output:
+                    filename = 'pylint_global.' + self.reporter.extension
+                    self.reporter.set_output(open(filename, 'w'))
+            else:
+                sect = ureports.Section()
+            if self.config.reports or self.config.output_format == 'html':
+                self.reporter.display_results(sect)
             # save results if persistent run
             if self.config.persistent:
-                config.save_results(self.stats, self.base_name)
+                config.save_results(self.stats, self.file_state.base_name)
+        else:
+            if self.config.output_format == 'html':
+                # No output will be emitted for the html
+                # reporter if the file doesn't exist, so emit
+                # the results here.
+                self.reporter.display_results(ureports.Section())
+            self.reporter.on_close(self.stats, {})
 
     # specific reports ########################################################
 
-    def report_evaluation(self, sect, stats, old_stats):
+    def report_evaluation(self, sect, stats, previous_stats):
         """make the global evaluation report"""
         # check with at least check 1 statements (usually 0 when there is a
         # syntax error preventing pylint from further processing)
         if stats['statement'] == 0:
-            raise EmptyReport()
+            raise utils.EmptyReport()
         # get a global note for the code
         evaluation = self.config.evaluation
         try:
-            note = eval(evaluation, {}, self.stats)
-        except Exception, ex:
+            note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
+        except Exception as ex: # pylint: disable=broad-except
             msg = 'An exception occurred while rating: %s' % ex
         else:
             stats['global_note'] = note
             msg = 'Your code has been rated at %.2f/10' % note
-            if 'global_note' in old_stats:
-                msg += ' (previous run: %.2f/10)' % old_stats['global_note']
+            pnote = previous_stats.get('global_note')
+            if pnote is not None:
+                msg += ' (previous run: %.2f/10, %+.2f)' % (pnote, note - pnote)
             if self.config.comment:
                 msg = '%s\n%s' % (msg, config.get_note_message(note))
-        sect.append(Text(msg))
+        sect.append(ureports.Text(msg))
 
 # some reporting functions ####################################################
 
-def report_total_messages_stats(sect, stats, old_stats):
+def report_total_messages_stats(sect, stats, previous_stats):
     """make total errors / warnings report"""
     lines = ['type', 'number', 'previous', 'difference']
-    lines += table_lines_from_stats(stats, old_stats,
-                                    ('convention', 'refactor',
-                                     'warning', 'error'))
-    sect.append(Table(children=lines, cols=4, rheaders=1))
+    lines += checkers.table_lines_from_stats(stats, previous_stats,
+                                             ('convention', 'refactor',
+                                              'warning', 'error'))
+    sect.append(ureports.Table(children=lines, cols=4, rheaders=1))
 
 def report_messages_stats(sect, stats, _):
     """make messages type report"""
     if not stats['by_msg']:
         # don't print this report when we didn't detected any errors
-        raise EmptyReport()
-    in_order = sorted([(value, msg_id) 
-                       for msg_id, value in stats['by_msg'].items()
+        raise utils.EmptyReport()
+    in_order = sorted([(value, msg_id)
+                       for msg_id, value in six.iteritems(stats['by_msg'])
                        if not msg_id.startswith('I')])
     in_order.reverse()
     lines = ('message id', 'occurrences')
     for value, msg_id in in_order:
         lines += (msg_id, str(value))
-    sect.append(Table(children=lines, cols=2, rheaders=1))
+    sect.append(ureports.Table(children=lines, cols=2, rheaders=1))
 
 def report_messages_by_module_stats(sect, stats, _):
     """make errors / warnings by modules report"""
     if len(stats['by_module']) == 1:
         # don't print this report when we are analysing a single module
-        raise EmptyReport()
-    by_mod = {}
+        raise utils.EmptyReport()
+    by_mod = collections.defaultdict(dict)
     for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'):
         total = stats[m_type]
-        for module in stats['by_module'].keys():
+        for module in six.iterkeys(stats['by_module']):
             mod_total = stats['by_module'][module][m_type]
             if total == 0:
                 percent = 0
             else:
                 percent = float((mod_total)*100) / total
-            by_mod.setdefault(module, {})[m_type] = percent
+            by_mod[module][m_type] = percent
     sorted_result = []
-    for module, mod_info in by_mod.items():
+    for module, mod_info in six.iteritems(by_mod):
         sorted_result.append((mod_info['error'],
                               mod_info['warning'],
                               mod_info['refactor'],
@@ -673,25 +999,19 @@
     sorted_result.reverse()
     lines = ['module', 'error', 'warning', 'refactor', 'convention']
     for line in sorted_result:
-        if line[0] == 0 and line[1] == 0:
-            break
+        # Don't report clean modules.
+        if all(entry == 0 for entry in line[:-1]):
+            continue
         lines.append(line[-1])
         for val in line[:-1]:
             lines.append('%.2f' % val)
     if len(lines) == 5:
-        raise EmptyReport()
-    sect.append(Table(children=lines, cols=5, rheaders=1))
+        raise utils.EmptyReport()
+    sect.append(ureports.Table(children=lines, cols=5, rheaders=1))
 
 
 # utilities ###################################################################
 
-# this may help to import modules using gettext
-
-try:
-    __builtins__._ = str
-except AttributeError:
-    __builtins__['_'] = str
-
 
 class ArgumentPreprocessingError(Exception):
     """Raised if an error occurs during argument preprocessing."""
@@ -714,19 +1034,49 @@
                 option, val = arg[2:], None
             try:
                 cb, takearg = search_for[option]
+            except KeyError:
+                i += 1
+            else:
                 del args[i]
                 if takearg and val is None:
                     if i >= len(args) or args[i].startswith('-'):
-                        raise ArgumentPreprocessingError(arg)
+                        msg = 'Option %s expects a value' % option
+                        raise ArgumentPreprocessingError(msg)
                     val = args[i]
                     del args[i]
+                elif not takearg and val is not None:
+                    msg = "Option %s doesn't expects a value" % option
+                    raise ArgumentPreprocessingError(msg)
                 cb(option, val)
-            except KeyError:
-                i += 1
         else:
             i += 1
 
-class Run:
+
+@contextlib.contextmanager
+def fix_import_path(args):
+    """Prepare sys.path for running the linter checks.
+
+    Within this context, each of the given arguments is importable.
+    Paths are added to sys.path in corresponding order to the arguments.
+    We avoid adding duplicate directories to sys.path.
+    `sys.path` is reset to its original value upon exitign this context.
+    """
+    orig = list(sys.path)
+    changes = []
+    for arg in args:
+        path = _get_python_path(arg)
+        if path in changes:
+            continue
+        else:
+            changes.append(path)
+    sys.path[:] = changes + sys.path
+    try:
+        yield
+    finally:
+        sys.path[:] = orig
+
+
+class Run(object):
     """helper class to use as main for pylint :
 
     run(*sys.argv[1:])
@@ -743,11 +1093,12 @@
         try:
             preprocess_options(args, {
                 # option: (callback, takearg)
+                'init-hook':   (cb_init_hook, True),
                 'rcfile':       (self.cb_set_rcfile, True),
                 'load-plugins': (self.cb_add_plugins, True),
                 })
-        except ArgumentPreprocessingError, e:
-            print >> sys.stderr, 'Argument %s expects a value.' % (e.args[0],)
+        except ArgumentPreprocessingError as ex:
+            print(ex, file=sys.stderr)
             sys.exit(32)
 
         self.linter = linter = self.LinterClass((
@@ -757,17 +1108,18 @@
               'help' : 'Specify a configuration file.'}),
 
             ('init-hook',
-             {'action' : 'callback', 'type' : 'string', 'metavar': '<code>',
-              'callback' : cb_init_hook, 'level': 1,
-              'help' : 'Python code to execute, usually for sys.path \
-manipulation such as pygtk.require().'}),
+             {'action' : 'callback', 'callback' : lambda *args: 1,
+              'type' : 'string', 'metavar': '<code>',
+              'level': 1,
+              'help' : 'Python code to execute, usually for sys.path '
+                       'manipulation such as pygtk.require().'}),
 
             ('help-msg',
              {'action' : 'callback', 'type' : 'string', 'metavar': '<msg-id>',
               'callback' : self.cb_help_message,
               'group': 'Commands',
-              'help' : '''Display a help message for the given message id and \
-exit. The value may be a comma separated list of message ids.'''}),
+              'help' : 'Display a help message for the given message id and '
+                       'exit. The value may be a comma separated list of message ids.'}),
 
             ('list-msgs',
              {'action' : 'callback', 'metavar': '<msg-id>',
@@ -775,6 +1127,12 @@
               'group': 'Commands', 'level': 1,
               'help' : "Generate pylint's messages."}),
 
+            ('list-conf-levels',
+             {'action' : 'callback',
+              'callback' : cb_list_confidence_levels,
+              'group': 'Commands', 'level': 1,
+              'help' : "Generate pylint's messages."}),
+
             ('full-documentation',
              {'action' : 'callback', 'metavar': '<msg-id>',
               'callback' : self.cb_full_documentation,
@@ -784,68 +1142,83 @@
             ('generate-rcfile',
              {'action' : 'callback', 'callback' : self.cb_generate_config,
               'group': 'Commands',
-              'help' : '''Generate a sample configuration file according to \
-the current configuration. You can put other options before this one to get \
-them in the generated configuration.'''}),
+              'help' : 'Generate a sample configuration file according to '
+                       'the current configuration. You can put other options '
+                       'before this one to get them in the generated '
+                       'configuration.'}),
 
             ('generate-man',
              {'action' : 'callback', 'callback' : self.cb_generate_manpage,
               'group': 'Commands',
-              'help' : "Generate pylint's man page.",'hide': True}),
+              'help' : "Generate pylint's man page.", 'hide': True}),
 
             ('errors-only',
              {'action' : 'callback', 'callback' : self.cb_error_mode,
               'short': 'E',
-              'help' : '''In error mode, checkers without error messages are \
-disabled and for others, only the ERROR messages are displayed, and no reports \
-are done by default'''}),
+              'help' : 'In error mode, checkers without error messages are '
+                       'disabled and for others, only the ERROR messages are '
+                       'displayed, and no reports are done by default'''}),
+
+            ('py3k',
+             {'action' : 'callback', 'callback' : self.cb_python3_porting_mode,
+              'help' : 'In Python 3 porting mode, all checkers will be '
+                       'disabled and only messages emitted by the porting '
+                       'checker will be displayed'}),
 
             ('profile',
              {'type' : 'yn', 'metavar' : '<y_or_n>',
               'default': False, 'hide': True,
               'help' : 'Profiled execution.'}),
 
-            ), option_groups=self.option_groups,
-               reporter=reporter, pylintrc=self._rcfile)
+            ), option_groups=self.option_groups, pylintrc=self._rcfile)
         # register standard checkers
         linter.load_default_plugins()
         # load command line plugins
         linter.load_plugin_modules(self._plugins)
         # add some help section
         linter.add_help_section('Environment variables', config.ENV_HELP, level=1)
-        linter.add_help_section('Output', '''
-Using the default text output, the message format is :                          
-                                                                                
-        MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE                                
-                                                                                
-There are 5 kind of message types :                                             
-    * (C) convention, for programming standard violation                        
-    * (R) refactor, for bad code smell                                          
-    * (W) warning, for python specific problems                                 
-    * (E) error, for probable bugs in the code                                  
-    * (F) fatal, if an error occurred which prevented pylint from doing further
-processing.
-        ''', level=1)
-        linter.add_help_section('Output status code', '''
-Pylint should leave with following status code:                                 
-    * 0 if everything went fine                                                 
-    * 1 if a fatal message was issued                                           
-    * 2 if an error message was issued                                          
-    * 4 if a warning message was issued                                         
-    * 8 if a refactor message was issued                                        
-    * 16 if a convention message was issued                                     
-    * 32 on usage error                                                         
-                                                                                
-status 1 to 16 will be bit-ORed so you can know which different categories has
-been issued by analysing pylint output status code
-        ''', level=1)
+        # pylint: disable=bad-continuation
+        linter.add_help_section('Output',
+'Using the default text output, the message format is :                          \n'
+'                                                                                \n'
+'        MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE                                \n'
+'                                                                                \n'
+'There are 5 kind of message types :                                             \n'
+'    * (C) convention, for programming standard violation                        \n'
+'    * (R) refactor, for bad code smell                                          \n'
+'    * (W) warning, for python specific problems                                 \n'
+'    * (E) error, for probable bugs in the code                                  \n'
+'    * (F) fatal, if an error occurred which prevented pylint from doing further\n'
+'processing.\n'
+                                , level=1)
+        linter.add_help_section('Output status code',
+'Pylint should leave with following status code:                                 \n'
+'    * 0 if everything went fine                                                 \n'
+'    * 1 if a fatal message was issued                                           \n'
+'    * 2 if an error message was issued                                          \n'
+'    * 4 if a warning message was issued                                         \n'
+'    * 8 if a refactor message was issued                                        \n'
+'    * 16 if a convention message was issued                                     \n'
+'    * 32 on usage error                                                         \n'
+'                                                                                \n'
+'status 1 to 16 will be bit-ORed so you can know which different categories has\n'
+'been issued by analysing pylint output status code\n',
+                                level=1)
         # read configuration
-        linter.disable('W0704')
+        linter.disable('pointless-except')
+        linter.disable('suppressed-message')
+        linter.disable('useless-suppression')
         linter.read_config_file()
-        # is there some additional plugins in the file configuration, in
         config_parser = linter.cfgfile_parser
+        # run init hook, if present, before loading plugins
+        if config_parser.has_option('MASTER', 'init-hook'):
+            cb_init_hook('init-hook',
+                         textutils.unquote(config_parser.get('MASTER',
+                                                             'init-hook')))
+        # is there some additional plugins in the file configuration, in
         if config_parser.has_option('MASTER', 'load-plugins'):
-            plugins = splitstrip(config_parser.get('MASTER', 'load-plugins'))
+            plugins = textutils.splitstrip(
+                config_parser.get('MASTER', 'load-plugins'))
             linter.load_plugin_modules(plugins)
         # now we can load file config and command line, plugins (which can
         # provide options) have been registered
@@ -857,37 +1230,52 @@
             linter.set_reporter(reporter)
         try:
             args = linter.load_command_line_configuration(args)
-        except SystemExit, exc:
+        except SystemExit as exc:
             if exc.code == 2: # bad options
                 exc.code = 32
             raise
         if not args:
-            print linter.help()
+            print(linter.help())
             sys.exit(32)
+
+        if linter.config.jobs < 0:
+            print("Jobs number (%d) should be greater than 0"
+                  % linter.config.jobs, file=sys.stderr)
+            sys.exit(32)
+        if linter.config.jobs > 1 or linter.config.jobs == 0:
+            if multiprocessing is None:
+                print("Multiprocessing library is missing, "
+                      "fallback to single process", file=sys.stderr)
+                linter.set_option("jobs", 1)
+            else:
+                if linter.config.jobs == 0:
+                    linter.config.jobs = multiprocessing.cpu_count()
+
         # insert current working directory to the python path to have a correct
         # behaviour
-        sys.path.insert(0, os.getcwd())
         if self.linter.config.profile:
-            print >> sys.stderr, '** profiled run'
-            import cProfile, pstats
-            cProfile.runctx('linter.check(%r)' % args, globals(), locals(), 'stones.prof' )
-            data = pstats.Stats('stones.prof')
-            data.strip_dirs()
-            data.sort_stats('time', 'calls')
-            data.print_stats(30)
+            with fix_import_path(args):
+                print('** profiled run', file=sys.stderr)
+                import cProfile, pstats
+                cProfile.runctx('linter.check(%r)' % args, globals(), locals(),
+                                'stones.prof')
+                data = pstats.Stats('stones.prof')
+                data.strip_dirs()
+                data.sort_stats('time', 'calls')
+                data.print_stats(30)
         else:
             linter.check(args)
-        sys.path.pop(0)
+        linter.generate_reports()
         if exit:
             sys.exit(self.linter.msg_status)
 
     def cb_set_rcfile(self, name, value):
-        """callback for option preprocessing (i.e. before optik parsing)"""
+        """callback for option preprocessing (i.e. before option parsing)"""
         self._rcfile = value
 
     def cb_add_plugins(self, name, value):
-        """callback for option preprocessing (i.e. before optik parsing)"""
-        self._plugins.extend(splitstrip(value))
+        """callback for option preprocessing (i.e. before option parsing)"""
+        self._plugins.extend(textutils.splitstrip(value))
 
     def cb_error_mode(self, *args, **kwargs):
         """error mode:
@@ -912,7 +1300,7 @@
 
     def cb_help_message(self, option, optname, value, parser):
         """optik callback for printing some help about a particular message"""
-        self.linter.help_message(splitstrip(value))
+        self.linter.msgs_store.help_message(textutils.splitstrip(value))
         sys.exit(0)
 
     def cb_full_documentation(self, option, optname, value, parser):
@@ -922,12 +1310,23 @@
 
     def cb_list_messages(self, option, optname, value, parser): # FIXME
         """optik callback for printing available messages"""
-        self.linter.list_messages()
+        self.linter.msgs_store.list_messages()
         sys.exit(0)
 
-def cb_init_hook(option, optname, value, parser):
+    def cb_python3_porting_mode(self, *args, **kwargs):
+        """Activate only the python3 porting checker."""
+        self.linter.disable('all')
+        self.linter.enable('python3')
+
+
+def cb_list_confidence_levels(option, optname, value, parser):
+    for level in interfaces.CONFIDENCE_LEVELS:
+        print('%-18s: %s' % level)
+    sys.exit(0)
+
+def cb_init_hook(optname, value):
     """exec arbitrary code to set sys.path for instance"""
-    exec value
+    exec(value) # pylint: disable=exec-used
 
 
 if __name__ == '__main__':
diff --git a/third_party/pylint/pyreverse/diadefslib.py b/third_party/pylint/pyreverse/diadefslib.py
index 68ca68c..e0dc8cf 100644
--- a/third_party/pylint/pyreverse/diadefslib.py
+++ b/third_party/pylint/pyreverse/diadefslib.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2000-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,22 +12,24 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """handle diagram generation options for class diagram or default diagrams
 """
 
 from logilab.common.compat import builtins
-BUILTINS_NAME = builtins.__name__
-from logilab import astng
-from logilab.astng.utils import LocalsVisitor
+
+import astroid
+from astroid.utils import LocalsVisitor
 
 from pylint.pyreverse.diagrams import PackageDiagram, ClassDiagram
 
+BUILTINS_NAME = builtins.__name__
+
 # diagram generators ##########################################################
 
-class DiaDefGenerator:
-    """handle diagram generation options
-    """
+class DiaDefGenerator(object):
+    """handle diagram generation options"""
+
     def __init__(self, linker, handler):
         """common Diagram Handler initialization"""
         self.config = handler.config
@@ -39,7 +41,7 @@
         """get title for objects"""
         title = node.name
         if self.module_names:
-            title =  '%s.%s' % (node.root().name, title)
+            title = '%s.%s' % (node.root().name, title)
         return title
 
     def _set_option(self, option):
@@ -97,12 +99,12 @@
         """return associated nodes of a class node"""
         if level == 0:
             return
-        for ass_nodes in klass_node.instance_attrs_type.values() + \
-                         klass_node.locals_type.values():
+        for ass_nodes in list(klass_node.instance_attrs_type.values()) + \
+                         list(klass_node.locals_type.values()):
             for ass_node in ass_nodes:
-                if isinstance(ass_node, astng.Instance):
+                if isinstance(ass_node, astroid.Instance):
                     ass_node = ass_node._proxied
-                if not (isinstance(ass_node, astng.Class) 
+                if not (isinstance(ass_node, astroid.Class)
                         and self.show_node(ass_node)):
                     continue
                 yield ass_node
@@ -132,7 +134,7 @@
         LocalsVisitor.__init__(self)
 
     def visit_project(self, node):
-        """visit an astng.Project node
+        """visit an astroid.Project node
 
         create a diagram definition for packages
         """
@@ -143,8 +145,8 @@
             self.pkgdiagram = None
         self.classdiagram = ClassDiagram('classes %s' % node.name, mode)
 
-    def leave_project(self, node):
-        """leave the astng.Project node
+    def leave_project(self, node): # pylint: disable=unused-argument
+        """leave the astroid.Project node
 
         return the generated diagram definition
         """
@@ -153,7 +155,7 @@
         return self.classdiagram,
 
     def visit_module(self, node):
-        """visit an astng.Module node
+        """visit an astroid.Module node
 
         add this class to the package diagram definition
         """
@@ -162,7 +164,7 @@
             self.pkgdiagram.add_object(node.name, node)
 
     def visit_class(self, node):
-        """visit an astng.Class node
+        """visit an astroid.Class node
 
         add this class to the class diagram definition
         """
@@ -170,7 +172,7 @@
         self.extract_classes(node, anc_level, ass_level)
 
     def visit_from(self, node):
-        """visit astng.From  and catch modules for package diagram
+        """visit astroid.From  and catch modules for package diagram
         """
         if self.pkgdiagram:
             self.pkgdiagram.add_from_depend(node, node.modname)
@@ -196,7 +198,7 @@
         else:
             module = project.modules[0]
             klass = klass.split('.')[-1]
-        klass = module.ilookup(klass).next()
+        klass = next(module.ilookup(klass))
 
         anc_level, ass_level = self._get_levels()
         self.extract_classes(klass, anc_level, ass_level)
@@ -204,7 +206,7 @@
 
 # diagram handler #############################################################
 
-class DiadefsHandler:
+class DiadefsHandler(object):
     """handle diagram definitions :
 
     get it from user (i.e. xml files) or generate them
@@ -215,8 +217,8 @@
 
     def get_diadefs(self, project, linker):
         """get the diagrams configuration data
-        :param linker: astng.inspector.Linker(IdGeneratorMixIn, LocalsVisitor)
-        :param project: astng.manager.Project        
+        :param linker: astroid.inspector.Linker(IdGeneratorMixIn, LocalsVisitor)
+        :param project: astroid.manager.Project
         """
 
         #  read and interpret diagram definitions (Diadefs)
diff --git a/third_party/pylint/pyreverse/diagrams.py b/third_party/pylint/pyreverse/diagrams.py
index 23d23ef..f0d7a92 100644
--- a/third_party/pylint/pyreverse/diagrams.py
+++ b/third_party/pylint/pyreverse/diagrams.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2004-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,24 +12,16 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """diagram objects
 """
 
-from logilab import astng
+import astroid
 from pylint.pyreverse.utils import is_interface, FilterMixIn
 
-def set_counter(value):
-    """Figure counter (re)set"""
-    Figure._UID_COUNT = value
-    
-class Figure:
+class Figure(object):
     """base class for counter handling"""
-    _UID_COUNT = 0
-    def __init__(self):
-        Figure._UID_COUNT += 1
-        self.fig_id = Figure._UID_COUNT
-        
+
 class Relationship(Figure):
     """a relation ship from an object in the diagram to another
     """
@@ -39,10 +31,10 @@
         self.to_object = to_object
         self.type = relation_type
         self.name = name
-        
-    
+
+
 class DiagramEntity(Figure):
-    """a diagram object, i.e. a label associated to an astng node
+    """a diagram object, i.e. a label associated to an astroid node
     """
     def __init__(self, title='No name', node=None):
         Figure.__init__(self)
@@ -62,7 +54,12 @@
         self._nodes = {}
         self.depends = []
 
-    def add_relationship(self, from_object, to_object, 
+    def get_relationships(self, role):
+        # sorted to get predictable (hence testable) results
+        return sorted(self.relationships.get(role, ()),
+                      key=lambda x: (x.from_object.fig_id, x.to_object.fig_id))
+
+    def add_relationship(self, from_object, to_object,
                          relation_type, name=None):
         """create a relation ship
         """
@@ -80,20 +77,23 @@
     def get_attrs(self, node):
         """return visible attributes, possibly with class name"""
         attrs = []
-        for node_name, ass_nodes in node.instance_attrs_type.items() + \
-                                node.locals_type.items():
+        for node_name, ass_nodes in list(node.instance_attrs_type.items()) + \
+                                list(node.locals_type.items()):
             if not self.show_attr(node_name):
                 continue
             names = self.class_names(ass_nodes)
             if names:
                 node_name = "%s : %s" % (node_name, ", ".join(names))
             attrs.append(node_name)
-        return attrs
+        return sorted(attrs)
 
     def get_methods(self, node):
         """return visible methods"""
-        return [m for m in node.values()
-                if isinstance(m, astng.Function) and self.show_attr(m.name)]
+        methods = [
+            m for m in node.values()
+            if isinstance(m, astroid.Function) and self.show_attr(m.name)
+        ]
+        return sorted(methods, key=lambda n: n.name)
 
     def add_object(self, title, node):
         """create a diagram object
@@ -107,9 +107,9 @@
         """return class names if needed in diagram"""
         names = []
         for ass_node in nodes:
-            if isinstance(ass_node, astng.Instance):
+            if isinstance(ass_node, astroid.Instance):
                 ass_node = ass_node._proxied
-            if isinstance(ass_node, astng.Class) \
+            if isinstance(ass_node, astroid.Class) \
                 and hasattr(ass_node, "name") and not self.has_node(ass_node):
                 if ass_node.name not in names:
                     ass_name = ass_node.name
@@ -125,15 +125,15 @@
         """return true if the given node is included in the diagram
         """
         return node in self._nodes
-        
+
     def object_from_node(self, node):
         """return the diagram object mapped to node
         """
         return self._nodes[node]
-            
+
     def classes(self):
         """return all class nodes in the diagram"""
-        return [o for o in self.objects if isinstance(o.node, astng.Class)]
+        return [o for o in self.objects if isinstance(o.node, astroid.Class)]
 
     def classe(self, name):
         """return a class by its name, raise KeyError if not found
@@ -142,7 +142,7 @@
             if klass.node.name == name:
                 return klass
         raise KeyError(name)
-    
+
     def extract_relationships(self):
         """extract relation ships between nodes in the diagram
         """
@@ -170,12 +170,12 @@
                 except KeyError:
                     continue
             # associations link
-            for name, values in node.instance_attrs_type.items() + \
-                                node.locals_type.items():
+            for name, values in list(node.instance_attrs_type.items()) + \
+                                list(node.locals_type.items()):
                 for value in values:
-                    if value is astng.YES:
+                    if value is astroid.YES:
                         continue
-                    if isinstance( value, astng.Instance):
+                    if isinstance(value, astroid.Instance):
                         value = value._proxied
                     try:
                         ass_obj = self.object_from_node(value)
@@ -188,10 +188,10 @@
     """package diagram handling
     """
     TYPE = 'package'
-    
+
     def modules(self):
         """return all module nodes in the diagram"""
-        return [o for o in self.objects if isinstance(o.node, astng.Module)]
+        return [o for o in self.objects if isinstance(o.node, astroid.Module)]
 
     def module(self, name):
         """return a module by its name, raise KeyError if not found
@@ -216,12 +216,12 @@
             if mod_name == "%s.%s" % (package.rsplit('.', 1)[0], name):
                 return mod
         raise KeyError(name)
-        
+
     def add_from_depend(self, node, from_module):
         """add dependencies created by from-imports
         """
         mod_name = node.root().name
-        obj = self.module( mod_name )
+        obj = self.module(mod_name)
         if from_module not in obj.node.depends:
             obj.node.depends.append(from_module)
 
diff --git a/third_party/pylint/pyreverse/main.py b/third_party/pylint/pyreverse/main.py
index da80bd6..408c172 100644
--- a/third_party/pylint/pyreverse/main.py
+++ b/third_party/pylint/pyreverse/main.py
@@ -1,4 +1,4 @@
-# # Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE).
+# # Copyright (c) 2000-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,27 +12,28 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """
   %prog [options] <packages>
 
-  create UML diagrams for classes and modules in <packages> 
+  create UML diagrams for classes and modules in <packages>
 """
+from __future__ import print_function
 
 import sys, os
 from logilab.common.configuration import ConfigurationMixIn
-from logilab.astng.manager import ASTNGManager
-from logilab.astng.inspector import Linker
+from astroid.manager import AstroidManager
+from astroid.inspector import Linker
 
 from pylint.pyreverse.diadefslib import DiadefsHandler
 from pylint.pyreverse import writer
 from pylint.pyreverse.utils import insert_default_options
 
 OPTIONS = (
-("filter-mode",
-    dict(short='f', default='PUB_ONLY', dest='mode', type='string',
-    action='store', metavar='<mode>', 
-    help="""filter attributes and functions according to
+    ("filter-mode",
+     dict(short='f', default='PUB_ONLY', dest='mode', type='string',
+          action='store', metavar='<mode>',
+          help="""filter attributes and functions according to
     <mode>. Correct modes are :
                             'PUB_ONLY' filter all non public attributes
                                 [DEFAULT], equivalent to PRIVATE+SPECIAL_A
@@ -42,49 +43,48 @@
                             'OTHER' filter protected and private
                                 attributes""")),
 
-("class",
-dict(short='c', action="append", metavar="<class>", dest="classes", default=[],
-    help="create a class diagram with all classes related to <class>;\
+    ("class",
+     dict(short='c', action="append", metavar="<class>", dest="classes", default=[],
+          help="create a class diagram with all classes related to <class>;\
  this uses by default the options -ASmy")),
 
-("show-ancestors",
-dict(short="a",  action="store", metavar='<ancestor>', type='int',
-    help='show <ancestor> generations of ancestor classes not in <projects>')),
-("all-ancestors",
-dict(short="A", default=None,
-    help="show all ancestors off all classes in <projects>") ),
-("show-associated",
-dict(short='s', action="store", metavar='<ass_level>', type='int',
-    help='show <ass_level> levels of associated classes not in <projects>')),
-("all-associated",
-dict(short='S', default=None,
-    help='show recursively all associated off all associated classes')),
+    ("show-ancestors",
+     dict(short="a", action="store", metavar='<ancestor>', type='int',
+          help='show <ancestor> generations of ancestor classes not in <projects>')),
+    ("all-ancestors",
+     dict(short="A", default=None,
+          help="show all ancestors off all classes in <projects>")),
+    ("show-associated",
+     dict(short='s', action="store", metavar='<ass_level>', type='int',
+          help='show <ass_level> levels of associated classes not in <projects>')),
+    ("all-associated",
+     dict(short='S', default=None,
+          help='show recursively all associated off all associated classes')),
+    ("show-builtin",
+     dict(short="b", action="store_true", default=False,
+          help='include builtin objects in representation of classes')),
 
-("show-builtin",
-dict(short="b", action="store_true", default=False,
-    help='include builtin objects in representation of classes')),
-
-("module-names",
-dict(short="m", default=None, type='yn', metavar='[yn]',
-    help='include module name in representation of classes')),
-# TODO : generate dependencies like in pylint
-#("package-dependencies",
-#dict(short="M", action="store", metavar='<package_depth>', type='int',
-    #help='show <package_depth> module dependencies beyond modules in \
-#<projects> (for the package diagram)')),
-("only-classnames",
-dict(short='k', action="store_true", default=False,
-    help="don't show attributes and methods in the class boxes; \
+    ("module-names",
+     dict(short="m", default=None, type='yn', metavar='[yn]',
+          help='include module name in representation of classes')),
+    # TODO : generate dependencies like in pylint
+    # ("package-dependencies",
+    # dict(short="M", action="store", metavar='<package_depth>', type='int',
+    #     help='show <package_depth> module dependencies beyond modules in \
+    # <projects> (for the package diagram)')),
+    ("only-classnames",
+     dict(short='k', action="store_true", default=False,
+          help="don't show attributes and methods in the class boxes; \
 this disables -f values")),
-("output", dict(short="o", dest="output_format", action="store",
-                 default="dot", metavar="<format>",
-                help="create a *.<format> output file if format available.")),
+    ("output", dict(short="o", dest="output_format", action="store",
+                    default="dot", metavar="<format>",
+                    help="create a *.<format> output file if format available.")),
 )
 # FIXME : quiet mode
-#( ('quiet', 
+#( ('quiet',
                 #dict(help='run quietly', action='store_true', short='q')), )
 
-class PyreverseCommand(ConfigurationMixIn):
+class Run(ConfigurationMixIn):
     """base class providing common behaviour for pyreverse commands"""
 
     options = OPTIONS
@@ -92,16 +92,16 @@
     def __init__(self, args):
         ConfigurationMixIn.__init__(self, usage=__doc__)
         insert_default_options()
-        self.manager = ASTNGManager()
+        self.manager = AstroidManager()
         self.register_options_provider(self.manager)
         args = self.load_command_line_configuration()
-        self.run(args)
+        sys.exit(self.run(args))
 
     def run(self, args):
         """checking arguments and run project"""
         if not args:
-            print self.help()
-            return
+            print(self.help())
+            return 1
         # insert current working directory to the python path to recognize
         # dependencies to local modules even if cwd is not in the PYTHONPATH
         sys.path.insert(0, os.getcwd())
@@ -117,13 +117,8 @@
             writer.VCGWriter(self.config).write(diadefs)
         else:
             writer.DotWriter(self.config).write(diadefs)
+        return 0
 
 
-class Run:
-    """pyreverse main class"""
-    def __init__(self, args):
-        """run pyreverse"""
-        PyreverseCommand(args)
-
 if __name__ == '__main__':
     Run(sys.argv[1:])
diff --git a/third_party/pylint/pyreverse/utils.py b/third_party/pylint/pyreverse/utils.py
index ea8b67c..5d6d133 100644
--- a/third_party/pylint/pyreverse/utils.py
+++ b/third_party/pylint/pyreverse/utils.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2002-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2002-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -12,10 +12,11 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """
 generic classes/functions for pyreverse core/extensions
 """
+from __future__ import print_function
 
 import sys
 import re
@@ -50,7 +51,7 @@
 
 
 
-# astng utilities ###########################################################
+# astroid utilities ###########################################################
 
 SPECIAL = re.compile('^__[A-Za-z0-9]+[A-Za-z0-9_]*__$')
 PRIVATE = re.compile('^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$')
@@ -106,10 +107,10 @@
     'SPECIAL'   : _SPECIAL,
     'OTHER'     : _PROTECTED + _PRIVATE,
 }
-VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED, \
-            'private': _PRIVATE, 'public': 0 }
+VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED,
+           'private': _PRIVATE, 'public': 0}
 
-class FilterMixIn:
+class FilterMixIn(object):
     """filter nodes according to a mode and nodes' visibility
     """
     def __init__(self, mode):
@@ -118,8 +119,8 @@
         for nummod in mode.split('+'):
             try:
                 __mode += MODES[nummod]
-            except KeyError, ex:
-                print >> sys.stderr, 'Unknown filter mode %s' % ex
+            except KeyError as ex:
+                print('Unknown filter mode %s' % ex, file=sys.stderr)
         self.__mode = __mode
 
 
@@ -127,5 +128,5 @@
         """return true if the node should be treated
         """
         visibility = get_visibility(getattr(node, 'name', node))
-        return not (self.__mode & VIS_MOD[visibility] )
+        return not self.__mode & VIS_MOD[visibility]
 
diff --git a/third_party/pylint/pyreverse/writer.py b/third_party/pylint/pyreverse/writer.py
index 6dbfc26..8628a8c 100644
--- a/third_party/pylint/pyreverse/writer.py
+++ b/third_party/pylint/pyreverse/writer.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Copyright (c) 2008-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2008-2013 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -13,17 +13,15 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-"""
-Utilities for creating VCG and Dot diagrams.
-"""
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""Utilities for creating VCG and Dot diagrams"""
 
 from logilab.common.vcgutils import VCGPrinter
 from logilab.common.graph import DotBackend
 
 from pylint.pyreverse.utils import is_exception
 
-class DiagramWriter:
+class DiagramWriter(object):
     """base class for writing project diagrams
     """
     def __init__(self, config, styles):
@@ -46,30 +44,33 @@
 
     def write_packages(self, diagram):
         """write a package diagram"""
-        for obj in diagram.modules():
-            label = self.get_title(obj)
-            self.printer.emit_node(obj.fig_id, label=label, shape='box')
+        # sorted to get predictable (hence testable) results
+        for i, obj in enumerate(sorted(diagram.modules(), key=lambda x: x.title)):
+            self.printer.emit_node(i, label=self.get_title(obj), shape='box')
+            obj.fig_id = i
         # package dependencies
-        for rel in diagram.relationships.get('depends', ()):
+        for rel in diagram.get_relationships('depends'):
             self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
-                              **self.pkg_edges)
+                                   **self.pkg_edges)
 
     def write_classes(self, diagram):
         """write a class diagram"""
-        for obj in diagram.objects:
-            self.printer.emit_node(obj.fig_id, **self.get_values(obj) )
+        # sorted to get predictable (hence testable) results
+        for i, obj in enumerate(sorted(diagram.objects, key=lambda x: x.title)):
+            self.printer.emit_node(i, **self.get_values(obj))
+            obj.fig_id = i
         # inheritance links
-        for rel in diagram.relationships.get('specialization', ()):
+        for rel in diagram.get_relationships('specialization'):
             self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
-                              **self.inh_edges)
+                                   **self.inh_edges)
         # implementation links
-        for rel in diagram.relationships.get('implements', ()):
+        for rel in diagram.get_relationships('implements'):
             self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
-                              **self.imp_edges)
+                                   **self.imp_edges)
         # generate associations
-        for rel in diagram.relationships.get('association', ()):
+        for rel in diagram.get_relationships('association'):
             self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
-                              label=rel.name, **self.ass_edges)
+                                   label=rel.name, **self.ass_edges)
 
     def set_printer(self, file_name, basename):
         """set printer"""
@@ -93,11 +94,12 @@
     """
 
     def __init__(self, config):
-        styles = [dict(arrowtail='none', arrowhead="open"), 
-                  dict(arrowtail = "none", arrowhead='empty'), 
-                  dict(arrowtail="node", arrowhead='empty', style='dashed'),
+        styles = [dict(arrowtail='none', arrowhead="open"),
+                  dict(arrowtail='none', arrowhead='empty'),
+                  dict(arrowtail='node', arrowhead='empty', style='dashed'),
                   dict(fontcolor='green', arrowtail='none',
-                       arrowhead='diamond', style='solid') ]
+                       arrowhead='diamond', style='solid'),
+                 ]
         DiagramWriter.__init__(self, config, styles)
 
     def set_printer(self, file_name, basename):
@@ -113,20 +115,20 @@
 
     def get_values(self, obj):
         """get label and shape for classes.
-        
+
         The label contains all attributes and methods
         """
-        label =  obj.title
+        label = obj.title
         if obj.shape == 'interface':
-            label = "«interface»\\n%s" % label
+            label = u'«interface»\\n%s' % label
         if not self.config.only_classnames:
-            label = "%s|%s\l|" % (label,  r"\l".join(obj.attrs) )
+            label = r'%s|%s\l|' % (label, r'\l'.join(obj.attrs))
             for func in obj.methods:
                 label = r'%s%s()\l' % (label, func.name)
             label = '{%s}' % label
         if is_exception(obj.node):
-            return dict(fontcolor="red", label=label, shape="record")
-        return dict(label=label, shape="record")
+            return dict(fontcolor='red', label=label, shape='record')
+        return dict(label=label, shape='record')
 
     def close_graph(self):
         """print the dot graph into <file_name>"""
@@ -139,12 +141,13 @@
     def __init__(self, config):
         styles = [dict(arrowstyle='solid', backarrowstyle='none',
                        backarrowsize=0),
-                  dict(arrowstyle='solid', backarrowstyle='none', 
+                  dict(arrowstyle='solid', backarrowstyle='none',
                        backarrowsize=10),
                   dict(arrowstyle='solid', backarrowstyle='none',
                        linestyle='dotted', backarrowsize=10),
                   dict(arrowstyle='solid', backarrowstyle='none',
-                       textcolor='green') ]
+                       textcolor='green'),
+                 ]
         DiagramWriter.__init__(self, config, styles)
 
     def set_printer(self, file_name, basename):
@@ -163,7 +166,7 @@
 
     def get_values(self, obj):
         """get label and shape for classes.
-        
+
         The label contains all attributes and methods
         """
         if is_exception(obj.node):
@@ -179,7 +182,7 @@
             methods = [func.name for func in obj.methods]
             # box width for UML like diagram
             maxlen = max(len(name) for name in [obj.title] + methods + attrs)
-            line =  "_" * (maxlen + 2)
+            line = '_' * (maxlen + 2)
             label = r'%s\n\f%s' % (label, line)
             for attr in attrs:
                 label = r'%s\n\f08%s' % (label, attr)
diff --git a/third_party/pylint/reporters/__init__.py b/third_party/pylint/reporters/__init__.py
index 0582a6f..ea3281f 100644
--- a/third_party/pylint/reporters/__init__.py
+++ b/third_party/pylint/reporters/__init__.py
@@ -1,3 +1,4 @@
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -9,17 +10,24 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-"""utilities methods and classes for reporters
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""utilities methods and classes for reporters"""
+from __future__ import print_function
 
-Copyright (c) 2000-2003 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr
-"""
+import sys
+import locale
+import os
 
-import sys, locale
+
+from pylint import utils
 
 CMPS = ['=', '-', '+']
 
+# py3k has no more cmp builtin
+if sys.version_info >= (3, 0):
+    def cmp(a, b): # pylint: disable=redefined-builtin
+        return (a > b) - (a < b)
+
 def diff_string(old, new):
     """given a old and new int value, return a string representing the
     difference
@@ -29,21 +37,37 @@
     return diff_str
 
 
-class EmptyReport(Exception):
-    """raised when a report is empty and so should not be displayed"""
+class BaseReporter(object):
+    """base class for reporters
 
-class BaseReporter:
-    """base class for reporters"""
+    symbols: show short symbolic names for messages.
+    """
 
     extension = ''
 
     def __init__(self, output=None):
         self.linter = None
-        self.include_ids = None
+        # self.include_ids = None # Deprecated
+        # self.symbols = None # Deprecated
         self.section = 0
         self.out = None
         self.out_encoding = None
+        self.encode = None
         self.set_output(output)
+        # Build the path prefix to strip to get relative paths
+        self.path_strip_prefix = os.getcwd() + os.sep
+
+    def handle_message(self, msg):
+        """Handle a new message triggered on the current file.
+
+        Invokes the legacy add_message API by default."""
+        self.add_message(
+            msg.msg_id, (msg.abspath, msg.module, msg.obj, msg.line, msg.column),
+            msg.msg)
+
+    def add_message(self, msg_id, location, msg):
+        """Deprecated, do not use."""
+        raise NotImplementedError
 
     def set_output(self, output=None):
         """set output stream"""
@@ -59,17 +83,20 @@
             encoding = (getattr(self.out, 'encoding', None) or
                         locale.getdefaultlocale()[1] or
                         sys.getdefaultencoding())
-            return string.encode(encoding)
+            # errors=replace, we don't want to crash when attempting to show
+            # source code line that can't be encoded with the current locale
+            # settings
+            return string.encode(encoding, 'replace')
         self.encode = encode
 
     def writeln(self, string=''):
         """write a line in the output buffer"""
-        print >> self.out, self.encode(string)
+        print(self.encode(string), file=self.out)
 
     def display_results(self, layout):
         """display results encapsulated in the layout tree"""
         self.section = 0
-        if self.include_ids and hasattr(layout, 'report_id'):
+        if hasattr(layout, 'report_id'):
             layout.children[0].children[0].data += ' (%s)' % layout.report_id
         self._display(layout)
 
@@ -77,3 +104,30 @@
         """display the layout"""
         raise NotImplementedError()
 
+    # Event callbacks
+
+    def on_set_current_module(self, module, filepath):
+        """starting analyzis of a module"""
+        pass
+
+    def on_close(self, stats, previous_stats):
+        """global end of analyzis"""
+        pass
+
+
+class CollectingReporter(BaseReporter):
+    """collects messages"""
+
+    name = 'collector'
+
+    def __init__(self):
+        BaseReporter.__init__(self)
+        self.messages = []
+
+    def handle_message(self, msg):
+        self.messages.append(msg)
+
+
+def initialize(linter):
+    """initialize linter with reporters in this package """
+    utils.register_plugins(linter, __path__[0])
diff --git a/third_party/pylint/reporters/guireporter.py b/third_party/pylint/reporters/guireporter.py
index 13914ba..4ad4ebb 100644
--- a/third_party/pylint/reporters/guireporter.py
+++ b/third_party/pylint/reporters/guireporter.py
@@ -16,20 +16,11 @@
     def __init__(self, gui, output=sys.stdout):
         """init"""
         BaseReporter.__init__(self, output)
-        self.msgs = []
         self.gui = gui
 
-    def add_message(self, msg_id, location, msg):
+    def handle_message(self, msg):
         """manage message of different type and in the context of path"""
-        module, obj, line = location[1:]
-        if self.include_ids:
-            sigle = msg_id
-        else:
-            sigle = msg_id[0]
-
-        full_msg = [sigle, module, obj, str(line), msg]
-        self.msgs += [[sigle, module, obj, str(line)]]
-        self.gui.msg_queue.put(full_msg)
+        self.gui.msg_queue.put(msg)
 
     def _display(self, layout):
         """launch layouts display"""
diff --git a/third_party/pylint/reporters/html.py b/third_party/pylint/reporters/html.py
index 56efcd6..1c6c260 100644
--- a/third_party/pylint/reporters/html.py
+++ b/third_party/pylint/reporters/html.py
@@ -1,5 +1,4 @@
-# Copyright (c) 2003-2006 Sylvain Thenault (thenault@gmail.com).
-# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -11,7 +10,7 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """HTML reporter"""
 
 import sys
@@ -27,20 +26,17 @@
     """report messages and layouts in HTML"""
 
     __implements__ = IReporter
+    name = 'html'
     extension = 'html'
 
     def __init__(self, output=sys.stdout):
         BaseReporter.__init__(self, output)
         self.msgs = []
 
-    def add_message(self, msg_id, location, msg):
+    def handle_message(self, msg):
         """manage message of different type and in the context of path"""
-        module, obj, line, col_offset = location[1:]
-        if self.include_ids:
-            sigle = msg_id
-        else:
-            sigle = msg_id[0]
-        self.msgs += [sigle, module, obj, str(line), str(col_offset), escape(msg)]
+        self.msgs += (msg.category, msg.module, msg.obj,
+                      str(msg.line), str(msg.column), escape(msg.msg))
 
     def set_output(self, output=None):
         """set output stream
@@ -67,3 +63,7 @@
             self.msgs = []
         HTMLWriter().format(layout, self.out)
 
+
+def register(linter):
+    """Register the reporter classes with the linter."""
+    linter.register_reporter(HTMLReporter)
diff --git a/third_party/pylint/reporters/json.py b/third_party/pylint/reporters/json.py
new file mode 100644
index 0000000..7dba52b
--- /dev/null
+++ b/third_party/pylint/reporters/json.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""JSON reporter"""
+from __future__ import absolute_import, print_function
+
+import json
+import sys
+from cgi import escape
+
+from pylint.interfaces import IReporter
+from pylint.reporters import BaseReporter
+
+
+class JSONReporter(BaseReporter):
+    """Report messages and layouts in JSON."""
+
+    __implements__ = IReporter
+    name = 'json'
+    extension = 'json'
+
+    def __init__(self, output=sys.stdout):
+        BaseReporter.__init__(self, output)
+        self.messages = []
+
+    def handle_message(self, message):
+        """Manage message of different type and in the context of path."""
+
+        self.messages.append({
+            'type': message.category,
+            'module': message.module,
+            'obj': message.obj,
+            'line': message.line,
+            'column': message.column,
+            'path': message.path,
+            'symbol': message.symbol,
+            'message': escape(message.msg or ''),
+        })
+
+    def _display(self, layout):
+        """Launch layouts display"""
+        if self.messages:
+            print(json.dumps(self.messages, indent=4), file=self.out)
+
+
+def register(linter):
+    """Register the reporter classes with the linter."""
+    linter.register_reporter(JSONReporter)
diff --git a/third_party/pylint/reporters/text.py b/third_party/pylint/reporters/text.py
index 032df6b..53c4a8d 100644
--- a/third_party/pylint/reporters/text.py
+++ b/third_party/pylint/reporters/text.py
@@ -1,5 +1,4 @@
-# Copyright (c) 2003-2007 Sylvain Thenault (thenault@gmail.com).
-# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
 # Foundation; either version 2 of the License, or (at your option) any later
@@ -11,60 +10,59 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """Plain text reporters:
 
 :text: the default one grouping messages by module
-:parseable:
-  standard parseable output with full module path on each message (for
-  editor integration)
 :colorized: an ANSI colorized text reporter
-
 """
+from __future__ import print_function
 
-import os
-import sys
+import warnings
 
 from logilab.common.ureports import TextWriter
 from logilab.common.textutils import colorize_ansi
 
 from pylint.interfaces import IReporter
 from pylint.reporters import BaseReporter
+import six
 
 TITLE_UNDERLINES = ['', '=', '-', '.']
 
 
 class TextReporter(BaseReporter):
-    """reports messages and layouts in plain text
-    """
+    """reports messages and layouts in plain text"""
 
     __implements__ = IReporter
+    name = 'text'
     extension = 'txt'
+    line_format = '{C}:{line:3d},{column:2d}: {msg} ({symbol})'
 
-    def __init__(self, output=sys.stdout):
+    def __init__(self, output=None):
         BaseReporter.__init__(self, output)
-        self._modules = {}
+        self._modules = set()
+        self._template = None
 
-    def add_message(self, msg_id, location, msg):
+    def on_set_current_module(self, module, filepath):
+        self._template = six.text_type(self.linter.config.msg_template or self.line_format)
+
+    def write_message(self, msg):
+        """Convenience method to write a formated message with class default template"""
+        self.writeln(msg.format(self._template))
+
+    def handle_message(self, msg):
         """manage message of different type and in the context of path"""
-        path, module, obj, line, col_offset = location
-        if module not in self._modules:
-            if module:
-                self.writeln('************* Module %s' % (path if path else module))
-                self._modules[module] = 1
+        if msg.module not in self._modules:
+            if msg.module:
+                self.writeln('************* Module %s' % msg.module)
+                self._modules.add(msg.module)
             else:
-                self.writeln('************* %s' % module)
-        if obj:
-            obj = ':%s' % obj
-        if self.include_ids:
-            sigle = msg_id
-        else:
-            sigle = msg_id[0]
-        self.writeln('%s:%3s,%s%s: %s' % (sigle, line, col_offset, obj, msg))
+                self.writeln('************* ')
+        self.write_message(msg)
 
     def _display(self, layout):
         """launch layouts display"""
-        print >> self.out
+        print(file=self.out)
         TextWriter().format(layout, self.out)
 
 
@@ -74,35 +72,26 @@
 
     <filename>:<linenum>:<msg>
     """
-    line_format = '%(path)s:%(line)s: [%(sigle)s%(obj)s] %(msg)s'
+    name = 'parseable'
+    line_format = '{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}'
 
-    def __init__(self, output=sys.stdout, relative=True):
+    def __init__(self, output=None):
+        warnings.warn('%s output format is deprecated. This is equivalent '
+                      'to --msg-template=%s' % (self.name, self.line_format),
+                      DeprecationWarning)
         TextReporter.__init__(self, output)
-        if relative:
-            self._prefix = os.getcwd() + os.sep
-        else:
-            self._prefix = ''
 
-    def add_message(self, msg_id, location, msg):
-        """manage message of different type and in the context of path"""
-        path, _, obj, line, _ = location
-        if obj:
-            obj = ', %s' % obj
-        if self.include_ids:
-            sigle = msg_id
-        else:
-            sigle = msg_id[0]
-        if self._prefix:
-            path = path.replace(self._prefix, '')
-        self.writeln(self.line_format % locals())
 
 class VSTextReporter(ParseableTextReporter):
     """Visual studio text reporter"""
-    line_format = '%(path)s(%(line)s): [%(sigle)s%(obj)s] %(msg)s'
+    name = 'msvs'
+    line_format = '{path}({line}): [{msg_id}({symbol}){obj}] {msg}'
+
 
 class ColorizedTextReporter(TextReporter):
     """Simple TextReporter that colorizes text output"""
 
+    name = 'colorized'
     COLOR_MAPPING = {
         "I" : ("green", None),
         'C' : (None, "bold"),
@@ -113,12 +102,11 @@
         'S' : ("yellow", "inverse"), # S stands for module Separator
     }
 
-    def __init__(self, output=sys.stdout, color_mapping = None):
+    def __init__(self, output=None, color_mapping=None):
         TextReporter.__init__(self, output)
         self.color_mapping = color_mapping or \
                              dict(ColorizedTextReporter.COLOR_MAPPING)
 
-
     def _get_decoration(self, msg_id):
         """Returns the tuple color, style associated with msg_id as defined
         in self.color_mapping
@@ -128,28 +116,31 @@
         except KeyError:
             return None, None
 
-    def add_message(self, msg_id, location, msg):
+    def handle_message(self, msg):
         """manage message of different types, and colorize output
         using ansi escape codes
         """
-        module, obj, line, _ = location[1:]
-        if module not in self._modules:
+        if msg.module not in self._modules:
             color, style = self._get_decoration('S')
-            if module:
-                modsep = colorize_ansi('************* Module %s' % module,
+            if msg.module:
+                modsep = colorize_ansi('************* Module %s' % msg.module,
                                        color, style)
             else:
-                modsep = colorize_ansi('************* %s' % module,
+                modsep = colorize_ansi('************* %s' % msg.module,
                                        color, style)
             self.writeln(modsep)
-            self._modules[module] = 1
-        if obj:
-            obj = ':%s' % obj
-        if self.include_ids:
-            sigle = msg_id
-        else:
-            sigle = msg_id[0]
-        color, style = self._get_decoration(sigle)
-        msg = colorize_ansi(msg, color, style)
-        sigle = colorize_ansi(sigle, color, style)
-        self.writeln('%s:%3s%s: %s' % (sigle, line, obj, msg))
+            self._modules.add(msg.module)
+        color, style = self._get_decoration(msg.C)
+
+        msg = msg._replace(
+            **{attr: colorize_ansi(getattr(msg, attr), color, style)
+               for attr in ('msg', 'symbol', 'category', 'C')})
+        self.write_message(msg)
+
+
+def register(linter):
+    """Register the reporter classes with the linter."""
+    linter.register_reporter(TextReporter)
+    linter.register_reporter(ParseableTextReporter)
+    linter.register_reporter(VSTextReporter)
+    linter.register_reporter(ColorizedTextReporter)
diff --git a/third_party/pylint/testutils.py b/third_party/pylint/testutils.py
new file mode 100644
index 0000000..2f9af4d
--- /dev/null
+++ b/third_party/pylint/testutils.py
@@ -0,0 +1,412 @@
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""functional/non regression tests for pylint"""
+from __future__ import print_function
+
+import collections
+import contextlib
+import functools
+import os
+import sys
+import re
+import unittest
+import tempfile
+import tokenize
+
+from glob import glob
+from os import linesep, getcwd, sep
+from os.path import abspath, basename, dirname, isdir, join, splitext
+
+from astroid import test_utils
+
+from pylint import checkers
+from pylint.utils import PyLintASTWalker
+from pylint.reporters import BaseReporter
+from pylint.interfaces import IReporter
+from pylint.lint import PyLinter
+
+import six
+from six.moves import StringIO
+
+
+# Utils
+
+SYS_VERS_STR = '%d%d%d' % sys.version_info[:3]
+TITLE_UNDERLINES = ['', '=', '-', '.']
+PREFIX = abspath(dirname(__file__))
+PY3K = sys.version_info[0] == 3
+
+def fix_path():
+    sys.path.insert(0, PREFIX)
+
+def get_tests_info(input_dir, msg_dir, prefix, suffix):
+    """get python input examples and output messages
+
+    We use following conventions for input files and messages:
+    for different inputs:
+        test for python  >= x.y    ->  input   =  <name>_pyxy.py
+        test for python  <  x.y    ->  input   =  <name>_py_xy.py
+    for one input and different messages:
+        message for python >=  x.y ->  message =  <name>_pyxy.txt
+        lower versions             ->  message with highest num
+    """
+    result = []
+    for fname in glob(join(input_dir, prefix + '*' + suffix)):
+        infile = basename(fname)
+        fbase = splitext(infile)[0]
+        # filter input files :
+        pyrestr = fbase.rsplit('_py', 1)[-1] # like _26 or 26
+        if pyrestr.isdigit(): # '24', '25'...
+            if SYS_VERS_STR < pyrestr:
+                continue
+        if pyrestr.startswith('_') and  pyrestr[1:].isdigit():
+            # skip test for higher python versions
+            if SYS_VERS_STR >= pyrestr[1:]:
+                continue
+        messages = glob(join(msg_dir, fbase + '*.txt'))
+        # the last one will be without ext, i.e. for all or upper versions:
+        if messages:
+            for outfile in sorted(messages, reverse=True):
+                py_rest = outfile.rsplit('_py', 1)[-1][:-4]
+                if py_rest.isdigit() and SYS_VERS_STR >= py_rest:
+                    break
+        else:
+            # This will provide an error message indicating the missing filename.
+            outfile = join(msg_dir, fbase + '.txt')
+        result.append((infile, outfile))
+    return result
+
+
+class TestReporter(BaseReporter):
+    """reporter storing plain text messages"""
+
+    __implements____ = IReporter
+
+    def __init__(self): # pylint: disable=super-init-not-called
+
+        self.message_ids = {}
+        self.reset()
+        self.path_strip_prefix = getcwd() + sep
+
+    def reset(self):
+        self.out = StringIO()
+        self.messages = []
+
+    def add_message(self, msg_id, location, msg):
+        """manage message of different type and in the context of path """
+        _, _, obj, line, _ = location
+        self.message_ids[msg_id] = 1
+        if obj:
+            obj = ':%s' % obj
+        sigle = msg_id[0]
+        if PY3K and linesep != '\n':
+            # 2to3 writes os.linesep instead of using
+            # the previosly used line separators
+            msg = msg.replace('\r\n', '\n')
+        self.messages.append('%s:%3s%s: %s' % (sigle, line, obj, msg))
+
+    def finalize(self):
+        self.messages.sort()
+        for msg in self.messages:
+            print(msg, file=self.out)
+        result = self.out.getvalue()
+        self.reset()
+        return result
+
+    def display_results(self, layout):
+        """ignore layouts"""
+
+
+class Message(collections.namedtuple('Message',
+                                     ['msg_id', 'line', 'node', 'args'])):
+    def __new__(cls, msg_id, line=None, node=None, args=None):
+        return tuple.__new__(cls, (msg_id, line, node, args))
+
+
+class UnittestLinter(object):
+    """A fake linter class to capture checker messages."""
+    # pylint: disable=unused-argument, no-self-use
+
+    def __init__(self):
+        self._messages = []
+        self.stats = {}
+
+    def release_messages(self):
+        try:
+            return self._messages
+        finally:
+            self._messages = []
+
+    def add_message(self, msg_id, line=None, node=None, args=None,
+                    confidence=None):
+        self._messages.append(Message(msg_id, line, node, args))
+
+    def is_message_enabled(self, *unused_args):
+        return True
+
+    def add_stats(self, **kwargs):
+        for name, value in six.iteritems(kwargs):
+            self.stats[name] = value
+        return self.stats
+
+    @property
+    def options_providers(self):
+        return linter.options_providers
+
+def set_config(**kwargs):
+    """Decorator for setting config values on a checker."""
+    def _Wrapper(fun):
+        @functools.wraps(fun)
+        def _Forward(self):
+            for key, value in six.iteritems(kwargs):
+                setattr(self.checker.config, key, value)
+            if isinstance(self, CheckerTestCase):
+                # reopen checker in case, it may be interested in configuration change
+                self.checker.open()
+            fun(self)
+
+        return _Forward
+    return _Wrapper
+
+
+class CheckerTestCase(unittest.TestCase):
+    """A base testcase class for unittesting individual checker classes."""
+    CHECKER_CLASS = None
+    CONFIG = {}
+
+    def setUp(self):
+        self.linter = UnittestLinter()
+        self.checker = self.CHECKER_CLASS(self.linter) # pylint: disable=not-callable
+        for key, value in six.iteritems(self.CONFIG):
+            setattr(self.checker.config, key, value)
+        self.checker.open()
+
+    @contextlib.contextmanager
+    def assertNoMessages(self):
+        """Assert that no messages are added by the given method."""
+        with self.assertAddsMessages():
+            yield
+
+    @contextlib.contextmanager
+    def assertAddsMessages(self, *messages):
+        """Assert that exactly the given method adds the given messages.
+
+        The list of messages must exactly match *all* the messages added by the
+        method. Additionally, we check to see whether the args in each message can
+        actually be substituted into the message string.
+        """
+        yield
+        got = self.linter.release_messages()
+        msg = ('Expected messages did not match actual.\n'
+               'Expected:\n%s\nGot:\n%s' % ('\n'.join(repr(m) for m in messages),
+                                            '\n'.join(repr(m) for m in got)))
+        self.assertEqual(list(messages), got, msg)
+
+    def walk(self, node):
+        """recursive walk on the given node"""
+        walker = PyLintASTWalker(linter)
+        walker.add_checker(self.checker)
+        walker.walk(node)
+
+
+# Init
+test_reporter = TestReporter()
+linter = PyLinter()
+linter.set_reporter(test_reporter)
+linter.config.persistent = 0
+checkers.initialize(linter)
+linter.global_set_option('required-attributes', ('__revision__',))
+
+if linesep != '\n':
+    LINE_RGX = re.compile(linesep)
+    def ulines(string):
+        return LINE_RGX.sub('\n', string)
+else:
+    def ulines(string):
+        return string
+
+INFO_TEST_RGX = re.compile(r'^func_i\d\d\d\d$')
+
+def exception_str(self, ex): # pylint: disable=unused-argument
+    """function used to replace default __str__ method of exception instances"""
+    return 'in %s\n:: %s' % (ex.file, ', '.join(ex.args))
+
+# Test classes
+
+class LintTestUsingModule(unittest.TestCase):
+    INPUT_DIR = None
+    DEFAULT_PACKAGE = 'input'
+    package = DEFAULT_PACKAGE
+    linter = linter
+    module = None
+    depends = None
+    output = None
+    _TEST_TYPE = 'module'
+    maxDiff = None
+
+    def shortDescription(self):
+        values = {'mode' : self._TEST_TYPE,
+                  'input': self.module,
+                  'pkg':   self.package,
+                  'cls':   self.__class__.__name__}
+
+        if self.package == self.DEFAULT_PACKAGE:
+            msg = '%(mode)s test of input file "%(input)s" (%(cls)s)'
+        else:
+            msg = '%(mode)s test of input file "%(input)s" in "%(pkg)s" (%(cls)s)'
+        return msg % values
+
+    def test_functionality(self):
+        tocheck = [self.package+'.'+self.module]
+        if self.depends:
+            tocheck += [self.package+'.%s' % name.replace('.py', '')
+                        for name, _ in self.depends]
+        self._test(tocheck)
+
+    def _check_result(self, got):
+        self.assertMultiLineEqual(self._get_expected().strip()+'\n',
+                                  got.strip()+'\n')
+
+    def _test(self, tocheck):
+        if INFO_TEST_RGX.match(self.module):
+            self.linter.enable('I')
+        else:
+            self.linter.disable('I')
+        try:
+            self.linter.check(tocheck)
+        except Exception as ex:
+            # need finalization to restore a correct state
+            self.linter.reporter.finalize()
+            ex.file = tocheck
+            print(ex)
+            ex.__str__ = exception_str
+            raise
+        self._check_result(self.linter.reporter.finalize())
+
+    def _has_output(self):
+        return not self.module.startswith('func_noerror_')
+
+    def _get_expected(self):
+        if self._has_output() and self.output:
+            with open(self.output, 'U') as fobj:
+                return fobj.read().strip() + '\n'
+        else:
+            return ''
+
+class LintTestUsingFile(LintTestUsingModule):
+
+    _TEST_TYPE = 'file'
+
+    def test_functionality(self):
+        importable = join(self.INPUT_DIR, self.module)
+        # python also prefers packages over simple modules.
+        if not isdir(importable):
+            importable += '.py'
+        tocheck = [importable]
+        if self.depends:
+            tocheck += [join(self.INPUT_DIR, name) for name, _file in self.depends]
+        self._test(tocheck)
+
+class LintTestUpdate(LintTestUsingModule):
+
+    _TEST_TYPE = 'update'
+
+    def _check_result(self, got):
+        if self._has_output():
+            try:
+                expected = self._get_expected()
+            except IOError:
+                expected = ''
+            if got != expected:
+                with open(self.output, 'w') as fobj:
+                    fobj.write(got)
+
+# Callback
+
+def cb_test_gen(base_class):
+    def call(input_dir, msg_dir, module_file, messages_file, dependencies):
+        # pylint: disable=no-init
+        class LintTC(base_class):
+            module = module_file.replace('.py', '')
+            output = messages_file
+            depends = dependencies or None
+            INPUT_DIR = input_dir
+            MSG_DIR = msg_dir
+        return LintTC
+    return call
+
+# Main function
+
+def make_tests(input_dir, msg_dir, filter_rgx, callbacks):
+    """generate tests classes from test info
+
+    return the list of generated test classes
+    """
+    if filter_rgx:
+        is_to_run = re.compile(filter_rgx).search
+    else:
+        is_to_run = lambda x: 1
+    tests = []
+    for module_file, messages_file in (
+            get_tests_info(input_dir, msg_dir, 'func_', '')
+    ):
+        if not is_to_run(module_file) or module_file.endswith('.pyc'):
+            continue
+        base = module_file.replace('func_', '').replace('.py', '')
+
+        dependencies = get_tests_info(input_dir, msg_dir, base, '.py')
+
+        for callback in callbacks:
+            test = callback(input_dir, msg_dir, module_file, messages_file,
+                            dependencies)
+            if test:
+                tests.append(test)
+    return tests
+
+def tokenize_str(code):
+    return list(tokenize.generate_tokens(StringIO(code).readline))
+
+@contextlib.contextmanager
+def create_tempfile(content=None):
+    """Create a new temporary file.
+
+    If *content* parameter is given, then it will be written
+    in the temporary file, before passing it back.
+    This is a context manager and should be used with a *with* statement.
+    """
+    # Can't use tempfile.NamedTemporaryFile here
+    # because on Windows the file must be closed before writing to it,
+    # see http://bugs.python.org/issue14243
+    fd, tmp = tempfile.mkstemp()
+    if content:
+        if sys.version_info >= (3, 0):
+            # erff
+            os.write(fd, bytes(content, 'ascii'))
+        else:
+            os.write(fd, content)
+    try:
+        yield tmp
+    finally:
+        os.close(fd)
+        os.remove(tmp)
+
+@contextlib.contextmanager
+def create_file_backed_module(code):
+    """Create an astroid module for the given code, backed by a real file."""
+    with create_tempfile() as temp:
+        module = test_utils.build_module(code)
+        module.file = temp
+        yield module
diff --git a/third_party/pylint/utils.py b/third_party/pylint/utils.py
index 9c8e8f4..6685c4a 100644
--- a/third_party/pylint/utils.py
+++ b/third_party/pylint/utils.py
@@ -1,5 +1,4 @@
-# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com).
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
 # http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This program is free software; you can redistribute it and/or modify it under
@@ -13,29 +12,41 @@
 #
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
 """some various utilities and helper classes, most of them used in the
 main pylint class
 """
+from __future__ import print_function
 
+import collections
+import os
+import re
 import sys
-from os import linesep
+import tokenize
+import warnings
 from os.path import dirname, basename, splitext, exists, isdir, join, normpath
 
-from logilab.common.modutils import modpath_from_file, get_module_files, \
-                                    file_from_modpath
+import six
+from six.moves import zip  # pylint: disable=redefined-builtin
+
+from logilab.common.interface import implements
 from logilab.common.textutils import normalize_text
 from logilab.common.configuration import rest_format_section
 from logilab.common.ureports import Section
 
-from logilab.astng import nodes, Module
+from astroid import nodes, Module
+from astroid.modutils import modpath_from_file, get_module_files, \
+    file_from_modpath, load_module_from_file
 
-from pylint.checkers import EmptyReport
+from pylint.interfaces import IRawChecker, ITokenChecker, UNDEFINED
 
 
 class UnknownMessage(Exception):
     """raised when a unregistered message id is encountered"""
 
+class EmptyReport(Exception):
+    """raised when a report is empty and so should not be displayed"""
+
 
 MSG_TYPES = {
     'I' : 'info',
@@ -45,7 +56,7 @@
     'E' : 'error',
     'F' : 'fatal'
     }
-MSG_TYPES_LONG = dict([(v, k) for k, v in MSG_TYPES.iteritems()])
+MSG_TYPES_LONG = {v: k for k, v in six.iteritems(MSG_TYPES)}
 
 MSG_TYPES_STATUS = {
     'I' : 0,
@@ -57,17 +68,42 @@
     }
 
 _MSG_ORDER = 'EWRCIF'
+MSG_STATE_SCOPE_CONFIG = 0
+MSG_STATE_SCOPE_MODULE = 1
+MSG_STATE_CONFIDENCE = 2
 
-def sort_msgs(msgids):
-    """sort message identifiers according to their category first"""
-    msgs = {}
-    for msg in msgids:
-        msgs.setdefault(msg[0], []).append(msg)
-    result = []
-    for m_id in _MSG_ORDER:
-        if m_id in msgs:
-            result.extend( sorted(msgs[m_id]) )
-    return result
+OPTION_RGX = re.compile(r'\s*#.*\bpylint:(.*)')
+
+# The line/node distinction does not apply to fatal errors and reports.
+_SCOPE_EXEMPT = 'FR'
+
+class WarningScope(object):
+    LINE = 'line-based-msg'
+    NODE = 'node-based-msg'
+
+_MsgBase = collections.namedtuple(
+    '_MsgBase',
+    ['msg_id', 'symbol', 'msg', 'C', 'category', 'confidence',
+     'abspath', 'path', 'module', 'obj', 'line', 'column'])
+
+
+class Message(_MsgBase):
+    """This class represent a message to be issued by the reporters"""
+    def __new__(cls, msg_id, symbol, location, msg, confidence):
+        return _MsgBase.__new__(
+            cls, msg_id, symbol, msg, msg_id[0], MSG_TYPES[msg_id[0]],
+            confidence, *location)
+
+    def format(self, template):
+        """Format the message according to the given template.
+
+        The template format is the one of the format method :
+        cf. http://docs.python.org/2/library/string.html#formatstrings
+        """
+        # For some reason, _asdict on derived namedtuples does not work with
+        # Python 3.4. Needs some investigation.
+        return template.format(**dict(zip(self._fields, self)))
+
 
 def get_module_and_frameid(node):
     """return the module name and the frame id in the module"""
@@ -85,36 +121,505 @@
     obj.reverse()
     return module, '.'.join(obj)
 
-def category_id(id):
-    id = id.upper()
-    if id in MSG_TYPES:
-        return id
-    return MSG_TYPES_LONG.get(id)
+def category_id(cid):
+    cid = cid.upper()
+    if cid in MSG_TYPES:
+        return cid
+    return MSG_TYPES_LONG.get(cid)
 
 
-class Message:
-    def __init__(self, checker, msgid, msg, descr):
+def _decoding_readline(stream, module):
+    return lambda: stream.readline().decode(module.file_encoding,
+                                           'replace')
+
+
+def tokenize_module(module):
+    with module.stream() as stream:
+        readline = stream.readline
+        if sys.version_info < (3, 0):
+            if module.file_encoding is not None:
+                readline = _decoding_readline(stream, module)
+            return list(tokenize.generate_tokens(readline))
+        return list(tokenize.tokenize(readline))
+
+def build_message_def(checker, msgid, msg_tuple):
+    if implements(checker, (IRawChecker, ITokenChecker)):
+        default_scope = WarningScope.LINE
+    else:
+        default_scope = WarningScope.NODE
+    options = {}
+    if len(msg_tuple) > 3:
+        (msg, symbol, descr, options) = msg_tuple
+    elif len(msg_tuple) > 2:
+        (msg, symbol, descr) = msg_tuple[:3]
+    else:
+        # messages should have a symbol, but for backward compatibility
+        # they may not.
+        (msg, descr) = msg_tuple
+        warnings.warn("[pylint 0.26] description of message %s doesn't include "
+                      "a symbolic name" % msgid, DeprecationWarning)
+        symbol = None
+    options.setdefault('scope', default_scope)
+    return MessageDefinition(checker, msgid, msg, descr, symbol, **options)
+
+
+class MessageDefinition(object):
+    def __init__(self, checker, msgid, msg, descr, symbol, scope,
+                 minversion=None, maxversion=None, old_names=None):
+        self.checker = checker
         assert len(msgid) == 5, 'Invalid message id %s' % msgid
         assert msgid[0] in MSG_TYPES, \
                'Bad message type %s in %r' % (msgid[0], msgid)
         self.msgid = msgid
         self.msg = msg
         self.descr = descr
-        self.checker = checker
+        self.symbol = symbol
+        self.scope = scope
+        self.minversion = minversion
+        self.maxversion = maxversion
+        self.old_names = old_names or []
 
-class MessagesHandlerMixIn:
+    def may_be_emitted(self):
+        """return True if message may be emitted using the current interpreter"""
+        if self.minversion is not None and self.minversion > sys.version_info:
+            return False
+        if self.maxversion is not None and self.maxversion <= sys.version_info:
+            return False
+        return True
+
+    def format_help(self, checkerref=False):
+        """return the help string for the given message id"""
+        desc = self.descr
+        if checkerref:
+            desc += ' This message belongs to the %s checker.' % \
+                   self.checker.name
+        title = self.msg
+        if self.symbol:
+            msgid = '%s (%s)' % (self.symbol, self.msgid)
+        else:
+            msgid = self.msgid
+        if self.minversion or self.maxversion:
+            restr = []
+            if self.minversion:
+                restr.append('< %s' % '.'.join([str(n) for n in self.minversion]))
+            if self.maxversion:
+                restr.append('>= %s' % '.'.join([str(n) for n in self.maxversion]))
+            restr = ' or '.join(restr)
+            if checkerref:
+                desc += " It can't be emitted when using Python %s." % restr
+            else:
+                desc += " This message can't be emitted when using Python %s." % restr
+        desc = normalize_text(' '.join(desc.split()), indent='  ')
+        if title != '%s':
+            title = title.splitlines()[0]
+            return ':%s: *%s*\n%s' % (msgid, title, desc)
+        return ':%s:\n%s' % (msgid, desc)
+
+
+class MessagesHandlerMixIn(object):
     """a mix-in class containing all the messages related methods for the main
     lint class
     """
 
     def __init__(self):
-        # dictionary of registered messages
-        self._messages = {}
         self._msgs_state = {}
-        self._module_msgs_state = {} # None
-        self._msgs_by_category = {}
         self.msg_status = 0
 
+    def disable(self, msgid, scope='package', line=None, ignore_unknown=False):
+        """don't output message of the given id"""
+        assert scope in ('package', 'module')
+        # handle disable=all by disabling all categories
+        if msgid == 'all':
+            for msgid in MSG_TYPES:
+                self.disable(msgid, scope, line)
+            return
+        # msgid is a category?
+        catid = category_id(msgid)
+        if catid is not None:
+            for _msgid in self.msgs_store._msgs_by_category.get(catid):
+                self.disable(_msgid, scope, line)
+            return
+        # msgid is a checker name?
+        if msgid.lower() in self._checkers:
+            msgs_store = self.msgs_store
+            for checker in self._checkers[msgid.lower()]:
+                for _msgid in checker.msgs:
+                    if _msgid in msgs_store._alternative_names:
+                        self.disable(_msgid, scope, line)
+            return
+        # msgid is report id?
+        if msgid.lower().startswith('rp'):
+            self.disable_report(msgid)
+            return
+
+        try:
+            # msgid is a symbolic or numeric msgid.
+            msg = self.msgs_store.check_message_id(msgid)
+        except UnknownMessage:
+            if ignore_unknown:
+                return
+            raise
+
+        if scope == 'module':
+            self.file_state.set_msg_status(msg, line, False)
+            if msg.symbol != 'locally-disabled':
+                self.add_message('locally-disabled', line=line,
+                                 args=(msg.symbol, msg.msgid))
+
+        else:
+            msgs = self._msgs_state
+            msgs[msg.msgid] = False
+            # sync configuration object
+            self.config.disable = [mid for mid, val in six.iteritems(msgs)
+                                   if not val]
+
+    def enable(self, msgid, scope='package', line=None, ignore_unknown=False):
+        """reenable message of the given id"""
+        assert scope in ('package', 'module')
+        catid = category_id(msgid)
+        # msgid is a category?
+        if catid is not None:
+            for msgid in self.msgs_store._msgs_by_category.get(catid):
+                self.enable(msgid, scope, line)
+            return
+        # msgid is a checker name?
+        if msgid.lower() in self._checkers:
+            for checker in self._checkers[msgid.lower()]:
+                for msgid_ in checker.msgs:
+                    self.enable(msgid_, scope, line)
+            return
+        # msgid is report id?
+        if msgid.lower().startswith('rp'):
+            self.enable_report(msgid)
+            return
+
+        try:
+            # msgid is a symbolic or numeric msgid.
+            msg = self.msgs_store.check_message_id(msgid)
+        except UnknownMessage:
+            if ignore_unknown:
+                return
+            raise
+
+        if scope == 'module':
+            self.file_state.set_msg_status(msg, line, True)
+            self.add_message('locally-enabled', line=line, args=(msg.symbol, msg.msgid))
+        else:
+            msgs = self._msgs_state
+            msgs[msg.msgid] = True
+            # sync configuration object
+            self.config.enable = [mid for mid, val in six.iteritems(msgs) if val]
+
+    def get_message_state_scope(self, msgid, line=None, confidence=UNDEFINED):
+        """Returns the scope at which a message was enabled/disabled."""
+        if self.config.confidence and confidence.name not in self.config.confidence:
+            return MSG_STATE_CONFIDENCE
+        try:
+            if line in self.file_state._module_msgs_state[msgid]:
+                return MSG_STATE_SCOPE_MODULE
+        except (KeyError, TypeError):
+            return MSG_STATE_SCOPE_CONFIG
+
+    def is_message_enabled(self, msg_descr, line=None, confidence=None):
+        """return true if the message associated to the given message id is
+        enabled
+
+        msgid may be either a numeric or symbolic message id.
+        """
+        if self.config.confidence and confidence:
+            if confidence.name not in self.config.confidence:
+                return False
+        try:
+            msgid = self.msgs_store.check_message_id(msg_descr).msgid
+        except UnknownMessage:
+            # The linter checks for messages that are not registered
+            # due to version mismatch, just treat them as message IDs
+            # for now.
+            msgid = msg_descr
+        if line is None:
+            return self._msgs_state.get(msgid, True)
+        try:
+            return self.file_state._module_msgs_state[msgid][line]
+        except KeyError:
+            return self._msgs_state.get(msgid, True)
+
+    def add_message(self, msg_descr, line=None, node=None, args=None, confidence=UNDEFINED):
+        """Adds a message given by ID or name.
+
+        If provided, the message string is expanded using args
+
+        AST checkers should must the node argument (but may optionally
+        provide line if the line number is different), raw and token checkers
+        must provide the line argument.
+        """
+        msg_info = self.msgs_store.check_message_id(msg_descr)
+        msgid = msg_info.msgid
+        # backward compatibility, message may not have a symbol
+        symbol = msg_info.symbol or msgid
+        # Fatal messages and reports are special, the node/scope distinction
+        # does not apply to them.
+        if msgid[0] not in _SCOPE_EXEMPT:
+            if msg_info.scope == WarningScope.LINE:
+                assert node is None and line is not None, (
+                    'Message %s must only provide line, got line=%s, node=%s' % (msgid, line, node))
+            elif msg_info.scope == WarningScope.NODE:
+                # Node-based warnings may provide an override line.
+                assert node is not None, 'Message %s must provide Node, got None'
+
+        if line is None and node is not None:
+            line = node.fromlineno
+        if hasattr(node, 'col_offset'):
+            col_offset = node.col_offset # XXX measured in bytes for utf-8, divide by two for chars?
+        else:
+            col_offset = None
+        # should this message be displayed
+        if not self.is_message_enabled(msgid, line, confidence):
+            self.file_state.handle_ignored_message(
+                self.get_message_state_scope(msgid, line, confidence),
+                msgid, line, node, args, confidence)
+            return
+        # update stats
+        msg_cat = MSG_TYPES[msgid[0]]
+        self.msg_status |= MSG_TYPES_STATUS[msgid[0]]
+        self.stats[msg_cat] += 1
+        self.stats['by_module'][self.current_name][msg_cat] += 1
+        try:
+            self.stats['by_msg'][symbol] += 1
+        except KeyError:
+            self.stats['by_msg'][symbol] = 1
+        # expand message ?
+        msg = msg_info.msg
+        if args:
+            msg %= args
+        # get module and object
+        if node is None:
+            module, obj = self.current_name, ''
+            abspath = self.current_file
+        else:
+            module, obj = get_module_and_frameid(node)
+            abspath = node.root().file
+        path = abspath.replace(self.reporter.path_strip_prefix, '')
+        # add the message
+        self.reporter.handle_message(
+            Message(msgid, symbol,
+                    (abspath, path, module, obj, line or 1, col_offset or 0), msg, confidence))
+
+    def print_full_documentation(self):
+        """output a full documentation in ReST format"""
+        print("Pylint global options and switches")
+        print("----------------------------------")
+        print("")
+        print("Pylint provides global options and switches.")
+        print("")
+
+        by_checker = {}
+        for checker in self.get_checkers():
+            if checker.name == 'master':
+                if checker.options:
+                    for section, options in checker.options_by_section():
+                        if section is None:
+                            title = 'General options'
+                        else:
+                            title = '%s options' % section.capitalize()
+                        print(title)
+                        print('~' * len(title))
+                        rest_format_section(sys.stdout, None, options)
+                        print("")
+            else:
+                try:
+                    by_checker[checker.name][0] += checker.options_and_values()
+                    by_checker[checker.name][1].update(checker.msgs)
+                    by_checker[checker.name][2] += checker.reports
+                except KeyError:
+                    by_checker[checker.name] = [list(checker.options_and_values()),
+                                                dict(checker.msgs),
+                                                list(checker.reports)]
+
+        print("Pylint checkers' options and switches")
+        print("-------------------------------------")
+        print("")
+        print("Pylint checkers can provide three set of features:")
+        print("")
+        print("* options that control their execution,")
+        print("* messages that they can raise,")
+        print("* reports that they can generate.")
+        print("")
+        print("Below is a list of all checkers and their features.")
+        print("")
+
+        for checker, (options, msgs, reports) in six.iteritems(by_checker):
+            title = '%s checker' % (checker.replace("_", " ").title())
+            print(title)
+            print('~' * len(title))
+            print("")
+            print("Verbatim name of the checker is ``%s``." % checker)
+            print("")
+            if options:
+                title = 'Options'
+                print(title)
+                print('^' * len(title))
+                rest_format_section(sys.stdout, None, options)
+                print("")
+            if msgs:
+                title = 'Messages'
+                print(title)
+                print('~' * len(title))
+                for msgid, msg in sorted(six.iteritems(msgs),
+                                         key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])):
+                    msg = build_message_def(checker, msgid, msg)
+                    print(msg.format_help(checkerref=False))
+                print("")
+            if reports:
+                title = 'Reports'
+                print(title)
+                print('~' * len(title))
+                for report in reports:
+                    print(':%s: %s' % report[:2])
+                print("")
+            print("")
+
+
+class FileState(object):
+    """Hold internal state specific to the currently analyzed file"""
+
+    def __init__(self, modname=None):
+        self.base_name = modname
+        self._module_msgs_state = {}
+        self._raw_module_msgs_state = {}
+        self._ignored_msgs = collections.defaultdict(set)
+        self._suppression_mapping = {}
+
+    def collect_block_lines(self, msgs_store, module_node):
+        """Walk the AST to collect block level options line numbers."""
+        for msg, lines in six.iteritems(self._module_msgs_state):
+            self._raw_module_msgs_state[msg] = lines.copy()
+        orig_state = self._module_msgs_state.copy()
+        self._module_msgs_state = {}
+        self._suppression_mapping = {}
+        self._collect_block_lines(msgs_store, module_node, orig_state)
+
+    def _collect_block_lines(self, msgs_store, node, msg_state):
+        """Recursivly walk (depth first) AST to collect block level options line
+        numbers.
+        """
+        for child in node.get_children():
+            self._collect_block_lines(msgs_store, child, msg_state)
+        first = node.fromlineno
+        last = node.tolineno
+        # first child line number used to distinguish between disable
+        # which are the first child of scoped node with those defined later.
+        # For instance in the code below:
+        #
+        # 1.   def meth8(self):
+        # 2.        """test late disabling"""
+        # 3.        # pylint: disable=E1102
+        # 4.        print self.blip
+        # 5.        # pylint: disable=E1101
+        # 6.        print self.bla
+        #
+        # E1102 should be disabled from line 1 to 6 while E1101 from line 5 to 6
+        #
+        # this is necessary to disable locally messages applying to class /
+        # function using their fromlineno
+        if isinstance(node, (nodes.Module, nodes.Class, nodes.Function)) and node.body:
+            firstchildlineno = node.body[0].fromlineno
+        else:
+            firstchildlineno = last
+        for msgid, lines in six.iteritems(msg_state):
+            for lineno, state in list(lines.items()):
+                original_lineno = lineno
+                if first <= lineno <= last:
+                    # Set state for all lines for this block, if the
+                    # warning is applied to nodes.
+                    if  msgs_store.check_message_id(msgid).scope == WarningScope.NODE:
+                        if lineno > firstchildlineno:
+                            state = True
+                        first_, last_ = node.block_range(lineno)
+                    else:
+                        first_ = lineno
+                        last_ = last
+                    for line in range(first_, last_+1):
+                        # do not override existing entries
+                        if not line in self._module_msgs_state.get(msgid, ()):
+                            if line in lines: # state change in the same block
+                                state = lines[line]
+                                original_lineno = line
+                            if not state:
+                                self._suppression_mapping[(msgid, line)] = original_lineno
+                            try:
+                                self._module_msgs_state[msgid][line] = state
+                            except KeyError:
+                                self._module_msgs_state[msgid] = {line: state}
+                    del lines[lineno]
+
+    def set_msg_status(self, msg, line, status):
+        """Set status (enabled/disable) for a given message at a given line"""
+        assert line > 0
+        try:
+            self._module_msgs_state[msg.msgid][line] = status
+        except KeyError:
+            self._module_msgs_state[msg.msgid] = {line: status}
+
+    def handle_ignored_message(self, state_scope, msgid, line,
+                               node, args, confidence): # pylint: disable=unused-argument
+        """Report an ignored message.
+
+        state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG,
+        depending on whether the message was disabled locally in the module,
+        or globally. The other arguments are the same as for add_message.
+        """
+        if state_scope == MSG_STATE_SCOPE_MODULE:
+            try:
+                orig_line = self._suppression_mapping[(msgid, line)]
+                self._ignored_msgs[(msgid, orig_line)].add(line)
+            except KeyError:
+                pass
+
+    def iter_spurious_suppression_messages(self, msgs_store):
+        for warning, lines in six.iteritems(self._raw_module_msgs_state):
+            for line, enable in six.iteritems(lines):
+                if not enable and (warning, line) not in self._ignored_msgs:
+                    yield 'useless-suppression', line, \
+                        (msgs_store.get_msg_display_string(warning),)
+        # don't use iteritems here, _ignored_msgs may be modified by add_message
+        for (warning, from_), lines in list(self._ignored_msgs.items()):
+            for line in lines:
+                yield 'suppressed-message', line, \
+                    (msgs_store.get_msg_display_string(warning), from_)
+
+
+class MessagesStore(object):
+    """The messages store knows information about every possible message but has
+    no particular state during analysis.
+    """
+
+    def __init__(self):
+        # Primary registry for all active messages (i.e. all messages
+        # that can be emitted by pylint for the underlying Python
+        # version). It contains the 1:1 mapping from symbolic names
+        # to message definition objects.
+        self._messages = {}
+        # Maps alternative names (numeric IDs, deprecated names) to
+        # message definitions. May contain several names for each definition
+        # object.
+        self._alternative_names = {}
+        self._msgs_by_category = collections.defaultdict(list)
+
+    @property
+    def messages(self):
+        """The list of all active messages."""
+        return six.itervalues(self._messages)
+
+    def add_renamed_message(self, old_id, old_symbol, new_symbol):
+        """Register the old ID and symbol for a warning that was renamed.
+
+        This allows users to keep using the old ID/symbol in suppressions.
+        """
+        msg = self.check_message_id(new_symbol)
+        msg.old_names.append((old_id, old_symbol))
+        self._alternative_names[old_id] = msg
+        self._alternative_names[old_symbol] = msg
+
     def register_messages(self, checker):
         """register a dictionary of messages
 
@@ -124,247 +629,82 @@
         message ids should be a string of len 4, where the two first characters
         are the checker id and the two last the message id in this checker
         """
-        msgs_dict = checker.msgs
         chkid = None
-        for msgid, (msg, msgdescr) in msgs_dict.items():
+        for msgid, msg_tuple in six.iteritems(checker.msgs):
+            msg = build_message_def(checker, msgid, msg_tuple)
+            assert msg.symbol not in self._messages, \
+                    'Message symbol %r is already defined' % msg.symbol
             # avoid duplicate / malformed ids
-            assert msgid not in self._messages, \
+            assert msg.msgid not in self._alternative_names, \
                    'Message id %r is already defined' % msgid
-            assert chkid is None or chkid == msgid[1:3], \
+            assert chkid is None or chkid == msg.msgid[1:3], \
                    'Inconsistent checker part in message id %r' % msgid
-            chkid = msgid[1:3]
-            self._messages[msgid] = Message(checker, msgid, msg, msgdescr)
-            self._msgs_by_category.setdefault(msgid[0], []).append(msgid)
-
-    def get_message_help(self, msgid, checkerref=False):
-        """return the help string for the given message id"""
-        msg = self.check_message_id(msgid)
-        desc = normalize_text(' '.join(msg.descr.split()), indent='  ')
-        if checkerref:
-            desc += ' This message belongs to the %s checker.' % \
-                   msg.checker.name
-        title = msg.msg
-        if title != '%s':
-            title = title.splitlines()[0]
-            return ':%s: *%s*\n%s' % (msg.msgid, title, desc)
-        return ':%s:\n%s' % (msg.msgid, desc)
-
-    def disable(self, msgid, scope='package', line=None):
-        """don't output message of the given id"""
-        assert scope in ('package', 'module')
-        # msgid is a category?
-        catid = category_id(msgid)
-        if catid is not None:
-            for msgid in self._msgs_by_category.get(catid):
-                self.disable(msgid, scope, line)
-            return
-        # msgid is a checker name?
-        if msgid.lower() in self._checkers:
-            for checker in self._checkers[msgid.lower()]:
-                for msgid in checker.msgs:
-                    self.disable(msgid, scope, line)
-            return
-        # msgid is report id?
-        if msgid.lower().startswith('rp'):
-            self.disable_report(msgid)
-            return
-        # msgid is a msgid.
-        msg = self.check_message_id(msgid)
-        if scope == 'module':
-            assert line > 0
-            try:
-                self._module_msgs_state[msg.msgid][line] = False
-            except KeyError:
-                self._module_msgs_state[msg.msgid] = {line: False}
-                if msgid != 'I0011':
-                    self.add_message('I0011', line=line, args=msg.msgid)
-
-        else:
-            msgs = self._msgs_state
-            msgs[msg.msgid] = False
-            # sync configuration object
-            self.config.disable_msg = [mid for mid, val in msgs.items()
-                                       if not val]
-
-    def enable(self, msgid, scope='package', line=None):
-        """reenable message of the given id"""
-        assert scope in ('package', 'module')
-        catid = category_id(msgid)
-        # msgid is a category?
-        if catid is not None:
-            for msgid in self._msgs_by_category.get(catid):
-                self.enable(msgid, scope, line)
-            return
-        # msgid is a checker name?
-        if msgid.lower() in self._checkers:
-            for checker in self._checkers[msgid.lower()]:
-                for msgid in checker.msgs:
-                    self.enable(msgid, scope, line)
-            return
-        # msgid is report id?
-        if msgid.lower().startswith('rp'):
-            self.enable_report(msgid)
-            return
-        # msgid is a msgid.
-        msg = self.check_message_id(msgid)
-        if scope == 'module':
-            assert line > 0
-            try:
-                self._module_msgs_state[msg.msgid][line] = True
-            except KeyError:
-                self._module_msgs_state[msg.msgid] = {line: True}
-                self.add_message('I0012', line=line, args=msg.msgid)
-        else:
-            msgs = self._msgs_state
-            msgs[msg.msgid] = True
-            # sync configuration object
-            self.config.enable = [mid for mid, val in msgs.items() if val]
+            chkid = msg.msgid[1:3]
+            self._messages[msg.symbol] = msg
+            self._alternative_names[msg.msgid] = msg
+            for old_id, old_symbol in msg.old_names:
+                self._alternative_names[old_id] = msg
+                self._alternative_names[old_symbol] = msg
+            self._msgs_by_category[msg.msgid[0]].append(msg.msgid)
 
     def check_message_id(self, msgid):
-        """raise UnknownMessage if the message id is not defined"""
-        msgid = msgid.upper()
-        try:
-            return self._messages[msgid]
-        except KeyError:
-            raise UnknownMessage('No such message id %s' % msgid)
+        """returns the Message object for this message.
 
-    def is_message_enabled(self, msgid, line=None):
-        """return true if the message associated to the given message id is
-        enabled
+        msgid may be either a numeric or symbolic id.
+
+        Raises UnknownMessage if the message id is not defined.
         """
-        if line is None:
-            return self._msgs_state.get(msgid, True)
-        try:
-            return self._module_msgs_state[msgid][line]
-        except (KeyError, TypeError):
-            return self._msgs_state.get(msgid, True)
+        if msgid[1:].isdigit():
+            msgid = msgid.upper()
+        for source in (self._alternative_names, self._messages):
+            try:
+                return source[msgid]
+            except KeyError:
+                pass
+        raise UnknownMessage('No such message id %s' % msgid)
 
-    def add_message(self, msgid, line=None, node=None, args=None):
-        """add the message corresponding to the given id.
+    def get_msg_display_string(self, msgid):
+        """Generates a user-consumable representation of a message.
 
-        If provided, msg is expanded using args
-
-        astng checkers should provide the node argument, raw checkers should
-        provide the line argument.
+        Can be just the message ID or the ID and the symbol.
         """
-        if line is None and node is not None:
-            line = node.fromlineno
-        if hasattr(node, 'col_offset'):
-            col_offset = node.col_offset # XXX measured in bytes for utf-8, divide by two for chars?
-        else:
-            col_offset = None
-        # should this message be displayed
-        if not self.is_message_enabled(msgid, line):
-            return
-        # update stats
-        msg_cat = MSG_TYPES[msgid[0]]
-        self.msg_status |= MSG_TYPES_STATUS[msgid[0]]
-        self.stats[msg_cat] += 1
-        self.stats['by_module'][self.current_name][msg_cat] += 1
-        try:
-            self.stats['by_msg'][msgid] += 1
-        except KeyError:
-            self.stats['by_msg'][msgid] = 1
-        msg = self._messages[msgid].msg
-        # expand message ?
-        if args:
-            msg %= args
-        # get module and object
-        if node is None:
-            module, obj = self.current_name, ''
-            path = self.current_file
-        else:
-            module, obj = get_module_and_frameid(node)
-            path = node.root().file
-        # add the message
-        self.reporter.add_message(msgid, (path, module, obj, line or 1, col_offset or 0), msg)
+        return repr(self.check_message_id(msgid).symbol)
 
     def help_message(self, msgids):
         """display help messages for the given message identifiers"""
         for msgid in msgids:
             try:
-                print self.get_message_help(msgid, True)
-                print
-            except UnknownMessage, ex:
-                print ex
-                print
+                print(self.check_message_id(msgid).format_help(checkerref=True))
+                print("")
+            except UnknownMessage as ex:
+                print(ex)
+                print("")
                 continue
 
-    def print_full_documentation(self):
-        """output a full documentation in ReST format"""
-        by_checker = {}
-        for checker in self.get_checkers():
-            if checker.name == 'master':
-                prefix = 'Main '
-                print "Options"
-                print '-------\n'
-                if checker.options:
-                    for section, options in checker.options_by_section():
-                        if section is None:
-                            title = 'General options'
-                        else:
-                            title = '%s options' % section.capitalize()
-                        print title
-                        print '~' * len(title)
-                        rest_format_section(sys.stdout, None, options)
-                        print
-            else:
-                try:
-                    by_checker[checker.name][0] += checker.options_and_values()
-                    by_checker[checker.name][1].update(checker.msgs)
-                    by_checker[checker.name][2] += checker.reports
-                except KeyError:
-                    by_checker[checker.name] = [list(checker.options_and_values()),
-                                                dict(checker.msgs),
-                                                list(checker.reports)]
-        for checker, (options, msgs, reports) in by_checker.items():
-            prefix = ''
-            title = '%s checker' % checker
-            print title
-            print '-' * len(title)
-            print
-            if options:
-                title = 'Options'
-                print title
-                print '~' * len(title)
-                rest_format_section(sys.stdout, None, options)
-                print
-            if msgs:
-                title = ('%smessages' % prefix).capitalize()
-                print title
-                print '~' * len(title)
-                for msgid in sort_msgs(msgs.keys()):
-                    print self.get_message_help(msgid, False)
-                print
-            if reports:
-                title = ('%sreports' % prefix).capitalize()
-                print title
-                print '~' * len(title)
-                for report in reports:
-                    print ':%s: %s' % report[:2]
-                print
-            print
-
     def list_messages(self):
         """output full messages list documentation in ReST format"""
-        msgids = []
-        for checker in self.get_checkers():
-            for msgid in checker.msgs.keys():
-                msgids.append(msgid)
-        msgids.sort()
-        for msgid in msgids:
-            print self.get_message_help(msgid, False)
-        print
+        msgs = sorted(six.itervalues(self._messages), key=lambda msg: msg.msgid)
+        for msg in msgs:
+            if not msg.may_be_emitted():
+                continue
+            print(msg.format_help(checkerref=False))
+        print("")
 
 
-class ReportsHandlerMixIn:
+class ReportsHandlerMixIn(object):
     """a mix-in class containing all the reports and stats manipulation
     related methods for the main lint class
     """
     def __init__(self):
-        self._reports = {}
+        self._reports = collections.defaultdict(list)
         self._reports_state = {}
 
+    def report_order(self):
+        """ Return a list of reports, sorted in the order
+        in which they must be called.
+        """
+        return list(self._reports)
+
     def register_report(self, reportid, r_title, r_cb, checker):
         """register a report
 
@@ -374,7 +714,7 @@
         checker is the checker defining the report
         """
         reportid = reportid.upper()
-        self._reports.setdefault(checker, []).append( (reportid, r_title, r_cb) )
+        self._reports[checker].append((reportid, r_title, r_cb))
 
     def enable_report(self, reportid):
         """disable the report of the given id"""
@@ -394,12 +734,9 @@
 
     def make_reports(self, stats, old_stats):
         """render registered reports"""
-        if self.config.files_output:
-            filename = 'pylint_global.' + self.reporter.extension
-            self.reporter.set_output(open(filename, 'w'))
         sect = Section('Report',
                        '%s statements analysed.'% (self.stats['statement']))
-        for checker in self._reports:
+        for checker in self.report_order():
             for reportid, r_title, r_cb in self._reports[checker]:
                 if not self.report_is_enabled(reportid):
                     continue
@@ -410,13 +747,13 @@
                     continue
                 report_sect.report_id = reportid
                 sect.append(report_sect)
-        self.reporter.display_results(sect)
+        return sect
 
     def add_stats(self, **kwargs):
         """add some stats entries to the statistic dictionary
         raise an AssertionError if there is a key conflict
         """
-        for key, value in kwargs.items():
+        for key, value in six.iteritems(kwargs):
             if key[-1] == '_':
                 key = key[:-1]
             assert key not in self.stats
@@ -447,24 +784,25 @@
             try:
                 filepath = file_from_modpath(modname.split('.'))
                 if filepath is None:
-                    errors.append( {'key' : 'F0003', 'mod': modname} )
+                    errors.append({'key' : 'ignored-builtin-module', 'mod': modname})
                     continue
-            except (ImportError, SyntaxError), ex:
+            except (ImportError, SyntaxError) as ex:
                 # FIXME p3k : the SyntaxError is a Python bug and should be
                 # removed as soon as possible http://bugs.python.org/issue10588
-                errors.append( {'key': 'F0001', 'mod': modname, 'ex': ex} )
+                errors.append({'key': 'fatal', 'mod': modname, 'ex': ex})
                 continue
         filepath = normpath(filepath)
-        result.append( {'path': filepath, 'name': modname,
-                        'basepath': filepath, 'basename': modname} )
+        result.append({'path': filepath, 'name': modname, 'isarg': True,
+                       'basepath': filepath, 'basename': modname})
         if not (modname.endswith('.__init__') or modname == '__init__') \
                 and '__init__.py' in filepath:
             for subfilepath in get_module_files(dirname(filepath), black_list):
                 if filepath == subfilepath:
                     continue
                 submodname = '.'.join(modpath_from_file(subfilepath))
-                result.append( {'path': subfilepath, 'name': submodname,
-                                'basepath': filepath, 'basename': modname} )
+                result.append({'path': subfilepath, 'name': submodname,
+                               'isarg': False,
+                               'basepath': filepath, 'basename': modname})
     return result, errors
 
 
@@ -473,10 +811,18 @@
     def __init__(self, linter):
         # callbacks per node types
         self.nbstatements = 1
-        self.visit_events = {}
-        self.leave_events = {}
+        self.visit_events = collections.defaultdict(list)
+        self.leave_events = collections.defaultdict(list)
         self.linter = linter
 
+    def _is_method_enabled(self, method):
+        if not hasattr(method, 'checks_msgs'):
+            return True
+        for msg_desc in method.checks_msgs:
+            if self.linter.is_message_enabled(msg_desc):
+                return True
+        return False
+
     def add_checker(self, checker):
         """walk to the checker's dir and collect visit and leave methods"""
         # XXX : should be possible to merge needed_checkers and add_checker
@@ -484,7 +830,6 @@
         lcids = set()
         visits = self.visit_events
         leaves = self.leave_events
-        msgs = self.linter._msgs_state
         for member in dir(checker):
             cid = member[6:]
             if cid == 'default':
@@ -492,40 +837,83 @@
             if member.startswith('visit_'):
                 v_meth = getattr(checker, member)
                 # don't use visit_methods with no activated message:
-                if hasattr(v_meth, 'checks_msgs'):
-                    if not any(msgs.get(m, True) for m in v_meth.checks_msgs):
-                        continue
-                visits.setdefault(cid, []).append(v_meth)
-                vcids.add(cid)
+                if self._is_method_enabled(v_meth):
+                    visits[cid].append(v_meth)
+                    vcids.add(cid)
             elif member.startswith('leave_'):
                 l_meth = getattr(checker, member)
                 # don't use leave_methods with no activated message:
-                if hasattr(l_meth, 'checks_msgs'):
-                    if not any(msgs.get(m, True) for m in l_meth.checks_msgs):
-                        continue
-                leaves.setdefault(cid, []).append(l_meth)
-                lcids.add(cid)
+                if self._is_method_enabled(l_meth):
+                    leaves[cid].append(l_meth)
+                    lcids.add(cid)
         visit_default = getattr(checker, 'visit_default', None)
         if visit_default:
             for cls in nodes.ALL_NODE_CLASSES:
                 cid = cls.__name__.lower()
                 if cid not in vcids:
-                    visits.setdefault(cid, []).append(visit_default)
+                    visits[cid].append(visit_default)
         # for now we have no "leave_default" method in Pylint
 
-    def walk(self, astng):
-        """call visit events of astng checkers for the given node, recurse on
+    def walk(self, astroid):
+        """call visit events of astroid checkers for the given node, recurse on
         its children, then leave events.
         """
-        cid = astng.__class__.__name__.lower()
-        if astng.is_statement:
+        cid = astroid.__class__.__name__.lower()
+        if astroid.is_statement:
             self.nbstatements += 1
         # generate events for this node on each checker
         for cb in self.visit_events.get(cid, ()):
-            cb(astng)
+            cb(astroid)
         # recurse on children
-        for child in astng.get_children():
+        for child in astroid.get_children():
             self.walk(child)
         for cb in self.leave_events.get(cid, ()):
-            cb(astng)
+            cb(astroid)
 
+
+PY_EXTS = ('.py', '.pyc', '.pyo', '.pyw', '.so', '.dll')
+
+def register_plugins(linter, directory):
+    """load all module and package in the given directory, looking for a
+    'register' function in each one, used to register pylint checkers
+    """
+    imported = {}
+    for filename in os.listdir(directory):
+        base, extension = splitext(filename)
+        if base in imported or base == '__pycache__':
+            continue
+        if extension in PY_EXTS and base != '__init__' or (
+                not extension and isdir(join(directory, base))):
+            try:
+                module = load_module_from_file(join(directory, filename))
+            except ValueError:
+                # empty module name (usually emacs auto-save files)
+                continue
+            except ImportError as exc:
+                print("Problem importing module %s: %s" % (filename, exc),
+                      file=sys.stderr)
+            else:
+                if hasattr(module, 'register'):
+                    module.register(linter)
+                    imported[base] = 1
+
+def get_global_option(checker, option, default=None):
+    """ Retrieve an option defined by the given *checker* or
+    by all known option providers.
+
+    It will look in the list of all options providers
+    until the given *option* will be found.
+    If the option wasn't found, the *default* value will be returned.
+    """
+    # First, try in the given checker's config.
+    # After that, look in the options providers.
+
+    try:
+        return getattr(checker.config, option.replace("-", "_"))
+    except AttributeError:
+        pass
+    for provider in checker.linter.options_providers:
+        for options in provider.options:
+            if options[0] == option:
+                return getattr(provider.config, option.replace("-", "_"))
+    return default
diff --git a/third_party/six/LICENSE.txt b/third_party/six/LICENSE.txt
new file mode 100644
index 0000000..d76e024
--- /dev/null
+++ b/third_party/six/LICENSE.txt
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2014 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/six/README.chromium b/third_party/six/README.chromium
new file mode 100644
index 0000000..7573e6b
--- /dev/null
+++ b/third_party/six/README.chromium
@@ -0,0 +1,10 @@
+URL: https://pypi.python.org/pypi/six
+Version: 1.8.0
+License: MIT
+License File: LICENSE.txt
+
+Description:
+This directory contains the Python six module.
+
+Local Modifications:
+None
diff --git a/third_party/six/__init__.py b/third_party/six/__init__.py
new file mode 100644
index 0000000..21b0e80
--- /dev/null
+++ b/third_party/six/__init__.py
@@ -0,0 +1,762 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+# Copyright (c) 2010-2014 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import absolute_import
+
+import functools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.8.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+    string_types = str,
+    integer_types = int,
+    class_types = type,
+    text_type = str
+    binary_type = bytes
+
+    MAXSIZE = sys.maxsize
+else:
+    string_types = basestring,
+    integer_types = (int, long)
+    class_types = (type, types.ClassType)
+    text_type = unicode
+    binary_type = str
+
+    if sys.platform.startswith("java"):
+        # Jython always uses 32 bits.
+        MAXSIZE = int((1 << 31) - 1)
+    else:
+        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+        class X(object):
+            def __len__(self):
+                return 1 << 31
+        try:
+            len(X())
+        except OverflowError:
+            # 32-bit
+            MAXSIZE = int((1 << 31) - 1)
+        else:
+            # 64-bit
+            MAXSIZE = int((1 << 63) - 1)
+        del X
+
+
+def _add_doc(func, doc):
+    """Add documentation to a function."""
+    func.__doc__ = doc
+
+
+def _import_module(name):
+    """Import module, returning the module after the last dot."""
+    __import__(name)
+    return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+    def __init__(self, name):
+        self.name = name
+
+    def __get__(self, obj, tp):
+        result = self._resolve()
+        setattr(obj, self.name, result) # Invokes __set__.
+        # This is a bit ugly, but it avoids running this again.
+        delattr(obj.__class__, self.name)
+        return result
+
+
+class MovedModule(_LazyDescr):
+
+    def __init__(self, name, old, new=None):
+        super(MovedModule, self).__init__(name)
+        if PY3:
+            if new is None:
+                new = name
+            self.mod = new
+        else:
+            self.mod = old
+
+    def _resolve(self):
+        return _import_module(self.mod)
+
+    def __getattr__(self, attr):
+        _module = self._resolve()
+        value = getattr(_module, attr)
+        setattr(self, attr, value)
+        return value
+
+
+class _LazyModule(types.ModuleType):
+
+    def __init__(self, name):
+        super(_LazyModule, self).__init__(name)
+        self.__doc__ = self.__class__.__doc__
+
+    def __dir__(self):
+        attrs = ["__doc__", "__name__"]
+        attrs += [attr.name for attr in self._moved_attributes]
+        return attrs
+
+    # Subclasses should override this
+    _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+        super(MovedAttribute, self).__init__(name)
+        if PY3:
+            if new_mod is None:
+                new_mod = name
+            self.mod = new_mod
+            if new_attr is None:
+                if old_attr is None:
+                    new_attr = name
+                else:
+                    new_attr = old_attr
+            self.attr = new_attr
+        else:
+            self.mod = old_mod
+            if old_attr is None:
+                old_attr = name
+            self.attr = old_attr
+
+    def _resolve(self):
+        module = _import_module(self.mod)
+        return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+    """
+    A meta path importer to import six.moves and its submodules.
+
+    This class implements a PEP302 finder and loader. It should be compatible
+    with Python 2.5 and all existing versions of Python3
+    """
+    def __init__(self, six_module_name):
+        self.name = six_module_name
+        self.known_modules = {}
+
+    def _add_module(self, mod, *fullnames):
+        for fullname in fullnames:
+            self.known_modules[self.name + "." + fullname] = mod
+
+    def _get_module(self, fullname):
+        return self.known_modules[self.name + "." + fullname]
+
+    def find_module(self, fullname, path=None):
+        if fullname in self.known_modules:
+            return self
+        return None
+
+    def __get_module(self, fullname):
+        try:
+            return self.known_modules[fullname]
+        except KeyError:
+            raise ImportError("This loader does not know module " + fullname)
+
+    def load_module(self, fullname):
+        try:
+            # in case of a reload
+            return sys.modules[fullname]
+        except KeyError:
+            pass
+        mod = self.__get_module(fullname)
+        if isinstance(mod, MovedModule):
+            mod = mod._resolve()
+        else:
+            mod.__loader__ = self
+        sys.modules[fullname] = mod
+        return mod
+
+    def is_package(self, fullname):
+        """
+        Return true, if the named module is a package.
+
+        We need this method to get correct spec objects with
+        Python 3.4 (see PEP451)
+        """
+        return hasattr(self.__get_module(fullname), "__path__")
+
+    def get_code(self, fullname):
+        """Return None
+
+        Required, if is_package is implemented"""
+        self.__get_module(fullname)  # eventually raises ImportError
+        return None
+    get_source = get_code  # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+    """Lazy loading of moved objects"""
+    __path__ = []  # mark as package
+
+
+_moved_attributes = [
+    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+    MovedAttribute("intern", "__builtin__", "sys"),
+    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+    MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+    MovedAttribute("reduce", "__builtin__", "functools"),
+    MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+    MovedAttribute("StringIO", "StringIO", "io"),
+    MovedAttribute("UserDict", "UserDict", "collections"),
+    MovedAttribute("UserList", "UserList", "collections"),
+    MovedAttribute("UserString", "UserString", "collections"),
+    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+
+    MovedModule("builtins", "__builtin__"),
+    MovedModule("configparser", "ConfigParser"),
+    MovedModule("copyreg", "copy_reg"),
+    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+    MovedModule("http_cookies", "Cookie", "http.cookies"),
+    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+    MovedModule("html_parser", "HTMLParser", "html.parser"),
+    MovedModule("http_client", "httplib", "http.client"),
+    MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+    MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+    MovedModule("cPickle", "cPickle", "pickle"),
+    MovedModule("queue", "Queue"),
+    MovedModule("reprlib", "repr"),
+    MovedModule("socketserver", "SocketServer"),
+    MovedModule("_thread", "thread", "_thread"),
+    MovedModule("tkinter", "Tkinter"),
+    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+    MovedModule("tkinter_colorchooser", "tkColorChooser",
+                "tkinter.colorchooser"),
+    MovedModule("tkinter_commondialog", "tkCommonDialog",
+                "tkinter.commondialog"),
+    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+                "tkinter.simpledialog"),
+    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+    MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+    MovedModule("winreg", "_winreg"),
+]
+for attr in _moved_attributes:
+    setattr(_MovedItems, attr.name, attr)
+    if isinstance(attr, MovedModule):
+        _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("quote", "urllib", "urllib.parse"),
+    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("unquote", "urllib", "urllib.parse"),
+    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("urlencode", "urllib", "urllib.parse"),
+    MovedAttribute("splitquery", "urllib", "urllib.parse"),
+    MovedAttribute("splittag", "urllib", "urllib.parse"),
+    MovedAttribute("splituser", "urllib", "urllib.parse"),
+    MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+    setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+                      "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+    MovedAttribute("URLError", "urllib2", "urllib.error"),
+    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+    setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+                      "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+    MovedAttribute("urlopen", "urllib2", "urllib.request"),
+    MovedAttribute("install_opener", "urllib2", "urllib.request"),
+    MovedAttribute("build_opener", "urllib2", "urllib.request"),
+    MovedAttribute("pathname2url", "urllib", "urllib.request"),
+    MovedAttribute("url2pathname", "urllib", "urllib.request"),
+    MovedAttribute("getproxies", "urllib", "urllib.request"),
+    MovedAttribute("Request", "urllib2", "urllib.request"),
+    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+    MovedAttribute("URLopener", "urllib", "urllib.request"),
+    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+    setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+                      "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+    MovedAttribute("addbase", "urllib", "urllib.response"),
+    MovedAttribute("addclosehook", "urllib", "urllib.response"),
+    MovedAttribute("addinfo", "urllib", "urllib.response"),
+    MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+    setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+                      "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+                      "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+    __path__ = []  # mark as package
+    parse = _importer._get_module("moves.urllib_parse")
+    error = _importer._get_module("moves.urllib_error")
+    request = _importer._get_module("moves.urllib_request")
+    response = _importer._get_module("moves.urllib_response")
+    robotparser = _importer._get_module("moves.urllib_robotparser")
+
+    def __dir__(self):
+        return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+                      "moves.urllib")
+
+
+def add_move(move):
+    """Add an item to six.moves."""
+    setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+    """Remove item from six.moves."""
+    try:
+        delattr(_MovedItems, name)
+    except AttributeError:
+        try:
+            del moves.__dict__[name]
+        except KeyError:
+            raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+    _meth_func = "__func__"
+    _meth_self = "__self__"
+
+    _func_closure = "__closure__"
+    _func_code = "__code__"
+    _func_defaults = "__defaults__"
+    _func_globals = "__globals__"
+else:
+    _meth_func = "im_func"
+    _meth_self = "im_self"
+
+    _func_closure = "func_closure"
+    _func_code = "func_code"
+    _func_defaults = "func_defaults"
+    _func_globals = "func_globals"
+
+
+try:
+    advance_iterator = next
+except NameError:
+    def advance_iterator(it):
+        return it.next()
+next = advance_iterator
+
+
+try:
+    callable = callable
+except NameError:
+    def callable(obj):
+        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+    def get_unbound_function(unbound):
+        return unbound
+
+    create_bound_method = types.MethodType
+
+    Iterator = object
+else:
+    def get_unbound_function(unbound):
+        return unbound.im_func
+
+    def create_bound_method(func, obj):
+        return types.MethodType(func, obj, obj.__class__)
+
+    class Iterator(object):
+
+        def next(self):
+            return type(self).__next__(self)
+
+    callable = callable
+_add_doc(get_unbound_function,
+         """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+    def iterkeys(d, **kw):
+        return iter(d.keys(**kw))
+
+    def itervalues(d, **kw):
+        return iter(d.values(**kw))
+
+    def iteritems(d, **kw):
+        return iter(d.items(**kw))
+
+    def iterlists(d, **kw):
+        return iter(d.lists(**kw))
+else:
+    def iterkeys(d, **kw):
+        return iter(d.iterkeys(**kw))
+
+    def itervalues(d, **kw):
+        return iter(d.itervalues(**kw))
+
+    def iteritems(d, **kw):
+        return iter(d.iteritems(**kw))
+
+    def iterlists(d, **kw):
+        return iter(d.iterlists(**kw))
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+         "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+         "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+    def b(s):
+        return s.encode("latin-1")
+    def u(s):
+        return s
+    unichr = chr
+    if sys.version_info[1] <= 1:
+        def int2byte(i):
+            return bytes((i,))
+    else:
+        # This is about 2x faster than the implementation above on 3.2+
+        int2byte = operator.methodcaller("to_bytes", 1, "big")
+    byte2int = operator.itemgetter(0)
+    indexbytes = operator.getitem
+    iterbytes = iter
+    import io
+    StringIO = io.StringIO
+    BytesIO = io.BytesIO
+else:
+    def b(s):
+        return s
+    # Workaround for standalone backslash
+    def u(s):
+        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+    unichr = unichr
+    int2byte = chr
+    def byte2int(bs):
+        return ord(bs[0])
+    def indexbytes(buf, i):
+        return ord(buf[i])
+    def iterbytes(buf):
+        return (ord(byte) for byte in buf)
+    import StringIO
+    StringIO = BytesIO = StringIO.StringIO
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+if PY3:
+    exec_ = getattr(moves.builtins, "exec")
+
+
+    def reraise(tp, value, tb=None):
+        if value is None:
+            value = tp()
+        if value.__traceback__ is not tb:
+            raise value.with_traceback(tb)
+        raise value
+
+else:
+    def exec_(_code_, _globs_=None, _locs_=None):
+        """Execute code in a namespace."""
+        if _globs_ is None:
+            frame = sys._getframe(1)
+            _globs_ = frame.f_globals
+            if _locs_ is None:
+                _locs_ = frame.f_locals
+            del frame
+        elif _locs_ is None:
+            _locs_ = _globs_
+        exec("""exec _code_ in _globs_, _locs_""")
+
+
+    exec_("""def reraise(tp, value, tb=None):
+    raise tp, value, tb
+""")
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+    def print_(*args, **kwargs):
+        """The new-style print function for Python 2.4 and 2.5."""
+        fp = kwargs.pop("file", sys.stdout)
+        if fp is None:
+            return
+        def write(data):
+            if not isinstance(data, basestring):
+                data = str(data)
+            # If the file has an encoding, encode unicode with it.
+            if (isinstance(fp, file) and
+                isinstance(data, unicode) and
+                fp.encoding is not None):
+                errors = getattr(fp, "errors", None)
+                if errors is None:
+                    errors = "strict"
+                data = data.encode(fp.encoding, errors)
+            fp.write(data)
+        want_unicode = False
+        sep = kwargs.pop("sep", None)
+        if sep is not None:
+            if isinstance(sep, unicode):
+                want_unicode = True
+            elif not isinstance(sep, str):
+                raise TypeError("sep must be None or a string")
+        end = kwargs.pop("end", None)
+        if end is not None:
+            if isinstance(end, unicode):
+                want_unicode = True
+            elif not isinstance(end, str):
+                raise TypeError("end must be None or a string")
+        if kwargs:
+            raise TypeError("invalid keyword arguments to print()")
+        if not want_unicode:
+            for arg in args:
+                if isinstance(arg, unicode):
+                    want_unicode = True
+                    break
+        if want_unicode:
+            newline = unicode("\n")
+            space = unicode(" ")
+        else:
+            newline = "\n"
+            space = " "
+        if sep is None:
+            sep = space
+        if end is None:
+            end = newline
+        for i, arg in enumerate(args):
+            if i:
+                write(sep)
+            write(arg)
+        write(end)
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+    def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+              updated=functools.WRAPPER_UPDATES):
+        def wrapper(f):
+            f = functools.wraps(wrapped)(f)
+            f.__wrapped__ = wrapped
+            return f
+        return wrapper
+else:
+    wraps = functools.wraps
+
+def with_metaclass(meta, *bases):
+    """Create a base class with a metaclass."""
+    # This requires a bit of explanation: the basic idea is to make a dummy
+    # metaclass for one level of class instantiation that replaces itself with
+    # the actual metaclass.
+    class metaclass(meta):
+        def __new__(cls, name, this_bases, d):
+            return meta(name, bases, d)
+    return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+    """Class decorator for creating a class with a metaclass."""
+    def wrapper(cls):
+        orig_vars = cls.__dict__.copy()
+        slots = orig_vars.get('__slots__')
+        if slots is not None:
+            if isinstance(slots, str):
+                slots = [slots]
+            for slots_var in slots:
+                orig_vars.pop(slots_var)
+        orig_vars.pop('__dict__', None)
+        orig_vars.pop('__weakref__', None)
+        return metaclass(cls.__name__, cls.__bases__, orig_vars)
+    return wrapper
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = []  # required for PEP 302 and PEP 451
+__package__ = __name__  # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+    __spec__.submodule_search_locations = []  # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+    for i, importer in enumerate(sys.meta_path):
+        # Here's some real nastiness: Another "instance" of the six module might
+        # be floating around. Therefore, we can't use isinstance() to check for
+        # the six meta path importer, since the other six instance will have
+        # inserted an importer with different class.
+        if (type(importer).__name__ == "_SixMetaPathImporter" and
+            importer.name == __name__):
+            del sys.meta_path[i]
+            break
+    del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/third_party/upload.py b/third_party/upload.py
index fa4133c..002a0d6 100755
--- a/third_party/upload.py
+++ b/third_party/upload.py
@@ -34,7 +34,6 @@
 # This code is derived from appcfg.py in the App Engine SDK (open source),
 # and from ASPN recipe #146306.
 
-import BaseHTTPServer
 import ConfigParser
 import cookielib
 import errno
@@ -52,7 +51,6 @@
 import urllib
 import urllib2
 import urlparse
-import webbrowser
 
 from multiprocessing.pool import ThreadPool
 
@@ -69,9 +67,13 @@
 
 try:
   import keyring
-except ImportError:
+except:
   keyring = None
 
+# auth.py is a part of depot_tools.
+# TODO(vadimsh): Merge upload.py into depot_tools
+import auth
+
 # The logging verbosity:
 #  0: Errors only.
 #  1: Status messages.
@@ -102,22 +104,18 @@
 VCS_UNKNOWN = "Unknown"
 
 VCS = [
-{
-    'name': VCS_MERCURIAL,
-    'aliases': ['hg', 'mercurial'],
-}, {
-    'name': VCS_SUBVERSION,
-    'aliases': ['svn', 'subversion'],
-}, {
-    'name': VCS_PERFORCE,
-    'aliases': ['p4', 'perforce'],
-}, {
-    'name': VCS_GIT,
-    'aliases': ['git'],
-}, {
-    'name': VCS_CVS,
-    'aliases': ['cvs'],
-}]
+  {'name': VCS_MERCURIAL,
+   'aliases': ['hg', 'mercurial']},
+  {'name': VCS_SUBVERSION,
+   'aliases': ['svn', 'subversion'],},
+  {'name': VCS_PERFORCE,
+   'aliases': ['p4', 'perforce']},
+  {'name': VCS_GIT,
+   'aliases': ['git']},
+  {'name': VCS_CVS,
+   'aliases': ['cvs']},
+  ]
+
 
 VCS_SHORT_NAMES = []    # hg, svn, ...
 VCS_ABBREVIATIONS = {}  # alias: name, ...
@@ -126,48 +124,6 @@
   VCS_ABBREVIATIONS.update((alias, vcs['name']) for alias in vcs['aliases'])
 
 
-# OAuth 2.0-Related Constants
-LOCALHOST_IP = '127.0.0.1'
-DEFAULT_OAUTH2_PORT = 8001
-ACCESS_TOKEN_PARAM = 'access_token'
-ERROR_PARAM = 'error'
-OAUTH_DEFAULT_ERROR_MESSAGE = 'OAuth 2.0 error occurred.'
-OAUTH_PATH = '/get-access-token'
-OAUTH_PATH_PORT_TEMPLATE = OAUTH_PATH + '?port=%(port)d'
-AUTH_HANDLER_RESPONSE = """\
-<html>
-  <head>
-    <title>Authentication Status</title>
-    <script>
-    window.onload = function() {
-      window.close();
-    }
-    </script>
-  </head>
-  <body>
-    <p>The authentication flow has completed.</p>
-  </body>
-</html>
-"""
-# Borrowed from google-api-python-client
-OPEN_LOCAL_MESSAGE_TEMPLATE = """\
-Your browser has been opened to visit:
-
-    %s
-
-If your browser is on a different machine then exit and re-run
-upload.py with the command-line parameter
-
-  --no_oauth2_webbrowser
-"""
-NO_OPEN_LOCAL_MESSAGE_TEMPLATE = """\
-Go to the following link in your browser:
-
-    %s
-
-and copy the access token.
-"""
-
 # The result of parsing Subversion's [auto-props] setting.
 svn_auto_props_map = None
 
@@ -217,7 +173,7 @@
 
 def ErrorExit(msg):
   """Print an error message to stderr and exit."""
-  print >>sys.stderr, msg
+  print >> sys.stderr, msg
   sys.exit(1)
 
 
@@ -242,8 +198,8 @@
   """Provides a common interface for a simple RPC server."""
 
   def __init__(self, host, auth_function, host_override=None,
-               extra_headers=None, save_cookies=False,
-               account_type=AUTH_ACCOUNT_TYPE):
+               request_path_prefix=None, extra_headers=None,
+               save_cookies=False, account_type=AUTH_ACCOUNT_TYPE):
     """Creates a new AbstractRpcServer.
 
     Args:
@@ -252,6 +208,7 @@
         (email, password) tuple when called. Will be called if authentication
         is required.
       host_override: The host header to send to the server (defaults to host).
+      request_path_prefix: A string to prefix all URL paths with (e.g. 'bots/').
       extra_headers: A dict of extra headers to append to every request.
       save_cookies: If True, save the authentication cookies to local disk.
         If False, use an in-memory cookiejar instead.  Subclasses must
@@ -264,6 +221,7 @@
         not self.host.startswith("https://")):
       self.host = "http://" + self.host
     self.host_override = host_override
+    self.request_path_prefix = request_path_prefix or ''
     self.auth_function = auth_function
     self.authenticated = False
     self.extra_headers = extra_headers or {}
@@ -293,7 +251,7 @@
       req.add_header(key, value)
     return req
 
-  def _GetAuthToken(self, email, password):
+  def _GetAuthToken(self, email, password, internal=False):
     """Uses ClientLogin to authenticate the user, returning an auth token.
 
     Args:
@@ -311,8 +269,9 @@
     if self.host.endswith(".google.com"):
       # Needed for use inside Google.
       account_type = "HOSTED"
+    service = ('ClientLogin') if not internal else ('ClientAuth')
     req = self._CreateRequest(
-        url="https://www.google.com/accounts/ClientLogin",
+        url="https://www.google.com/accounts/%s" % (service,),
         data=urllib.urlencode({
             "Email": email,
             "Passwd": password,
@@ -360,7 +319,7 @@
                               response.headers, response.fp)
     self.authenticated = True
 
-  def _Authenticate(self):
+  def _Authenticate(self, force_refresh):
     """Authenticates the user.
 
     The authentication process works as follows:
@@ -377,44 +336,66 @@
     """
     for i in range(3):
       credentials = self.auth_function()
+
+      # Try external, then internal.
+      e = None
+      error_map = None
       try:
         auth_token = self._GetAuthToken(credentials[0], credentials[1])
-      except ClientLoginError, e:
-        print >>sys.stderr, ''
-        if e.reason == "BadAuthentication":
+      except urllib2.HTTPError:
+        try:
+          # Try internal endpoint.
+          error_map = {
+              "badauth": "BadAuthentication",
+              "cr": "CaptchaRequired",
+              "adel": "AccountDeleted",
+              "adis": "AccountDisabled",
+              "sdis": "ServiceDisabled",
+              "ire": "ServiceUnavailable",
+          }
+          auth_token = self._GetAuthToken(credentials[0], credentials[1],
+                                          internal=True)
+        except ClientLoginError, exc:
+          e = exc
+      if e:
+        print >> sys.stderr, ''
+        error_message = e.reason
+        if error_map:
+          error_message = error_map.get(error_message, error_message)
+        if error_message == "BadAuthentication":
           if e.info == "InvalidSecondFactor":
-            print >>sys.stderr, (
+            print >> sys.stderr, (
                 "Use an application-specific password instead "
                 "of your regular account password.\n"
                 "See http://www.google.com/"
                 "support/accounts/bin/answer.py?answer=185833")
           else:
-            print >>sys.stderr, "Invalid username or password."
-        elif e.reason == "CaptchaRequired":
-          print >>sys.stderr, (
+            print >> sys.stderr, "Invalid username or password."
+        elif error_message == "CaptchaRequired":
+          print >> sys.stderr, (
               "Please go to\n"
               "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
               "and verify you are a human.  Then try again.\n"
               "If you are using a Google Apps account the URL is:\n"
               "https://www.google.com/a/yourdomain.com/UnlockCaptcha")
-        elif e.reason == "NotVerified":
-          print >>sys.stderr, "Account not verified."
-        elif e.reason == "TermsNotAgreed":
-          print >>sys.stderr, "User has not agreed to TOS."
-        elif e.reason == "AccountDeleted":
-          print >>sys.stderr, "The user account has been deleted."
-        elif e.reason == "AccountDisabled":
-          print >>sys.stderr, "The user account has been disabled."
+        elif error_message == "NotVerified":
+          print >> sys.stderr, "Account not verified."
+        elif error_message == "TermsNotAgreed":
+          print >> sys.stderr, "User has not agreed to TOS."
+        elif error_message == "AccountDeleted":
+          print >> sys.stderr, "The user account has been deleted."
+        elif error_message == "AccountDisabled":
+          print >> sys.stderr, "The user account has been disabled."
           break
-        elif e.reason == "ServiceDisabled":
-          print >>sys.stderr, ("The user's access to the service has been "
+        elif error_message == "ServiceDisabled":
+          print >> sys.stderr, ("The user's access to the service has been "
                                "disabled.")
-        elif e.reason == "ServiceUnavailable":
-          print >>sys.stderr, "The service is not available; try again later."
+        elif error_message == "ServiceUnavailable":
+          print >> sys.stderr, "The service is not available; try again later."
         else:
           # Unknown error.
-          raise
-        print >>sys.stderr, ''
+          raise e
+        print >> sys.stderr, ''
         continue
       self._GetAuthCookie(auth_token)
       return
@@ -443,16 +424,17 @@
     # TODO: Don't require authentication.  Let the server say
     # whether it is necessary.
     if not self.authenticated and self.auth_function:
-      self._Authenticate()
+      self._Authenticate(force_refresh=False)
 
     old_timeout = socket.getdefaulttimeout()
     socket.setdefaulttimeout(timeout)
+    auth_attempted = False
     try:
       tries = 0
       while True:
         tries += 1
         args = dict(kwargs)
-        url = "%s%s" % (self.host, request_path)
+        url = "%s%s%s" % (self.host, self.request_path_prefix, request_path)
         if args:
           url += "?" + urllib.urlencode(args)
         req = self._CreateRequest(url=url, data=payload)
@@ -461,17 +443,23 @@
           for header, value in extra_headers.items():
             req.add_header(header, value)
         try:
-          f = self.opener.open(req)
+          f = self.opener.open(req, timeout=70)
           response = f.read()
           f.close()
           return response
         except urllib2.HTTPError, e:
           if tries > 3:
             raise
-          elif e.code == 401 or e.code == 302:
+          elif e.code in (302, 401, 403):
             if not self.auth_function:
               raise
-            self._Authenticate()
+            # Already tried force refresh, didn't help -> give up with error.
+            if auth_attempted:
+              raise auth.AuthenticationError(
+                  'Access to %s is denied (server returned HTTP %d).'
+                  % (self.host, e.code))
+            self._Authenticate(force_refresh=True)
+            auth_attempted = True
           elif e.code == 301:
             # Handle permanent redirect manually.
             url = e.info()["location"]
@@ -490,15 +478,22 @@
 class HttpRpcServer(AbstractRpcServer):
   """Provides a simplified RPC-style interface for HTTP requests."""
 
-  def _Authenticate(self):
+  def _Authenticate(self, force_refresh):
     """Save the cookie jar after authentication."""
-    if isinstance(self.auth_function, OAuth2Creds):
-      access_token = self.auth_function()
-      if access_token is not None:
-        self.extra_headers['Authorization'] = 'OAuth %s' % (access_token,)
-        self.authenticated = True
+    if isinstance(self.auth_function, auth.Authenticator):
+      try:
+        access_token = self.auth_function.get_access_token(force_refresh)
+      except auth.LoginRequiredError:
+        # Attempt to make unauthenticated request first if there's no cached
+        # credentials. HttpRpcServer calls __Authenticate(force_refresh=True)
+        # again if unauthenticated request doesn't work.
+        if not force_refresh:
+          return
+        raise
+      self.extra_headers['Authorization'] = 'Bearer %s' % (
+          access_token.token,)
     else:
-      super(HttpRpcServer, self)._Authenticate()
+      super(HttpRpcServer, self)._Authenticate(force_refresh)
       if self.save_cookies:
         StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
         self.cookie_jar.save()
@@ -542,37 +537,37 @@
 
 
 class CondensedHelpFormatter(optparse.IndentedHelpFormatter):
-   """Frees more horizontal space by removing indentation from group
-      options and collapsing arguments between short and long, e.g.
-      '-o ARG, --opt=ARG' to -o --opt ARG"""
+  """Frees more horizontal space by removing indentation from group
+     options and collapsing arguments between short and long, e.g.
+     '-o ARG, --opt=ARG' to -o --opt ARG"""
 
-   def format_heading(self, heading):
-     return "%s:\n" % heading
+  def format_heading(self, heading):
+    return "%s:\n" % heading
 
-   def format_option(self, option):
-     self.dedent()
-     res = optparse.HelpFormatter.format_option(self, option)
-     self.indent()
-     return res
+  def format_option(self, option):
+    self.dedent()
+    res = optparse.HelpFormatter.format_option(self, option)
+    self.indent()
+    return res
 
-   def format_option_strings(self, option):
-     self.set_long_opt_delimiter(" ")
-     optstr = optparse.HelpFormatter.format_option_strings(self, option)
-     optlist = optstr.split(", ")
-     if len(optlist) > 1:
-       if option.takes_value():
-         # strip METAVAR from all but the last option
-         optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:]
-       optstr = " ".join(optlist)
-     return optstr
+  def format_option_strings(self, option):
+    self.set_long_opt_delimiter(" ")
+    optstr = optparse.HelpFormatter.format_option_strings(self, option)
+    optlist = optstr.split(", ")
+    if len(optlist) > 1:
+      if option.takes_value():
+        # strip METAVAR from all but the last option
+        optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:]
+      optstr = " ".join(optlist)
+    return optstr
 
 
 parser = optparse.OptionParser(
-    usage=("%prog [options] [-- diff_options] [path...]\n"
-           "See also: http://code.google.com/p/rietveld/wiki/UploadPyUsage"),
-    add_help_option=False,
-    formatter=CondensedHelpFormatter()
-)
+  usage=("%prog [options] [-- diff_options] [path...]\n"
+         "See also: http://code.google.com/p/rietveld/wiki/UploadPyUsage"),
+  add_help_option=False,
+  formatter=CondensedHelpFormatter()
+  )
 parser.add_option("-h", "--help", action="store_true",
                   help="Show this help message and exit.")
 parser.add_option("-y", "--assume_yes", action="store_true",
@@ -599,32 +594,11 @@
 group.add_option("-e", "--email", action="store", dest="email",
                  metavar="EMAIL", default=None,
                  help="The username to use. Will prompt if omitted.")
-group.add_option("-H", "--host", action="store", dest="host",
-                 metavar="HOST", default=None,
-                 help="Overrides the Host header sent with all RPCs.")
-group.add_option("--no_cookies", action="store_false",
-                 dest="save_cookies", default=True,
-                 help="Do not save authentication cookies to local disk.")
-group.add_option("--oauth2", action="store_true",
-                 dest="use_oauth2", default=False,
-                 help="Use OAuth 2.0 instead of a password.")
-group.add_option("--oauth2_port", action="store", type="int",
-                 dest="oauth2_port", default=DEFAULT_OAUTH2_PORT,
-                 help=("Port to use to handle OAuth 2.0 redirect. Must be an "
-                       "integer in the range 1024-49151, defaults to "
-                       "'%default'."))
-group.add_option("--no_oauth2_webbrowser", action="store_false",
-                 dest="open_oauth2_local_webbrowser", default=True,
-                 help="Don't open a browser window to get an access token.")
-group.add_option("--account_type", action="store", dest="account_type",
-                 metavar="TYPE", default=AUTH_ACCOUNT_TYPE,
-                 choices=["GOOGLE", "HOSTED"],
-                 help=("Override the default account type "
-                       "(defaults to '%default', "
-                       "valid choices are 'GOOGLE' and 'HOSTED')."))
 group.add_option("-j", "--number-parallel-uploads",
                  dest="num_upload_threads", default=8,
                  help="Number of uploads to do in parallel.")
+# Authentication
+auth.add_auth_options(parser)
 # Issue
 group = parser.add_option_group("Issue options")
 group.add_option("-t", "--title", action="store", dest="title",
@@ -654,6 +628,17 @@
                  help="Base URL path for files (listed as \"Base URL\" when "
                  "viewing issue).  If omitted, will be guessed automatically "
                  "for SVN repos and left blank for others.")
+group.add_option("--target_ref", action="store", dest="target_ref",
+                 default=None,
+                 help="The target ref that is transitively tracked by the "
+                 "local branch this patch comes from.")
+parser.add_option("--cq_dry_run", action="store_true",
+                  help="Send the patchset to do a CQ dry run right after "
+                       "upload.")
+parser.add_option("--depends_on_patchset", action="store",
+                  dest="depends_on_patchset",
+                  help="The uploaded patchset this patchset depends on. The "
+                       "value will be in this format- issue_num:patchset_num")
 group.add_option("--download_base", action="store_true",
                  dest="download_base", default=False,
                  help="Base files will be downloaded by the server "
@@ -680,8 +665,11 @@
 group = parser.add_option_group("Git-specific options")
 group.add_option("--git_similarity", action="store", dest="git_similarity",
                  metavar="SIM", type="int", default=50,
-                 help=("Set the minimum similarity index for detecting renames "
-                       "and copies. See `git diff -C`. (default 50)."))
+                 help=("Set the minimum similarity percentage for detecting "
+                       "renames and copies. See `git diff -C`. (default 50)."))
+group.add_option("--git_only_search_patch", action="store_false", default=True,
+                 dest='git_find_copies_harder',
+                 help="Removes --find-copies-harder when seaching for copies")
 group.add_option("--git_no_find_copies", action="store_false", default=True,
                  dest="git_find_copies",
                  help=("Prevents git from looking for copies (default off)."))
@@ -702,150 +690,6 @@
                  help=("Perforce user"))
 
 
-# OAuth 2.0 Methods and Helpers
-class ClientRedirectServer(BaseHTTPServer.HTTPServer):
-  """A server for redirects back to localhost from the associated server.
-
-  Waits for a single request and parses the query parameters for an access token
-  or an error and then stops serving.
-  """
-  access_token = None
-  error = None
-
-
-class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-  """A handler for redirects back to localhost from the associated server.
-
-  Waits for a single request and parses the query parameters into the server's
-  access_token or error and then stops serving.
-  """
-
-  def SetResponseValue(self):
-    """Stores the access token or error from the request on the server.
-
-    Will only do this if exactly one query parameter was passed in to the
-    request and that query parameter used 'access_token' or 'error' as the key.
-    """
-    query_string = urlparse.urlparse(self.path).query
-    query_params = urlparse.parse_qs(query_string)
-
-    if len(query_params) == 1:
-      if query_params.has_key(ACCESS_TOKEN_PARAM):
-        access_token_list = query_params[ACCESS_TOKEN_PARAM]
-        if len(access_token_list) == 1:
-          self.server.access_token = access_token_list[0]
-      else:
-        error_list = query_params.get(ERROR_PARAM, [])
-        if len(error_list) == 1:
-          self.server.error = error_list[0]
-
-  def do_GET(self):
-    """Handle a GET request.
-
-    Parses and saves the query parameters and prints a message that the server
-    has completed its lone task (handling a redirect).
-
-    Note that we can't detect if an error occurred.
-    """
-    self.send_response(200)
-    self.send_header('Content-type', 'text/html')
-    self.end_headers()
-    self.SetResponseValue()
-    self.wfile.write(AUTH_HANDLER_RESPONSE)
-
-  def log_message(self, format, *args):
-    """Do not log messages to stdout while running as command line program."""
-    pass
-
-
-def OpenOAuth2ConsentPage(server=DEFAULT_REVIEW_SERVER,
-                          port=DEFAULT_OAUTH2_PORT):
-  """Opens the OAuth 2.0 consent page or prints instructions how to.
-
-  Uses the webbrowser module to open the OAuth server side page in a browser.
-
-  Args:
-    server: String containing the review server URL. Defaults to
-      DEFAULT_REVIEW_SERVER.
-    port: Integer, the port where the localhost server receiving the redirect
-      is serving. Defaults to DEFAULT_OAUTH2_PORT.
-
-  Returns:
-    A boolean indicating whether the page opened successfully.
-  """
-  path = OAUTH_PATH_PORT_TEMPLATE % {'port': port}
-  parsed_url = urlparse.urlparse(server)
-  scheme = parsed_url[0] or 'https'
-  if scheme != 'https':
-    ErrorExit('Using OAuth requires a review server with SSL enabled.')
-  # If no scheme was given on command line the server address ends up in
-  # parsed_url.path otherwise in netloc.
-  host = parsed_url[1] or parsed_url[2]
-  page = '%s://%s%s' % (scheme, host, path)
-  page_opened = webbrowser.open(page, new=1, autoraise=True)
-  if page_opened:
-    print OPEN_LOCAL_MESSAGE_TEMPLATE % (page,)
-  return page_opened
-
-
-def WaitForAccessToken(port=DEFAULT_OAUTH2_PORT):
-  """Spins up a simple HTTP Server to handle a single request.
-
-  Intended to handle a single redirect from the production server after the
-  user authenticated via OAuth 2.0 with the server.
-
-  Args:
-    port: Integer, the port where the localhost server receiving the redirect
-      is serving. Defaults to DEFAULT_OAUTH2_PORT.
-
-  Returns:
-    The access token passed to the localhost server, or None if no access token
-      was passed.
-  """
-  httpd = ClientRedirectServer((LOCALHOST_IP, port), ClientRedirectHandler)
-  # Wait to serve just one request before deferring control back
-  # to the caller of wait_for_refresh_token
-  httpd.handle_request()
-  if httpd.access_token is None:
-    ErrorExit(httpd.error or OAUTH_DEFAULT_ERROR_MESSAGE)
-  return httpd.access_token
-
-
-def GetAccessToken(server=DEFAULT_REVIEW_SERVER, port=DEFAULT_OAUTH2_PORT,
-                   open_local_webbrowser=True):
-  """Gets an Access Token for the current user.
-
-  Args:
-    server: String containing the review server URL. Defaults to
-      DEFAULT_REVIEW_SERVER.
-    port: Integer, the port where the localhost server receiving the redirect
-      is serving. Defaults to DEFAULT_OAUTH2_PORT.
-    open_local_webbrowser: Boolean, defaults to True. If set, opens a page in
-      the user's browser.
-
-  Returns:
-    A string access token that was sent to the local server. If the serving page
-      via WaitForAccessToken does not receive an access token, this method
-      returns None.
-  """
-  access_token = None
-  if open_local_webbrowser:
-    page_opened = OpenOAuth2ConsentPage(server=server, port=port)
-    if page_opened:
-      try:
-        access_token = WaitForAccessToken(port=port)
-      except socket.error, e:
-        print 'Can\'t start local webserver. Socket Error: %s\n' % (e.strerror,)
-
-  if access_token is None:
-    # TODO(dhermes): Offer to add to clipboard using xsel, xclip, pbcopy, etc.
-    page = 'https://%s%s' % (server, OAUTH_PATH)
-    print NO_OPEN_LOCAL_MESSAGE_TEMPLATE % (page,)
-    access_token = raw_input('Enter access token: ').strip()
-
-  return access_token
-
-
 class KeyringCreds(object):
   def __init__(self, server, host, email):
     self.server = server
@@ -891,46 +735,25 @@
     return (email, password)
 
 
-class OAuth2Creds(object):
-  """Simple object to hold server and port to be passed to GetAccessToken."""
-
-  def __init__(self, server, port, open_local_webbrowser=True):
-    self.server = server
-    self.port = port
-    self.open_local_webbrowser = open_local_webbrowser
-
-  def __call__(self):
-    """Uses stored server and port to retrieve OAuth 2.0 access token."""
-    return GetAccessToken(server=self.server, port=self.port,
-                          open_local_webbrowser=self.open_local_webbrowser)
-
-
-def GetRpcServer(server, email=None, host_override=None, save_cookies=True,
-                 account_type=AUTH_ACCOUNT_TYPE, use_oauth2=False,
-                 oauth2_port=DEFAULT_OAUTH2_PORT,
-                 open_oauth2_local_webbrowser=True):
+def GetRpcServer(server, auth_config=None, email=None):
   """Returns an instance of an AbstractRpcServer.
 
   Args:
     server: String containing the review server URL.
-    email: String containing user's email address.
-    host_override: If not None, string containing an alternate hostname to use
-      in the host header.
-    save_cookies: Whether authentication cookies should be saved to disk.
-    account_type: Account type for authentication, either 'GOOGLE'
-      or 'HOSTED'. Defaults to AUTH_ACCOUNT_TYPE.
-    use_oauth2: Boolean indicating whether OAuth 2.0 should be used for
-      authentication.
-    oauth2_port: Integer, the port where the localhost server receiving the
-      redirect is serving. Defaults to DEFAULT_OAUTH2_PORT.
-    open_oauth2_local_webbrowser: Boolean, defaults to True. If True and using
-      OAuth, this opens a page in the user's browser to obtain a token.
+    auth_config: auth.AuthConfig tuple with OAuth2 configuration.
+    email: String containing user's email address [deprecated].
 
   Returns:
     A new HttpRpcServer, on which RPC calls can be made.
   """
+  # If email is given as an empty string or no auth config is passed, then
+  # assume we want to make requests that do not need authentication. Bypass
+  # authentication by setting the auth_function to None.
+  if email == '' or not auth_config:
+    return HttpRpcServer(server, None)
+
   # If this is the dev_appserver, use fake authentication.
-  host = (host_override or server).lower()
+  host = server.lower()
   if re.match(r'(http://)?localhost([:/]|$)', host):
     if email is None:
       email = "test@example.com"
@@ -938,25 +761,38 @@
     server = HttpRpcServer(
         server,
         lambda: (email, "password"),
-        host_override=host_override,
         extra_headers={"Cookie":
                        'dev_appserver_login="%s:False"' % email},
-        save_cookies=save_cookies,
-        account_type=account_type)
+        save_cookies=auth_config.save_cookies,
+        account_type=AUTH_ACCOUNT_TYPE)
     # Don't try to talk to ClientLogin.
     server.authenticated = True
     return server
 
-  positional_args = [server]
-  if use_oauth2:
-    positional_args.append(
-        OAuth2Creds(server, oauth2_port, open_oauth2_local_webbrowser))
+  if auth_config.use_oauth2:
+    auth_func = auth.get_authenticator_for_host(server, auth_config)
   else:
-    positional_args.append(KeyringCreds(server, host, email).GetUserCredentials)
-  return HttpRpcServer(*positional_args,
-                       host_override=host_override,
-                       save_cookies=save_cookies,
-                       account_type=account_type)
+    auth_func = KeyringCreds(server, host, email).GetUserCredentials
+
+  # HACK(crbug.com/476690): Internal Rietveld is configured to require cookie
+  # auth for all paths except /bots/* (requests to /bots/* are authenticated
+  # with OAuth). /bots/* paths expose exact same API as /* (at least enough of
+  # it for depot_tools to work). So when using OAuth with internal Rietveld,
+  # silently prefix all requests with '/bots'.
+  request_path_prefix = ''
+  if auth_config.use_oauth2:
+    if not host.startswith(('http://', 'https://')):
+      host = 'https://' + host
+    parsed = urlparse.urlparse(host)
+    if parsed.netloc.endswith('.googleplex.com'):
+      request_path_prefix = '/bots'
+
+  return HttpRpcServer(
+      server,
+      auth_func,
+      request_path_prefix=request_path_prefix,
+      save_cookies=auth_config.save_cookies,
+      account_type=AUTH_ACCOUNT_TYPE)
 
 
 def EncodeMultipartFormData(fields, files):
@@ -972,7 +808,7 @@
   Source:
     http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
   """
-  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-%s-' % sum(hash(f) for f in files)
   CRLF = '\r\n'
   lines = []
   for (key, value) in fields:
@@ -1009,7 +845,7 @@
 def RunShellWithReturnCodeAndStderr(command, print_output=False,
                            universal_newlines=True,
                            env=os.environ):
-  """Executes a command and returns the output from stdout, stderr and the return code.
+  """Run a command and return output from stdout, stderr and the return code.
 
   Args:
     command: Command to execute.
@@ -1040,7 +876,7 @@
   p.wait()
   errout = p.stderr.read()
   if print_output and errout:
-    print >>sys.stderr, errout
+    print >> sys.stderr, errout
   p.stdout.close()
   p.stderr.close()
   return output, errout, p.returncode
@@ -1048,7 +884,7 @@
 def RunShellWithReturnCode(command, print_output=False,
                            universal_newlines=True,
                            env=os.environ):
-  """Executes a command and returns the output from stdout and the return code."""
+  """Run a command and return output from stdout and the return code."""
   out, err, retcode = RunShellWithReturnCodeAndStderr(command, print_output,
                            universal_newlines, env)
   return out, retcode
@@ -1223,7 +1059,8 @@
     mimetype =  mimetypes.guess_type(filename)[0]
     if not mimetype:
       return False
-    return mimetype.startswith("image/") and not mimetype.startswith("image/svg")
+    return (mimetype.startswith("image/") and
+            not mimetype.startswith("image/svg"))
 
   def IsBinaryData(self, data):
     """Returns true if data contains a null byte."""
@@ -1269,21 +1106,21 @@
     """
     url = self._GetInfo("URL")
     if url:
-        scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
-        guess = ""
-        # TODO(anatoli) - repository specific hacks should be handled by server
-        if netloc == "svn.python.org" and scheme == "svn+ssh":
-          path = "projects" + path
-          scheme = "http"
-          guess = "Python "
-        elif netloc.endswith(".googlecode.com"):
-          scheme = "http"
-          guess = "Google Code "
-        path = path + "/"
-        base = urlparse.urlunparse((scheme, netloc, path, params,
-                                    query, fragment))
-        LOGGER.info("Guessed %sbase = %s", guess, base)
-        return base
+      scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+      guess = ""
+      # TODO(anatoli) - repository specific hacks should be handled by server
+      if netloc == "svn.python.org" and scheme == "svn+ssh":
+        path = "projects" + path
+        scheme = "http"
+        guess = "Python "
+      elif netloc.endswith(".googlecode.com"):
+        scheme = "http"
+        guess = "Google Code "
+      path = path + "/"
+      base = urlparse.urlunparse((scheme, netloc, path, params,
+                                  query, fragment))
+      LOGGER.info("Guessed %sbase = %s", guess, base)
+      return base
     if required:
       ErrorExit("Can't find URL in output from svn info")
     return None
@@ -1338,9 +1175,10 @@
     }
 
     def repl(m):
-       if m.group(2):
-         return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
-       return "$%s$" % m.group(1)
+      if m.group(2):
+        return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
+      return "$%s$" % m.group(1)
+
     keywords = [keyword
                 for name in keyword_str.split(" ")
                 for keyword in svn_keywords.get(name, [])]
@@ -1393,7 +1231,8 @@
         if returncode:
           # Directory might not yet exist at start revison
           # svn: Unable to find repository location for 'abc' in revision nnn
-          if re.match('^svn: Unable to find repository location for .+ in revision \d+', err):
+          if re.match('^svn: Unable to find repository location '
+                      'for .+ in revision \d+', err):
             old_files = ()
           else:
             ErrorExit("Failed to get status for %s:\n%s" % (filename, err))
@@ -1615,16 +1454,18 @@
     # append a diff (with rename detection), without deletes.
     cmd = [
         "git", "diff", "--no-color", "--no-ext-diff", "--full-index",
-        "--ignore-submodules",
+        "--ignore-submodules", "--src-prefix=a/", "--dst-prefix=b/",
     ]
     diff = RunShell(
         cmd + ["--no-renames", "--diff-filter=D"] + extra_args,
         env=env, silent_ok=True)
+    assert 0 <= self.options.git_similarity <= 100
     if self.options.git_find_copies:
-      similarity_options = ["--find-copies-harder", "-l100000",
-                            "-C%s" % self.options.git_similarity ]
+      similarity_options = ["-l100000", "-C%d%%" % self.options.git_similarity]
+      if self.options.git_find_copies_harder:
+        similarity_options.append("--find-copies-harder")
     else:
-      similarity_options = ["-M%s" % self.options.git_similarity ]
+      similarity_options = ["-M%d%%" % self.options.git_similarity ]
     diff += RunShell(
         cmd + ["--diff-filter=AMCRT"] + similarity_options + extra_args,
         env=env, silent_ok=True)
@@ -2085,7 +1926,7 @@
       line_count = len(diffData.file_body.splitlines())
       diffData.change_summary = "@@ -0,0 +1"
       if line_count > 1:
-          diffData.change_summary += ",%d" % line_count
+        diffData.change_summary += ",%d" % line_count
       diffData.change_summary += " @@"
       diffData.prefix = "+"
       return diffData
@@ -2564,16 +2405,9 @@
   files = vcs.GetBaseFiles(data)
   if verbosity >= 1:
     print "Upload server:", options.server, "(change with -s/--server)"
-  if options.use_oauth2:
-    options.save_cookies = False
-  rpc_server = GetRpcServer(options.server,
-                            options.email,
-                            options.host,
-                            options.save_cookies,
-                            options.account_type,
-                            options.use_oauth2,
-                            options.oauth2_port,
-                            options.open_oauth2_local_webbrowser)
+
+  auth_config = auth.extract_auth_config_from_options(options)
+  rpc_server = GetRpcServer(options.server, auth_config, options.email)
   form_fields = []
 
   repo_guid = vcs.GetGUID()
@@ -2601,6 +2435,13 @@
     form_fields.append(("cc", options.cc))
   if options.project:
     form_fields.append(("project", options.project))
+  if options.target_ref:
+    form_fields.append(("target_ref", options.target_ref))
+  if options.cq_dry_run:
+    form_fields.append(("cq_dry_run", "1"))
+    form_fields.append(("commit", "1"))
+  if options.depends_on_patchset:
+    form_fields.append(("depends_on_patchset", options.depends_on_patchset))
 
   # Process --message, --title and --file.
   message = options.message or ""
@@ -2708,6 +2549,9 @@
     print
     StatusUpdate("Interrupted.")
     sys.exit(1)
+  except auth.AuthenticationError as e:
+    print >> sys.stderr, e
+    sys.exit(1)
 
 
 if __name__ == "__main__":
diff --git a/trychange.py b/trychange.py
index 719275a..03e59b3 100755
--- a/trychange.py
+++ b/trychange.py
@@ -727,6 +727,7 @@
   Gerrit message format: starts with !tryjob, optionally followed by a tryjob
   definition in JSON format:
       buildNames: list of strings specifying build names.
+      build_properties: a dict of build properties.
   """
 
   logging.info('Sending by Gerrit')
@@ -752,9 +753,14 @@
   def FormatMessage():
     # Build job definition.
     job_def = {}
+    build_properties = {}
+    if options.testfilter:
+      build_properties['testfilter'] = options.testfilter
     builderNames = [builder for builder, _ in bot_spec]
     if builderNames:
       job_def['builderNames'] = builderNames
+    if build_properties:
+      job_def['build_properties'] = build_properties
 
     # Format message.
     msg = '!tryjob'
diff --git a/update_depot_tools b/update_depot_tools
index 72e1b99..81d9979 100755
--- a/update_depot_tools
+++ b/update_depot_tools
@@ -16,9 +16,13 @@
 OUTPUT="$(uname | grep 'MINGW')"
 MINGW=$?
 
-base_dir=$(dirname "$0")
-if [ -L "$base_dir" ]; then
-  base_dir=`cd "$base_dir" && pwd -P`
+if [ $MINGW = 0 ]; then
+  base_dir="${0%/*}"
+else
+  base_dir=$(dirname "$0")
+  if [ -L "$base_dir" ]; then
+    base_dir=`cd "$base_dir" && pwd -P`
+  fi
 fi
 
 # Don't try to use Cygwin tools.  Get real win32 tools using the batch script.
@@ -140,11 +144,20 @@
 then
   # Update the root directory to stay up-to-date with the latest depot_tools.
   BEFORE_REVISION=$(get_svn_revision)
+  if echo $* | grep -e --force > /dev/null; then
+    "$SVN" -q revert -R "$base_dir"
+  fi
   "$SVN" -q up "$base_dir"
   AFTER_REVISION=$(get_svn_revision)
   if [[ "$BEFORE_REVISION" != "$AFTER_REVISION" ]]; then
-    echo "Depot Tools has been updated to revision $AFTER_REVISION." 1>&2
+    if [ -z "$DEPOT_TOOLS_HIDE_UPDATED_MESSAGE" ]; then
+      echo "Depot Tools has been updated to revision $AFTER_REVISION." 1>&2
+    fi
   fi
 fi
 
 find "$base_dir" -iname "*.pyc" -exec rm {} \;
+
+# Initialize/update virtualenv.
+cd $base_dir
+python -u ./bootstrap/bootstrap.py --deps_file bootstrap/deps.pyl --quiet ENV
diff --git a/update_depot_tools.bat b/update_depot_tools.bat
index 36b8df8..9b2439c 100644
--- a/update_depot_tools.bat
+++ b/update_depot_tools.bat
@@ -27,6 +27,11 @@
 :: Now clear errorlevel so it can be set by other programs later.

 set errorlevel=

 

+:: Initialize/update virtualenv.

+cd /d "%DEPOT_TOOLS_DIR%."

+call python.bat -u bootstrap\bootstrap.py --deps_file bootstrap\deps.pyl --quiet ENV

+if errorlevel 1 goto :EOF

+

 :: Shall skip automatic update?

 IF "%DEPOT_TOOLS_UPDATE%" == "0" GOTO :EOF

 

@@ -38,7 +43,12 @@
 

 

 :SVN_UPDATE

-call svn up -q "%DEPOT_TOOLS_DIR%."

+FOR %%A IN (%*) DO (

+  IF "%%A" == "--force" (

+    call svn -q revert -R "%DEPOT_TOOLS_DIR%."

+  )

+)

+call svn -q up "%DEPOT_TOOLS_DIR%."

 goto :EOF

 

 

@@ -58,9 +68,9 @@
 call git fetch -q origin > NUL

 call git rebase -q origin/master > NUL

 if errorlevel 1 echo Failed to update depot_tools.

-

 goto :EOF

 

+

 :GIT_SVN_UPDATE

 cd /d "%DEPOT_TOOLS_DIR%."

 call git svn rebase -q -q

diff --git a/upload_to_google_storage.py b/upload_to_google_storage.py
index 35bc039..4cf9d1a 100755
--- a/upload_to_google_storage.py
+++ b/upload_to_google_storage.py
@@ -15,14 +15,10 @@
 import threading
 import time
 
-from download_from_google_storage import check_bucket_permissions
 from download_from_google_storage import get_sha1
 from download_from_google_storage import Gsutil
 from download_from_google_storage import printer_worker
-
-GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    'third_party', 'gsutil', 'gsutil')
+from download_from_google_storage import GSUTIL_DEFAULT_PATH
 
 USAGE_STRING = """%prog [options] target [target2 ...].
 Target is the file intended to be uploaded to Google Storage.
@@ -71,7 +67,7 @@
 
 def _upload_worker(
     thread_num, upload_queue, base_url, gsutil, md5_lock, force,
-    use_md5, stdout_queue, ret_codes):
+    use_md5, stdout_queue, ret_codes, gzip):
   while True:
     filename, sha1_sum = upload_queue.get()
     if not filename:
@@ -96,7 +92,11 @@
           continue
     stdout_queue.put('%d> Uploading %s...' % (
         thread_num, filename))
-    code, _, err = gsutil.check_call('cp', '-q', filename, file_url)
+    gsutil_args = ['cp']
+    if gzip:
+      gsutil_args.extend(['-z', gzip])
+    gsutil_args.extend([filename, file_url])
+    code, _, err = gsutil.check_call(*gsutil_args)
     if code != 0:
       ret_codes.put(
           (code,
@@ -133,7 +133,7 @@
 
 def upload_to_google_storage(
     input_filenames, base_url, gsutil, force,
-    use_md5, num_threads, skip_hashing):
+    use_md5, num_threads, skip_hashing, gzip):
   # We only want one MD5 calculation happening at a time to avoid HD thrashing.
   md5_lock = threading.Lock()
 
@@ -151,7 +151,7 @@
     t = threading.Thread(
         target=_upload_worker,
         args=[thread_num, upload_queue, base_url, gsutil, md5_lock,
-              force, use_md5, stdout_queue, ret_codes])
+              force, use_md5, stdout_queue, ret_codes, gzip])
     t.daemon = True
     t.start()
     all_threads.append(t)
@@ -207,7 +207,7 @@
   return max_ret_code
 
 
-def main(args):
+def main():
   parser = optparse.OptionParser(USAGE_STRING)
   parser.add_option('-b', '--bucket',
                     help='Google Storage bucket to upload to.')
@@ -227,6 +227,9 @@
                     help='Use \\0 instead of \\n when parsing '
                     'the file list from stdin.  This is useful if the input '
                     'is coming from "find ... -print0".')
+  parser.add_option('-z', '--gzip', metavar='ext',
+                    help='Gzip files which end in ext. '
+                         'ext is a comma-separated list')
   (options, args) = parser.parse_args()
 
   # Enumerate our inputs.
@@ -234,8 +237,7 @@
 
   # Make sure we can find a working instance of gsutil.
   if os.path.exists(GSUTIL_DEFAULT_PATH):
-    gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=options.boto,
-                    bypass_prodaccess=True)
+    gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=options.boto)
   else:
     gsutil = None
     for path in os.environ["PATH"].split(os.pathsep):
@@ -245,15 +247,16 @@
       parser.error('gsutil not found in %s, bad depot_tools checkout?' %
                    GSUTIL_DEFAULT_PATH)
 
-  # Check we have a valid bucket with valid permissions.
-  base_url, code = check_bucket_permissions(options.bucket, gsutil)
-  if code:
-    return code
+  base_url = 'gs://%s' % options.bucket
 
   return upload_to_google_storage(
       input_filenames, base_url, gsutil, options.force, options.use_md5,
-      options.num_threads, options.skip_hashing)
+      options.num_threads, options.skip_hashing, options.gzip)
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/win_toolchain/OWNERS b/win_toolchain/OWNERS
new file mode 100644
index 0000000..3f809e8
--- /dev/null
+++ b/win_toolchain/OWNERS
@@ -0,0 +1 @@
+scottmg@chromium.org
diff --git a/win_toolchain/README.md b/win_toolchain/README.md
new file mode 100644
index 0000000..5d21262
--- /dev/null
+++ b/win_toolchain/README.md
@@ -0,0 +1,74 @@
+Automatic updates of the Windows toolchain
+==========================================
+
+## On the consumer side, e.g. in Chromium src.git:
+
+- `vs_toolchain.py update` is called early during `DEPS`. `Update()` asks
+  depot\_tools to put in place a particular version of the toolchain (whatever
+  src will currently build with). src provides an output .json file, where
+  `Update()` saves relevant information about the toolchain, the paths, version
+  numbers, etc.
+- Later in `DEPS`, `build/gyp_chromium` uses
+  `vs_toolchain:SetEnvironmentAndGetRuntimeDllDirs()`, which loads the .json
+  file, and uses it to set a few `GYP_` variables and update the `PATH` to
+  include CRT runtime directories (see below).
+- Then, `gyp_chromium` runs gyp generation.
+- Finally, it uses `vs_toolchain` again to copy runtime dlls to the output
+  directories.
+
+The reason the logic was split between `depot_tools` and `src` was because at
+some point, the bots had insufficient hard drive space and if there were > 1
+build directories (say, if a build machine handled the Release and Debug builds
+for a given configuration) then the duplication of the toolchain in both trees
+would cause the bot to run out of disk space.
+
+## On the depot\_tools side:
+
+`get_toolchain_if_necessary.py` takes an output .json file (per above) and an
+input SHA1. It tries to confirm that the user is probably a Google employee (or
+a bot) to encourage them to use the automatic toolchain rather than using a
+system installed one. It then uses gsutil to download the zip corresponding to
+the hash. This requires authentication with @google.com credentials, so it walks
+the user through that process if necessary.
+
+(Previously in the VS2010 and early VS2013 timeframe, we also supported building
+with Express editions of VS. Along with `toolchain2013.py` this script dealt
+with all the complexity of acquiring the Express ISO, SDK bits, patches, etc.
+and applying them all in the correct sequence. However, Express no longer works,
+and Community is not too hard to install properly, so we just let the user do
+that. The primary benefit of having an automatically updated toolchain is that
+it works for bots, allows changes to the toolchain to be tryjob'd, reduces
+Infra/Labs work, and ensures that devs match bots.)
+
+For the above convoluted reason `get_toolchain_if_necessary` uses
+`toolchain2013.py` to extract the zip file, but the majority of the code in
+there is no longer used and what remains should be inlined into
+`get_toolchain_if_necessary` in the future.
+
+When the zip file is extracted, the mtimes of all the files, and the sha1 of the
+entire tree are saved to a local file. This allows future updates to compare
+whether the bits of the toolchain currently on disk are different than expected
+(the passed in SHA1), and if so, replace it with a toolchain with the correct
+SHA1. This is probably a bit more complicated than necessary, and again dates
+back to when the toolchain was assembled from many pieces. It could probably
+just write a stamp file with the SHA1, or just a version number, and trust that
+on future runs.
+
+Finally, it copies the json file to the location that the caller requested (the
+json file is generated during the unzip/acquire process in `toolchain2013.py`).
+
+## Building a <sha1>.zip
+
+Ignoring the `toolchain2013.py` steps to acquire a toolchain automatically from
+bits for Express, the procedure is roughly:
+- Get a clean Windows VM,
+- Install Visual Studio 2013 with updates as you want it,
+- Install Windows 8.1 SDK,
+- Run `package_from_installed.py`,
+- Upload the resulting zip file to the chrome-wintoolchain GS bucket.
+
+That script first builds a zip file of the required pieces, including generating
+a batch file corresponding to `SetEnv.cmd` or `vcvarsall.bat`. It then extracts
+that zip to a temporary location and calculates the SHA1 in the same way that
+the `depot_tools` update procedure would do, so that it knows what to rename the
+zip file to.
diff --git a/win_toolchain/get_toolchain_if_necessary.py b/win_toolchain/get_toolchain_if_necessary.py
index 522a697..0a589ec 100755
--- a/win_toolchain/get_toolchain_if_necessary.py
+++ b/win_toolchain/get_toolchain_if_necessary.py
@@ -33,29 +33,20 @@
 import shutil
 import subprocess
 import sys
+import tempfile
 import time
+import zipfile
 
 
 BASEDIR = os.path.dirname(os.path.abspath(__file__))
 DEPOT_TOOLS_PATH = os.path.join(BASEDIR, '..')
 sys.path.append(DEPOT_TOOLS_PATH)
-import download_from_google_storage
-
-if sys.platform != 'cygwin':
-  import ctypes.wintypes
-  GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
-  GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
-  GetFileAttributes.restype = ctypes.wintypes.DWORD
-  FILE_ATTRIBUTE_HIDDEN = 0x2
-  FILE_ATTRIBUTE_SYSTEM = 0x4
-
-
-def IsHidden(file_path):
-  """Returns whether the given |file_path| has the 'system' or 'hidden'
-  attribute set."""
-  p = GetFileAttributes(file_path)
-  assert p != 0xffffffff
-  return bool(p & (FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_SYSTEM))
+try:
+  import download_from_google_storage
+except ImportError:
+  # Allow use of utility functions in this script from package_from_installed
+  # on bare VM that doesn't have a full depot_tools.
+  pass
 
 
 def GetFileList(root):
@@ -65,7 +56,7 @@
   file_list = []
   for base, _, files in os.walk(root):
     paths = [os.path.join(base, f) for f in files]
-    file_list.extend(x.lower() for x in paths if not IsHidden(x))
+    file_list.extend(x.lower() for x in paths)
   return sorted(file_list)
 
 
@@ -137,7 +128,7 @@
 def LooksLikeGoogler():
   """Checks for a USERDOMAIN environment variable of 'GOOGLE', which
   probably implies the current user is a Googler."""
-  return os.environ.get('USERDOMAIN').upper() == 'GOOGLE'
+  return os.environ.get('USERDOMAIN', '').upper() == 'GOOGLE'
 
 
 def CanAccessToolchainBucket():
@@ -166,7 +157,7 @@
   print 'and follow the instructions.'
   print
   print 'NOTE 1: Use your google.com credentials, not chromium.org.'
-  print 'NOTE 2: Just press Enter when asked for a "project-id".'
+  print 'NOTE 2: Enter 0 when asked for a "project-id".'
   print
   print '-----------------------------------------------------------------'
   print
@@ -186,6 +177,41 @@
     print
 
 
+def DownloadUsingGsutil(filename):
+  """Downloads the given file from Google Storage chrome-wintoolchain bucket."""
+  temp_dir = tempfile.mkdtemp()
+  assert os.path.basename(filename) == filename
+  target_path = os.path.join(temp_dir, filename)
+  gsutil = download_from_google_storage.Gsutil(
+      download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
+  code = gsutil.call('cp', 'gs://chrome-wintoolchain/' + filename, target_path)
+  if code != 0:
+    sys.exit('gsutil failed')
+  return temp_dir, target_path
+
+
+def RmDir(path):
+  """Deletes path and all the files it contains."""
+  if sys.platform != 'win32':
+    shutil.rmtree(path, ignore_errors=True)
+  else:
+    # shutil.rmtree() doesn't delete read-only files on Windows.
+    subprocess.check_call('rmdir /s/q "%s"' % path, shell=True)
+
+
+def DoTreeMirror(target_dir, tree_sha1):
+  """In order to save temporary space on bots that do not have enough space to
+  download ISOs, unpack them, and copy to the target location, the whole tree
+  is uploaded as a zip to internal storage, and then mirrored here."""
+  temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip')
+  sys.stdout.write('Extracting %s...\n' % local_zip)
+  sys.stdout.flush()
+  with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf:
+    zf.extractall(target_dir)
+  if temp_dir:
+    RmDir(temp_dir)
+
+
 def main():
   if not sys.platform.startswith(('cygwin', 'win32')):
     return 0
@@ -205,6 +231,7 @@
       cmd.extend(['--output-json', winpath(options.output_json)])
     cmd.extend(args)
     sys.exit(subprocess.call(cmd))
+  assert sys.platform != 'cygwin'
 
   # We assume that the Pro hash is the first one.
   desired_hashes = args
@@ -215,7 +242,13 @@
   # the downloader script is.
   os.chdir(os.path.normpath(os.path.join(BASEDIR)))
   toolchain_dir = '.'
-  target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files'))
+  if os.environ.get('GYP_MSVS_VERSION') == '2015':
+    target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files'))
+  else:
+    target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files'))
+  abs_target_dir = os.path.abspath(target_dir)
+
+  got_new_toolchain = False
 
   # If the current hash doesn't match what we want in the file, nuke and pave.
   # Typically this script is only run when the .sha1 one file is updated, but
@@ -224,33 +257,60 @@
   current_hash = CalculateHash(target_dir)
   if current_hash not in desired_hashes:
     should_use_gs = False
-    if (HaveSrcInternalAccess() or 
-        LooksLikeGoogler() or 
+    if (HaveSrcInternalAccess() or
+        LooksLikeGoogler() or
         CanAccessToolchainBucket()):
       should_use_gs = True
       if not CanAccessToolchainBucket():
         RequestGsAuthentication()
-    print('Windows toolchain out of date or doesn\'t exist, updating (%s)...' %
-          ('Pro' if should_use_gs else 'Express'))
+    if not should_use_gs:
+      print('Please follow the instructions at '
+            'http://www.chromium.org/developers/how-tos/'
+            'build-instructions-windows')
+      return 1
+    print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...')
     print('  current_hash: %s' % current_hash)
     print('  desired_hashes: %s' % ', '.join(desired_hashes))
     sys.stdout.flush()
     DelayBeforeRemoving(target_dir)
-    # This stays resident and will make the rmdir below fail.
-    with open(os.devnull, 'wb') as nul:
-      subprocess.call(['taskkill', '/f', '/im', 'mspdbsrv.exe'],
-                      stdin=nul, stdout=nul, stderr=nul)
+    if sys.platform == 'win32':
+      # This stays resident and will make the rmdir below fail.
+      with open(os.devnull, 'wb') as nul:
+        subprocess.call(['taskkill', '/f', '/im', 'mspdbsrv.exe'],
+                        stdin=nul, stdout=nul, stderr=nul)
     if os.path.isdir(target_dir):
-      subprocess.check_call('rmdir /s/q "%s"' % target_dir, shell=True)
-    args = [sys.executable,
-            'toolchain2013.py',
-            '--targetdir', target_dir,
-            '--sha1', desired_hashes[0]]
-    if should_use_gs:
-      args.append('--use-gs')
-    else:
-      args.append('--express')
-    subprocess.check_call(args)
+      RmDir(target_dir)
+
+    DoTreeMirror(target_dir, desired_hashes[0])
+
+    got_new_toolchain = True
+
+  win_sdk = os.path.join(abs_target_dir, 'win_sdk')
+  try:
+    with open(os.path.join(target_dir, 'VS_VERSION'), 'rb') as f:
+      vs_version = f.read().strip()
+  except IOError:
+    # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk'
+    # instead of just 'win_sdk'.
+    vs_version = '2013'
+    win_sdk = os.path.join(abs_target_dir, 'win8sdk')
+
+  data = {
+      'path': abs_target_dir,
+      'version': vs_version,
+      'win_sdk': win_sdk,
+      # Added for backwards compatibility with old toolchain packages.
+      'win8sdk': win_sdk,
+      'wdk': os.path.join(abs_target_dir, 'wdk'),
+      'runtime_dirs': [
+        os.path.join(abs_target_dir, 'sys64'),
+        os.path.join(abs_target_dir, 'sys32'),
+      ],
+  }
+  with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f:
+    json.dump(data, f)
+
+  if got_new_toolchain:
     current_hash = CalculateHash(target_dir)
     if current_hash not in desired_hashes:
       print >> sys.stderr, (
diff --git a/win_toolchain/package_from_installed.py b/win_toolchain/package_from_installed.py
index 0c4ee2a..7f065b6 100644
--- a/win_toolchain/package_from_installed.py
+++ b/win_toolchain/package_from_installed.py
@@ -8,7 +8,7 @@
 
 It assumes default install locations for tools, in particular:
 - C:\Program Files (x86)\Microsoft Visual Studio 12.0\...
-- C:\Program Files (x86)\Windows Kits\8.0\...
+- C:\Program Files (x86)\Windows Kits\8.1\...
 
 1. Start from a fresh Win7 VM image.
 2. Install VS Pro. Deselect everything except MFC.
@@ -28,15 +28,16 @@
 import zipfile
 
 import get_toolchain_if_necessary
-import toolchain2013  # pylint: disable=F0401
+
+
+VS_VERSION = None
 
 
 def BuildFileList():
   result = []
 
   # Subset of VS corresponding roughly to VC.
-  vs_path = r'C:\Program Files (x86)\Microsoft Visual Studio 12.0'
-  for path in [ 
+  paths = [
       'DIA SDK/bin',
       'DIA SDK/idl',
       'DIA SDK/include',
@@ -47,15 +48,39 @@
       'VC/include',
       'VC/lib',
       'VC/redist',
-      ('VC/redist/x86/Microsoft.VC120.CRT', 'sys32'),
-      ('VC/redist/x86/Microsoft.VC120.MFC', 'sys32'),
-      ('VC/redist/Debug_NonRedist/x86/Microsoft.VC120.DebugCRT', 'sys32'),
-      ('VC/redist/Debug_NonRedist/x86/Microsoft.VC120.DebugMFC', 'sys32'),
-      ('VC/redist/x64/Microsoft.VC120.CRT', 'sys64'),
-      ('VC/redist/x64/Microsoft.VC120.MFC', 'sys64'),
-      ('VC/redist/Debug_NonRedist/x64/Microsoft.VC120.DebugCRT', 'sys64'),
-      ('VC/redist/Debug_NonRedist/x64/Microsoft.VC120.DebugMFC', 'sys64'),
-      ]:
+  ]
+
+  if VS_VERSION == '2013':
+    paths += [
+        ('VC/redist/x86/Microsoft.VC120.CRT', 'sys32'),
+        ('VC/redist/x86/Microsoft.VC120.MFC', 'sys32'),
+        ('VC/redist/Debug_NonRedist/x86/Microsoft.VC120.DebugCRT', 'sys32'),
+        ('VC/redist/Debug_NonRedist/x86/Microsoft.VC120.DebugMFC', 'sys32'),
+        ('VC/redist/x64/Microsoft.VC120.CRT', 'sys64'),
+        ('VC/redist/x64/Microsoft.VC120.MFC', 'sys64'),
+        ('VC/redist/Debug_NonRedist/x64/Microsoft.VC120.DebugCRT', 'sys64'),
+        ('VC/redist/Debug_NonRedist/x64/Microsoft.VC120.DebugMFC', 'sys64'),
+    ]
+  elif VS_VERSION == '2015':
+    paths += [
+        ('VC/redist/x86/Microsoft.VC140.CRT', 'sys32'),
+        ('VC/redist/x86/Microsoft.VC140.MFC', 'sys32'),
+        ('VC/redist/debug_nonredist/x86/Microsoft.VC140.DebugCRT', 'sys32'),
+        ('VC/redist/debug_nonredist/x86/Microsoft.VC140.DebugMFC', 'sys32'),
+        ('VC/redist/x64/Microsoft.VC140.CRT', 'sys64'),
+        ('VC/redist/x64/Microsoft.VC140.MFC', 'sys64'),
+        ('VC/redist/debug_nonredist/x64/Microsoft.VC140.DebugCRT', 'sys64'),
+        ('VC/redist/debug_nonredist/x64/Microsoft.VC140.DebugMFC', 'sys64'),
+    ]
+  else:
+    raise ValueError('VS_VERSION %s' % VS_VERSION)
+
+  if VS_VERSION == '2013':
+    vs_path = r'C:\Program Files (x86)\Microsoft Visual Studio 12.0'
+  else:
+    vs_path = r'C:\Program Files (x86)\Microsoft Visual Studio 14.0'
+
+  for path in paths:
     src = path[0] if isinstance(path, tuple) else path
     combined = os.path.join(vs_path, src)
     assert os.path.exists(combined) and os.path.isdir(combined)
@@ -68,8 +93,8 @@
         else:
           assert final_from.startswith(vs_path)
           dest = final_from[len(vs_path) + 1:]
-          if dest.lower().endswith('\\xtree'):
-            # Patch for C4702 in xtree. http://crbug.com/346399.
+          if VS_VERSION == '2013' and dest.lower().endswith('\\xtree'):
+            # Patch for C4702 in xtree on VS2013. http://crbug.com/346399.
             (handle, patched) = tempfile.mkstemp()
             with open(final_from, 'rb') as unpatched_f:
               unpatched_contents = unpatched_f.read()
@@ -81,25 +106,117 @@
             result.append((final_from, dest))
 
   # Just copy the whole SDK.
-  sdk_path = r'C:\Program Files (x86)\Windows Kits\8.0'
+  sdk_path = r'C:\Program Files (x86)\Windows Kits\8.1'
   for root, _, files in os.walk(sdk_path):
     for f in files:
       combined = os.path.normpath(os.path.join(root, f))
-      to = os.path.join('win8sdk', combined[len(sdk_path) + 1:])
+      to = os.path.join('win_sdk', combined[len(sdk_path) + 1:])
       result.append((combined, to))
 
+  if VS_VERSION == '2015':
+    for ucrt_path in (
+        (r'C:\Program Files (x86)\Windows Kits\10\Include', 'Include'),
+        (r'C:\Program Files (x86)\Windows Kits\10\Lib', 'Lib'),
+        (r'C:\Program Files (x86)\Windows Kits\10\Source', 'Source')):
+      src, target = ucrt_path
+      for root, _, files in os.walk(src):
+        for f in files:
+          combined = os.path.normpath(os.path.join(root, f))
+          to = os.path.join('ucrt', target, combined[len(src) + 1:])
+          result.append((combined, to))
+
+    system_crt_files = [
+        'api-ms-win-core-file-l1-2-0.dll',
+        'api-ms-win-core-file-l2-1-0.dll',
+        'api-ms-win-core-localization-l1-2-0.dll',
+        'api-ms-win-core-processthreads-l1-1-1.dll',
+        'api-ms-win-core-synch-l1-2-0.dll',
+        'api-ms-win-core-timezone-l1-1-0.dll',
+        'api-ms-win-core-xstate-l2-1-0.dll',
+        'api-ms-win-crt-conio-l1-1-0.dll',
+        'api-ms-win-crt-convert-l1-1-0.dll',
+        'api-ms-win-crt-environment-l1-1-0.dll',
+        'api-ms-win-crt-filesystem-l1-1-0.dll',
+        'api-ms-win-crt-heap-l1-1-0.dll',
+        'api-ms-win-crt-locale-l1-1-0.dll',
+        'api-ms-win-crt-math-l1-1-0.dll',
+        'api-ms-win-crt-multibyte-l1-1-0.dll',
+        'api-ms-win-crt-private-l1-1-0.dll',
+        'api-ms-win-crt-process-l1-1-0.dll',
+        'api-ms-win-crt-runtime-l1-1-0.dll',
+        'api-ms-win-crt-stdio-l1-1-0.dll',
+        'api-ms-win-crt-string-l1-1-0.dll',
+        'api-ms-win-crt-time-l1-1-0.dll',
+        'api-ms-win-crt-utility-l1-1-0.dll',
+        'api-ms-win-eventing-provider-l1-1-0.dll',
+        'ucrtbase.dll',
+        'ucrtbased.dll',
+    ]
+    for system_crt_file in system_crt_files:
+        result.append((os.path.join(r'C:\Windows\SysWOW64', system_crt_file),
+                       os.path.join('sys32', system_crt_file)))
+        result.append((os.path.join(r'C:\Windows\Sysnative', system_crt_file),
+                       os.path.join('sys64', system_crt_file)))
+
   # Generically drop all arm stuff that we don't need.
-  return [(f, t) for f, t in result if 'arm\\' not in f.lower()]
+  return [(f, t) for f, t in result if 'arm\\' not in f.lower() and
+                                       'arm64\\' not in f.lower()]
+
+
+def GenerateSetEnvCmd(target_dir):
+  """Generate a batch file that gyp expects to exist to set up the compiler
+  environment.
+
+  This is normally generated by a full install of the SDK, but we
+  do it here manually since we do not do a full install."""
+  with open(os.path.join(
+        target_dir, r'win_sdk\bin\SetEnv.cmd'), 'w') as f:
+    f.write('@echo off\n'
+            ':: Generated by win_toolchain\\package_from_installed.py.\n'
+            # Common to x86 and x64
+            'set PATH=%~dp0..\\..\\Common7\\IDE;%PATH%\n'
+            'set INCLUDE=%~dp0..\\..\\win_sdk\\Include\\um;'
+               '%~dp0..\\..\\win_sdk\\Include\\shared;'
+               '%~dp0..\\..\\win_sdk\\Include\\winrt;'
+               '%~dp0..\\..\\ucrt\\Include\\10.0.10056.0\\ucrt;'
+               '%~dp0..\\..\\VC\\include;'
+               '%~dp0..\\..\\VC\\atlmfc\\include\n'
+            'if "%1"=="/x64" goto x64\n')
+
+    # x86. Always use amd64_x86 cross, not x86 on x86.
+    f.write('set PATH=%~dp0..\\..\\win_sdk\\bin\\x86;'
+              '%~dp0..\\..\\VC\\bin\\amd64_x86;'
+              '%~dp0..\\..\\VC\\bin\\amd64;'  # Needed for mspdb1x0.dll.
+              '%PATH%\n')
+    f.write('set LIB=%~dp0..\\..\\VC\\lib;'
+               '%~dp0..\\..\\win_sdk\\Lib\\winv6.3\\um\\x86;'
+               '%~dp0..\\..\\ucrt\\Lib\\10.0.10056.0\\ucrt\\x86;'
+               '%~dp0..\\..\\VC\\atlmfc\\lib\n'
+            'goto :EOF\n')
+
+    # x64.
+    f.write(':x64\n'
+            'set PATH=%~dp0..\\..\\win_sdk\\bin\\x64;'
+                '%~dp0..\\..\\VC\\bin\\amd64;'
+                '%PATH%\n')
+    f.write('set LIB=%~dp0..\\..\\VC\\lib\\amd64;'
+               '%~dp0..\\..\\win_sdk\\Lib\\winv6.3\\um\\x64;'
+               '%~dp0..\\..\\ucrt\\Lib\\10.0.10056.0\\ucrt\\x64;'
+               '%~dp0..\\..\\VC\\atlmfc\\lib\\amd64\n')
 
 
 def AddEnvSetup(files):
   """We need to generate this file in the same way that the "from pieces"
   script does, so pull that in here."""
   tempdir = tempfile.mkdtemp()
-  os.makedirs(os.path.join(tempdir, 'win8sdk', 'bin'))
-  toolchain2013.GenerateSetEnvCmd(tempdir, True)
-  files.append((os.path.join(tempdir, 'win8sdk', 'bin', 'SetEnv.cmd'),
-                'win8sdk\\bin\\SetEnv.cmd'))
+  os.makedirs(os.path.join(tempdir, 'win_sdk', 'bin'))
+  GenerateSetEnvCmd(tempdir)
+  files.append((os.path.join(tempdir, 'win_sdk', 'bin', 'SetEnv.cmd'),
+                'win_sdk\\bin\\SetEnv.cmd'))
+  vs_version_file = os.path.join(tempdir, 'VS_VERSION')
+  with open(vs_version_file, 'wb') as version:
+    print >>version, VS_VERSION
+  files.append((vs_version_file, 'VS_VERSION'))
 
 
 def RenameToSha1(output):
@@ -109,7 +226,7 @@
   tempdir = tempfile.mkdtemp()
   old_dir = os.getcwd()
   os.chdir(tempdir)
-  rel_dir = 'vs2013_files'
+  rel_dir = 'vs_files'
   with zipfile.ZipFile(
       os.path.join(old_dir, output), 'r', zipfile.ZIP_DEFLATED, True) as zf:
     zf.extractall(rel_dir)
@@ -123,6 +240,13 @@
 
 
 def main():
+  if len(sys.argv) != 2 or sys.argv[1] not in ('2013', '2015'):
+    print 'Usage: package_from_installed.py 2013|2015'
+    return 1
+
+  global VS_VERSION
+  VS_VERSION = sys.argv[1]
+
   print 'Building file list...'
   files = BuildFileList()
 
diff --git a/win_toolchain/toolchain2013.py b/win_toolchain/toolchain2013.py
index 288985e..35dc1fc 100755
--- a/win_toolchain/toolchain2013.py
+++ b/win_toolchain/toolchain2013.py
@@ -150,7 +150,7 @@
   # TODO(scottmg): Do this (and exe) manually with python code.
   # Note that at the beginning of main() we set the working directory to 7z's
   # location so that 7z can find its codec dll.
-  RunOrDie('7z x "%s" -y "-o%s" >nul' % (iso_path, target_path))
+  RunOrDie('7z x "%s" -y "-o%s"' % (iso_path, target_path))
   return target_path
 
 
@@ -344,7 +344,7 @@
       'Program Files\\Microsoft Visual Studio 12.0\\DIA SDK\\': 'DIA SDK\\',
       'System64\\': 'sys64\\',
       'System\\': 'sys32\\',
-      'Windows Kits\\8.0\\': 'win8sdk\\',
+      'Windows Kits\\8.1\\': 'win8sdk\\',
       'WinDDK\\7600.16385.win7_wdk.100208-1538\\': 'wdk\\',
   }
   matches = []
@@ -398,7 +398,7 @@
       f.write('set PATH=%~dp0..\\..\\win8sdk\\bin\\x86;'
                 '%~dp0..\\..\\VC\\bin;%PATH%\n')
     f.write('set LIB=%~dp0..\\..\\VC\\lib;'
-               '%~dp0..\\..\\win8sdk\\Lib\\win8\\um\\x86;'
+               '%~dp0..\\..\\win8sdk\\Lib\\winv6.3\\um\\x86;'
                '%~dp0..\\..\\VC\\atlmfc\\lib\n'
             'goto :EOF\n')
 
@@ -420,7 +420,7 @@
                  '%~dp0..\\..\\VC\\bin\\amd64;'
                  '%PATH%\n')
     f.write('set LIB=%~dp0..\\..\\VC\\lib\\amd64;'
-               '%~dp0..\\..\\win8sdk\\Lib\\win8\\um\\x64;'
+               '%~dp0..\\..\\win8sdk\\Lib\\winv6.3\\um\\x64;'
                '%~dp0..\\..\\VC\\atlmfc\\lib\\amd64\n')
 
 
@@ -431,7 +431,7 @@
   local_zip = DownloadUsingGsutil(tree_sha1 + '.zip')
   sys.stdout.write('Extracting %s...\n' % local_zip)
   sys.stdout.flush()
-  RunOrDie('7z x "%s" -y "-o%s" >nul' % (local_zip, target_dir))
+  RunOrDie('7z x "%s" -y "-o%s"' % (local_zip, target_dir))
 
 
 def main():