Import Cobalt 19.master.0.205881
diff --git a/src/net/tools/testserver/BUILD.gn b/src/net/tools/testserver/BUILD.gn
new file mode 100644
index 0000000..443d1c2
--- /dev/null
+++ b/src/net/tools/testserver/BUILD.gn
@@ -0,0 +1,9 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/python.gni")
+
+python_library("testserver_py") {
+  pydeps_file = "testserver.pydeps"
+}
diff --git a/src/net/tools/testserver/asn1.py b/src/net/tools/testserver/asn1.py
new file mode 100644
index 0000000..c0e0398
--- /dev/null
+++ b/src/net/tools/testserver/asn1.py
@@ -0,0 +1,165 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file implements very minimal ASN.1, DER serialization.
+
+import types
+
+
+def ToDER(obj):
+  '''ToDER converts the given object into DER encoding'''
+  if type(obj) == types.NoneType:
+    # None turns into NULL
+    return TagAndLength(5, 0)
+  if type(obj) == types.StringType:
+    # Strings are PRINTABLESTRING
+    return TagAndLength(19, len(obj)) + obj
+  if type(obj) == types.BooleanType:
+    val = "\x00"
+    if obj:
+      val = "\xff"
+    return TagAndLength(1, 1) + val
+  if type(obj) == types.IntType or type(obj) == types.LongType:
+    big_endian = []
+    val = obj
+    while val != 0:
+      big_endian.append(val & 0xff)
+      val >>= 8
+
+    if len(big_endian) == 0 or big_endian[-1] >= 128:
+      big_endian.append(0)
+
+    big_endian.reverse()
+    return TagAndLength(2, len(big_endian)) + ToBytes(big_endian)
+
+  return obj.ToDER()
+
+
+def ToBytes(array_of_bytes):
+  '''ToBytes converts the array of byte values into a binary string'''
+  return ''.join([chr(x) for x in array_of_bytes])
+
+
+def TagAndLength(tag, length):
+  der = [tag]
+  if length < 128:
+    der.append(length)
+  elif length < 256:
+    der.append(0x81)
+    der.append(length)
+  elif length < 65535:
+    der.append(0x82)
+    der.append(length >> 8)
+    der.append(length & 0xff)
+  else:
+    assert False
+
+  return ToBytes(der)
+
+
+class Raw(object):
+  '''Raw contains raw DER encoded bytes that are used verbatim'''
+  def __init__(self, der):
+    self.der = der
+
+  def ToDER(self):
+    return self.der
+
+
+class Explicit(object):
+  '''Explicit prepends an explicit tag'''
+  def __init__(self, tag, child):
+    self.tag = tag
+    self.child = child
+
+  def ToDER(self):
+    der = ToDER(self.child)
+    tag = self.tag
+    tag |= 0x80 # content specific
+    tag |= 0x20 # complex
+    return TagAndLength(tag, len(der)) + der
+
+
+class ENUMERATED(object):
+  def __init__(self, value):
+    self.value = value
+
+  def ToDER(self):
+    return TagAndLength(10, 1) + chr(self.value)
+
+
+class SEQUENCE(object):
+  def __init__(self, children):
+    self.children = children
+
+  def ToDER(self):
+    der = ''.join([ToDER(x) for x in self.children])
+    return TagAndLength(0x30, len(der)) + der
+
+
+class SET(object):
+  def __init__(self, children):
+    self.children = children
+
+  def ToDER(self):
+    der = ''.join([ToDER(x) for x in self.children])
+    return TagAndLength(0x31, len(der)) + der
+
+
+class OCTETSTRING(object):
+  def __init__(self, val):
+    self.val = val
+
+  def ToDER(self):
+    return TagAndLength(4, len(self.val)) + self.val
+
+
+class OID(object):
+  def __init__(self, parts):
+    self.parts = parts
+
+  def ToDER(self):
+    if len(self.parts) < 2 or self.parts[0] > 6 or self.parts[1] >= 40:
+      assert False
+
+    der = [self.parts[0]*40 + self.parts[1]]
+    for x in self.parts[2:]:
+      if x == 0:
+        der.append(0)
+      else:
+        octets = []
+        while x != 0:
+          v = x & 0x7f
+          if len(octets) > 0:
+            v |= 0x80
+          octets.append(v)
+          x >>= 7
+        octets.reverse()
+        der = der + octets
+
+    return TagAndLength(6, len(der)) + ToBytes(der)
+
+
+class UTCTime(object):
+  def __init__(self, time_str):
+    self.time_str = time_str
+
+  def ToDER(self):
+    return TagAndLength(23, len(self.time_str)) + self.time_str
+
+
+class GeneralizedTime(object):
+  def __init__(self, time_str):
+    self.time_str = time_str
+
+  def ToDER(self):
+    return TagAndLength(24, len(self.time_str)) + self.time_str
+
+
+class BitString(object):
+  def __init__(self, bits):
+    self.bits = bits
+
+  def ToDER(self):
+    return TagAndLength(3, 1 + len(self.bits)) + "\x00" + self.bits
diff --git a/src/net/tools/testserver/backoff_server.py b/src/net/tools/testserver/backoff_server.py
new file mode 100755
index 0000000..ca2c57c
--- /dev/null
+++ b/src/net/tools/testserver/backoff_server.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This is a simple HTTP server for manually testing exponential
+back-off functionality in Chrome.
+"""
+
+
+import BaseHTTPServer
+import sys
+import urlparse
+
+
+AJAX_TEST_PAGE = '''
+<html>
+<head>
+<script>
+
+function reportResult(txt) {
+  var element = document.createElement('p');
+  element.innerHTML = txt;
+  document.body.appendChild(element);
+}
+
+function fetch() {
+  var response_code = document.getElementById('response_code');
+
+  xmlhttp = new XMLHttpRequest();
+  xmlhttp.open("GET",
+               "http://%s:%d/%s?code=" + response_code.value,
+               true);
+  xmlhttp.onreadystatechange = function() {
+    reportResult(
+        'readyState=' + xmlhttp.readyState + ', status=' + xmlhttp.status);
+  }
+  try {
+    xmlhttp.send(null);
+  } catch (e) {
+    reportResult('Exception: ' + e);
+  }
+}
+
+</script>
+</head>
+<body>
+<form action="javascript:fetch()">
+  Response code to get: <input id="response_code" type="text" value="503">
+  <input type="submit">
+</form>
+</body>
+</html>'''
+
+
+class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  keep_running = True
+  local_ip = ''
+  port = 0
+
+  def do_GET(self):
+    if self.path == '/quitquitquit':
+      self.send_response(200)
+      self.send_header('Content-Type', 'text/plain')
+      self.end_headers()
+      self.wfile.write('QUITTING')
+      RequestHandler.keep_running = False
+      return
+
+    if self.path.startswith('/ajax/'):
+      self.send_response(200)
+      self.send_header('Content-Type', 'text/html')
+      self.end_headers()
+      self.wfile.write(AJAX_TEST_PAGE % (self.local_ip,
+                                         self.port,
+                                         self.path[6:]))
+      return
+
+    params = urlparse.parse_qs(urlparse.urlparse(self.path).query)
+
+    if not params or not 'code' in params or params['code'][0] == '200':
+      self.send_response(200)
+      self.send_header('Content-Type', 'text/plain')
+      self.end_headers()
+      self.wfile.write('OK')
+    else:
+      status_code = int(params['code'][0])
+      self.send_response(status_code)
+      self.end_headers()
+      self.wfile.write('Error %d' % int(status_code))
+
+
+def main():
+  if len(sys.argv) != 3:
+    print "Usage: %s LOCAL_IP PORT" % sys.argv[0]
+    sys.exit(1)
+  RequestHandler.local_ip = sys.argv[1]
+  port = int(sys.argv[2])
+  RequestHandler.port = port
+  print "To stop the server, go to http://localhost:%d/quitquitquit" % port
+  httpd = BaseHTTPServer.HTTPServer(('', port), RequestHandler)
+  while RequestHandler.keep_running:
+    httpd.handle_request()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/net/tools/testserver/dist/_socket.pyd b/src/net/tools/testserver/dist/_socket.pyd
new file mode 100644
index 0000000..5ae91b7
--- /dev/null
+++ b/src/net/tools/testserver/dist/_socket.pyd
Binary files differ
diff --git a/src/net/tools/testserver/dist/_ssl.pyd b/src/net/tools/testserver/dist/_ssl.pyd
new file mode 100644
index 0000000..6a9b73c
--- /dev/null
+++ b/src/net/tools/testserver/dist/_ssl.pyd
Binary files differ
diff --git a/src/net/tools/testserver/echo_message.py b/src/net/tools/testserver/echo_message.py
new file mode 100644
index 0000000..b2f7b04
--- /dev/null
+++ b/src/net/tools/testserver/echo_message.py
@@ -0,0 +1,385 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides utility functions for TCP/UDP echo servers and clients.
+
+This program has classes and functions to encode, decode, calculate checksum
+and verify the "echo request" and "echo response" messages. "echo request"
+message is an echo message sent from the client to the server. "echo response"
+message is a response from the server to the "echo request" message from the
+client.
+
+The format of "echo request" message is
+<version><checksum><payload_size><payload>. <version> is the version number
+of the "echo request" protocol. <checksum> is the checksum of the <payload>.
+<payload_size> is the size of the <payload>. <payload> is the echo message.
+
+The format of "echo response" message is
+<version><checksum><payload_size><key><encoded_payload>.<version>,
+<checksum> and <payload_size> are same as what is in the "echo request" message.
+<encoded_payload> is encoded version of the <payload>. <key> is a randomly
+generated key that is used to encode/decode the <payload>.
+"""
+
+__author__ = 'rtenneti@google.com (Raman Tenneti)'
+
+
+from itertools import cycle
+from itertools import izip
+import random
+
+
+class EchoHeader(object):
+  """Class to keep header info of the EchoRequest and EchoResponse messages.
+
+  This class knows how to parse the checksum, payload_size from the
+  "echo request" and "echo response" messages. It holds the checksum,
+  payload_size of the "echo request" and "echo response" messages.
+  """
+
+  # This specifies the version.
+  VERSION_STRING = '01'
+
+  # This specifies the starting position of the checksum and length of the
+  # checksum. Maximum value for the checksum is less than (2 ** 31 - 1).
+  CHECKSUM_START = 2
+  CHECKSUM_LENGTH = 10
+  CHECKSUM_FORMAT = '%010d'
+  CHECKSUM_END = CHECKSUM_START + CHECKSUM_LENGTH
+
+  # This specifies the starting position of the <payload_size> and length of the
+  # <payload_size>. Maximum number of bytes that can be sent in the <payload> is
+  # 9,999,999.
+  PAYLOAD_SIZE_START = CHECKSUM_END
+  PAYLOAD_SIZE_LENGTH = 7
+  PAYLOAD_SIZE_FORMAT = '%07d'
+  PAYLOAD_SIZE_END = PAYLOAD_SIZE_START + PAYLOAD_SIZE_LENGTH
+
+  def __init__(self, checksum=0, payload_size=0):
+    """Initializes the checksum and payload_size of self (EchoHeader).
+
+    Args:
+      checksum: (int)
+        The checksum of the payload.
+      payload_size: (int)
+        The size of the payload.
+    """
+    self.checksum = checksum
+    self.payload_size = payload_size
+
+  def ParseAndInitialize(self, echo_message):
+    """Parses the echo_message and initializes self with the parsed data.
+
+    This method extracts checksum, and payload_size from the echo_message
+    (echo_message could be either echo_request or echo_response messages) and
+    initializes self (EchoHeader) with checksum and payload_size.
+
+    Args:
+      echo_message: (string)
+        The string representation of EchoRequest or EchoResponse objects.
+    Raises:
+      ValueError: Invalid data
+    """
+    if not echo_message or len(echo_message) < EchoHeader.PAYLOAD_SIZE_END:
+      raise ValueError('Invalid data:%s' % echo_message)
+    self.checksum = int(echo_message[
+        EchoHeader.CHECKSUM_START:EchoHeader.CHECKSUM_END])
+    self.payload_size = int(echo_message[
+        EchoHeader.PAYLOAD_SIZE_START:EchoHeader.PAYLOAD_SIZE_END])
+
+  def InitializeFromPayload(self, payload):
+    """Initializes the EchoHeader object with the payload.
+
+    It calculates checksum for the payload and initializes self (EchoHeader)
+    with the calculated checksum and size of the payload.
+
+    This method is used by the client code during testing.
+
+    Args:
+      payload: (string)
+        The payload is the echo string (like 'hello').
+    Raises:
+      ValueError: Invalid data
+    """
+    if not payload:
+      raise ValueError('Invalid data:%s' % payload)
+    self.payload_size = len(payload)
+    self.checksum = Checksum(payload, self.payload_size)
+
+  def __str__(self):
+    """String representation of the self (EchoHeader).
+
+    Returns:
+      A string representation of self (EchoHeader).
+    """
+    checksum_string = EchoHeader.CHECKSUM_FORMAT % self.checksum
+    payload_size_string = EchoHeader.PAYLOAD_SIZE_FORMAT % self.payload_size
+    return EchoHeader.VERSION_STRING + checksum_string + payload_size_string
+
+
+class EchoRequest(EchoHeader):
+  """Class holds data specific to the "echo request" message.
+
+  This class holds the payload extracted from the "echo request" message.
+  """
+
+  # This specifies the starting position of the <payload>.
+  PAYLOAD_START = EchoHeader.PAYLOAD_SIZE_END
+
+  def __init__(self):
+    """Initializes EchoRequest object."""
+    EchoHeader.__init__(self)
+    self.payload = ''
+
+  def ParseAndInitialize(self, echo_request_data):
+    """Parses and Initializes the EchoRequest object from the echo_request_data.
+
+    This method extracts the header information (checksum and payload_size) and
+    payload from echo_request_data.
+
+    Args:
+      echo_request_data: (string)
+        The string representation of EchoRequest object.
+    Raises:
+      ValueError: Invalid data
+    """
+    EchoHeader.ParseAndInitialize(self, echo_request_data)
+    if len(echo_request_data) <= EchoRequest.PAYLOAD_START:
+      raise ValueError('Invalid data:%s' % echo_request_data)
+    self.payload = echo_request_data[EchoRequest.PAYLOAD_START:]
+
+  def InitializeFromPayload(self, payload):
+    """Initializes the EchoRequest object with payload.
+
+    It calculates checksum for the payload and initializes self (EchoRequest)
+    object.
+
+    Args:
+      payload: (string)
+        The payload string for which "echo request" needs to be constructed.
+    """
+    EchoHeader.InitializeFromPayload(self, payload)
+    self.payload = payload
+
+  def __str__(self):
+    """String representation of the self (EchoRequest).
+
+    Returns:
+      A string representation of self (EchoRequest).
+    """
+    return EchoHeader.__str__(self) + self.payload
+
+
+class EchoResponse(EchoHeader):
+  """Class holds data specific to the "echo response" message.
+
+  This class knows how to parse the "echo response" message. This class holds
+  key, encoded_payload and decoded_payload of the "echo response" message.
+  """
+
+  # This specifies the starting position of the |key_| and length of the |key_|.
+  # Minimum and maximum values for the |key_| are 100,000 and 999,999.
+  KEY_START = EchoHeader.PAYLOAD_SIZE_END
+  KEY_LENGTH = 6
+  KEY_FORMAT = '%06d'
+  KEY_END = KEY_START + KEY_LENGTH
+  KEY_MIN_VALUE = 0
+  KEY_MAX_VALUE = 999999
+
+  # This specifies the starting position of the <encoded_payload> and length
+  # of the <encoded_payload>.
+  ENCODED_PAYLOAD_START = KEY_END
+
+  def __init__(self, key='', encoded_payload='', decoded_payload=''):
+    """Initializes the EchoResponse object."""
+    EchoHeader.__init__(self)
+    self.key = key
+    self.encoded_payload = encoded_payload
+    self.decoded_payload = decoded_payload
+
+  def ParseAndInitialize(self, echo_response_data=None):
+    """Parses and Initializes the EchoResponse object from echo_response_data.
+
+    This method calls EchoHeader to extract header information from the
+    echo_response_data and it then extracts key and encoded_payload from the
+    echo_response_data. It holds the decoded payload of the encoded_payload.
+
+    Args:
+      echo_response_data: (string)
+        The string representation of EchoResponse object.
+    Raises:
+      ValueError: Invalid echo_request_data
+    """
+    EchoHeader.ParseAndInitialize(self, echo_response_data)
+    if len(echo_response_data) <= EchoResponse.ENCODED_PAYLOAD_START:
+      raise ValueError('Invalid echo_response_data:%s' % echo_response_data)
+    self.key = echo_response_data[EchoResponse.KEY_START:EchoResponse.KEY_END]
+    self.encoded_payload = echo_response_data[
+        EchoResponse.ENCODED_PAYLOAD_START:]
+    self.decoded_payload = Crypt(self.encoded_payload, self.key)
+
+  def InitializeFromEchoRequest(self, echo_request):
+    """Initializes EchoResponse with the data from the echo_request object.
+
+    It gets the checksum, payload_size and payload from the echo_request object
+    and then encodes the payload with a random key. It also saves the payload
+    as decoded_payload.
+
+    Args:
+      echo_request: (EchoRequest)
+        The EchoRequest object which has "echo request" message.
+    """
+    self.checksum = echo_request.checksum
+    self.payload_size = echo_request.payload_size
+    self.key = (EchoResponse.KEY_FORMAT %
+                random.randrange(EchoResponse.KEY_MIN_VALUE,
+                                 EchoResponse.KEY_MAX_VALUE))
+    self.encoded_payload = Crypt(echo_request.payload, self.key)
+    self.decoded_payload = echo_request.payload
+
+  def __str__(self):
+    """String representation of the self (EchoResponse).
+
+    Returns:
+      A string representation of self (EchoResponse).
+    """
+    return EchoHeader.__str__(self) + self.key + self.encoded_payload
+
+
+def Crypt(payload, key):
+  """Encodes/decodes the payload with the key and returns encoded payload.
+
+  This method loops through the payload and XORs each byte with the key.
+
+  Args:
+    payload: (string)
+      The string to be encoded/decoded.
+    key: (string)
+      The key used to encode/decode the payload.
+
+  Returns:
+    An encoded/decoded string.
+  """
+  return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(payload, cycle(key)))
+
+
+def Checksum(payload, payload_size):
+  """Calculates the checksum of the payload.
+
+  Args:
+    payload: (string)
+      The payload string for which checksum needs to be calculated.
+    payload_size: (int)
+      The number of bytes in the payload.
+
+  Returns:
+    The checksum of the payload.
+  """
+  checksum = 0
+  length = min(payload_size, len(payload))
+  for i in range (0, length):
+    checksum += ord(payload[i])
+  return checksum
+
+
+def GetEchoRequestData(payload):
+  """Constructs an "echo request" message from the payload.
+
+  It builds an EchoRequest object from the payload and then returns a string
+  representation of the EchoRequest object.
+
+  This is used by the TCP/UDP echo clients to build the "echo request" message.
+
+  Args:
+    payload: (string)
+      The payload string for which "echo request" needs to be constructed.
+
+  Returns:
+    A string representation of the EchoRequest object.
+  Raises:
+    ValueError: Invalid payload
+  """
+  try:
+    echo_request = EchoRequest()
+    echo_request.InitializeFromPayload(payload)
+    return str(echo_request)
+  except (IndexError, ValueError):
+    raise ValueError('Invalid payload:%s' % payload)
+
+
+def GetEchoResponseData(echo_request_data):
+  """Verifies the echo_request_data and returns "echo response" message.
+
+  It builds the EchoRequest object from the echo_request_data and then verifies
+  the checksum of the EchoRequest is same as the calculated checksum of the
+  payload. If the checksums don't match then it returns None. It checksums
+  match, it builds the echo_response object from echo_request object and returns
+  string representation of the EchoResponse object.
+
+  This is used by the TCP/UDP echo servers.
+
+  Args:
+    echo_request_data: (string)
+      The string that echo servers send to the clients.
+
+  Returns:
+    A string representation of the EchoResponse object. It returns None if the
+    echo_request_data is not valid.
+  Raises:
+    ValueError: Invalid echo_request_data
+  """
+  try:
+    if not echo_request_data:
+      raise ValueError('Invalid payload:%s' % echo_request_data)
+
+    echo_request = EchoRequest()
+    echo_request.ParseAndInitialize(echo_request_data)
+
+    if Checksum(echo_request.payload,
+                echo_request.payload_size) != echo_request.checksum:
+      return None
+
+    echo_response = EchoResponse()
+    echo_response.InitializeFromEchoRequest(echo_request)
+
+    return str(echo_response)
+  except (IndexError, ValueError):
+    raise ValueError('Invalid payload:%s' % echo_request_data)
+
+
+def DecodeAndVerify(echo_request_data, echo_response_data):
+  """Decodes and verifies the echo_response_data.
+
+  It builds EchoRequest and EchoResponse objects from the echo_request_data and
+  echo_response_data. It returns True if the EchoResponse's payload and
+  checksum match EchoRequest's.
+
+  This is used by the TCP/UDP echo clients for testing purposes.
+
+  Args:
+    echo_request_data: (string)
+      The request clients sent to echo servers.
+    echo_response_data: (string)
+      The response clients received from the echo servers.
+
+  Returns:
+    True if echo_request_data and echo_response_data match.
+  Raises:
+    ValueError: Invalid echo_request_data or Invalid echo_response
+  """
+
+  try:
+    echo_request = EchoRequest()
+    echo_request.ParseAndInitialize(echo_request_data)
+  except (IndexError, ValueError):
+    raise ValueError('Invalid echo_request:%s' % echo_request_data)
+
+  try:
+    echo_response = EchoResponse()
+    echo_response.ParseAndInitialize(echo_response_data)
+  except (IndexError, ValueError):
+    raise ValueError('Invalid echo_response:%s' % echo_response_data)
+
+  return (echo_request.checksum == echo_response.checksum and
+          echo_request.payload == echo_response.decoded_payload)
diff --git a/src/net/tools/testserver/minica.py b/src/net/tools/testserver/minica.py
new file mode 100644
index 0000000..95d0287
--- /dev/null
+++ b/src/net/tools/testserver/minica.py
@@ -0,0 +1,569 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import asn1
+import datetime
+import hashlib
+import itertools
+import os
+import time
+
+GENERALIZED_TIME_FORMAT = "%Y%m%d%H%M%SZ"
+
+OCSP_STATE_GOOD = 1
+OCSP_STATE_REVOKED = 2
+OCSP_STATE_INVALID_RESPONSE = 3
+OCSP_STATE_UNAUTHORIZED = 4
+OCSP_STATE_UNKNOWN = 5
+OCSP_STATE_TRY_LATER = 6
+OCSP_STATE_INVALID_RESPONSE_DATA = 7
+OCSP_STATE_MISMATCHED_SERIAL = 8
+
+OCSP_DATE_VALID = 1
+OCSP_DATE_OLD = 2
+OCSP_DATE_EARLY = 3
+OCSP_DATE_LONG = 4
+OCSP_DATE_LONGER = 5
+
+OCSP_PRODUCED_VALID = 1
+OCSP_PRODUCED_BEFORE_CERT = 2
+OCSP_PRODUCED_AFTER_CERT = 3
+
+# This file implements very minimal certificate and OCSP generation. It's
+# designed to test revocation checking.
+
+def RandomNumber(length_in_bytes):
+  '''RandomNumber returns a random number of length 8*|length_in_bytes| bits'''
+  rand = os.urandom(length_in_bytes)
+  n = 0
+  for x in rand:
+    n <<= 8
+    n |= ord(x)
+  return n
+
+
+def ModExp(n, e, p):
+  '''ModExp returns n^e mod p'''
+  r = 1
+  while e != 0:
+    if e & 1:
+      r = (r*n) % p
+    e >>= 1
+    n = (n*n) % p
+  return r
+
+# PKCS1v15_SHA256_PREFIX is the ASN.1 prefix for a SHA256 signature.
+PKCS1v15_SHA256_PREFIX = '3031300d060960864801650304020105000420'.decode('hex')
+
+class RSA(object):
+  def __init__(self, modulus, e, d):
+    self.m = modulus
+    self.e = e
+    self.d = d
+
+    self.modlen = 0
+    m = modulus
+    while m != 0:
+      self.modlen += 1
+      m >>= 8
+
+  def Sign(self, message):
+    digest = hashlib.sha256(message).digest()
+    prefix = PKCS1v15_SHA256_PREFIX
+
+    em = ['\xff'] * (self.modlen - 1 - len(prefix) - len(digest))
+    em[0] = '\x00'
+    em[1] = '\x01'
+    em += "\x00" + prefix + digest
+
+    n = 0
+    for x in em:
+      n <<= 8
+      n |= ord(x)
+
+    s = ModExp(n, self.d, self.m)
+    out = []
+    while s != 0:
+      out.append(s & 0xff)
+      s >>= 8
+    out.reverse()
+    return '\x00' * (self.modlen - len(out)) + asn1.ToBytes(out)
+
+  def ToDER(self):
+    return asn1.ToDER(asn1.SEQUENCE([self.m, self.e]))
+
+
+def Name(cn):
+  return asn1.SEQUENCE([
+    asn1.SET([
+      asn1.SEQUENCE([
+        COMMON_NAME, cn,
+      ])
+    ])
+  ])
+
+
+# The private key and root certificate name are hard coded here:
+
+# This is the root private key
+ROOT_KEY = RSA(0x00c1541fac63d3b969aa231a02cb2e0d9ee7b26724f136c121b2c28bdae5caa87733cc407ad83842ef20ec67d941b448a1ce3557cf5ddebf3c9bde8f36f253ee73e670d1c4c6631d1ddc0e39cbde09b833f66347ea379c3fa891d61a0ca005b38b0b2cad1058e3589c9f30600be81e4ff4ac220972c17b74f92f03d72b496f643543d0b27a5227f1efee13c138888b23cb101877b3b4dc091f0b3bb6fc3c792187b05ab38e97862f8af6156bcbfbb824385132c6741e6c65cfcd5f13142421a210b95185884c4866f3ea644dfb8006133d14e72a4704f3e700cf827ca5ffd2ef74c2ab6a5259ffff40f0f7f607891388f917fc9fc9e65742df1bfa0b322140bb65,
+              65537,
+              0x00980f2db66ef249e4954074a5fbdf663135363a3071554ac4d19079661bd5b179c890ffaa5fc4a8c8e3116e81104fd7cd049f2a48dd2165332bb9fad511f6f817cb09b3c45cf1fa25d13e9331099c8578c173c74dae9dc3e83784ba0a7216e9e8144af8786221b741c167d033ad47a245e4da04aa710a44aff5cdc480b48adbba3575d1315555690f081f9f69691e801e34c21240bcd3df9573ec5f9aa290c5ed19404fb911ab28b7680e0be086487273db72da6621f24d8c66197a5f1b7687efe1d9e3b6655af2891d4540482e1246ff5f62ce61b8b5dcb2c66ade6bb41e0bf071445fb8544aa0a489780f770a6f1031ee19347641794f4ad17354d579a9d061)
+
+# Root certificate CN
+ROOT_CN = "Testing CA"
+
+# All certificates are issued under this policy OID, in the Google arc:
+CERT_POLICY_OID = asn1.OID([1, 3, 6, 1, 4, 1, 11129, 2, 4, 1])
+
+# These result in the following root certificate:
+# -----BEGIN CERTIFICATE-----
+# MIIC1DCCAbygAwIBAgIBATANBgkqhkiG9w0BAQsFADAVMRMwEQYDVQQDEwpUZXN0
+# aW5nIENBMB4XDTEwMDEwMTA2MDAwMFoXDTMyMTIwMTA2MDAwMFowFTETMBEGA1UE
+# AxMKVGVzdGluZyBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFU
+# H6xj07lpqiMaAssuDZ7nsmck8TbBIbLCi9rlyqh3M8xAetg4Qu8g7GfZQbRIoc41
+# V89d3r88m96PNvJT7nPmcNHExmMdHdwOOcveCbgz9mNH6jecP6iR1hoMoAWziwss
+# rRBY41icnzBgC+geT/SsIglywXt0+S8D1ytJb2Q1Q9CyelIn8e/uE8E4iIsjyxAY
+# d7O03AkfCzu2/Dx5IYewWrOOl4YvivYVa8v7uCQ4UTLGdB5sZc/NXxMUJCGiELlR
+# hYhMSGbz6mRN+4AGEz0U5ypHBPPnAM+CfKX/0u90wqtqUln//0Dw9/YHiROI+Rf8
+# n8nmV0LfG/oLMiFAu2UCAwEAAaMvMC0wEgYDVR0TAQH/BAgwBgEB/wIBATAXBgNV
+# HSAEEDAOMAwGCisGAQQB1nkCBAEwDQYJKoZIhvcNAQELBQADggEBADNrvoAyqAVm
+# bydPBBfLRqyH4DXt2vuMVmnSdnWnOxYiEezGmNSNiO1k1ZFBwVSsd+JHrT24lax9
+# kvU1yQDW//PBu3ijfZOCaIUleQiGXHMGfV4MjzgYbxpvHOvEUC6IXmYCsIEwcZgK
+# lrwnfJQ3MVU4hOgGTlOTWYPtCwvTsBObNRLdIs+ifMQiWmzPBlM8XeX4e5acDjTb
+# emcN4szU3EcgmCA0LvBIRI4F6NWpaIJl2WnLyMUDyKq4vjpRJOZkNwAC+525duDr
+# JFE4PKR2Lh53nJQIJv6mcTZQkX1mmw0yzqWxcGCoHACma3TgSwOHryvSopL+t26+
+# ZlQvP2ygwqY=
+# -----END CERTIFICATE-----
+
+# If you update any of the above, you can generate a new root by running this
+# file as a script.
+
+INTERMEDIATE_KEY = RSA(0x00c661afcc659f88855a83ade8fb792dc13d0cf388b17bece9149cf0b8556d27b19101d081fb2a842d13a2ac95d8308ddd66783843ecc5806513959eb6b30dd69b2845d97e10d0bbbf653d686dc8828935022cc96f9e030b567157257d3d6526734080bb9727cee0d30f4209d5820e1d662f358fc789c0e9366d84f89adf1beb8d843f74e6f325876ac35d5c11691fcb296967c06edf69450c16bb2314c14599fe90725d5ec90f2db6698afae72bba0cfbf77967c7e8b49f2172f9381827c27ab7f9471c62bd8da4a6c657966ec1385cf41d739449835888f30d64971619dcd380408cd74f25c3be19833a92620c9cf710da67e15ac8cef69bc7e4e5e7f813c1ed,
+                       65537,
+                       0x77c5e2edf52d2cafd6c649e9b06aa9455226cfa26805fa337f4e81c7c94bedfb3721715208e2d28aa4a042b2f5a3db03212ad44dae564ffeb6a44efedf7c2b65e21aca056301a3591b36c82600394fbdc16268fc0adaabadb5207871f4ef6d17888a30b84240955cd889768681cf23d0de0fe88f008c8841643e341acd397e2d1104a23242e566088b7617c26ae8b48a85b6c9b7dc64ef1fa5e9b124ff8c1659a82d8225f28a820cc6ca07beff0354364c631a9142309fea1d8b054f6e00e23c54b493a21fcbe89a646b39d1acba5bc2ace9bba0252671d42a15202f3afccc912114d6c20eb3131e74289f2c744c5b39e7d3780fe21402ab1c3ae65854fee401)
+
+# Intermediate certificate CN prefix (random serial number is added to the CN
+# in order to avoid caching issues.)
+INTERMEDIATE_CN_PREFIX = "Testing Intermediate CA"
+
+LEAF_KEY = RSA(0x00cd12d317b39cfbb160fb1dc9c9f0dc8fef3604dda4d8c557392ce1d616483713f78216cadbefd1c76ea0f3bbbe410e24b233b1b73583922b09314e249b2cfde1be0995e13f160fb630c10d447750da20ffaa4880006717feaa3e4db602e4f511b5cc312f770f44b037784effec62640f948aa189c3769f03bdd0e22a36ecfa5951f5577de195a4fba33c879b657968b79138fd7ab389a9968522f7389c6052be1ff78bc168d3ea961e132a044eba33ac07ead95367c7b815e91eca924d914fd0d811349b8bf500707ba71a43a2901a545f34e1792e72654f6649fab9716f4ba17379ee8042186bbba9b9bac416a60474cc60686f0e6e4b01259cc3cb5873edf9,
+               65537,
+               0x009c23e81bd4c30314743dded9646b82d408937db2f0afa7d9988be6cba59d886a287aa13605ad9c7117776efc94885de76cd3554da46e301d9a5b331f4613449edb9ddac36cd0345848d8c46c4bd880acbd5cfee48ee9efe813e16a33da124fd213348c8292494ac84d03ca4aabc5e25fc67ea32e0c6845fc884b01d8988768b8b931c41de49708dbcd5fcb61823f9a1f7507c6f364be4cb5a8cf24af4925997030dd4f67a0c9c6813401cc8b2f5d1971ee0022770239b7042fde8228c33942e9c0a0b18854cb1b5542be928338ab33ac936bbba174e55457007b16f36011dbb8f4258abe64e42b1cfa79803d30170b7ecf3e7c595d42003fff72591e07acd9cd)
+
+LEAF_KEY_PEM = '''-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEAzRLTF7Oc+7Fg+x3JyfDcj+82BN2k2MVXOSzh1hZINxP3ghbK
+2+/Rx26g87u+QQ4ksjOxtzWDkisJMU4kmyz94b4JleE/Fg+2MMENRHdQ2iD/qkiA
+AGcX/qo+TbYC5PURtcwxL3cPRLA3eE7/7GJkD5SKoYnDdp8DvdDiKjbs+llR9Vd9
+4ZWk+6M8h5tleWi3kTj9erOJqZaFIvc4nGBSvh/3i8Fo0+qWHhMqBE66M6wH6tlT
+Z8e4FekeypJNkU/Q2BE0m4v1AHB7pxpDopAaVF804XkucmVPZkn6uXFvS6Fzee6A
+Qhhru6m5usQWpgR0zGBobw5uSwElnMPLWHPt+QIDAQABAoIBAQCcI+gb1MMDFHQ9
+3tlka4LUCJN9svCvp9mYi+bLpZ2Iaih6oTYFrZxxF3du/JSIXeds01VNpG4wHZpb
+Mx9GE0Se253aw2zQNFhI2MRsS9iArL1c/uSO6e/oE+FqM9oST9ITNIyCkklKyE0D
+ykqrxeJfxn6jLgxoRfyISwHYmIdouLkxxB3klwjbzV/LYYI/mh91B8bzZL5MtajP
+JK9JJZlwMN1PZ6DJxoE0AcyLL10Zce4AIncCObcEL96CKMM5QunAoLGIVMsbVUK+
+koM4qzOsk2u7oXTlVFcAexbzYBHbuPQlir5k5Csc+nmAPTAXC37PPnxZXUIAP/9y
+WR4HrNnNAoGBAPmOqTe7ntto6rDEsU1cKOJFKIZ7UVcSByyz8aLrvj1Rb2mkrNJU
+SdTqJvtqrvDXgO0HuGtFOzsZrRV9+XRPd2P0mP0uhfRiYGWT8hnILGyI2+7zlC/w
+HDtLEefelhtdOVKgUaLQXptSn7aGalUHghZKWjRNT5ah+U85MoI2ZkDbAoGBANJe
+KvrBBPSFLj+x2rsMhG+ksK0I6tivapVvSTtDV3ME1DvA/4BIMV/nIZyoH4AYI72c
+m/vD66+eCqh75cq5BzbVD63tR+ZRi/VdT1HJcl2IFXynk6eaBw8v7gpQyx6t3iSK
+lx/dIdpLt1BQuR4qI6x1wYp7Utn98soEkiFXzgq7AoGBAJTLBYPQXvgNBxlcPSaV
+016Nw4rjTe0vN43kwCbWjjf7LQV9BPnm/Zpv/cwboLDCnQE2gDOdNKKZPYS59pjt
+pI65UNpr+bxrR3RpEIlku2/+7br8ChfG/t4vdT6djTxFih8ErYf42t+bFNT8Mbv+
+3QYzULMsgU6bxo0A2meezbrPAoGBAK/IxmtQXP6iRxosWRUSCZxs5sFAgVVdh1el
+bXEa/Xj8IQhpZlbgfHmh3oFULzZPdZYcxm7jsQ7HpipRlZwHbtLPyNFSRFFd9PCr
+7vrttSYY77OBKC3V1G5JY8S07HYPXV/1ewDCPGZ3/I8dVQKyvap/n6FDGeFUhctv
+dFhuUZq/AoGAWLXlbcIl1cvOhfFJ5owohJhzh9oW9tlCtjV5/dlix2RaE5CtDZWS
+oMm4sQu9HiA8jLDP1MEEMRFPrPXdrZnxnSqVd1DgabSegD1/ZCb1QlWwQWkk5QU+
+wotPOMI33L50kZqUaDP+1XSL0Dyfo/pYpm4tYy/5QmP6WKXCtFUXybI=
+-----END RSA PRIVATE KEY-----
+'''
+
+# Various OIDs
+
+AIA_OCSP = asn1.OID([1, 3, 6, 1, 5, 5, 7, 48, 1])
+AIA_CA_ISSUERS = asn1.OID([1, 3, 6, 1, 5, 5, 7, 48, 2])
+AUTHORITY_INFORMATION_ACCESS = asn1.OID([1, 3, 6, 1, 5, 5, 7, 1, 1])
+BASIC_CONSTRAINTS = asn1.OID([2, 5, 29, 19])
+CERT_POLICIES = asn1.OID([2, 5, 29, 32])
+COMMON_NAME = asn1.OID([2, 5, 4, 3])
+COUNTRY = asn1.OID([2, 5, 4, 6])
+HASH_SHA1 = asn1.OID([1, 3, 14, 3, 2, 26])
+OCSP_TYPE_BASIC = asn1.OID([1, 3, 6, 1, 5, 5, 7, 48, 1, 1])
+ORGANIZATION = asn1.OID([2, 5, 4, 10])
+PUBLIC_KEY_RSA = asn1.OID([1, 2, 840, 113549, 1, 1, 1])
+SHA256_WITH_RSA_ENCRYPTION = asn1.OID([1, 2, 840, 113549, 1, 1, 11])
+SUBJECT_ALTERNATIVE_NAME = asn1.OID([2, 5, 29, 17])
+
+def MakeCertificate(
+    issuer_cn, subject_cn, serial, pubkey, privkey, ocsp_url = None,
+    ca_issuers_url = None, is_ca=False, path_len=None, ip_sans=None,
+    dns_sans=None):
+  '''MakeCertificate returns a DER encoded certificate, signed by privkey.'''
+  extensions = asn1.SEQUENCE([])
+
+  if is_ca:
+    # Root certificate.
+    c = None
+    o = None
+    extensions.children.append(
+      asn1.SEQUENCE([
+        BASIC_CONSTRAINTS,
+        True,
+        asn1.OCTETSTRING(asn1.ToDER(asn1.SEQUENCE([
+            True, # IsCA
+        ] + ([path_len] if path_len is not None else []) # Path len
+        ))),
+      ]))
+  if ip_sans is not None or dns_sans is not None:
+    sans = []
+    if dns_sans is not None:
+      for dns_name in dns_sans:
+        sans.append(
+          asn1.Raw(asn1.TagAndLength(0x82, len(dns_name)) + dns_name))
+    if ip_sans is not None:
+      for ip_addr in ip_sans:
+        sans.append(
+          asn1.Raw(asn1.TagAndLength(0x87, len(ip_addr)) + ip_addr))
+    extensions.children.append(
+      asn1.SEQUENCE([
+        SUBJECT_ALTERNATIVE_NAME,
+        # There is implicitly a critical=False here. Since false is the
+        # default, encoding the value would be invalid DER.
+        asn1.OCTETSTRING(asn1.ToDER(asn1.SEQUENCE(sans)))
+      ]))
+
+  if ocsp_url is not None or ca_issuers_url is not None:
+    aia_entries = []
+    if ocsp_url is not None:
+      aia_entries.append(
+          asn1.SEQUENCE([
+            AIA_OCSP,
+            asn1.Raw(asn1.TagAndLength(0x86, len(ocsp_url)) + ocsp_url),
+          ]))
+    if ca_issuers_url is not None:
+      aia_entries.append(
+          asn1.SEQUENCE([
+            AIA_CA_ISSUERS,
+            asn1.Raw(asn1.TagAndLength(0x86,
+                                       len(ca_issuers_url)) + ca_issuers_url),
+            ]))
+    extensions.children.append(
+      asn1.SEQUENCE([
+        AUTHORITY_INFORMATION_ACCESS,
+        # There is implicitly a critical=False here. Since false is the default,
+        # encoding the value would be invalid DER.
+        asn1.OCTETSTRING(asn1.ToDER(asn1.SEQUENCE(aia_entries))),
+        ]))
+
+  extensions.children.append(
+    asn1.SEQUENCE([
+      CERT_POLICIES,
+      # There is implicitly a critical=False here. Since false is the default,
+      # encoding the value would be invalid DER.
+      asn1.OCTETSTRING(asn1.ToDER(asn1.SEQUENCE([
+        asn1.SEQUENCE([ # PolicyInformation
+          CERT_POLICY_OID,
+        ]),
+      ]))),
+    ])
+  )
+
+  tbsCert = asn1.ToDER(asn1.SEQUENCE([
+      asn1.Explicit(0, 2), # Version
+      serial,
+      asn1.SEQUENCE([SHA256_WITH_RSA_ENCRYPTION, None]), # SignatureAlgorithm
+      Name(cn = issuer_cn), # Issuer
+      asn1.SEQUENCE([ # Validity
+        asn1.UTCTime("100101060000Z"), # NotBefore
+        asn1.UTCTime("321201060000Z"), # NotAfter
+      ]),
+      Name(cn = subject_cn), # Subject
+      asn1.SEQUENCE([ # SubjectPublicKeyInfo
+        asn1.SEQUENCE([ # Algorithm
+          PUBLIC_KEY_RSA,
+          None,
+        ]),
+        asn1.BitString(asn1.ToDER(pubkey)),
+      ]),
+      asn1.Explicit(3, extensions),
+    ]))
+
+  return asn1.ToDER(asn1.SEQUENCE([
+    asn1.Raw(tbsCert),
+    asn1.SEQUENCE([
+      SHA256_WITH_RSA_ENCRYPTION,
+      None,
+    ]),
+    asn1.BitString(privkey.Sign(tbsCert)),
+  ]))
+
+def MakeOCSPSingleResponse(
+    issuer_name_hash, issuer_key_hash, serial, ocsp_state, ocsp_date):
+  cert_status = None
+  if ocsp_state == OCSP_STATE_REVOKED:
+    cert_status = asn1.Explicit(1, asn1.GeneralizedTime("20100101060000Z"))
+  elif ocsp_state == OCSP_STATE_UNKNOWN:
+    cert_status = asn1.Raw(asn1.TagAndLength(0x80 | 2, 0))
+  elif ocsp_state == OCSP_STATE_GOOD:
+    cert_status = asn1.Raw(asn1.TagAndLength(0x80 | 0, 0))
+  elif ocsp_state == OCSP_STATE_MISMATCHED_SERIAL:
+    cert_status = asn1.Raw(asn1.TagAndLength(0x80 | 0, 0))
+    serial -= 1
+  else:
+    raise ValueError('Bad OCSP state: ' + str(ocsp_state))
+
+  now = datetime.datetime.fromtimestamp(time.mktime(time.gmtime()))
+  if ocsp_date == OCSP_DATE_VALID:
+    thisUpdate = now - datetime.timedelta(days=1)
+    nextUpdate = thisUpdate + datetime.timedelta(weeks=1)
+  elif ocsp_date == OCSP_DATE_OLD:
+    thisUpdate = now - datetime.timedelta(days=1, weeks=1)
+    nextUpdate = thisUpdate + datetime.timedelta(weeks=1)
+  elif ocsp_date == OCSP_DATE_EARLY:
+    thisUpdate = now + datetime.timedelta(days=1)
+    nextUpdate = thisUpdate + datetime.timedelta(weeks=1)
+  elif ocsp_date == OCSP_DATE_LONG:
+    thisUpdate = now - datetime.timedelta(days=365)
+    nextUpdate = thisUpdate + datetime.timedelta(days=366)
+  elif ocsp_date == OCSP_DATE_LONGER:
+    thisUpdate = now - datetime.timedelta(days=367)
+    nextUpdate = thisUpdate + datetime.timedelta(days=368)
+  else:
+    raise ValueError('Bad OCSP date: ' + str(ocsp_date))
+
+  return asn1.SEQUENCE([ # SingleResponse
+    asn1.SEQUENCE([ # CertID
+      asn1.SEQUENCE([ # hashAlgorithm
+        HASH_SHA1,
+        None,
+      ]),
+      issuer_name_hash,
+      issuer_key_hash,
+      serial,
+    ]),
+    cert_status,
+    asn1.GeneralizedTime( # thisUpdate
+      thisUpdate.strftime(GENERALIZED_TIME_FORMAT)
+    ),
+    asn1.Explicit( # nextUpdate
+      0,
+      asn1.GeneralizedTime(nextUpdate.strftime(GENERALIZED_TIME_FORMAT))
+    ),
+  ])
+
+def MakeOCSPResponse(
+    issuer_cn, issuer_key, serial, ocsp_states, ocsp_dates, ocsp_produced):
+  if ocsp_states[0] == OCSP_STATE_UNAUTHORIZED:
+    return unauthorizedDER
+  elif ocsp_states[0] == OCSP_STATE_INVALID_RESPONSE:
+    return '3'
+  elif ocsp_states[0] == OCSP_STATE_TRY_LATER:
+    resp = asn1.SEQUENCE([
+      asn1.ENUMERATED(3),
+    ])
+    return asn1.ToDER(resp)
+  elif ocsp_states[0] == OCSP_STATE_INVALID_RESPONSE_DATA:
+    invalid_data = asn1.ToDER(asn1.OCTETSTRING('not ocsp data'))
+    basic_resp = asn1.SEQUENCE([
+      asn1.Raw(invalid_data),
+      asn1.SEQUENCE([
+        SHA256_WITH_RSA_ENCRYPTION,
+        None,
+      ]),
+      asn1.BitString(ROOT_KEY.Sign(invalid_data)),
+    ])
+    resp = asn1.SEQUENCE([
+      asn1.ENUMERATED(0),
+      asn1.Explicit(0, asn1.SEQUENCE([
+        OCSP_TYPE_BASIC,
+        asn1.OCTETSTRING(asn1.ToDER(basic_resp)),
+      ])),
+    ])
+    return asn1.ToDER(resp)
+
+  # https://tools.ietf.org/html/rfc2560
+  issuer_name_hash = asn1.OCTETSTRING(
+      hashlib.sha1(asn1.ToDER(Name(cn = issuer_cn))).digest())
+
+  issuer_key_hash = asn1.OCTETSTRING(
+      hashlib.sha1(asn1.ToDER(issuer_key)).digest())
+
+  now = datetime.datetime.fromtimestamp(time.mktime(time.gmtime()))
+  if ocsp_produced == OCSP_PRODUCED_VALID:
+    producedAt = now - datetime.timedelta(days=1)
+  elif ocsp_produced == OCSP_PRODUCED_BEFORE_CERT:
+    producedAt = datetime.datetime.strptime(
+        "19100101050000Z", GENERALIZED_TIME_FORMAT)
+  elif ocsp_produced == OCSP_PRODUCED_AFTER_CERT:
+    producedAt = datetime.datetime.strptime(
+        "20321201070000Z", GENERALIZED_TIME_FORMAT)
+  else:
+    raise ValueError('Bad OCSP produced: ' + str(ocsp_produced))
+
+  single_responses = [
+      MakeOCSPSingleResponse(issuer_name_hash, issuer_key_hash, serial,
+          ocsp_state, ocsp_date)
+      for ocsp_state, ocsp_date in itertools.izip(ocsp_states, ocsp_dates)
+  ]
+
+  basic_resp_data_der = asn1.ToDER(asn1.SEQUENCE([
+    asn1.Explicit(2, issuer_key_hash),
+    asn1.GeneralizedTime(producedAt.strftime(GENERALIZED_TIME_FORMAT)),
+    asn1.SEQUENCE(single_responses),
+  ]))
+
+  basic_resp = asn1.SEQUENCE([
+    asn1.Raw(basic_resp_data_der),
+    asn1.SEQUENCE([
+      SHA256_WITH_RSA_ENCRYPTION,
+      None,
+    ]),
+    asn1.BitString(issuer_key.Sign(basic_resp_data_der)),
+  ])
+
+  resp = asn1.SEQUENCE([
+    asn1.ENUMERATED(0),
+    asn1.Explicit(0, asn1.SEQUENCE([
+      OCSP_TYPE_BASIC,
+      asn1.OCTETSTRING(asn1.ToDER(basic_resp)),
+    ]))
+  ])
+
+  return asn1.ToDER(resp)
+
+
+def DERToPEM(der):
+  pem = '-----BEGIN CERTIFICATE-----\n'
+  pem += der.encode('base64')
+  pem += '-----END CERTIFICATE-----\n'
+  return pem
+
+# unauthorizedDER is an OCSPResponse with a status of 6:
+# SEQUENCE { ENUM(6) }
+unauthorizedDER = '30030a0106'.decode('hex')
+
+def GenerateCertKeyAndOCSP(subject = "127.0.0.1",
+                           ocsp_url = "http://127.0.0.1",
+                           ocsp_states = None,
+                           ocsp_dates = None,
+                           ocsp_produced = OCSP_PRODUCED_VALID,
+                           ocsp_intermediate_url = None,
+                           ocsp_intermediate_states = None,
+                           ocsp_intermediate_dates = None,
+                           ocsp_intermediate_produced = OCSP_PRODUCED_VALID,
+                           ip_sans = ["\x7F\x00\x00\x01"],
+                           dns_sans = None,
+                           serial = 0):
+  '''GenerateCertKeyAndOCSP returns a (cert_and_key_pem,
+                                       (ocsp_der, ocsp_intermediate_der)) where:
+       * cert_and_key_pem contains a certificate and private key in PEM format
+         with the given subject common name and OCSP URL.
+         It also contains the intermediate certificate PEM if
+         ocsp_intermediate_url is not None.
+       * ocsp_der contains a DER encoded OCSP response or None if ocsp_url is
+         None
+       * ocsp_intermediate_der contains a DER encoded OCSP response for the
+         intermediate or None if ocsp_intermediate_url is None'''
+
+  if ocsp_states is None:
+    ocsp_states = [OCSP_STATE_GOOD]
+  if ocsp_dates is None:
+    ocsp_dates = [OCSP_DATE_VALID]
+
+  issuer_cn = ROOT_CN
+  issuer_key = ROOT_KEY
+  intermediate_pem = ''
+  intermediate_ocsp_der = None
+
+  if ocsp_intermediate_url is not None:
+    ocsp_intermediate_url = bytes(ocsp_intermediate_url)
+    if ocsp_intermediate_states is None:
+      ocsp_intermediate_states = [OCSP_STATE_GOOD]
+    if ocsp_intermediate_dates is None:
+      ocsp_intermediate_dates = [OCSP_DATE_VALID]
+    intermediate_serial = RandomNumber(16)
+    intermediate_cn = "%s %X" % (INTERMEDIATE_CN_PREFIX, intermediate_serial)
+    intermediate_cert_der = MakeCertificate(ROOT_CN, intermediate_cn,
+                                            intermediate_serial,
+                                            INTERMEDIATE_KEY, ROOT_KEY,
+                                            ocsp_intermediate_url,
+                                            is_ca=True)
+    intermediate_pem = DERToPEM(intermediate_cert_der)
+    issuer_cn = intermediate_cn
+    issuer_key = INTERMEDIATE_KEY
+    intermediate_ocsp_der = MakeOCSPResponse(
+        ROOT_CN, ROOT_KEY, intermediate_serial, ocsp_intermediate_states,
+        ocsp_intermediate_dates, ocsp_intermediate_produced)
+
+  if serial == 0:
+    serial = RandomNumber(16)
+  if ocsp_url is not None:
+    ocsp_url = bytes(ocsp_url)
+  cert_der = MakeCertificate(issuer_cn, bytes(subject), serial, LEAF_KEY,
+                             issuer_key, ocsp_url, ip_sans=ip_sans,
+                             dns_sans=dns_sans)
+  cert_pem = DERToPEM(cert_der)
+
+  ocsp_der = None
+  if ocsp_url is not None:
+    ocsp_der = MakeOCSPResponse(
+        issuer_cn, issuer_key, serial, ocsp_states, ocsp_dates, ocsp_produced)
+
+  return cert_pem + LEAF_KEY_PEM + intermediate_pem, (ocsp_der,
+                                                      intermediate_ocsp_der)
+
+
+def GenerateCertKeyAndIntermediate(subject,
+                                   ca_issuers_url,
+                                   ip_sans=None,
+                                   dns_sans=None,
+                                   serial=0):
+  '''Returns a (cert_and_key_pem, intermediate_cert_pem) where:
+       * cert_and_key_pem contains a certificate and private key in PEM format
+         with the given subject common name and caIssuers URL.
+       * intermediate_cert_pem contains a PEM encoded certificate that signed
+         cert_and_key_pem and was signed by ocsp-test-root.pem.'''
+  if serial == 0:
+    serial = RandomNumber(16)
+
+  intermediate_serial = RandomNumber(16)
+  intermediate_cn = "%s %X" % (INTERMEDIATE_CN_PREFIX, intermediate_serial)
+
+  target_cert_der = MakeCertificate(intermediate_cn, bytes(subject), serial,
+                                    LEAF_KEY, INTERMEDIATE_KEY,
+                                    ip_sans=ip_sans, dns_sans=dns_sans,
+                                    ca_issuers_url=bytes(ca_issuers_url))
+  target_cert_pem = DERToPEM(target_cert_der)
+
+  intermediate_cert_der = MakeCertificate(ROOT_CN, intermediate_cn,
+                                          intermediate_serial,
+                                          INTERMEDIATE_KEY, ROOT_KEY,
+                                          is_ca=True)
+
+  return target_cert_pem + LEAF_KEY_PEM, intermediate_cert_der
+
+
+if __name__ == '__main__':
+  def bin_to_array(s):
+    return ' '.join(['0x%02x,'%ord(c) for c in s])
+
+  import sys
+  sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..',
+                               '..', 'data', 'ssl', 'scripts'))
+  import crlsetutil
+
+  der_root = MakeCertificate(ROOT_CN, ROOT_CN, 1, ROOT_KEY, ROOT_KEY,
+                             is_ca=True, path_len=1)
+  print 'ocsp-test-root.pem:'
+  print DERToPEM(der_root)
+
+  print
+  print 'kOCSPTestCertFingerprint:'
+  print bin_to_array(hashlib.sha1(der_root).digest())
+
+  print
+  print 'kOCSPTestCertSPKI:'
+  print bin_to_array(crlsetutil.der_cert_to_spki_hash(der_root))
diff --git a/src/net/tools/testserver/run_testserver.cc b/src/net/tools/testserver/run_testserver.cc
new file mode 100644
index 0000000..e12f323
--- /dev/null
+++ b/src/net/tools/testserver/run_testserver.cc
@@ -0,0 +1,125 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+#include "base/at_exit.h"
+#include "base/command_line.h"
+#include "base/files/file_path.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "base/run_loop.h"
+#include "base/strings/utf_string_conversions.h"
+#include "base/test/test_timeouts.h"
+#include "net/test/spawned_test_server/spawned_test_server.h"
+#include "starboard/types.h"
+
+static void PrintUsage() {
+  printf("run_testserver --doc-root=relpath\n"
+         "               [--http|--https|--ws|--wss|--ftp]\n"
+         "               [--ssl-cert=ok|mismatched-name|expired]\n");
+  printf("(NOTE: relpath should be relative to the 'src' directory.\n");
+}
+
+int main(int argc, const char* argv[]) {
+  base::AtExitManager at_exit_manager;
+  base::MessageLoopForIO message_loop;
+
+  // Process command line
+  base::CommandLine::Init(argc, argv);
+  base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
+
+  logging::LoggingSettings settings;
+  settings.logging_dest = logging::LOG_TO_ALL;
+  settings.log_file = FILE_PATH_LITERAL("testserver.log");
+  if (!logging::InitLogging(settings)) {
+    printf("Error: could not initialize logging. Exiting.\n");
+    return -1;
+  }
+
+  TestTimeouts::Initialize();
+
+  if (command_line->GetSwitches().empty() ||
+      command_line->HasSwitch("help")) {
+    PrintUsage();
+    return -1;
+  }
+
+  net::SpawnedTestServer::Type server_type;
+  if (command_line->HasSwitch("http")) {
+    server_type = net::SpawnedTestServer::TYPE_HTTP;
+  } else if (command_line->HasSwitch("https")) {
+    server_type = net::SpawnedTestServer::TYPE_HTTPS;
+  } else if (command_line->HasSwitch("ws")) {
+    server_type = net::SpawnedTestServer::TYPE_WS;
+  } else if (command_line->HasSwitch("wss")) {
+    server_type = net::SpawnedTestServer::TYPE_WSS;
+  } else if (command_line->HasSwitch("ftp")) {
+    server_type = net::SpawnedTestServer::TYPE_FTP;
+  } else {
+    // If no scheme switch is specified, select http or https scheme.
+    // TODO(toyoshim): Remove this estimation.
+    if (command_line->HasSwitch("ssl-cert"))
+      server_type = net::SpawnedTestServer::TYPE_HTTPS;
+    else
+      server_type = net::SpawnedTestServer::TYPE_HTTP;
+  }
+
+  net::SpawnedTestServer::SSLOptions ssl_options;
+  if (command_line->HasSwitch("ssl-cert")) {
+    if (!net::SpawnedTestServer::UsingSSL(server_type)) {
+      printf("Error: --ssl-cert is specified on non-secure scheme\n");
+      PrintUsage();
+      return -1;
+    }
+    std::string cert_option = command_line->GetSwitchValueASCII("ssl-cert");
+    if (cert_option == "ok") {
+      ssl_options.server_certificate =
+          net::SpawnedTestServer::SSLOptions::CERT_OK;
+    } else if (cert_option == "mismatched-name") {
+      ssl_options.server_certificate =
+          net::SpawnedTestServer::SSLOptions::CERT_MISMATCHED_NAME;
+    } else if (cert_option == "expired") {
+      ssl_options.server_certificate =
+          net::SpawnedTestServer::SSLOptions::CERT_EXPIRED;
+    } else {
+      printf("Error: --ssl-cert has invalid value %s\n", cert_option.c_str());
+      PrintUsage();
+      return -1;
+    }
+  }
+
+  base::FilePath doc_root = command_line->GetSwitchValuePath("doc-root");
+  if (doc_root.empty()) {
+    printf("Error: --doc-root must be specified\n");
+    PrintUsage();
+    return -1;
+  }
+
+  std::unique_ptr<net::SpawnedTestServer> test_server;
+  if (net::SpawnedTestServer::UsingSSL(server_type)) {
+    test_server.reset(
+        new net::SpawnedTestServer(server_type, ssl_options, doc_root));
+  } else {
+    test_server.reset(new net::SpawnedTestServer(server_type, doc_root));
+  }
+
+  if (!test_server->Start()) {
+    printf("Error: failed to start test server. Exiting.\n");
+    return -1;
+  }
+
+  if (!base::DirectoryExists(test_server->document_root())) {
+    printf("Error: invalid doc root: \"%s\" does not exist!\n",
+        base::UTF16ToUTF8(
+            test_server->document_root().LossyDisplayName()).c_str());
+    return -1;
+  }
+
+  printf("testserver running at %s (type ctrl+c to exit)\n",
+         test_server->host_port_pair().ToString().c_str());
+
+  base::RunLoop().Run();
+  return 0;
+}
diff --git a/src/net/tools/testserver/testserver.py b/src/net/tools/testserver/testserver.py
new file mode 100755
index 0000000..648082e
--- /dev/null
+++ b/src/net/tools/testserver/testserver.py
@@ -0,0 +1,2390 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This is a simple HTTP/FTP/TCP/UDP/PROXY/BASIC_AUTH_PROXY/WEBSOCKET server
+used for testing Chrome.
+
+It supports several test URLs, as specified by the handlers in TestPageHandler.
+By default, it listens on an ephemeral port and sends the port number back to
+the originating process over a pipe. The originating process can specify an
+explicit port if necessary.
+It can use https if you specify the flag --https=CERT where CERT is the path
+to a pem file containing the certificate and private key that should be used.
+"""
+
+import base64
+import BaseHTTPServer
+import cgi
+import hashlib
+import logging
+import minica
+import os
+import json
+import random
+import re
+import select
+import socket
+import SocketServer
+import ssl
+import struct
+import sys
+import threading
+import time
+import urllib
+import urlparse
+import zlib
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(BASE_DIR)))
+
+# Insert at the beginning of the path, we want to use our copies of the library
+# unconditionally (since they contain modifications from anything that might be
+# obtained from e.g. PyPi).
+sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party', 'pywebsocket', 'src'))
+sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party', 'tlslite'))
+
+import mod_pywebsocket.standalone
+from mod_pywebsocket.standalone import WebSocketServer
+# import manually
+mod_pywebsocket.standalone.ssl = ssl
+
+import pyftpdlib.ftpserver
+
+import tlslite
+import tlslite.api
+
+import echo_message
+import testserver_base
+
+SERVER_HTTP = 0
+SERVER_FTP = 1
+SERVER_TCP_ECHO = 2
+SERVER_UDP_ECHO = 3
+SERVER_BASIC_AUTH_PROXY = 4
+SERVER_WEBSOCKET = 5
+SERVER_PROXY = 6
+
+# Default request queue size for WebSocketServer.
+_DEFAULT_REQUEST_QUEUE_SIZE = 128
+
+OCSP_STATES_NO_SINGLE_RESPONSE = {
+  minica.OCSP_STATE_INVALID_RESPONSE,
+  minica.OCSP_STATE_UNAUTHORIZED,
+  minica.OCSP_STATE_TRY_LATER,
+  minica.OCSP_STATE_INVALID_RESPONSE_DATA,
+}
+
+class WebSocketOptions:
+  """Holds options for WebSocketServer."""
+
+  def __init__(self, host, port, data_dir):
+    self.request_queue_size = _DEFAULT_REQUEST_QUEUE_SIZE
+    self.server_host = host
+    self.port = port
+    self.websock_handlers = data_dir
+    self.scan_dir = None
+    self.allow_handlers_outside_root_dir = False
+    self.websock_handlers_map_file = None
+    self.cgi_directories = []
+    self.is_executable_method = None
+    self.allow_draft75 = False
+    self.strict = True
+
+    self.use_tls = False
+    self.private_key = None
+    self.certificate = None
+    self.tls_client_auth = False
+    self.tls_client_ca = None
+    self.tls_module = 'ssl'
+    self.use_basic_auth = False
+    self.basic_auth_credential = 'Basic ' + base64.b64encode('test:test')
+
+
+class RecordingSSLSessionCache(object):
+  """RecordingSSLSessionCache acts as a TLS session cache and maintains a log of
+  lookups and inserts in order to test session cache behaviours."""
+
+  def __init__(self):
+    self.log = []
+
+  def __getitem__(self, sessionID):
+    self.log.append(('lookup', sessionID))
+    raise KeyError()
+
+  def __setitem__(self, sessionID, session):
+    self.log.append(('insert', sessionID))
+
+
+class HTTPServer(testserver_base.ClientRestrictingServerMixIn,
+                 testserver_base.BrokenPipeHandlerMixIn,
+                 testserver_base.StoppableHTTPServer):
+  """This is a specialization of StoppableHTTPServer that adds client
+  verification."""
+
+  pass
+
+class ThreadingHTTPServer(SocketServer.ThreadingMixIn,
+                          HTTPServer):
+  """This variant of HTTPServer creates a new thread for every connection. It
+  should only be used with handlers that are known to be threadsafe."""
+
+  pass
+
+class OCSPServer(testserver_base.ClientRestrictingServerMixIn,
+                 testserver_base.BrokenPipeHandlerMixIn,
+                 BaseHTTPServer.HTTPServer):
+  """This is a specialization of HTTPServer that serves an
+  OCSP response"""
+
+  def serve_forever_on_thread(self):
+    self.thread = threading.Thread(target = self.serve_forever,
+                                   name = "OCSPServerThread")
+    self.thread.start()
+
+  def stop_serving(self):
+    self.shutdown()
+    self.thread.join()
+
+
+class HTTPSServer(tlslite.api.TLSSocketServerMixIn,
+                  testserver_base.ClientRestrictingServerMixIn,
+                  testserver_base.BrokenPipeHandlerMixIn,
+                  testserver_base.StoppableHTTPServer):
+  """This is a specialization of StoppableHTTPServer that add https support and
+  client verification."""
+
+  def __init__(self, server_address, request_hander_class, pem_cert_and_key,
+               ssl_client_auth, ssl_client_cas, ssl_client_cert_types,
+               ssl_bulk_ciphers, ssl_key_exchanges, alpn_protocols,
+               npn_protocols, record_resume_info, tls_intolerant,
+               tls_intolerance_type, signed_cert_timestamps,
+               fallback_scsv_enabled, ocsp_response,
+               alert_after_handshake, disable_channel_id, disable_ems):
+    self.cert_chain = tlslite.api.X509CertChain()
+    self.cert_chain.parsePemList(pem_cert_and_key)
+    # Force using only python implementation - otherwise behavior is different
+    # depending on whether m2crypto Python module is present (error is thrown
+    # when it is). m2crypto uses a C (based on OpenSSL) implementation under
+    # the hood.
+    self.private_key = tlslite.api.parsePEMKey(pem_cert_and_key,
+                                               private=True,
+                                               implementations=['python'])
+    self.ssl_client_auth = ssl_client_auth
+    self.ssl_client_cas = []
+    self.ssl_client_cert_types = []
+    self.npn_protocols = npn_protocols
+    self.signed_cert_timestamps = signed_cert_timestamps
+    self.fallback_scsv_enabled = fallback_scsv_enabled
+    self.ocsp_response = ocsp_response
+
+    if ssl_client_auth:
+      for ca_file in ssl_client_cas:
+        s = open(ca_file).read()
+        x509 = tlslite.api.X509()
+        x509.parse(s)
+        self.ssl_client_cas.append(x509.subject)
+
+      for cert_type in ssl_client_cert_types:
+        self.ssl_client_cert_types.append({
+            "rsa_sign": tlslite.api.ClientCertificateType.rsa_sign,
+            "ecdsa_sign": tlslite.api.ClientCertificateType.ecdsa_sign,
+            }[cert_type])
+
+    self.ssl_handshake_settings = tlslite.api.HandshakeSettings()
+    # Enable SSLv3 for testing purposes.
+    self.ssl_handshake_settings.minVersion = (3, 0)
+    if ssl_bulk_ciphers is not None:
+      self.ssl_handshake_settings.cipherNames = ssl_bulk_ciphers
+    if ssl_key_exchanges is not None:
+      self.ssl_handshake_settings.keyExchangeNames = ssl_key_exchanges
+    if tls_intolerant != 0:
+      self.ssl_handshake_settings.tlsIntolerant = (3, tls_intolerant)
+      self.ssl_handshake_settings.tlsIntoleranceType = tls_intolerance_type
+    if alert_after_handshake:
+      self.ssl_handshake_settings.alertAfterHandshake = True
+    if disable_channel_id:
+      self.ssl_handshake_settings.enableChannelID = False
+    if disable_ems:
+      self.ssl_handshake_settings.enableExtendedMasterSecret = False
+    self.ssl_handshake_settings.alpnProtos=alpn_protocols;
+
+    if record_resume_info:
+      # If record_resume_info is true then we'll replace the session cache with
+      # an object that records the lookups and inserts that it sees.
+      self.session_cache = RecordingSSLSessionCache()
+    else:
+      self.session_cache = tlslite.api.SessionCache()
+    testserver_base.StoppableHTTPServer.__init__(self,
+                                                 server_address,
+                                                 request_hander_class)
+
+  def handshake(self, tlsConnection):
+    """Creates the SSL connection."""
+
+    try:
+      self.tlsConnection = tlsConnection
+      tlsConnection.handshakeServer(certChain=self.cert_chain,
+                                    privateKey=self.private_key,
+                                    sessionCache=self.session_cache,
+                                    reqCert=self.ssl_client_auth,
+                                    settings=self.ssl_handshake_settings,
+                                    reqCAs=self.ssl_client_cas,
+                                    reqCertTypes=self.ssl_client_cert_types,
+                                    nextProtos=self.npn_protocols,
+                                    signedCertTimestamps=
+                                    self.signed_cert_timestamps,
+                                    fallbackSCSV=self.fallback_scsv_enabled,
+                                    ocspResponse = self.ocsp_response)
+      tlsConnection.ignoreAbruptClose = True
+      return True
+    except tlslite.api.TLSAbruptCloseError:
+      # Ignore abrupt close.
+      return True
+    except tlslite.api.TLSError, error:
+      print "Handshake failure:", str(error)
+      return False
+
+
+class FTPServer(testserver_base.ClientRestrictingServerMixIn,
+                pyftpdlib.ftpserver.FTPServer):
+  """This is a specialization of FTPServer that adds client verification."""
+
+  pass
+
+
+class TCPEchoServer(testserver_base.ClientRestrictingServerMixIn,
+                    SocketServer.TCPServer):
+  """A TCP echo server that echoes back what it has received."""
+
+  def server_bind(self):
+    """Override server_bind to store the server name."""
+
+    SocketServer.TCPServer.server_bind(self)
+    host, port = self.socket.getsockname()[:2]
+    self.server_name = socket.getfqdn(host)
+    self.server_port = port
+
+  def serve_forever(self):
+    self.stop = False
+    self.nonce_time = None
+    while not self.stop:
+      self.handle_request()
+    self.socket.close()
+
+
+class UDPEchoServer(testserver_base.ClientRestrictingServerMixIn,
+                    SocketServer.UDPServer):
+  """A UDP echo server that echoes back what it has received."""
+
+  def server_bind(self):
+    """Override server_bind to store the server name."""
+
+    SocketServer.UDPServer.server_bind(self)
+    host, port = self.socket.getsockname()[:2]
+    self.server_name = socket.getfqdn(host)
+    self.server_port = port
+
+  def serve_forever(self):
+    self.stop = False
+    self.nonce_time = None
+    while not self.stop:
+      self.handle_request()
+    self.socket.close()
+
+
+class TestPageHandler(testserver_base.BasePageHandler):
+  # Class variables to allow for persistence state between page handler
+  # invocations
+  rst_limits = {}
+  fail_precondition = {}
+
+  def __init__(self, request, client_address, socket_server):
+    connect_handlers = [
+      self.RedirectConnectHandler,
+      self.ServerAuthConnectHandler,
+      self.DefaultConnectResponseHandler]
+    get_handlers = [
+      self.NoCacheMaxAgeTimeHandler,
+      self.NoCacheTimeHandler,
+      self.CacheTimeHandler,
+      self.CacheExpiresHandler,
+      self.CacheProxyRevalidateHandler,
+      self.CachePrivateHandler,
+      self.CachePublicHandler,
+      self.CacheSMaxAgeHandler,
+      self.CacheMustRevalidateHandler,
+      self.CacheMustRevalidateMaxAgeHandler,
+      self.CacheNoStoreHandler,
+      self.CacheNoStoreMaxAgeHandler,
+      self.CacheNoTransformHandler,
+      self.DownloadHandler,
+      self.DownloadFinishHandler,
+      self.EchoHeader,
+      self.EchoHeaderCache,
+      self.EchoAllHandler,
+      self.ZipFileHandler,
+      self.FileHandler,
+      self.SetCookieHandler,
+      self.SetManyCookiesHandler,
+      self.ExpectAndSetCookieHandler,
+      self.SetHeaderHandler,
+      self.AuthBasicHandler,
+      self.AuthDigestHandler,
+      self.SlowServerHandler,
+      self.ChunkedServerHandler,
+      self.NoContentHandler,
+      self.ServerRedirectHandler,
+      self.CrossSiteRedirectHandler,
+      self.ClientRedirectHandler,
+      self.GetSSLSessionCacheHandler,
+      self.SSLManySmallRecords,
+      self.GetChannelID,
+      self.GetClientCert,
+      self.ClientCipherListHandler,
+      self.CloseSocketHandler,
+      self.DefaultResponseHandler]
+    post_handlers = [
+      self.EchoTitleHandler,
+      self.EchoHandler,
+      self.PostOnlyFileHandler,
+      self.EchoMultipartPostHandler] + get_handlers
+    put_handlers = [
+      self.EchoTitleHandler,
+      self.EchoHandler] + get_handlers
+    head_handlers = [
+      self.FileHandler,
+      self.DefaultResponseHandler]
+
+    self._mime_types = {
+      'crx' : 'application/x-chrome-extension',
+      'exe' : 'application/octet-stream',
+      'gif': 'image/gif',
+      'jpeg' : 'image/jpeg',
+      'jpg' : 'image/jpeg',
+      'js' : 'application/javascript',
+      'json': 'application/json',
+      'pdf' : 'application/pdf',
+      'txt' : 'text/plain',
+      'wav' : 'audio/wav',
+      'xml' : 'text/xml'
+    }
+    self._default_mime_type = 'text/html'
+
+    testserver_base.BasePageHandler.__init__(self, request, client_address,
+                                             socket_server, connect_handlers,
+                                             get_handlers, head_handlers,
+                                             post_handlers, put_handlers)
+
+  def GetMIMETypeFromName(self, file_name):
+    """Returns the mime type for the specified file_name. So far it only looks
+    at the file extension."""
+
+    (_shortname, extension) = os.path.splitext(file_name.split("?")[0])
+    if len(extension) == 0:
+      # no extension.
+      return self._default_mime_type
+
+    # extension starts with a dot, so we need to remove it
+    return self._mime_types.get(extension[1:], self._default_mime_type)
+
+  def NoCacheMaxAgeTimeHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and no caching requested."""
+
+    if not self._ShouldHandleRequest("/nocachetime/maxage"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Cache-Control', 'max-age=0')
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def NoCacheTimeHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and no caching requested."""
+
+    if not self._ShouldHandleRequest("/nocachetime"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Cache-Control', 'no-cache')
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheTimeHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and allows caching for one minute."""
+
+    if not self._ShouldHandleRequest("/cachetime"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Cache-Control', 'max-age=60')
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheExpiresHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and set the page to expire on 1 Jan 2099."""
+
+    if not self._ShouldHandleRequest("/cache/expires"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Expires', 'Thu, 1 Jan 2099 00:00:00 GMT')
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheProxyRevalidateHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and allows caching for 60 seconds"""
+
+    if not self._ShouldHandleRequest("/cache/proxy-revalidate"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'max-age=60, proxy-revalidate')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CachePrivateHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and allows caching for 3 seconds."""
+
+    if not self._ShouldHandleRequest("/cache/private"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'max-age=3, private')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CachePublicHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and allows caching for 3 seconds."""
+
+    if not self._ShouldHandleRequest("/cache/public"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'max-age=3, public')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheSMaxAgeHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and does not allow for caching."""
+
+    if not self._ShouldHandleRequest("/cache/s-maxage"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'public, s-maxage = 60, max-age = 0')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheMustRevalidateHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and does not allow caching."""
+
+    if not self._ShouldHandleRequest("/cache/must-revalidate"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'must-revalidate')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheMustRevalidateMaxAgeHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and does not allow caching event though max-age of 60
+    seconds is specified."""
+
+    if not self._ShouldHandleRequest("/cache/must-revalidate/max-age"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'max-age=60, must-revalidate')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheNoStoreHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and does not allow the page to be stored."""
+
+    if not self._ShouldHandleRequest("/cache/no-store"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'no-store')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def CacheNoStoreMaxAgeHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and does not allow the page to be stored even though max-age
+    of 60 seconds is specified."""
+
+    if not self._ShouldHandleRequest("/cache/no-store/max-age"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'max-age=60, no-store')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+
+  def CacheNoTransformHandler(self):
+    """This request handler yields a page with the title set to the current
+    system time, and does not allow the content to transformed during
+    user-agent caching"""
+
+    if not self._ShouldHandleRequest("/cache/no-transform"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'no-transform')
+    self.end_headers()
+
+    self.wfile.write('<html><head><title>%s</title></head></html>' %
+                     time.time())
+
+    return True
+
+  def EchoHeader(self):
+    """This handler echoes back the value of a specific request header."""
+
+    return self.EchoHeaderHelper("/echoheader")
+
+  def EchoHeaderCache(self):
+    """This function echoes back the value of a specific request header while
+    allowing caching for 10 hours."""
+
+    return self.EchoHeaderHelper("/echoheadercache")
+
+  def EchoHeaderHelper(self, echo_header):
+    """This function echoes back the value of the request header passed in."""
+
+    if not self._ShouldHandleRequest(echo_header):
+      return False
+
+    query_char = self.path.find('?')
+    if query_char != -1:
+      header_name = self.path[query_char+1:]
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    if echo_header == '/echoheadercache':
+      self.send_header('Cache-control', 'max-age=60000')
+    else:
+      self.send_header('Cache-control', 'no-cache')
+    # insert a vary header to properly indicate that the cachability of this
+    # request is subject to value of the request header being echoed.
+    if len(header_name) > 0:
+      self.send_header('Vary', header_name)
+    self.end_headers()
+
+    if len(header_name) > 0:
+      self.wfile.write(self.headers.getheader(header_name))
+
+    return True
+
+  def ReadRequestBody(self):
+    """This function reads the body of the current HTTP request, handling
+    both plain and chunked transfer encoded requests."""
+
+    if self.headers.getheader('transfer-encoding') != 'chunked':
+      length = int(self.headers.getheader('content-length'))
+      return self.rfile.read(length)
+
+    # Read the request body as chunks.
+    body = ""
+    while True:
+      line = self.rfile.readline()
+      length = int(line, 16)
+      if length == 0:
+        self.rfile.readline()
+        break
+      body += self.rfile.read(length)
+      self.rfile.read(2)
+    return body
+
+  def EchoHandler(self):
+    """This handler just echoes back the payload of the request, for testing
+    form submission."""
+
+    if not self._ShouldHandleRequest("/echo"):
+      return False
+
+    _, _, _, _, query, _ = urlparse.urlparse(self.path)
+    query_params = cgi.parse_qs(query, True)
+    if 'status' in query_params:
+      self.send_response(int(query_params['status'][0]))
+    else:
+      self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    self.wfile.write(self.ReadRequestBody())
+    return True
+
+  def EchoTitleHandler(self):
+    """This handler is like Echo, but sets the page title to the request."""
+
+    if not self._ShouldHandleRequest("/echotitle"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    request = self.ReadRequestBody()
+    self.wfile.write('<html><head><title>')
+    self.wfile.write(request)
+    self.wfile.write('</title></head></html>')
+    return True
+
+  def EchoAllHandler(self):
+    """This handler yields a (more) human-readable page listing information
+    about the request header & contents."""
+
+    if not self._ShouldHandleRequest("/echoall"):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    self.wfile.write('<html><head><style>'
+      'pre { border: 1px solid black; margin: 5px; padding: 5px }'
+      '</style></head><body>'
+      '<div style="float: right">'
+      '<a href="/echo">back to referring page</a></div>'
+      '<h1>Request Body:</h1><pre>')
+
+    if self.command == 'POST' or self.command == 'PUT':
+      qs = self.ReadRequestBody()
+      params = cgi.parse_qs(qs, keep_blank_values=1)
+
+      for param in params:
+        self.wfile.write('%s=%s\n' % (param, params[param][0]))
+
+    self.wfile.write('</pre>')
+
+    self.wfile.write('<h1>Request Headers:</h1><pre>%s</pre>' % self.headers)
+
+    self.wfile.write('</body></html>')
+    return True
+
+  def EchoMultipartPostHandler(self):
+    """This handler echoes received multipart post data as json format."""
+
+    if not (self._ShouldHandleRequest("/echomultipartpost") or
+            self._ShouldHandleRequest("/searchbyimage")):
+      return False
+
+    content_type, parameters = cgi.parse_header(
+        self.headers.getheader('content-type'))
+    if content_type == 'multipart/form-data':
+      post_multipart = cgi.parse_multipart(self.rfile, parameters)
+    elif content_type == 'application/x-www-form-urlencoded':
+      raise Exception('POST by application/x-www-form-urlencoded is '
+                      'not implemented.')
+    else:
+      post_multipart = {}
+
+    # Since the data can be binary, we encode them by base64.
+    post_multipart_base64_encoded = {}
+    for field, values in post_multipart.items():
+      post_multipart_base64_encoded[field] = [base64.b64encode(value)
+                                              for value in values]
+
+    result = {'POST_multipart' : post_multipart_base64_encoded}
+
+    self.send_response(200)
+    self.send_header("Content-type", "text/plain")
+    self.end_headers()
+    self.wfile.write(json.dumps(result, indent=2, sort_keys=False))
+    return True
+
+  def DownloadHandler(self):
+    """This handler sends a downloadable file with or without reporting
+    the size (6K)."""
+
+    if self.path.startswith("/download-unknown-size"):
+      send_length = False
+    elif self.path.startswith("/download-known-size"):
+      send_length = True
+    else:
+      return False
+
+    #
+    # The test which uses this functionality is attempting to send
+    # small chunks of data to the client.  Use a fairly large buffer
+    # so that we'll fill chrome's IO buffer enough to force it to
+    # actually write the data.
+    # See also the comments in the client-side of this test in
+    # download_uitest.cc
+    #
+    size_chunk1 = 35*1024
+    size_chunk2 = 10*1024
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'application/octet-stream')
+    self.send_header('Cache-Control', 'max-age=0')
+    if send_length:
+      self.send_header('Content-Length', size_chunk1 + size_chunk2)
+    self.end_headers()
+
+    # First chunk of data:
+    self.wfile.write("*" * size_chunk1)
+    self.wfile.flush()
+
+    # handle requests until one of them clears this flag.
+    self.server.wait_for_download = True
+    while self.server.wait_for_download:
+      self.server.handle_request()
+
+    # Second chunk of data:
+    self.wfile.write("*" * size_chunk2)
+    return True
+
+  def DownloadFinishHandler(self):
+    """This handler just tells the server to finish the current download."""
+
+    if not self._ShouldHandleRequest("/download-finish"):
+      return False
+
+    self.server.wait_for_download = False
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Cache-Control', 'max-age=0')
+    self.end_headers()
+    return True
+
+  def _ReplaceFileData(self, data, query_parameters):
+    """Replaces matching substrings in a file.
+
+    If the 'replace_text' URL query parameter is present, it is expected to be
+    of the form old_text:new_text, which indicates that any old_text strings in
+    the file are replaced with new_text. Multiple 'replace_text' parameters may
+    be specified.
+
+    If the parameters are not present, |data| is returned.
+    """
+
+    query_dict = cgi.parse_qs(query_parameters)
+    replace_text_values = query_dict.get('replace_text', [])
+    for replace_text_value in replace_text_values:
+      replace_text_args = replace_text_value.split(':')
+      if len(replace_text_args) != 2:
+        raise ValueError(
+          'replace_text must be of form old_text:new_text. Actual value: %s' %
+          replace_text_value)
+      old_text_b64, new_text_b64 = replace_text_args
+      old_text = base64.urlsafe_b64decode(old_text_b64)
+      new_text = base64.urlsafe_b64decode(new_text_b64)
+      data = data.replace(old_text, new_text)
+    return data
+
+  def ZipFileHandler(self):
+    """This handler sends the contents of the requested file in compressed form.
+    Can pass in a parameter that specifies that the content length be
+    C - the compressed size (OK),
+    U - the uncompressed size (Non-standard, but handled),
+    S - less than compressed (OK because we keep going),
+    M - larger than compressed but less than uncompressed (an error),
+    L - larger than uncompressed (an error)
+    Example: compressedfiles/Picture_1.doc?C
+    """
+
+    prefix = "/compressedfiles/"
+    if not self.path.startswith(prefix):
+      return False
+
+    # Consume a request body if present.
+    if self.command == 'POST' or self.command == 'PUT' :
+      self.ReadRequestBody()
+
+    _, _, url_path, _, query, _ = urlparse.urlparse(self.path)
+
+    if not query in ('C', 'U', 'S', 'M', 'L'):
+      return False
+
+    sub_path = url_path[len(prefix):]
+    entries = sub_path.split('/')
+    file_path = os.path.join(self.server.data_dir, *entries)
+    if os.path.isdir(file_path):
+      file_path = os.path.join(file_path, 'index.html')
+
+    if not os.path.isfile(file_path):
+      print "File not found " + sub_path + " full path:" + file_path
+      self.send_error(404)
+      return True
+
+    f = open(file_path, "rb")
+    data = f.read()
+    uncompressed_len = len(data)
+    f.close()
+
+    # Compress the data.
+    data = zlib.compress(data)
+    compressed_len = len(data)
+
+    content_length = compressed_len
+    if query == 'U':
+      content_length = uncompressed_len
+    elif query == 'S':
+      content_length = compressed_len / 2
+    elif query == 'M':
+      content_length = (compressed_len + uncompressed_len) / 2
+    elif query == 'L':
+      content_length = compressed_len + uncompressed_len
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'application/msword')
+    self.send_header('Content-encoding', 'deflate')
+    self.send_header('Connection', 'close')
+    self.send_header('Content-Length', content_length)
+    self.send_header('ETag', '\'' + file_path + '\'')
+    self.end_headers()
+
+    self.wfile.write(data)
+
+    return True
+
+  def FileHandler(self):
+    """This handler sends the contents of the requested file.  Wow, it's like
+    a real webserver!"""
+
+    prefix = self.server.file_root_url
+    if not self.path.startswith(prefix):
+      return False
+    return self._FileHandlerHelper(prefix)
+
+  def PostOnlyFileHandler(self):
+    """This handler sends the contents of the requested file on a POST."""
+
+    prefix = urlparse.urljoin(self.server.file_root_url, 'post/')
+    if not self.path.startswith(prefix):
+      return False
+    return self._FileHandlerHelper(prefix)
+
+  def _FileHandlerHelper(self, prefix):
+    request_body = ''
+    if self.command == 'POST' or self.command == 'PUT':
+      # Consume a request body if present.
+      request_body = self.ReadRequestBody()
+
+    _, _, url_path, _, query, _ = urlparse.urlparse(self.path)
+    query_dict = cgi.parse_qs(query)
+
+    expected_body = query_dict.get('expected_body', [])
+    if expected_body and request_body not in expected_body:
+      self.send_response(404)
+      self.end_headers()
+      self.wfile.write('')
+      return True
+
+    expected_headers = query_dict.get('expected_headers', [])
+    for expected_header in expected_headers:
+      header_name, expected_value = expected_header.split(':')
+      if self.headers.getheader(header_name) != expected_value:
+        self.send_response(404)
+        self.end_headers()
+        self.wfile.write('')
+        return True
+
+    sub_path = url_path[len(prefix):]
+    entries = sub_path.split('/')
+    file_path = os.path.join(self.server.data_dir, *entries)
+    if os.path.isdir(file_path):
+      file_path = os.path.join(file_path, 'index.html')
+
+    if not os.path.isfile(file_path):
+      print "File not found " + sub_path + " full path:" + file_path
+      self.send_error(404)
+      return True
+
+    f = open(file_path, "rb")
+    data = f.read()
+    f.close()
+
+    data = self._ReplaceFileData(data, query)
+
+    old_protocol_version = self.protocol_version
+
+    # If file.mock-http-headers exists, it contains the headers we
+    # should send.  Read them in and parse them.
+    headers_path = file_path + '.mock-http-headers'
+    if os.path.isfile(headers_path):
+      f = open(headers_path, "r")
+
+      # "HTTP/1.1 200 OK"
+      response = f.readline()
+      http_major, http_minor, status_code = re.findall(
+          'HTTP/(\d+).(\d+) (\d+)', response)[0]
+      self.protocol_version = "HTTP/%s.%s" % (http_major, http_minor)
+      self.send_response(int(status_code))
+
+      for line in f:
+        header_values = re.findall('(\S+):\s*(.*)', line)
+        if len(header_values) > 0:
+          # "name: value"
+          name, value = header_values[0]
+          self.send_header(name, value)
+      f.close()
+    else:
+      # Could be more generic once we support mime-type sniffing, but for
+      # now we need to set it explicitly.
+
+      range_header = self.headers.get('Range')
+      if range_header and range_header.startswith('bytes='):
+        # Note this doesn't handle all valid byte range_header values (i.e.
+        # left open ended ones), just enough for what we needed so far.
+        range_header = range_header[6:].split('-')
+        start = int(range_header[0])
+        if range_header[1]:
+          end = int(range_header[1])
+        else:
+          end = len(data) - 1
+
+        self.send_response(206)
+        content_range = ('bytes ' + str(start) + '-' + str(end) + '/' +
+                         str(len(data)))
+        self.send_header('Content-Range', content_range)
+        data = data[start: end + 1]
+      else:
+        self.send_response(200)
+
+      self.send_header('Content-Type', self.GetMIMETypeFromName(file_path))
+      self.send_header('Accept-Ranges', 'bytes')
+      self.send_header('Content-Length', len(data))
+      self.send_header('ETag', '\'' + file_path + '\'')
+    self.end_headers()
+
+    if (self.command != 'HEAD'):
+      self.wfile.write(data)
+
+    self.protocol_version = old_protocol_version
+    return True
+
+  def SetCookieHandler(self):
+    """This handler just sets a cookie, for testing cookie handling."""
+
+    if not self._ShouldHandleRequest("/set-cookie"):
+      return False
+
+    query_char = self.path.find('?')
+    if query_char != -1:
+      cookie_values = self.path[query_char + 1:].split('&')
+    else:
+      cookie_values = ("",)
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    for cookie_value in cookie_values:
+      self.send_header('Set-Cookie', '%s' % cookie_value)
+    self.end_headers()
+    for cookie_value in cookie_values:
+      self.wfile.write('%s' % cookie_value)
+    return True
+
+  def SetManyCookiesHandler(self):
+    """This handler just sets a given number of cookies, for testing handling
+       of large numbers of cookies."""
+
+    if not self._ShouldHandleRequest("/set-many-cookies"):
+      return False
+
+    query_char = self.path.find('?')
+    if query_char != -1:
+      num_cookies = int(self.path[query_char + 1:])
+    else:
+      num_cookies = 0
+    self.send_response(200)
+    self.send_header('', 'text/html')
+    for _i in range(0, num_cookies):
+      self.send_header('Set-Cookie', 'a=')
+    self.end_headers()
+    self.wfile.write('%d cookies were sent' % num_cookies)
+    return True
+
+  def ExpectAndSetCookieHandler(self):
+    """Expects some cookies to be sent, and if they are, sets more cookies.
+
+    The expect parameter specifies a required cookie.  May be specified multiple
+    times.
+    The set parameter specifies a cookie to set if all required cookies are
+    preset.  May be specified multiple times.
+    The data parameter specifies the response body data to be returned."""
+
+    if not self._ShouldHandleRequest("/expect-and-set-cookie"):
+      return False
+
+    _, _, _, _, query, _ = urlparse.urlparse(self.path)
+    query_dict = cgi.parse_qs(query)
+    cookies = set()
+    if 'Cookie' in self.headers:
+      cookie_header = self.headers.getheader('Cookie')
+      cookies.update([s.strip() for s in cookie_header.split(';')])
+    got_all_expected_cookies = True
+    for expected_cookie in query_dict.get('expect', []):
+      if expected_cookie not in cookies:
+        got_all_expected_cookies = False
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    if got_all_expected_cookies:
+      for cookie_value in query_dict.get('set', []):
+        self.send_header('Set-Cookie', '%s' % cookie_value)
+    self.end_headers()
+    for data_value in query_dict.get('data', []):
+      self.wfile.write(data_value)
+    return True
+
+  def SetHeaderHandler(self):
+    """This handler sets a response header. Parameters are in the
+    key%3A%20value&key2%3A%20value2 format."""
+
+    if not self._ShouldHandleRequest("/set-header"):
+      return False
+
+    query_char = self.path.find('?')
+    if query_char != -1:
+      headers_values = self.path[query_char + 1:].split('&')
+    else:
+      headers_values = ("",)
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    for header_value in headers_values:
+      header_value = urllib.unquote(header_value)
+      (key, value) = header_value.split(': ', 1)
+      self.send_header(key, value)
+    self.end_headers()
+    for header_value in headers_values:
+      self.wfile.write('%s' % header_value)
+    return True
+
+  def AuthBasicHandler(self):
+    """This handler tests 'Basic' authentication.  It just sends a page with
+    title 'user/pass' if you succeed."""
+
+    if not self._ShouldHandleRequest("/auth-basic"):
+      return False
+
+    username = userpass = password = b64str = ""
+    expected_password = 'secret'
+    realm = 'testrealm'
+    set_cookie_if_challenged = False
+
+    _, _, url_path, _, query, _ = urlparse.urlparse(self.path)
+    query_params = cgi.parse_qs(query, True)
+    if 'set-cookie-if-challenged' in query_params:
+      set_cookie_if_challenged = True
+    if 'password' in query_params:
+      expected_password = query_params['password'][0]
+    if 'realm' in query_params:
+      realm = query_params['realm'][0]
+
+    auth = self.headers.getheader('authorization')
+    try:
+      if not auth:
+        raise Exception('no auth')
+      b64str = re.findall(r'Basic (\S+)', auth)[0]
+      userpass = base64.b64decode(b64str)
+      username, password = re.findall(r'([^:]+):(\S+)', userpass)[0]
+      if password != expected_password:
+        raise Exception('wrong password')
+    except Exception, e:
+      # Authentication failed.
+      self.send_response(401)
+      self.send_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
+      self.send_header('Content-Type', 'text/html')
+      if set_cookie_if_challenged:
+        self.send_header('Set-Cookie', 'got_challenged=true')
+      self.end_headers()
+      self.wfile.write('<html><head>')
+      self.wfile.write('<title>Denied: %s</title>' % e)
+      self.wfile.write('</head><body>')
+      self.wfile.write('auth=%s<p>' % auth)
+      self.wfile.write('b64str=%s<p>' % b64str)
+      self.wfile.write('username: %s<p>' % username)
+      self.wfile.write('userpass: %s<p>' % userpass)
+      self.wfile.write('password: %s<p>' % password)
+      self.wfile.write('You sent:<br>%s<p>' % self.headers)
+      self.wfile.write('</body></html>')
+      return True
+
+    # Authentication successful.  (Return a cachable response to allow for
+    # testing cached pages that require authentication.)
+    old_protocol_version = self.protocol_version
+    self.protocol_version = "HTTP/1.1"
+
+    if_none_match = self.headers.getheader('if-none-match')
+    if if_none_match == "abc":
+      self.send_response(304)
+      self.end_headers()
+    elif url_path.endswith(".gif"):
+      # Using chrome/test/data/google/logo.gif as the test image
+      test_image_path = ['google', 'logo.gif']
+      gif_path = os.path.join(self.server.data_dir, *test_image_path)
+      if not os.path.isfile(gif_path):
+        self.send_error(404)
+        self.protocol_version = old_protocol_version
+        return True
+
+      f = open(gif_path, "rb")
+      data = f.read()
+      f.close()
+
+      self.send_response(200)
+      self.send_header('Content-Type', 'image/gif')
+      self.send_header('Cache-control', 'max-age=60000')
+      self.send_header('Etag', 'abc')
+      self.end_headers()
+      self.wfile.write(data)
+    else:
+      self.send_response(200)
+      self.send_header('Content-Type', 'text/html')
+      self.send_header('Cache-control', 'max-age=60000')
+      self.send_header('Etag', 'abc')
+      self.end_headers()
+      self.wfile.write('<html><head>')
+      self.wfile.write('<title>%s/%s</title>' % (username, password))
+      self.wfile.write('</head><body>')
+      self.wfile.write('auth=%s<p>' % auth)
+      self.wfile.write('You sent:<br>%s<p>' % self.headers)
+      self.wfile.write('</body></html>')
+
+    self.protocol_version = old_protocol_version
+    return True
+
+  def GetNonce(self, force_reset=False):
+    """Returns a nonce that's stable per request path for the server's lifetime.
+    This is a fake implementation. A real implementation would only use a given
+    nonce a single time (hence the name n-once). However, for the purposes of
+    unittesting, we don't care about the security of the nonce.
+
+    Args:
+      force_reset: Iff set, the nonce will be changed. Useful for testing the
+          "stale" response.
+    """
+
+    if force_reset or not self.server.nonce_time:
+      self.server.nonce_time = time.time()
+    return hashlib.md5('privatekey%s%d' %
+                       (self.path, self.server.nonce_time)).hexdigest()
+
+  def AuthDigestHandler(self):
+    """This handler tests 'Digest' authentication.
+
+    It just sends a page with title 'user/pass' if you succeed.
+
+    A stale response is sent iff "stale" is present in the request path.
+    """
+
+    if not self._ShouldHandleRequest("/auth-digest"):
+      return False
+
+    stale = 'stale' in self.path
+    nonce = self.GetNonce(force_reset=stale)
+    opaque = hashlib.md5('opaque').hexdigest()
+    password = 'secret'
+    realm = 'testrealm'
+
+    auth = self.headers.getheader('authorization')
+    pairs = {}
+    try:
+      if not auth:
+        raise Exception('no auth')
+      if not auth.startswith('Digest'):
+        raise Exception('not digest')
+      # Pull out all the name="value" pairs as a dictionary.
+      pairs = dict(re.findall(r'(\b[^ ,=]+)="?([^",]+)"?', auth))
+
+      # Make sure it's all valid.
+      if pairs['nonce'] != nonce:
+        raise Exception('wrong nonce')
+      if pairs['opaque'] != opaque:
+        raise Exception('wrong opaque')
+
+      # Check the 'response' value and make sure it matches our magic hash.
+      # See http://www.ietf.org/rfc/rfc2617.txt
+      hash_a1 = hashlib.md5(
+          ':'.join([pairs['username'], realm, password])).hexdigest()
+      hash_a2 = hashlib.md5(':'.join([self.command, pairs['uri']])).hexdigest()
+      if 'qop' in pairs and 'nc' in pairs and 'cnonce' in pairs:
+        response = hashlib.md5(':'.join([hash_a1, nonce, pairs['nc'],
+            pairs['cnonce'], pairs['qop'], hash_a2])).hexdigest()
+      else:
+        response = hashlib.md5(':'.join([hash_a1, nonce, hash_a2])).hexdigest()
+
+      if pairs['response'] != response:
+        raise Exception('wrong password')
+    except Exception, e:
+      # Authentication failed.
+      self.send_response(401)
+      hdr = ('Digest '
+             'realm="%s", '
+             'domain="/", '
+             'qop="auth", '
+             'algorithm=MD5, '
+             'nonce="%s", '
+             'opaque="%s"') % (realm, nonce, opaque)
+      if stale:
+        hdr += ', stale="TRUE"'
+      self.send_header('WWW-Authenticate', hdr)
+      self.send_header('Content-Type', 'text/html')
+      self.end_headers()
+      self.wfile.write('<html><head>')
+      self.wfile.write('<title>Denied: %s</title>' % e)
+      self.wfile.write('</head><body>')
+      self.wfile.write('auth=%s<p>' % auth)
+      self.wfile.write('pairs=%s<p>' % pairs)
+      self.wfile.write('You sent:<br>%s<p>' % self.headers)
+      self.wfile.write('We are replying:<br>%s<p>' % hdr)
+      self.wfile.write('</body></html>')
+      return True
+
+    # Authentication successful.
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    self.wfile.write('<html><head>')
+    self.wfile.write('<title>%s/%s</title>' % (pairs['username'], password))
+    self.wfile.write('</head><body>')
+    self.wfile.write('auth=%s<p>' % auth)
+    self.wfile.write('pairs=%s<p>' % pairs)
+    self.wfile.write('</body></html>')
+
+    return True
+
+  def SlowServerHandler(self):
+    """Wait for the user suggested time before responding. The syntax is
+    /slow?0.5 to wait for half a second."""
+
+    if not self._ShouldHandleRequest("/slow"):
+      return False
+    query_char = self.path.find('?')
+    wait_sec = 1.0
+    if query_char >= 0:
+      try:
+        wait_sec = float(self.path[query_char + 1:])
+      except ValueError:
+        pass
+    time.sleep(wait_sec)
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    self.end_headers()
+    self.wfile.write("waited %.1f seconds" % wait_sec)
+    return True
+
+  def ChunkedServerHandler(self):
+    """Send chunked response. Allows to specify chunks parameters:
+     - waitBeforeHeaders - ms to wait before sending headers
+     - waitBetweenChunks - ms to wait between chunks
+     - chunkSize - size of each chunk in bytes
+     - chunksNumber - number of chunks
+    Example: /chunked?waitBeforeHeaders=1000&chunkSize=5&chunksNumber=5
+    waits one second, then sends headers and five chunks five bytes each."""
+
+    if not self._ShouldHandleRequest("/chunked"):
+      return False
+    query_char = self.path.find('?')
+    chunkedSettings = {'waitBeforeHeaders' : 0,
+                       'waitBetweenChunks' : 0,
+                       'chunkSize' : 5,
+                       'chunksNumber' : 5}
+    if query_char >= 0:
+      params = self.path[query_char + 1:].split('&')
+      for param in params:
+        keyValue = param.split('=')
+        if len(keyValue) == 2:
+          try:
+            chunkedSettings[keyValue[0]] = int(keyValue[1])
+          except ValueError:
+            pass
+    time.sleep(0.001 * chunkedSettings['waitBeforeHeaders'])
+    self.protocol_version = 'HTTP/1.1' # Needed for chunked encoding
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    self.send_header('Connection', 'close')
+    self.send_header('Transfer-Encoding', 'chunked')
+    self.end_headers()
+    # Chunked encoding: sending all chunks, then final zero-length chunk and
+    # then final CRLF.
+    for i in range(0, chunkedSettings['chunksNumber']):
+      if i > 0:
+        time.sleep(0.001 * chunkedSettings['waitBetweenChunks'])
+      self.sendChunkHelp('*' * chunkedSettings['chunkSize'])
+      self.wfile.flush() # Keep in mind that we start flushing only after 1kb.
+    self.sendChunkHelp('')
+    return True
+
+  def NoContentHandler(self):
+    """Returns a 204 No Content response."""
+
+    if not self._ShouldHandleRequest("/nocontent"):
+      return False
+    self.send_response(204)
+    self.end_headers()
+    return True
+
+  def ServerRedirectHandler(self):
+    """Sends a server redirect to the given URL. The syntax is
+    '/server-redirect?http://foo.bar/asdf' to redirect to
+    'http://foo.bar/asdf'"""
+
+    test_name = "/server-redirect"
+    if not self._ShouldHandleRequest(test_name):
+      return False
+
+    query_char = self.path.find('?')
+    if query_char < 0 or len(self.path) <= query_char + 1:
+      self.sendRedirectHelp(test_name)
+      return True
+    dest = urllib.unquote(self.path[query_char + 1:])
+
+    self.send_response(301)  # moved permanently
+    self.send_header('Location', dest)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    self.wfile.write('<html><head>')
+    self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest)
+
+    return True
+
+  def CrossSiteRedirectHandler(self):
+    """Sends a server redirect to the given site. The syntax is
+    '/cross-site/hostname/...' to redirect to //hostname/...
+    It is used to navigate between different Sites, causing
+    cross-site/cross-process navigations in the browser."""
+
+    test_name = "/cross-site"
+    if not self._ShouldHandleRequest(test_name):
+      return False
+
+    params = urllib.unquote(self.path[(len(test_name) + 1):])
+    slash = params.find('/')
+    if slash < 0:
+      self.sendRedirectHelp(test_name)
+      return True
+
+    host = params[:slash]
+    path = params[(slash+1):]
+    dest = "//%s:%s/%s" % (host, str(self.server.server_port), path)
+
+    self.send_response(301)  # moved permanently
+    self.send_header('Location', dest)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    self.wfile.write('<html><head>')
+    self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest)
+
+    return True
+
+  def ClientRedirectHandler(self):
+    """Sends a client redirect to the given URL. The syntax is
+    '/client-redirect?http://foo.bar/asdf' to redirect to
+    'http://foo.bar/asdf'"""
+
+    test_name = "/client-redirect"
+    if not self._ShouldHandleRequest(test_name):
+      return False
+
+    query_char = self.path.find('?')
+    if query_char < 0 or len(self.path) <= query_char + 1:
+      self.sendRedirectHelp(test_name)
+      return True
+    dest = urllib.unquote(self.path[query_char + 1:])
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    self.wfile.write('<html><head>')
+    self.wfile.write('<meta http-equiv="refresh" content="0;url=%s">' % dest)
+    self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest)
+
+    return True
+
+  def GetSSLSessionCacheHandler(self):
+    """Send a reply containing a log of the session cache operations."""
+
+    if not self._ShouldHandleRequest('/ssl-session-cache'):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    self.end_headers()
+    try:
+      log = self.server.session_cache.log
+    except AttributeError:
+      self.wfile.write('Pass --https-record-resume in order to use' +
+                       ' this request')
+      return True
+
+    for (action, sessionID) in log:
+      self.wfile.write('%s\t%s\n' % (action, bytes(sessionID).encode('hex')))
+    return True
+
+  def SSLManySmallRecords(self):
+    """Sends a reply consisting of a variety of small writes. These will be
+    translated into a series of small SSL records when used over an HTTPS
+    server."""
+
+    if not self._ShouldHandleRequest('/ssl-many-small-records'):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    self.end_headers()
+
+    # Write ~26K of data, in 1350 byte chunks
+    for i in xrange(20):
+      self.wfile.write('*' * 1350)
+      self.wfile.flush()
+    return True
+
+  def GetChannelID(self):
+    """Send a reply containing the hashed ChannelID that the client provided."""
+
+    if not self._ShouldHandleRequest('/channel-id'):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    self.end_headers()
+    channel_id = bytes(self.server.tlsConnection.channel_id)
+    self.wfile.write(hashlib.sha256(channel_id).digest().encode('base64'))
+    return True
+
+  def GetClientCert(self):
+    """Send a reply whether a client certificate was provided."""
+
+    if not self._ShouldHandleRequest('/client-cert'):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    self.end_headers()
+
+    cert_chain = self.server.tlsConnection.session.clientCertChain
+    if cert_chain != None:
+      self.wfile.write('got client cert with fingerprint: ' +
+                       cert_chain.getFingerprint())
+    else:
+      self.wfile.write('got no client cert')
+    return True
+
+  def ClientCipherListHandler(self):
+    """Send a reply containing the cipher suite list that the client
+    provided. Each cipher suite value is serialized in decimal, followed by a
+    newline."""
+
+    if not self._ShouldHandleRequest('/client-cipher-list'):
+      return False
+
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/plain')
+    self.end_headers()
+
+    cipher_suites = self.server.tlsConnection.clientHello.cipher_suites
+    self.wfile.write('\n'.join(str(c) for c in cipher_suites))
+    return True
+
+  def CloseSocketHandler(self):
+    """Closes the socket without sending anything."""
+
+    if not self._ShouldHandleRequest('/close-socket'):
+      return False
+
+    self.wfile.close()
+    return True
+
+  def DefaultResponseHandler(self):
+    """This is the catch-all response handler for requests that aren't handled
+    by one of the special handlers above.
+    Note that we specify the content-length as without it the https connection
+    is not closed properly (and the browser keeps expecting data)."""
+
+    contents = "Default response given for path: " + self.path
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Content-Length', len(contents))
+    self.end_headers()
+    if (self.command != 'HEAD'):
+      self.wfile.write(contents)
+    return True
+
+  def RedirectConnectHandler(self):
+    """Sends a redirect to the CONNECT request for www.redirect.com. This
+    response is not specified by the RFC, so the browser should not follow
+    the redirect."""
+
+    if (self.path.find("www.redirect.com") < 0):
+      return False
+
+    dest = "http://www.destination.com/foo.js"
+
+    self.send_response(302)  # moved temporarily
+    self.send_header('Location', dest)
+    self.send_header('Connection', 'close')
+    self.end_headers()
+    return True
+
+  def ServerAuthConnectHandler(self):
+    """Sends a 401 to the CONNECT request for www.server-auth.com. This
+    response doesn't make sense because the proxy server cannot request
+    server authentication."""
+
+    if (self.path.find("www.server-auth.com") < 0):
+      return False
+
+    challenge = 'Basic realm="WallyWorld"'
+
+    self.send_response(401)  # unauthorized
+    self.send_header('WWW-Authenticate', challenge)
+    self.send_header('Connection', 'close')
+    self.end_headers()
+    return True
+
+  def DefaultConnectResponseHandler(self):
+    """This is the catch-all response handler for CONNECT requests that aren't
+    handled by one of the special handlers above.  Real Web servers respond
+    with 400 to CONNECT requests."""
+
+    contents = "Your client has issued a malformed or illegal request."
+    self.send_response(400)  # bad request
+    self.send_header('Content-Type', 'text/html')
+    self.send_header('Content-Length', len(contents))
+    self.end_headers()
+    self.wfile.write(contents)
+    return True
+
+  # called by the redirect handling function when there is no parameter
+  def sendRedirectHelp(self, redirect_name):
+    self.send_response(200)
+    self.send_header('Content-Type', 'text/html')
+    self.end_headers()
+    self.wfile.write('<html><body><h1>Error: no redirect destination</h1>')
+    self.wfile.write('Use <pre>%s?http://dest...</pre>' % redirect_name)
+    self.wfile.write('</body></html>')
+
+  # called by chunked handling function
+  def sendChunkHelp(self, chunk):
+    # Each chunk consists of: chunk size (hex), CRLF, chunk body, CRLF
+    self.wfile.write('%X\r\n' % len(chunk))
+    self.wfile.write(chunk)
+    self.wfile.write('\r\n')
+
+
+class OCSPHandler(testserver_base.BasePageHandler):
+  def __init__(self, request, client_address, socket_server):
+    handlers = [self.OCSPResponse, self.CaIssuersResponse]
+    self.ocsp_response = socket_server.ocsp_response
+    self.ocsp_response_intermediate = socket_server.ocsp_response_intermediate
+    self.ca_issuers_response = socket_server.ca_issuers_response
+    testserver_base.BasePageHandler.__init__(self, request, client_address,
+                                             socket_server, [], handlers, [],
+                                             handlers, [])
+
+  def OCSPResponse(self):
+    if self._ShouldHandleRequest("/ocsp"):
+      response = self.ocsp_response
+    elif self._ShouldHandleRequest("/ocsp_intermediate"):
+      response = self.ocsp_response_intermediate
+    else:
+      return False
+    print 'handling ocsp request'
+    self.send_response(200)
+    self.send_header('Content-Type', 'application/ocsp-response')
+    self.send_header('Content-Length', str(len(response)))
+    self.end_headers()
+
+    self.wfile.write(response)
+
+  def CaIssuersResponse(self):
+    if not self._ShouldHandleRequest("/ca_issuers"):
+      return False
+    print 'handling ca_issuers request'
+    self.send_response(200)
+    self.send_header('Content-Type', 'application/pkix-cert')
+    self.send_header('Content-Length', str(len(self.ca_issuers_response)))
+    self.end_headers()
+
+    self.wfile.write(self.ca_issuers_response)
+
+
+class TCPEchoHandler(SocketServer.BaseRequestHandler):
+  """The RequestHandler class for TCP echo server.
+
+  It is instantiated once per connection to the server, and overrides the
+  handle() method to implement communication to the client.
+  """
+
+  def handle(self):
+    """Handles the request from the client and constructs a response."""
+
+    data = self.request.recv(65536).strip()
+    # Verify the "echo request" message received from the client. Send back
+    # "echo response" message if "echo request" message is valid.
+    try:
+      return_data = echo_message.GetEchoResponseData(data)
+      if not return_data:
+        return
+    except ValueError:
+      return
+
+    self.request.send(return_data)
+
+
+class UDPEchoHandler(SocketServer.BaseRequestHandler):
+  """The RequestHandler class for UDP echo server.
+
+  It is instantiated once per connection to the server, and overrides the
+  handle() method to implement communication to the client.
+  """
+
+  def handle(self):
+    """Handles the request from the client and constructs a response."""
+
+    data = self.request[0].strip()
+    request_socket = self.request[1]
+    # Verify the "echo request" message received from the client. Send back
+    # "echo response" message if "echo request" message is valid.
+    try:
+      return_data = echo_message.GetEchoResponseData(data)
+      if not return_data:
+        return
+    except ValueError:
+      return
+    request_socket.sendto(return_data, self.client_address)
+
+
+class ProxyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A request handler that behaves as a proxy server. Only CONNECT, GET and
+  HEAD methods are supported.
+  """
+
+  redirect_connect_to_localhost = False;
+
+  def _start_read_write(self, sock):
+    sock.setblocking(0)
+    self.request.setblocking(0)
+    rlist = [self.request, sock]
+    while True:
+      ready_sockets, _unused, errors = select.select(rlist, [], [])
+      if errors:
+        self.send_response(500)
+        self.end_headers()
+        return
+      for s in ready_sockets:
+        received = s.recv(1024)
+        if len(received) == 0:
+          return
+        if s == self.request:
+          other = sock
+        else:
+          other = self.request
+        # This will lose data if the kernel write buffer fills up.
+        # TODO(ricea): Correctly use the return value to track how much was
+        # written and buffer the rest. Use select to determine when the socket
+        # becomes writable again.
+        other.send(received)
+
+  def _do_common_method(self):
+    url = urlparse.urlparse(self.path)
+    port = url.port
+    if not port:
+      if url.scheme == 'http':
+        port = 80
+      elif url.scheme == 'https':
+        port = 443
+    if not url.hostname or not port:
+      self.send_response(400)
+      self.end_headers()
+      return
+
+    if len(url.path) == 0:
+      path = '/'
+    else:
+      path = url.path
+    if len(url.query) > 0:
+      path = '%s?%s' % (url.path, url.query)
+
+    sock = None
+    try:
+      sock = socket.create_connection((url.hostname, port))
+      sock.send('%s %s %s\r\n' % (
+          self.command, path, self.protocol_version))
+      for header in self.headers.headers:
+        header = header.strip()
+        if (header.lower().startswith('connection') or
+            header.lower().startswith('proxy')):
+          continue
+        sock.send('%s\r\n' % header)
+      sock.send('\r\n')
+      # This is wrong: it will pass through connection-level headers and
+      # misbehave on connection reuse. The only reason it works at all is that
+      # our test servers have never supported connection reuse.
+      # TODO(ricea): Use a proper HTTP client library instead.
+      self._start_read_write(sock)
+    except Exception:
+      logging.exception('failure in common method: %s %s', self.command, path)
+      self.send_response(500)
+      self.end_headers()
+    finally:
+      if sock is not None:
+        sock.close()
+
+  def do_CONNECT(self):
+    try:
+      pos = self.path.rfind(':')
+      host = self.path[:pos]
+      port = int(self.path[pos+1:])
+    except Exception:
+      self.send_response(400)
+      self.end_headers()
+
+    if ProxyRequestHandler.redirect_connect_to_localhost:
+      host = "127.0.0.1"
+
+    sock = None
+    try:
+      sock = socket.create_connection((host, port))
+      self.send_response(200, 'Connection established')
+      self.end_headers()
+      self._start_read_write(sock)
+    except Exception:
+      logging.exception('failure in CONNECT: %s', path)
+      self.send_response(500)
+      self.end_headers()
+    finally:
+      if sock is not None:
+        sock.close()
+
+  def do_GET(self):
+    self._do_common_method()
+
+  def do_HEAD(self):
+    self._do_common_method()
+
+class BasicAuthProxyRequestHandler(ProxyRequestHandler):
+  """A request handler that behaves as a proxy server which requires
+  basic authentication.
+  """
+
+  _AUTH_CREDENTIAL = 'Basic Zm9vOmJhcg==' # foo:bar
+
+  def parse_request(self):
+    """Overrides parse_request to check credential."""
+
+    if not ProxyRequestHandler.parse_request(self):
+      return False
+
+    auth = self.headers.getheader('Proxy-Authorization')
+    if auth != self._AUTH_CREDENTIAL:
+      self.send_response(407)
+      self.send_header('Proxy-Authenticate', 'Basic realm="MyRealm1"')
+      self.end_headers()
+      return False
+
+    return True
+
+
+class ServerRunner(testserver_base.TestServerRunner):
+  """TestServerRunner for the net test servers."""
+
+  def __init__(self):
+    super(ServerRunner, self).__init__()
+    self.__ocsp_server = None
+
+  def __make_data_dir(self):
+    if self.options.data_dir:
+      if not os.path.isdir(self.options.data_dir):
+        raise testserver_base.OptionError('specified data dir not found: ' +
+            self.options.data_dir + ' exiting...')
+      my_data_dir = self.options.data_dir
+    else:
+      # Create the default path to our data dir, relative to the exe dir.
+      my_data_dir = os.path.join(BASE_DIR, "..", "..", "..", "..",
+                                 "test", "data")
+
+      #TODO(ibrar): Must use Find* funtion defined in google\tools
+      #i.e my_data_dir = FindUpward(my_data_dir, "test", "data")
+
+    return my_data_dir
+
+  def __parse_ocsp_options(self, states_option, date_option, produced_option):
+    if states_option is None:
+      return None, None, None
+
+    ocsp_states = list()
+    for ocsp_state_arg in states_option.split(':'):
+      if ocsp_state_arg == 'ok':
+        ocsp_state = minica.OCSP_STATE_GOOD
+      elif ocsp_state_arg == 'revoked':
+        ocsp_state = minica.OCSP_STATE_REVOKED
+      elif ocsp_state_arg == 'invalid':
+        ocsp_state = minica.OCSP_STATE_INVALID_RESPONSE
+      elif ocsp_state_arg == 'unauthorized':
+        ocsp_state = minica.OCSP_STATE_UNAUTHORIZED
+      elif ocsp_state_arg == 'unknown':
+        ocsp_state = minica.OCSP_STATE_UNKNOWN
+      elif ocsp_state_arg == 'later':
+        ocsp_state = minica.OCSP_STATE_TRY_LATER
+      elif ocsp_state_arg == 'invalid_data':
+        ocsp_state = minica.OCSP_STATE_INVALID_RESPONSE_DATA
+      elif ocsp_state_arg == "mismatched_serial":
+        ocsp_state = minica.OCSP_STATE_MISMATCHED_SERIAL
+      else:
+        raise testserver_base.OptionError('unknown OCSP status: ' +
+            ocsp_state_arg)
+      ocsp_states.append(ocsp_state)
+
+    if len(ocsp_states) > 1:
+      if set(ocsp_states) & OCSP_STATES_NO_SINGLE_RESPONSE:
+        raise testserver_base.OptionError('Multiple OCSP responses '
+            'incompatible with states ' + str(ocsp_states))
+
+    ocsp_dates = list()
+    for ocsp_date_arg in date_option.split(':'):
+      if ocsp_date_arg == 'valid':
+        ocsp_date = minica.OCSP_DATE_VALID
+      elif ocsp_date_arg == 'old':
+        ocsp_date = minica.OCSP_DATE_OLD
+      elif ocsp_date_arg == 'early':
+        ocsp_date = minica.OCSP_DATE_EARLY
+      elif ocsp_date_arg == 'long':
+        ocsp_date = minica.OCSP_DATE_LONG
+      elif ocsp_date_arg == 'longer':
+        ocsp_date = minica.OCSP_DATE_LONGER
+      else:
+        raise testserver_base.OptionError('unknown OCSP date: ' +
+            ocsp_date_arg)
+      ocsp_dates.append(ocsp_date)
+
+    if len(ocsp_states) != len(ocsp_dates):
+      raise testserver_base.OptionError('mismatched ocsp and ocsp-date '
+          'count')
+
+    ocsp_produced = None
+    if produced_option == 'valid':
+      ocsp_produced = minica.OCSP_PRODUCED_VALID
+    elif produced_option == 'before':
+      ocsp_produced = minica.OCSP_PRODUCED_BEFORE_CERT
+    elif produced_option == 'after':
+      ocsp_produced = minica.OCSP_PRODUCED_AFTER_CERT
+    else:
+      raise testserver_base.OptionError('unknown OCSP produced: ' +
+          produced_option)
+
+    return ocsp_states, ocsp_dates, ocsp_produced
+
+  def create_server(self, server_data):
+    port = self.options.port
+    host = self.options.host
+
+    logging.basicConfig()
+
+    # Work around a bug in Mac OS 10.6. Spawning a WebSockets server
+    # will result in a call to |getaddrinfo|, which fails with "nodename
+    # nor servname provided" for localhost:0 on 10.6.
+    # TODO(ricea): Remove this if no longer needed.
+    if self.options.server_type == SERVER_WEBSOCKET and \
+       host == "localhost" and \
+       port == 0:
+      host = "127.0.0.1"
+
+    # Construct the subjectAltNames for any ad-hoc generated certificates.
+    # As host can be either a DNS name or IP address, attempt to determine
+    # which it is, so it can be placed in the appropriate SAN.
+    dns_sans = None
+    ip_sans = None
+    ip = None
+    try:
+      ip = socket.inet_aton(host)
+      ip_sans = [ip]
+    except socket.error:
+      pass
+    if ip is None:
+      dns_sans = [host]
+
+    if self.options.server_type == SERVER_HTTP:
+      if self.options.https:
+        pem_cert_and_key = None
+        ocsp_der = None
+        if self.options.cert_and_key_file:
+          if not os.path.isfile(self.options.cert_and_key_file):
+            raise testserver_base.OptionError(
+                'specified server cert file not found: ' +
+                self.options.cert_and_key_file + ' exiting...')
+          pem_cert_and_key = file(self.options.cert_and_key_file, 'r').read()
+        elif self.options.aia_intermediate:
+          self.__ocsp_server = OCSPServer((host, 0), OCSPHandler)
+          print ('AIA server started on %s:%d...' %
+              (host, self.__ocsp_server.server_port))
+
+          ocsp_server_port = self.__ocsp_server.server_port
+          if self.options.ocsp_proxy_port_number != 0:
+            ocsp_server_port = self.options.ocsp_proxy_port_number
+            server_data['ocsp_port'] = self.__ocsp_server.server_port
+
+          (pem_cert_and_key, intermediate_cert_der) = \
+              minica.GenerateCertKeyAndIntermediate(
+                  subject = self.options.cert_common_name,
+                  ip_sans=ip_sans, dns_sans=dns_sans,
+                  ca_issuers_url =
+                      ("http://%s:%d/ca_issuers" % (host, ocsp_server_port)),
+                  serial = self.options.cert_serial)
+
+          self.__ocsp_server.ocsp_response = None
+          self.__ocsp_server.ocsp_response_intermediate = None
+          self.__ocsp_server.ca_issuers_response = intermediate_cert_der
+        else:
+          # generate a new certificate and run an OCSP server for it.
+          self.__ocsp_server = OCSPServer((host, 0), OCSPHandler)
+          print ('OCSP server started on %s:%d...' %
+              (host, self.__ocsp_server.server_port))
+
+          ocsp_states, ocsp_dates, ocsp_produced =  self.__parse_ocsp_options(
+                  self.options.ocsp,
+                  self.options.ocsp_date,
+                  self.options.ocsp_produced)
+
+          (ocsp_intermediate_states, ocsp_intermediate_dates,
+           ocsp_intermediate_produced) =  self.__parse_ocsp_options(
+                  self.options.ocsp_intermediate,
+                  self.options.ocsp_intermediate_date,
+                  self.options.ocsp_intermediate_produced)
+
+          ocsp_server_port = self.__ocsp_server.server_port
+          if self.options.ocsp_proxy_port_number != 0:
+            ocsp_server_port = self.options.ocsp_proxy_port_number
+            server_data['ocsp_port'] = self.__ocsp_server.server_port
+
+          pem_cert_and_key, (ocsp_der,
+           ocsp_intermediate_der) = minica.GenerateCertKeyAndOCSP(
+              subject = self.options.cert_common_name,
+              ip_sans = ip_sans,
+              dns_sans = dns_sans,
+              ocsp_url = ("http://%s:%d/ocsp" % (host, ocsp_server_port)),
+              ocsp_states = ocsp_states,
+              ocsp_dates = ocsp_dates,
+              ocsp_produced = ocsp_produced,
+              ocsp_intermediate_url = (
+                  "http://%s:%d/ocsp_intermediate" % (host, ocsp_server_port)
+                  if ocsp_intermediate_states else None),
+              ocsp_intermediate_states = ocsp_intermediate_states,
+              ocsp_intermediate_dates = ocsp_intermediate_dates,
+              ocsp_intermediate_produced = ocsp_intermediate_produced,
+              serial = self.options.cert_serial)
+
+          if self.options.ocsp_server_unavailable:
+            # SEQUENCE containing ENUMERATED with value 3 (tryLater).
+            self.__ocsp_server.ocsp_response_intermediate = \
+                self.__ocsp_server.ocsp_response = '30030a0103'.decode('hex')
+          else:
+            self.__ocsp_server.ocsp_response = ocsp_der
+            self.__ocsp_server.ocsp_response_intermediate = \
+                ocsp_intermediate_der
+          self.__ocsp_server.ca_issuers_response = None
+
+        for ca_cert in self.options.ssl_client_ca:
+          if not os.path.isfile(ca_cert):
+            raise testserver_base.OptionError(
+                'specified trusted client CA file not found: ' + ca_cert +
+                ' exiting...')
+
+        stapled_ocsp_response = None
+        if self.options.staple_ocsp_response:
+          # TODO(mattm): Staple the intermediate response too (if applicable,
+          # and if chrome ever supports it).
+          stapled_ocsp_response = ocsp_der
+
+        server = HTTPSServer((host, port), TestPageHandler, pem_cert_and_key,
+                             self.options.ssl_client_auth,
+                             self.options.ssl_client_ca,
+                             self.options.ssl_client_cert_type,
+                             self.options.ssl_bulk_cipher,
+                             self.options.ssl_key_exchange,
+                             self.options.alpn_protocols,
+                             self.options.npn_protocols,
+                             self.options.record_resume,
+                             self.options.tls_intolerant,
+                             self.options.tls_intolerance_type,
+                             self.options.signed_cert_timestamps_tls_ext.decode(
+                                 "base64"),
+                             self.options.fallback_scsv,
+                             stapled_ocsp_response,
+                             self.options.alert_after_handshake,
+                             self.options.disable_channel_id,
+                             self.options.disable_extended_master_secret)
+        print 'HTTPS server started on https://%s:%d...' % \
+            (host, server.server_port)
+      else:
+        server = HTTPServer((host, port), TestPageHandler)
+        print 'HTTP server started on http://%s:%d...' % \
+            (host, server.server_port)
+
+      server.data_dir = self.__make_data_dir()
+      server.file_root_url = self.options.file_root_url
+      server_data['port'] = server.server_port
+    elif self.options.server_type == SERVER_WEBSOCKET:
+      # TODO(toyoshim): Remove following os.chdir. Currently this operation
+      # is required to work correctly. It should be fixed from pywebsocket side.
+      os.chdir(self.__make_data_dir())
+      websocket_options = WebSocketOptions(host, port, '.')
+      scheme = "ws"
+      if self.options.cert_and_key_file:
+        scheme = "wss"
+        websocket_options.use_tls = True
+        key_path = os.path.join(ROOT_DIR, self.options.cert_and_key_file)
+        if not os.path.isfile(key_path):
+          raise testserver_base.OptionError(
+              'specified server cert file not found: ' +
+              self.options.cert_and_key_file + ' exiting...')
+        websocket_options.private_key = key_path
+        websocket_options.certificate = key_path
+
+      if self.options.ssl_client_auth:
+        websocket_options.tls_client_cert_optional = False
+        websocket_options.tls_client_auth = True
+        if len(self.options.ssl_client_ca) != 1:
+          raise testserver_base.OptionError(
+              'one trusted client CA file should be specified')
+        if not os.path.isfile(self.options.ssl_client_ca[0]):
+          raise testserver_base.OptionError(
+              'specified trusted client CA file not found: ' +
+              self.options.ssl_client_ca[0] + ' exiting...')
+        websocket_options.tls_client_ca = self.options.ssl_client_ca[0]
+      print 'Trying to start websocket server on %s://%s:%d...' % \
+          (scheme, websocket_options.server_host, websocket_options.port)
+      server = WebSocketServer(websocket_options)
+      print 'WebSocket server started on %s://%s:%d...' % \
+          (scheme, host, server.server_port)
+      server_data['port'] = server.server_port
+      websocket_options.use_basic_auth = self.options.ws_basic_auth
+    elif self.options.server_type == SERVER_TCP_ECHO:
+      # Used for generating the key (randomly) that encodes the "echo request"
+      # message.
+      random.seed()
+      server = TCPEchoServer((host, port), TCPEchoHandler)
+      print 'Echo TCP server started on port %d...' % server.server_port
+      server_data['port'] = server.server_port
+    elif self.options.server_type == SERVER_UDP_ECHO:
+      # Used for generating the key (randomly) that encodes the "echo request"
+      # message.
+      random.seed()
+      server = UDPEchoServer((host, port), UDPEchoHandler)
+      print 'Echo UDP server started on port %d...' % server.server_port
+      server_data['port'] = server.server_port
+    elif self.options.server_type == SERVER_PROXY:
+      ProxyRequestHandler.redirect_connect_to_localhost = \
+          self.options.redirect_connect_to_localhost
+      server = ThreadingHTTPServer((host, port), ProxyRequestHandler)
+      print 'Proxy server started on port %d...' % server.server_port
+      server_data['port'] = server.server_port
+    elif self.options.server_type == SERVER_BASIC_AUTH_PROXY:
+      ProxyRequestHandler.redirect_connect_to_localhost = \
+          self.options.redirect_connect_to_localhost
+      server = ThreadingHTTPServer((host, port), BasicAuthProxyRequestHandler)
+      print 'BasicAuthProxy server started on port %d...' % server.server_port
+      server_data['port'] = server.server_port
+    elif self.options.server_type == SERVER_FTP:
+      my_data_dir = self.__make_data_dir()
+
+      # Instantiate a dummy authorizer for managing 'virtual' users
+      authorizer = pyftpdlib.ftpserver.DummyAuthorizer()
+
+      # Define a new user having full r/w permissions
+      authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw')
+
+      # Define a read-only anonymous user unless disabled
+      if not self.options.no_anonymous_ftp_user:
+        authorizer.add_anonymous(my_data_dir)
+
+      # Instantiate FTP handler class
+      ftp_handler = pyftpdlib.ftpserver.FTPHandler
+      ftp_handler.authorizer = authorizer
+
+      # Define a customized banner (string returned when client connects)
+      ftp_handler.banner = ("pyftpdlib %s based ftpd ready." %
+                            pyftpdlib.ftpserver.__ver__)
+
+      # Instantiate FTP server class and listen to address:port
+      server = pyftpdlib.ftpserver.FTPServer((host, port), ftp_handler)
+      server_data['port'] = server.socket.getsockname()[1]
+      print 'FTP server started on port %d...' % server_data['port']
+    else:
+      raise testserver_base.OptionError('unknown server type' +
+          self.options.server_type)
+
+    return server
+
+  def run_server(self):
+    if self.__ocsp_server:
+      self.__ocsp_server.serve_forever_on_thread()
+
+    testserver_base.TestServerRunner.run_server(self)
+
+    if self.__ocsp_server:
+      self.__ocsp_server.stop_serving()
+
+  def add_options(self):
+    testserver_base.TestServerRunner.add_options(self)
+    self.option_parser.add_option('-f', '--ftp', action='store_const',
+                                  const=SERVER_FTP, default=SERVER_HTTP,
+                                  dest='server_type',
+                                  help='start up an FTP server.')
+    self.option_parser.add_option('--tcp-echo', action='store_const',
+                                  const=SERVER_TCP_ECHO, default=SERVER_HTTP,
+                                  dest='server_type',
+                                  help='start up a tcp echo server.')
+    self.option_parser.add_option('--udp-echo', action='store_const',
+                                  const=SERVER_UDP_ECHO, default=SERVER_HTTP,
+                                  dest='server_type',
+                                  help='start up a udp echo server.')
+    self.option_parser.add_option('--proxy', action='store_const',
+                                  const=SERVER_PROXY,
+                                  default=SERVER_HTTP, dest='server_type',
+                                  help='start up a proxy server.')
+    self.option_parser.add_option('--basic-auth-proxy', action='store_const',
+                                  const=SERVER_BASIC_AUTH_PROXY,
+                                  default=SERVER_HTTP, dest='server_type',
+                                  help='start up a proxy server which requires '
+                                  'basic authentication.')
+    self.option_parser.add_option('--websocket', action='store_const',
+                                  const=SERVER_WEBSOCKET, default=SERVER_HTTP,
+                                  dest='server_type',
+                                  help='start up a WebSocket server.')
+    self.option_parser.add_option('--https', action='store_true',
+                                  dest='https', help='Specify that https '
+                                  'should be used.')
+    self.option_parser.add_option('--cert-and-key-file',
+                                  dest='cert_and_key_file', help='specify the '
+                                  'path to the file containing the certificate '
+                                  'and private key for the server in PEM '
+                                  'format')
+    self.option_parser.add_option('--aia-intermediate', action='store_true',
+                                  dest='aia_intermediate',
+                                  help='generate a certificate chain that '
+                                  'requires AIA cert fetching, and run a '
+                                  'server to respond to the AIA request.')
+    self.option_parser.add_option('--ocsp', dest='ocsp', default='ok',
+                                  help='The type of OCSP response generated '
+                                  'for the automatically generated '
+                                  'certificate. One of [ok,revoked,invalid]')
+    self.option_parser.add_option('--ocsp-date', dest='ocsp_date',
+                                  default='valid', help='The validity of the '
+                                  'range between thisUpdate and nextUpdate')
+    self.option_parser.add_option('--ocsp-produced', dest='ocsp_produced',
+                                  default='valid', help='producedAt relative '
+                                  'to certificate expiry')
+    self.option_parser.add_option('--ocsp-intermediate',
+                                  dest='ocsp_intermediate', default=None,
+                                  help='If specified, the automatically '
+                                  'generated chain will include an '
+                                  'intermediate certificate with this type '
+                                  'of OCSP response (see docs for --ocsp)')
+    self.option_parser.add_option('--ocsp-intermediate-date',
+                                  dest='ocsp_intermediate_date',
+                                  default='valid', help='The validity of the '
+                                  'range between thisUpdate and nextUpdate')
+    self.option_parser.add_option('--ocsp-intermediate-produced',
+                                  dest='ocsp_intermediate_produced',
+                                  default='valid', help='producedAt relative '
+                                  'to certificate expiry')
+    self.option_parser.add_option('--cert-serial', dest='cert_serial',
+                                  default=0, type=int,
+                                  help='If non-zero then the generated '
+                                  'certificate will have this serial number')
+    self.option_parser.add_option('--cert-common-name', dest='cert_common_name',
+                                  default="127.0.0.1",
+                                  help='The generated certificate will have '
+                                  'this common name')
+    self.option_parser.add_option('--tls-intolerant', dest='tls_intolerant',
+                                  default='0', type='int',
+                                  help='If nonzero, certain TLS connections '
+                                  'will be aborted in order to test version '
+                                  'fallback. 1 means all TLS versions will be '
+                                  'aborted. 2 means TLS 1.1 or higher will be '
+                                  'aborted. 3 means TLS 1.2 or higher will be '
+                                  'aborted. 4 means TLS 1.3 or higher will be '
+                                  'aborted.')
+    self.option_parser.add_option('--tls-intolerance-type',
+                                  dest='tls_intolerance_type',
+                                  default="alert",
+                                  help='Controls how the server reacts to a '
+                                  'TLS version it is intolerant to. Valid '
+                                  'values are "alert", "close", and "reset".')
+    self.option_parser.add_option('--signed-cert-timestamps-tls-ext',
+                                  dest='signed_cert_timestamps_tls_ext',
+                                  default='',
+                                  help='Base64 encoded SCT list. If set, '
+                                  'server will respond with a '
+                                  'signed_certificate_timestamp TLS extension '
+                                  'whenever the client supports it.')
+    self.option_parser.add_option('--fallback-scsv', dest='fallback_scsv',
+                                  default=False, const=True,
+                                  action='store_const',
+                                  help='If given, TLS_FALLBACK_SCSV support '
+                                  'will be enabled. This causes the server to '
+                                  'reject fallback connections from compatible '
+                                  'clients (e.g. Chrome).')
+    self.option_parser.add_option('--staple-ocsp-response',
+                                  dest='staple_ocsp_response',
+                                  default=False, action='store_true',
+                                  help='If set, server will staple the OCSP '
+                                  'response whenever OCSP is on and the client '
+                                  'supports OCSP stapling.')
+    self.option_parser.add_option('--https-record-resume',
+                                  dest='record_resume', const=True,
+                                  default=False, action='store_const',
+                                  help='Record resumption cache events rather '
+                                  'than resuming as normal. Allows the use of '
+                                  'the /ssl-session-cache request')
+    self.option_parser.add_option('--ssl-client-auth', action='store_true',
+                                  help='Require SSL client auth on every '
+                                  'connection.')
+    self.option_parser.add_option('--ssl-client-ca', action='append',
+                                  default=[], help='Specify that the client '
+                                  'certificate request should include the CA '
+                                  'named in the subject of the DER-encoded '
+                                  'certificate contained in the specified '
+                                  'file. This option may appear multiple '
+                                  'times, indicating multiple CA names should '
+                                  'be sent in the request.')
+    self.option_parser.add_option('--ssl-client-cert-type', action='append',
+                                  default=[], help='Specify that the client '
+                                  'certificate request should include the '
+                                  'specified certificate_type value. This '
+                                  'option may appear multiple times, '
+                                  'indicating multiple values should be send '
+                                  'in the request. Valid values are '
+                                  '"rsa_sign", "dss_sign", and "ecdsa_sign". '
+                                  'If omitted, "rsa_sign" will be used.')
+    self.option_parser.add_option('--ssl-bulk-cipher', action='append',
+                                  help='Specify the bulk encryption '
+                                  'algorithm(s) that will be accepted by the '
+                                  'SSL server. Valid values are "aes128gcm", '
+                                  '"aes256", "aes128", "3des", "rc4". If '
+                                  'omitted, all algorithms will be used. This '
+                                  'option may appear multiple times, '
+                                  'indicating multiple algorithms should be '
+                                  'enabled.');
+    self.option_parser.add_option('--ssl-key-exchange', action='append',
+                                  help='Specify the key exchange algorithm(s)'
+                                  'that will be accepted by the SSL server. '
+                                  'Valid values are "rsa", "dhe_rsa", '
+                                  '"ecdhe_rsa". If omitted, all algorithms '
+                                  'will be used. This option may appear '
+                                  'multiple times, indicating multiple '
+                                  'algorithms should be enabled.');
+    self.option_parser.add_option('--alpn-protocols', action='append',
+                                  help='Specify the list of ALPN protocols.  '
+                                  'The server will not send an ALPN response '
+                                  'if this list does not overlap with the '
+                                  'list of protocols the client advertises.')
+    self.option_parser.add_option('--npn-protocols', action='append',
+                                  help='Specify the list of protocols sent in '
+                                  'an NPN response.  The server will not'
+                                  'support NPN if the list is empty.')
+    self.option_parser.add_option('--file-root-url', default='/files/',
+                                  help='Specify a root URL for files served.')
+    # TODO(ricea): Generalize this to support basic auth for HTTP too.
+    self.option_parser.add_option('--ws-basic-auth', action='store_true',
+                                  dest='ws_basic_auth',
+                                  help='Enable basic-auth for WebSocket')
+    self.option_parser.add_option('--ocsp-server-unavailable',
+                                  dest='ocsp_server_unavailable',
+                                  default=False, action='store_true',
+                                  help='If set, the OCSP server will return '
+                                  'a tryLater status rather than the actual '
+                                  'OCSP response.')
+    self.option_parser.add_option('--ocsp-proxy-port-number', default=0,
+                                  type='int', dest='ocsp_proxy_port_number',
+                                  help='Port allocated for OCSP proxy '
+                                  'when connection is proxied.')
+    self.option_parser.add_option('--alert-after-handshake',
+                                  dest='alert_after_handshake',
+                                  default=False, action='store_true',
+                                  help='If set, the server will send a fatal '
+                                  'alert immediately after the handshake.')
+    self.option_parser.add_option('--no-anonymous-ftp-user',
+                                  dest='no_anonymous_ftp_user',
+                                  default=False, action='store_true',
+                                  help='If set, the FTP server will not create '
+                                  'an anonymous user.')
+    self.option_parser.add_option('--disable-channel-id', action='store_true')
+    self.option_parser.add_option('--disable-extended-master-secret',
+                                  action='store_true')
+    self.option_parser.add_option('--redirect-connect-to-localhost',
+                                  dest='redirect_connect_to_localhost',
+                                  default=False, action='store_true',
+                                  help='If set, the Proxy server will connect '
+                                  'to localhost instead of the requested URL '
+                                  'on CONNECT requests')
+
+
+if __name__ == '__main__':
+  sys.exit(ServerRunner().main())
diff --git a/src/net/tools/testserver/testserver.pydeps b/src/net/tools/testserver/testserver.pydeps
new file mode 100644
index 0000000..ce20245
--- /dev/null
+++ b/src/net/tools/testserver/testserver.pydeps
@@ -0,0 +1,83 @@
+# Generated by running:
+#   build/print_python_deps.py --root net/tools/testserver --output net/tools/testserver/testserver.pydeps --whitelist third_party/tlslite/tlslite/utils net/tools/testserver/testserver.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/__init__.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/_stream_base.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/_stream_hixie75.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/_stream_hybi.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/common.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/dispatch.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/extensions.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/handshake/__init__.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/handshake/_base.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/handshake/hybi.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/handshake/hybi00.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/http_header_util.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/memorizingfile.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/msgutil.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/mux.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/standalone.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/stream.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/util.py
+../../../third_party/pywebsocket/src/mod_pywebsocket/xhr_benchmark_handler.py
+../../../third_party/tlslite/tlslite/__init__.py
+../../../third_party/tlslite/tlslite/api.py
+../../../third_party/tlslite/tlslite/basedb.py
+../../../third_party/tlslite/tlslite/checker.py
+../../../third_party/tlslite/tlslite/constants.py
+../../../third_party/tlslite/tlslite/errors.py
+../../../third_party/tlslite/tlslite/handshakesettings.py
+../../../third_party/tlslite/tlslite/integration/__init__.py
+../../../third_party/tlslite/tlslite/integration/asyncstatemachine.py
+../../../third_party/tlslite/tlslite/integration/clienthelper.py
+../../../third_party/tlslite/tlslite/integration/httptlsconnection.py
+../../../third_party/tlslite/tlslite/integration/imap4_tls.py
+../../../third_party/tlslite/tlslite/integration/pop3_tls.py
+../../../third_party/tlslite/tlslite/integration/smtp_tls.py
+../../../third_party/tlslite/tlslite/integration/tlsasyncdispatchermixin.py
+../../../third_party/tlslite/tlslite/integration/tlssocketservermixin.py
+../../../third_party/tlslite/tlslite/integration/xmlrpcserver.py
+../../../third_party/tlslite/tlslite/integration/xmlrpctransport.py
+../../../third_party/tlslite/tlslite/mathtls.py
+../../../third_party/tlslite/tlslite/messages.py
+../../../third_party/tlslite/tlslite/session.py
+../../../third_party/tlslite/tlslite/sessioncache.py
+../../../third_party/tlslite/tlslite/tlsconnection.py
+../../../third_party/tlslite/tlslite/tlsrecordlayer.py
+../../../third_party/tlslite/tlslite/utils/__init__.py
+../../../third_party/tlslite/tlslite/utils/aes.py
+../../../third_party/tlslite/tlslite/utils/aesgcm.py
+../../../third_party/tlslite/tlslite/utils/asn1parser.py
+../../../third_party/tlslite/tlslite/utils/cipherfactory.py
+../../../third_party/tlslite/tlslite/utils/codec.py
+../../../third_party/tlslite/tlslite/utils/compat.py
+../../../third_party/tlslite/tlslite/utils/cryptomath.py
+../../../third_party/tlslite/tlslite/utils/datefuncs.py
+../../../third_party/tlslite/tlslite/utils/keyfactory.py
+../../../third_party/tlslite/tlslite/utils/openssl_aes.py
+../../../third_party/tlslite/tlslite/utils/openssl_rc4.py
+../../../third_party/tlslite/tlslite/utils/openssl_rsakey.py
+../../../third_party/tlslite/tlslite/utils/openssl_tripledes.py
+../../../third_party/tlslite/tlslite/utils/p256.py
+../../../third_party/tlslite/tlslite/utils/pem.py
+../../../third_party/tlslite/tlslite/utils/pycrypto_aes.py
+../../../third_party/tlslite/tlslite/utils/pycrypto_aesgcm.py
+../../../third_party/tlslite/tlslite/utils/pycrypto_rc4.py
+../../../third_party/tlslite/tlslite/utils/pycrypto_rsakey.py
+../../../third_party/tlslite/tlslite/utils/pycrypto_tripledes.py
+../../../third_party/tlslite/tlslite/utils/python_aes.py
+../../../third_party/tlslite/tlslite/utils/python_aesgcm.py
+../../../third_party/tlslite/tlslite/utils/python_rc4.py
+../../../third_party/tlslite/tlslite/utils/python_rsakey.py
+../../../third_party/tlslite/tlslite/utils/rc4.py
+../../../third_party/tlslite/tlslite/utils/rijndael.py
+../../../third_party/tlslite/tlslite/utils/rsakey.py
+../../../third_party/tlslite/tlslite/utils/tackwrapper.py
+../../../third_party/tlslite/tlslite/utils/tripledes.py
+../../../third_party/tlslite/tlslite/verifierdb.py
+../../../third_party/tlslite/tlslite/x509.py
+../../../third_party/tlslite/tlslite/x509certchain.py
+asn1.py
+echo_message.py
+minica.py
+testserver.py
+testserver_base.py
diff --git a/src/net/tools/testserver/testserver_base.py b/src/net/tools/testserver/testserver_base.py
new file mode 100644
index 0000000..7be34a3
--- /dev/null
+++ b/src/net/tools/testserver/testserver_base.py
@@ -0,0 +1,273 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import BaseHTTPServer
+import errno
+import json
+import optparse
+import os
+import re
+import socket
+import SocketServer
+import struct
+import sys
+import warnings
+
+import tlslite.errors
+
+# Ignore deprecation warnings, they make our output more cluttered.
+warnings.filterwarnings("ignore", category=DeprecationWarning)
+
+if sys.platform == 'win32':
+  import msvcrt
+
+# Using debug() seems to cause hangs on XP: see http://crbug.com/64515.
+debug_output = sys.stderr
+def debug(string):
+  debug_output.write(string + "\n")
+  debug_output.flush()
+
+
+class Error(Exception):
+  """Error class for this module."""
+
+
+class OptionError(Error):
+  """Error for bad command line options."""
+
+
+class FileMultiplexer(object):
+  def __init__(self, fd1, fd2) :
+    self.__fd1 = fd1
+    self.__fd2 = fd2
+
+  def __del__(self) :
+    if self.__fd1 != sys.stdout and self.__fd1 != sys.stderr:
+      self.__fd1.close()
+    if self.__fd2 != sys.stdout and self.__fd2 != sys.stderr:
+      self.__fd2.close()
+
+  def write(self, text) :
+    self.__fd1.write(text)
+    self.__fd2.write(text)
+
+  def flush(self) :
+    self.__fd1.flush()
+    self.__fd2.flush()
+
+
+class ClientRestrictingServerMixIn:
+  """Implements verify_request to limit connections to our configured IP
+  address."""
+
+  def verify_request(self, _request, client_address):
+    return client_address[0] == self.server_address[0]
+
+
+class BrokenPipeHandlerMixIn:
+  """Allows the server to deal with "broken pipe" errors (which happen if the
+  browser quits with outstanding requests, like for the favicon). This mix-in
+  requires the class to derive from SocketServer.BaseServer and not override its
+  handle_error() method. """
+
+  def handle_error(self, request, client_address):
+    value = sys.exc_info()[1]
+    if isinstance(value, tlslite.errors.TLSClosedConnectionError):
+      print "testserver.py: Closed connection"
+      return
+    if isinstance(value, socket.error):
+      err = value.args[0]
+      if sys.platform in ('win32', 'cygwin'):
+        # "An established connection was aborted by the software in your host."
+        pipe_err = 10053
+      else:
+        pipe_err = errno.EPIPE
+      if err == pipe_err:
+        print "testserver.py: Broken pipe"
+        return
+      if err == errno.ECONNRESET:
+        print "testserver.py: Connection reset by peer"
+        return
+    SocketServer.BaseServer.handle_error(self, request, client_address)
+
+
+class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
+  """This is a specialization of BaseHTTPServer to allow it
+  to be exited cleanly (by setting its "stop" member to True)."""
+
+  def serve_forever(self):
+    self.stop = False
+    self.nonce_time = None
+    while not self.stop:
+      self.handle_request()
+    self.socket.close()
+
+
+def MultiplexerHack(std_fd, log_fd):
+  """Creates a FileMultiplexer that will write to both specified files.
+
+  When running on Windows XP bots, stdout and stderr will be invalid file
+  handles, so log_fd will be returned directly.  (This does not occur if you
+  run the test suite directly from a console, but only if the output of the
+  test executable is redirected.)
+  """
+  if std_fd.fileno() <= 0:
+    return log_fd
+  return FileMultiplexer(std_fd, log_fd)
+
+
+class BasePageHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+
+  def __init__(self, request, client_address, socket_server,
+               connect_handlers, get_handlers, head_handlers, post_handlers,
+               put_handlers):
+    self._connect_handlers = connect_handlers
+    self._get_handlers = get_handlers
+    self._head_handlers = head_handlers
+    self._post_handlers = post_handlers
+    self._put_handlers = put_handlers
+    BaseHTTPServer.BaseHTTPRequestHandler.__init__(
+      self, request, client_address, socket_server)
+
+  def log_request(self, *args, **kwargs):
+    # Disable request logging to declutter test log output.
+    pass
+
+  def _ShouldHandleRequest(self, handler_name):
+    """Determines if the path can be handled by the handler.
+
+    We consider a handler valid if the path begins with the
+    handler name. It can optionally be followed by "?*", "/*".
+    """
+
+    pattern = re.compile('%s($|\?|/).*' % handler_name)
+    return pattern.match(self.path)
+
+  def do_CONNECT(self):
+    for handler in self._connect_handlers:
+      if handler():
+        return
+
+  def do_GET(self):
+    for handler in self._get_handlers:
+      if handler():
+        return
+
+  def do_HEAD(self):
+    for handler in self._head_handlers:
+      if handler():
+        return
+
+  def do_POST(self):
+    for handler in self._post_handlers:
+      if handler():
+        return
+
+  def do_PUT(self):
+    for handler in self._put_handlers:
+      if handler():
+        return
+
+
+class TestServerRunner(object):
+  """Runs a test server and communicates with the controlling C++ test code.
+
+  Subclasses should override the create_server method to create their server
+  object, and the add_options method to add their own options.
+  """
+
+  def __init__(self):
+    self.option_parser = optparse.OptionParser()
+    self.add_options()
+
+  def main(self):
+    self.options, self.args = self.option_parser.parse_args()
+
+    logfile = open(self.options.log_file, 'w')
+
+    # http://crbug.com/248796 : Error logs streamed to normal sys.stderr will be
+    # written to HTTP response payload when remote test server is used.
+    # For this reason, some tests like ResourceFetcherTests.ResourceFetcher404
+    # were failing on Android because remote test server is being used there.
+    # To fix them, we need to use sys.stdout as sys.stderr if remote test server
+    # is used.
+    if self.options.on_remote_server:
+      sys.stderr = sys.stdout
+
+    sys.stderr = MultiplexerHack(sys.stderr, logfile)
+    if self.options.log_to_console:
+      sys.stdout = MultiplexerHack(sys.stdout, logfile)
+    else:
+      sys.stdout = logfile
+
+    server_data = {
+      'host': self.options.host,
+    }
+    self.server = self.create_server(server_data)
+    self._notify_startup_complete(server_data)
+    self.run_server()
+
+  def create_server(self, server_data):
+    """Creates a server object and returns it.
+
+    Must populate server_data['port'], and can set additional server_data
+    elements if desired."""
+    raise NotImplementedError()
+
+  def run_server(self):
+    try:
+      self.server.serve_forever()
+    except KeyboardInterrupt:
+      print 'shutting down server'
+      self.server.stop = True
+
+  def add_options(self):
+    self.option_parser.add_option('--startup-pipe', type='int',
+                                  dest='startup_pipe',
+                                  help='File handle of pipe to parent process')
+    self.option_parser.add_option('--log-to-console', action='store_const',
+                                  const=True, default=False,
+                                  dest='log_to_console',
+                                  help='Enables or disables sys.stdout logging '
+                                  'to the console.')
+    self.option_parser.add_option('--log-file', default='testserver.log',
+                                  dest='log_file',
+                                  help='The name of the server log file.')
+    self.option_parser.add_option('--port', default=0, type='int',
+                                  help='Port used by the server. If '
+                                  'unspecified, the server will listen on an '
+                                  'ephemeral port.')
+    self.option_parser.add_option('--host', default='127.0.0.1',
+                                  dest='host',
+                                  help='Hostname or IP upon which the server '
+                                  'will listen. Client connections will also '
+                                  'only be allowed from this address.')
+    self.option_parser.add_option('--data-dir', dest='data_dir',
+                                  help='Directory from which to read the '
+                                  'files.')
+    self.option_parser.add_option('--on-remote-server', action='store_const',
+                                  const=True, default=False,
+                                  dest='on_remote_server',
+                                  help='Whether remote server is being used or '
+                                  'not.')
+
+  def _notify_startup_complete(self, server_data):
+    # Notify the parent that we've started. (BaseServer subclasses
+    # bind their sockets on construction.)
+    if self.options.startup_pipe is not None:
+      server_data_json = json.dumps(server_data)
+      server_data_len = len(server_data_json)
+      print 'sending server_data: %s (%d bytes)' % (
+        server_data_json, server_data_len)
+      if sys.platform == 'win32':
+        fd = msvcrt.open_osfhandle(self.options.startup_pipe, 0)
+      else:
+        fd = self.options.startup_pipe
+      startup_pipe = os.fdopen(fd, "w")
+      # First write the data length as an unsigned 4-byte value.  This
+      # is _not_ using network byte ordering since the other end of the
+      # pipe is on the same machine.
+      startup_pipe.write(struct.pack('=L', server_data_len))
+      startup_pipe.write(server_data_json)
+      startup_pipe.close()