Import Cobalt 19.master.0.203780

Includes the following patches:
  https://cobalt-review.googlesource.com/c/cobalt/+/5210
    by errong.leng@samsung.com
  https://cobalt-review.googlesource.com/c/cobalt/+/5270
    by linus.wang@samsung.com
diff --git a/src/third_party/web_platform_tests/webaudio/README.md b/src/third_party/web_platform_tests/webaudio/README.md
new file mode 100644
index 0000000..e1afecd
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/README.md
@@ -0,0 +1,5 @@
+Our test suite is currently tracking the [editor's draft](https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html) of the Web Audio API.
+
+The tests are arranged in subdirectories, corresponding to different
+sections of the spec. So, for example, tests for the `DelayNode` are
+in `the-audio-api/the-delaynode-interface`.
diff --git a/src/third_party/web_platform_tests/webaudio/js/buffer-loader.js b/src/third_party/web_platform_tests/webaudio/js/buffer-loader.js
new file mode 100644
index 0000000..453dc4a
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/js/buffer-loader.js
@@ -0,0 +1,44 @@
+/* Taken from
+   https://raw.github.com/WebKit/webkit/master/LayoutTests/webaudio/resources/buffer-loader.js */
+
+function BufferLoader(context, urlList, callback) {
+  this.context = context;
+  this.urlList = urlList;
+  this.onload = callback;
+  this.bufferList = new Array();
+  this.loadCount = 0;
+}
+
+BufferLoader.prototype.loadBuffer = function(url, index) {
+  // Load buffer asynchronously
+  var request = new XMLHttpRequest();
+  request.open("GET", url, true);
+  request.responseType = "arraybuffer";
+
+  var loader = this;
+
+  request.onload = function() {
+    loader.context.decodeAudioData(request.response, decodeSuccessCallback, decodeErrorCallback);
+  };
+
+  request.onerror = function() {
+    alert('BufferLoader: XHR error');
+  };
+
+  var decodeSuccessCallback = function(buffer) {
+    loader.bufferList[index] = buffer;
+    if (++loader.loadCount == loader.urlList.length)
+      loader.onload(loader.bufferList);
+  };
+
+  var decodeErrorCallback = function() {
+    alert('decodeErrorCallback: decode error');
+  };
+
+  request.send();
+}
+
+BufferLoader.prototype.load = function() {
+  for (var i = 0; i < this.urlList.length; ++i)
+    this.loadBuffer(this.urlList[i], i);
+}
diff --git a/src/third_party/web_platform_tests/webaudio/js/helpers.js b/src/third_party/web_platform_tests/webaudio/js/helpers.js
new file mode 100644
index 0000000..9e4ee6f
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/js/helpers.js
@@ -0,0 +1,33 @@
+function assert_array_approx_equals(actual, expected, epsilon, description)
+{
+  assert_true(actual.length === expected.length,
+              (description + ": lengths differ, expected " + expected.length + " got " + actual.length))
+
+  for (var i=0; i < actual.length; i++) {
+    assert_approx_equals(actual[i], expected[i], epsilon, (description + ": element " + i))
+  }
+}
+
+/*
+  Returns an array (typed or not), of the passed array with removed trailing and ending
+  zero-valued elements
+ */
+function trimEmptyElements(array) {
+  var start = 0;
+  var end = array.length;
+  
+  while (start < array.length) {
+    if (array[start] !== 0) {
+      break;
+    }
+    start++;
+  }
+
+  while (end > 0) {
+    end--;
+    if (array[end] !== 0) {
+      break;
+    }
+  }
+  return array.subarray(start, end);
+}
diff --git a/src/third_party/web_platform_tests/webaudio/js/lodash.js b/src/third_party/web_platform_tests/webaudio/js/lodash.js
new file mode 100644
index 0000000..3813d2a
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/js/lodash.js
@@ -0,0 +1,4282 @@
+/*!
+ * Lo-Dash 0.10.0 <http://lodash.com>
+ * (c) 2012 John-David Dalton <http://allyoucanleet.com/>
+ * Based on Underscore.js 1.4.2 <http://underscorejs.org>
+ * (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc.
+ * Available under MIT license <http://lodash.com/license>
+ */
+;(function(window, undefined) {
+
+  /** Detect free variable `exports` */
+  var freeExports = typeof exports == 'object' && exports;
+
+  /** Detect free variable `global` and use it as `window` */
+  var freeGlobal = typeof global == 'object' && global;
+  if (freeGlobal.global === freeGlobal) {
+    window = freeGlobal;
+  }
+
+  /** Used for array and object method references */
+  var arrayRef = [],
+      // avoid a Closure Compiler bug by creatively creating an object
+      objectRef = new function(){};
+
+  /** Used to generate unique IDs */
+  var idCounter = 0;
+
+  /** Used internally to indicate various things */
+  var indicatorObject = objectRef;
+
+  /** Used by `cachedContains` as the default size when optimizations are enabled for large arrays */
+  var largeArraySize = 30;
+
+  /** Used to restore the original `_` reference in `noConflict` */
+  var oldDash = window._;
+
+  /** Used to detect template delimiter values that require a with-statement */
+  var reComplexDelimiter = /[-?+=!~*%&^<>|{(\/]|\[\D|\b(?:delete|in|instanceof|new|typeof|void)\b/;
+
+  /** Used to match HTML entities */
+  var reEscapedHtml = /&(?:amp|lt|gt|quot|#x27);/g;
+
+  /** Used to match empty string literals in compiled template source */
+  var reEmptyStringLeading = /\b__p \+= '';/g,
+      reEmptyStringMiddle = /\b(__p \+=) '' \+/g,
+      reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g;
+
+  /** Used to match regexp flags from their coerced string values */
+  var reFlags = /\w*$/;
+
+  /** Used to insert the data object variable into compiled template source */
+  var reInsertVariable = /(?:__e|__t = )\(\s*(?![\d\s"']|this\.)/g;
+
+  /** Used to detect if a method is native */
+  var reNative = RegExp('^' +
+    (objectRef.valueOf + '')
+      .replace(/[.*+?^=!:${}()|[\]\/\\]/g, '\\$&')
+      .replace(/valueOf|for [^\]]+/g, '.+?') + '$'
+  );
+
+  /**
+   * Used to match ES6 template delimiters
+   * http://people.mozilla.org/~jorendorff/es6-draft.html#sec-7.8.6
+   */
+  var reEsTemplate = /\$\{((?:(?=\\?)\\?[\s\S])*?)}/g;
+
+  /** Used to match "interpolate" template delimiters */
+  var reInterpolate = /<%=([\s\S]+?)%>/g;
+
+  /** Used to ensure capturing order of template delimiters */
+  var reNoMatch = /($^)/;
+
+  /** Used to match HTML characters */
+  var reUnescapedHtml = /[&<>"']/g;
+
+  /** Used to match unescaped characters in compiled string literals */
+  var reUnescapedString = /['\n\r\t\u2028\u2029\\]/g;
+
+  /** Used to fix the JScript [[DontEnum]] bug */
+  var shadowed = [
+    'constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable',
+    'toLocaleString', 'toString', 'valueOf'
+  ];
+
+  /** Used to make template sourceURLs easier to identify */
+  var templateCounter = 0;
+
+  /** Native method shortcuts */
+  var ceil = Math.ceil,
+      concat = arrayRef.concat,
+      floor = Math.floor,
+      getPrototypeOf = reNative.test(getPrototypeOf = Object.getPrototypeOf) && getPrototypeOf,
+      hasOwnProperty = objectRef.hasOwnProperty,
+      push = arrayRef.push,
+      propertyIsEnumerable = objectRef.propertyIsEnumerable,
+      slice = arrayRef.slice,
+      toString = objectRef.toString;
+
+  /* Native method shortcuts for methods with the same name as other `lodash` methods */
+  var nativeBind = reNative.test(nativeBind = slice.bind) && nativeBind,
+      nativeIsArray = reNative.test(nativeIsArray = Array.isArray) && nativeIsArray,
+      nativeIsFinite = window.isFinite,
+      nativeIsNaN = window.isNaN,
+      nativeKeys = reNative.test(nativeKeys = Object.keys) && nativeKeys,
+      nativeMax = Math.max,
+      nativeMin = Math.min,
+      nativeRandom = Math.random;
+
+  /** `Object#toString` result shortcuts */
+  var argsClass = '[object Arguments]',
+      arrayClass = '[object Array]',
+      boolClass = '[object Boolean]',
+      dateClass = '[object Date]',
+      funcClass = '[object Function]',
+      numberClass = '[object Number]',
+      objectClass = '[object Object]',
+      regexpClass = '[object RegExp]',
+      stringClass = '[object String]';
+
+  /**
+   * Detect the JScript [[DontEnum]] bug:
+   *
+   * In IE < 9 an objects own properties, shadowing non-enumerable ones, are
+   * made non-enumerable as well.
+   */
+  var hasDontEnumBug;
+
+  /** Detect if own properties are iterated after inherited properties (IE < 9) */
+  var iteratesOwnLast;
+
+  /**
+   * Detect if `Array#shift` and `Array#splice` augment array-like objects
+   * incorrectly:
+   *
+   * Firefox < 10, IE compatibility mode, and IE < 9 have buggy Array `shift()`
+   * and `splice()` functions that fail to remove the last element, `value[0]`,
+   * of array-like objects even though the `length` property is set to `0`.
+   * The `shift()` method is buggy in IE 8 compatibility mode, while `splice()`
+   * is buggy regardless of mode in IE < 9 and buggy in compatibility mode in IE 9.
+   */
+  var hasObjectSpliceBug = (hasObjectSpliceBug = { '0': 1, 'length': 1 },
+    arrayRef.splice.call(hasObjectSpliceBug, 0, 1), hasObjectSpliceBug[0]);
+
+  /** Detect if an `arguments` object's indexes are non-enumerable (IE < 9) */
+  var noArgsEnum = true;
+
+  (function() {
+    var props = [];
+    function ctor() { this.x = 1; }
+    ctor.prototype = { 'valueOf': 1, 'y': 1 };
+    for (var prop in new ctor) { props.push(prop); }
+    for (prop in arguments) { noArgsEnum = !prop; }
+
+    hasDontEnumBug = !/valueOf/.test(props);
+    iteratesOwnLast = props[0] != 'x';
+  }(1));
+
+  /** Detect if an `arguments` object's [[Class]] is unresolvable (Firefox < 4, IE < 9) */
+  var noArgsClass = !isArguments(arguments);
+
+  /** Detect if `Array#slice` cannot be used to convert strings to arrays (Opera < 10.52) */
+  var noArraySliceOnStrings = slice.call('x')[0] != 'x';
+
+  /**
+   * Detect lack of support for accessing string characters by index:
+   *
+   * IE < 8 can't access characters by index and IE 8 can only access
+   * characters by index on string literals.
+   */
+  var noCharByIndex = ('x'[0] + Object('x')[0]) != 'xx';
+
+  /**
+   * Detect if a node's [[Class]] is unresolvable (IE < 9)
+   * and that the JS engine won't error when attempting to coerce an object to
+   * a string without a `toString` property value of `typeof` "function".
+   */
+  try {
+    var noNodeClass = ({ 'toString': 0 } + '', toString.call(window.document || 0) == objectClass);
+  } catch(e) { }
+
+  /* Detect if `Function#bind` exists and is inferred to be fast (all but V8) */
+  var isBindFast = nativeBind && /\n|Opera/.test(nativeBind + toString.call(window.opera));
+
+  /* Detect if `Object.keys` exists and is inferred to be fast (IE, Opera, V8) */
+  var isKeysFast = nativeKeys && /^.+$|true/.test(nativeKeys + !!window.attachEvent);
+
+  /**
+   * Detect if sourceURL syntax is usable without erroring:
+   *
+   * The JS engine in Adobe products, like InDesign, will throw a syntax error
+   * when it encounters a single line comment beginning with the `@` symbol.
+   *
+   * The JS engine in Narwhal will generate the function `function anonymous(){//}`
+   * and throw a syntax error.
+   *
+   * Avoid comments beginning `@` symbols in IE because they are part of its
+   * non-standard conditional compilation support.
+   * http://msdn.microsoft.com/en-us/library/121hztk3(v=vs.94).aspx
+   */
+  try {
+    var useSourceURL = (Function('//@')(), !window.attachEvent);
+  } catch(e) { }
+
+  /** Used to identify object classifications that `_.clone` supports */
+  var cloneableClasses = {};
+  cloneableClasses[argsClass] = cloneableClasses[funcClass] = false;
+  cloneableClasses[arrayClass] = cloneableClasses[boolClass] = cloneableClasses[dateClass] =
+  cloneableClasses[numberClass] = cloneableClasses[objectClass] = cloneableClasses[regexpClass] =
+  cloneableClasses[stringClass] = true;
+
+  /** Used to determine if values are of the language type Object */
+  var objectTypes = {
+    'boolean': false,
+    'function': true,
+    'object': true,
+    'number': false,
+    'string': false,
+    'undefined': false
+  };
+
+  /** Used to escape characters for inclusion in compiled string literals */
+  var stringEscapes = {
+    '\\': '\\',
+    "'": "'",
+    '\n': 'n',
+    '\r': 'r',
+    '\t': 't',
+    '\u2028': 'u2028',
+    '\u2029': 'u2029'
+  };
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * The `lodash` function.
+   *
+   * @name _
+   * @constructor
+   * @category Chaining
+   * @param {Mixed} value The value to wrap in a `lodash` instance.
+   * @returns {Object} Returns a `lodash` instance.
+   */
+  function lodash(value) {
+    // exit early if already wrapped
+    if (value && value.__wrapped__) {
+      return value;
+    }
+    // allow invoking `lodash` without the `new` operator
+    if (!(this instanceof lodash)) {
+      return new lodash(value);
+    }
+    this.__wrapped__ = value;
+  }
+
+  /**
+   * By default, the template delimiters used by Lo-Dash are similar to those in
+   * embedded Ruby (ERB). Change the following template settings to use alternative
+   * delimiters.
+   *
+   * @static
+   * @memberOf _
+   * @type Object
+   */
+  lodash.templateSettings = {
+
+    /**
+     * Used to detect `data` property values to be HTML-escaped.
+     *
+     * @static
+     * @memberOf _.templateSettings
+     * @type RegExp
+     */
+    'escape': /<%-([\s\S]+?)%>/g,
+
+    /**
+     * Used to detect code to be evaluated.
+     *
+     * @static
+     * @memberOf _.templateSettings
+     * @type RegExp
+     */
+    'evaluate': /<%([\s\S]+?)%>/g,
+
+    /**
+     * Used to detect `data` property values to inject.
+     *
+     * @static
+     * @memberOf _.templateSettings
+     * @type RegExp
+     */
+    'interpolate': reInterpolate,
+
+    /**
+     * Used to reference the data object in the template text.
+     *
+     * @static
+     * @memberOf _.templateSettings
+     * @type String
+     */
+    'variable': ''
+  };
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * The template used to create iterator functions.
+   *
+   * @private
+   * @param {Obect} data The data object used to populate the text.
+   * @returns {String} Returns the interpolated text.
+   */
+  var iteratorTemplate = template(
+    // conditional strict mode
+    '<% if (obj.useStrict) { %>\'use strict\';\n<% } %>' +
+
+    // the `iteratee` may be reassigned by the `top` snippet
+    'var index, value, iteratee = <%= firstArg %>, ' +
+    // assign the `result` variable an initial value
+    'result = <%= firstArg %>;\n' +
+    // exit early if the first argument is falsey
+    'if (!<%= firstArg %>) return result;\n' +
+    // add code before the iteration branches
+    '<%= top %>;\n' +
+
+    // array-like iteration:
+    '<% if (arrayLoop) { %>' +
+    'var length = iteratee.length; index = -1;\n' +
+    'if (typeof length == \'number\') {' +
+
+    // add support for accessing string characters by index if needed
+    '  <% if (noCharByIndex) { %>\n' +
+    '  if (isString(iteratee)) {\n' +
+    '    iteratee = iteratee.split(\'\')\n' +
+    '  }' +
+    '  <% } %>\n' +
+
+    // iterate over the array-like value
+    '  while (++index < length) {\n' +
+    '    value = iteratee[index];\n' +
+    '    <%= arrayLoop %>\n' +
+    '  }\n' +
+    '}\n' +
+    'else {' +
+
+    // object iteration:
+    // add support for iterating over `arguments` objects if needed
+    '  <%  } else if (noArgsEnum) { %>\n' +
+    '  var length = iteratee.length; index = -1;\n' +
+    '  if (length && isArguments(iteratee)) {\n' +
+    '    while (++index < length) {\n' +
+    '      value = iteratee[index += \'\'];\n' +
+    '      <%= objectLoop %>\n' +
+    '    }\n' +
+    '  } else {' +
+    '  <% } %>' +
+
+    // Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1
+    // (if the prototype or a property on the prototype has been set)
+    // incorrectly sets a function's `prototype` property [[Enumerable]]
+    // value to `true`. Because of this Lo-Dash standardizes on skipping
+    // the the `prototype` property of functions regardless of its
+    // [[Enumerable]] value.
+    '  <% if (!hasDontEnumBug) { %>\n' +
+    '  var skipProto = typeof iteratee == \'function\' && \n' +
+    '    propertyIsEnumerable.call(iteratee, \'prototype\');\n' +
+    '  <% } %>' +
+
+    // iterate own properties using `Object.keys` if it's fast
+    '  <% if (isKeysFast && useHas) { %>\n' +
+    '  var ownIndex = -1,\n' +
+    '      ownProps = objectTypes[typeof iteratee] ? nativeKeys(iteratee) : [],\n' +
+    '      length = ownProps.length;\n\n' +
+    '  while (++ownIndex < length) {\n' +
+    '    index = ownProps[ownIndex];\n' +
+    '    <% if (!hasDontEnumBug) { %>if (!(skipProto && index == \'prototype\')) {\n  <% } %>' +
+    '    value = iteratee[index];\n' +
+    '    <%= objectLoop %>\n' +
+    '    <% if (!hasDontEnumBug) { %>}\n<% } %>' +
+    '  }' +
+
+    // else using a for-in loop
+    '  <% } else { %>\n' +
+    '  for (index in iteratee) {<%' +
+    '    if (!hasDontEnumBug || useHas) { %>\n    if (<%' +
+    '      if (!hasDontEnumBug) { %>!(skipProto && index == \'prototype\')<% }' +
+    '      if (!hasDontEnumBug && useHas) { %> && <% }' +
+    '      if (useHas) { %>hasOwnProperty.call(iteratee, index)<% }' +
+    '    %>) {' +
+    '    <% } %>\n' +
+    '    value = iteratee[index];\n' +
+    '    <%= objectLoop %>;' +
+    '    <% if (!hasDontEnumBug || useHas) { %>\n    }<% } %>\n' +
+    '  }' +
+    '  <% } %>' +
+
+    // Because IE < 9 can't set the `[[Enumerable]]` attribute of an
+    // existing property and the `constructor` property of a prototype
+    // defaults to non-enumerable, Lo-Dash skips the `constructor`
+    // property when it infers it's iterating over a `prototype` object.
+    '  <% if (hasDontEnumBug) { %>\n\n' +
+    '  var ctor = iteratee.constructor;\n' +
+    '    <% for (var k = 0; k < 7; k++) { %>\n' +
+    '  index = \'<%= shadowed[k] %>\';\n' +
+    '  if (<%' +
+    '      if (shadowed[k] == \'constructor\') {' +
+    '        %>!(ctor && ctor.prototype === iteratee) && <%' +
+    '      } %>hasOwnProperty.call(iteratee, index)) {\n' +
+    '    value = iteratee[index];\n' +
+    '    <%= objectLoop %>\n' +
+    '  }' +
+    '    <% } %>' +
+    '  <% } %>' +
+    '  <% if (arrayLoop || noArgsEnum) { %>\n}<% } %>\n' +
+
+    // add code to the bottom of the iteration function
+    '<%= bottom %>;\n' +
+    // finally, return the `result`
+    'return result'
+  );
+
+  /** Reusable iterator options for `assign` and `defaults` */
+  var assignIteratorOptions = {
+    'args': 'object, source, guard',
+    'top':
+      'for (var argsIndex = 1, argsLength = typeof guard == \'number\' ? 2 : arguments.length; argsIndex < argsLength; argsIndex++) {\n' +
+      '  if ((iteratee = arguments[argsIndex])) {',
+    'objectLoop': 'result[index] = value',
+    'bottom': '  }\n}'
+  };
+
+  /**
+   * Reusable iterator options shared by `forEach`, `forIn`, and `forOwn`.
+   */
+  var forEachIteratorOptions = {
+    'args': 'collection, callback, thisArg',
+    'top': 'callback = createCallback(callback, thisArg)',
+    'arrayLoop': 'if (callback(value, index, collection) === false) return result',
+    'objectLoop': 'if (callback(value, index, collection) === false) return result'
+  };
+
+  /** Reusable iterator options for `forIn` and `forOwn` */
+  var forOwnIteratorOptions = {
+    'arrayLoop': null
+  };
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Creates a function optimized to search large arrays for a given `value`,
+   * starting at `fromIndex`, using strict equality for comparisons, i.e. `===`.
+   *
+   * @private
+   * @param {Array} array The array to search.
+   * @param {Mixed} value The value to search for.
+   * @param {Number} [fromIndex=0] The index to search from.
+   * @param {Number} [largeSize=30] The length at which an array is considered large.
+   * @returns {Boolean} Returns `true` if `value` is found, else `false`.
+   */
+  function cachedContains(array, fromIndex, largeSize) {
+    fromIndex || (fromIndex = 0);
+
+    var length = array.length,
+        isLarge = (length - fromIndex) >= (largeSize || largeArraySize);
+
+    if (isLarge) {
+      var cache = {},
+          index = fromIndex - 1;
+
+      while (++index < length) {
+        // manually coerce `value` to a string because `hasOwnProperty`, in some
+        // older versions of Firefox, coerces objects incorrectly
+        var key = array[index] + '';
+        (hasOwnProperty.call(cache, key) ? cache[key] : (cache[key] = [])).push(array[index]);
+      }
+    }
+    return function(value) {
+      if (isLarge) {
+        var key = value + '';
+        return hasOwnProperty.call(cache, key) && indexOf(cache[key], value) > -1;
+      }
+      return indexOf(array, value, fromIndex) > -1;
+    }
+  }
+
+  /**
+   * Used by `_.max` and `_.min` as the default `callback` when a given
+   * `collection` is a string value.
+   *
+   * @private
+   * @param {String} value The character to inspect.
+   * @returns {Number} Returns the code unit of given character.
+   */
+  function charAtCallback(value) {
+    return value.charCodeAt(0);
+  }
+
+  /**
+   * Used by `sortBy` to compare transformed `collection` values, stable sorting
+   * them in ascending order.
+   *
+   * @private
+   * @param {Object} a The object to compare to `b`.
+   * @param {Object} b The object to compare to `a`.
+   * @returns {Number} Returns the sort order indicator of `1` or `-1`.
+   */
+  function compareAscending(a, b) {
+    var ai = a.index,
+        bi = b.index;
+
+    a = a.criteria;
+    b = b.criteria;
+
+    // ensure a stable sort in V8 and other engines
+    // http://code.google.com/p/v8/issues/detail?id=90
+    if (a !== b) {
+      if (a > b || a === undefined) {
+        return 1;
+      }
+      if (a < b || b === undefined) {
+        return -1;
+      }
+    }
+    return ai < bi ? -1 : 1;
+  }
+
+  /**
+   * Creates a function that, when called, invokes `func` with the `this`
+   * binding of `thisArg` and prepends any `partailArgs` to the arguments passed
+   * to the bound function.
+   *
+   * @private
+   * @param {Function|String} func The function to bind or the method name.
+   * @param {Mixed} [thisArg] The `this` binding of `func`.
+   * @param {Array} partialArgs An array of arguments to be partially applied.
+   * @returns {Function} Returns the new bound function.
+   */
+  function createBound(func, thisArg, partialArgs) {
+    var isFunc = isFunction(func),
+        isPartial = !partialArgs,
+        key = thisArg;
+
+    // juggle arguments
+    if (isPartial) {
+      partialArgs = thisArg;
+    }
+    if (!isFunc) {
+      thisArg = func;
+    }
+
+    function bound() {
+      // `Function#bind` spec
+      // http://es5.github.com/#x15.3.4.5
+      var args = arguments,
+          thisBinding = isPartial ? this : thisArg;
+
+      if (!isFunc) {
+        func = thisArg[key];
+      }
+      if (partialArgs.length) {
+        args = args.length
+          ? partialArgs.concat(slice.call(args))
+          : partialArgs;
+      }
+      if (this instanceof bound) {
+        // get `func` instance if `bound` is invoked in a `new` expression
+        noop.prototype = func.prototype;
+        thisBinding = new noop;
+
+        // mimic the constructor's `return` behavior
+        // http://es5.github.com/#x13.2.2
+        var result = func.apply(thisBinding, args);
+        return isObject(result)
+          ? result
+          : thisBinding
+      }
+      return func.apply(thisBinding, args);
+    }
+    return bound;
+  }
+
+  /**
+   * Produces an iteration callback bound to an optional `thisArg`. If `func` is
+   * a property name, the callback will return the property value for a given element.
+   *
+   * @private
+   * @param {Function|String} [func=identity|property] The function called per
+   * iteration or property name to query.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Function} Returns a callback function.
+   */
+  function createCallback(func, thisArg) {
+    if (!func) {
+      return identity;
+    }
+    if (typeof func != 'function') {
+      return function(object) {
+        return object[func];
+      };
+    }
+    if (thisArg !== undefined) {
+      return function(value, index, object) {
+        return func.call(thisArg, value, index, object);
+      };
+    }
+    return func;
+  }
+
+  /**
+   * Creates compiled iteration functions.
+   *
+   * @private
+   * @param {Object} [options1, options2, ...] The compile options object(s).
+   *  useHas - A boolean to specify using `hasOwnProperty` checks in the object loop.
+   *  args - A string of comma separated arguments the iteration function will accept.
+   *  top - A string of code to execute before the iteration branches.
+   *  arrayLoop - A string of code to execute in the array loop.
+   *  objectLoop - A string of code to execute in the object loop.
+   *  bottom - A string of code to execute after the iteration branches.
+   *
+   * @returns {Function} Returns the compiled function.
+   */
+  function createIterator() {
+    var data = {
+      'arrayLoop': '',
+      'bottom': '',
+      'hasDontEnumBug': hasDontEnumBug,
+      'isKeysFast': isKeysFast,
+      'objectLoop': '',
+      'noArgsEnum': noArgsEnum,
+      'noCharByIndex': noCharByIndex,
+      'shadowed': shadowed,
+      'top': '',
+      'useHas': true
+    };
+
+    // merge options into a template data object
+    for (var object, index = 0; object = arguments[index]; index++) {
+      for (var key in object) {
+        data[key] = object[key];
+      }
+    }
+    var args = data.args;
+    data.firstArg = /^[^,]+/.exec(args)[0];
+
+    // create the function factory
+    var factory = Function(
+        'createCallback, hasOwnProperty, isArguments, isString, objectTypes, ' +
+        'nativeKeys, propertyIsEnumerable',
+      'return function(' + args + ') {\n' + iteratorTemplate(data) + '\n}'
+    );
+    // return the compiled function
+    return factory(
+      createCallback, hasOwnProperty, isArguments, isString, objectTypes,
+      nativeKeys, propertyIsEnumerable
+    );
+  }
+
+  /**
+   * Used by `template` to escape characters for inclusion in compiled
+   * string literals.
+   *
+   * @private
+   * @param {String} match The matched character to escape.
+   * @returns {String} Returns the escaped character.
+   */
+  function escapeStringChar(match) {
+    return '\\' + stringEscapes[match];
+  }
+
+  /**
+   * Used by `escape` to convert characters to HTML entities.
+   *
+   * @private
+   * @param {String} match The matched character to escape.
+   * @returns {String} Returns the escaped character.
+   */
+  function escapeHtmlChar(match) {
+    return htmlEscapes[match];
+  }
+
+  /**
+   * A no-operation function.
+   *
+   * @private
+   */
+  function noop() {
+    // no operation performed
+  }
+
+  /**
+   * Used by `unescape` to convert HTML entities to characters.
+   *
+   * @private
+   * @param {String} match The matched character to unescape.
+   * @returns {String} Returns the unescaped character.
+   */
+  function unescapeHtmlChar(match) {
+    return htmlUnescapes[match];
+  }
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Assigns own enumerable properties of source object(s) to the `destination`
+   * object. Subsequent sources will overwrite propery assignments of previous
+   * sources.
+   *
+   * @static
+   * @memberOf _
+   * @alias extend
+   * @category Objects
+   * @param {Object} object The destination object.
+   * @param {Object} [source1, source2, ...] The source objects.
+   * @returns {Object} Returns the destination object.
+   * @example
+   *
+   * _.assign({ 'name': 'moe' }, { 'age': 40 });
+   * // => { 'name': 'moe', 'age': 40 }
+   */
+  var assign = createIterator(assignIteratorOptions);
+
+  /**
+   * Checks if `value` is an `arguments` object.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is an `arguments` object, else `false`.
+   * @example
+   *
+   * (function() { return _.isArguments(arguments); })(1, 2, 3);
+   * // => true
+   *
+   * _.isArguments([1, 2, 3]);
+   * // => false
+   */
+  function isArguments(value) {
+    return toString.call(value) == argsClass;
+  }
+  // fallback for browsers that can't detect `arguments` objects by [[Class]]
+  if (noArgsClass) {
+    isArguments = function(value) {
+      return value ? hasOwnProperty.call(value, 'callee') : false;
+    };
+  }
+
+  /**
+   * Iterates over `object`'s own and inherited enumerable properties, executing
+   * the `callback` for each property. The `callback` is bound to `thisArg` and
+   * invoked with three arguments; (value, key, object). Callbacks may exit iteration
+   * early by explicitly returning `false`.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The object to iterate over.
+   * @param {Function} callback The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Object} Returns `object`.
+   * @example
+   *
+   * function Dog(name) {
+   *   this.name = name;
+   * }
+   *
+   * Dog.prototype.bark = function() {
+   *   alert('Woof, woof!');
+   * };
+   *
+   * _.forIn(new Dog('Dagny'), function(value, key) {
+   *   alert(key);
+   * });
+   * // => alerts 'name' and 'bark' (order is not guaranteed)
+   */
+  var forIn = createIterator(forEachIteratorOptions, forOwnIteratorOptions, {
+    'useHas': false
+  });
+
+  /**
+   * Iterates over an object's own enumerable properties, executing the `callback`
+   * for each property. The `callback` is bound to `thisArg` and invoked with three
+   * arguments; (value, key, object). Callbacks may exit iteration early by explicitly
+   * returning `false`.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The object to iterate over.
+   * @param {Function} callback The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Object} Returns `object`.
+   * @example
+   *
+   * _.forOwn({ '0': 'zero', '1': 'one', 'length': 2 }, function(num, key) {
+   *   alert(key);
+   * });
+   * // => alerts '0', '1', and 'length' (order is not guaranteed)
+   */
+  var forOwn = createIterator(forEachIteratorOptions, forOwnIteratorOptions);
+
+  /**
+   * A fallback implementation of `isPlainObject` that checks if a given `value`
+   * is an object created by the `Object` constructor, assuming objects created
+   * by the `Object` constructor have no inherited enumerable properties and that
+   * there are no `Object.prototype` extensions.
+   *
+   * @private
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if `value` is a plain object, else `false`.
+   */
+  function shimIsPlainObject(value) {
+    // avoid non-objects and false positives for `arguments` objects
+    var result = false;
+    if (!(value && typeof value == 'object') || isArguments(value)) {
+      return result;
+    }
+    // IE < 9 presents DOM nodes as `Object` objects except they have `toString`
+    // methods that are `typeof` "string" and still can coerce nodes to strings.
+    // Also check that the constructor is `Object` (i.e. `Object instanceof Object`)
+    var ctor = value.constructor;
+    if ((!noNodeClass || !(typeof value.toString != 'function' && typeof (value + '') == 'string')) &&
+        (!isFunction(ctor) || ctor instanceof ctor)) {
+      // IE < 9 iterates inherited properties before own properties. If the first
+      // iterated property is an object's own property then there are no inherited
+      // enumerable properties.
+      if (iteratesOwnLast) {
+        forIn(value, function(value, key, object) {
+          result = !hasOwnProperty.call(object, key);
+          return false;
+        });
+        return result === false;
+      }
+      // In most environments an object's own properties are iterated before
+      // its inherited properties. If the last iterated property is an object's
+      // own property then there are no inherited enumerable properties.
+      forIn(value, function(value, key) {
+        result = key;
+      });
+      return result === false || hasOwnProperty.call(value, result);
+    }
+    return result;
+  }
+
+  /**
+   * A fallback implementation of `Object.keys` that produces an array of the
+   * given object's own enumerable property names.
+   *
+   * @private
+   * @param {Object} object The object to inspect.
+   * @returns {Array} Returns a new array of property names.
+   */
+  function shimKeys(object) {
+    var result = [];
+    forOwn(object, function(value, key) {
+      result.push(key);
+    });
+    return result;
+  }
+
+  /**
+   * Used to convert characters to HTML entities:
+   *
+   * Though the `>` character is escaped for symmetry, characters like `>` and `/`
+   * don't require escaping in HTML and have no special meaning unless they're part
+   * of a tag or an unquoted attribute value.
+   * http://mathiasbynens.be/notes/ambiguous-ampersands (under "semi-related fun fact")
+   */
+  var htmlEscapes = {
+    '&': '&amp;',
+    '<': '&lt;',
+    '>': '&gt;',
+    '"': '&quot;',
+    "'": '&#x27;'
+  };
+
+  /** Used to convert HTML entities to characters */
+  var htmlUnescapes = invert(htmlEscapes);
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Creates a clone of `value`. If `deep` is `true`, all nested objects will
+   * also be cloned otherwise they will be assigned by reference. Functions, DOM
+   * nodes, `arguments` objects, and objects created by constructors other than
+   * `Object` are **not** cloned.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to clone.
+   * @param {Boolean} deep A flag to indicate a deep clone.
+   * @param- {Object} [guard] Internally used to allow this method to work with
+   *  others like `_.map` without using their callback `index` argument for `deep`.
+   * @param- {Array} [stackA=[]] Internally used to track traversed source objects.
+   * @param- {Array} [stackB=[]] Internally used to associate clones with their
+   *  source counterparts.
+   * @returns {Mixed} Returns the cloned `value`.
+   * @example
+   *
+   * var stooges = [
+   *   { 'name': 'moe', 'age': 40 },
+   *   { 'name': 'larry', 'age': 50 },
+   *   { 'name': 'curly', 'age': 60 }
+   * ];
+   *
+   * _.clone({ 'name': 'moe' });
+   * // => { 'name': 'moe' }
+   *
+   * var shallow = _.clone(stooges);
+   * shallow[0] === stooges[0];
+   * // => true
+   *
+   * var deep = _.clone(stooges, true);
+   * shallow[0] === stooges[0];
+   * // => false
+   */
+  function clone(value, deep, guard, stackA, stackB) {
+    if (value == null) {
+      return value;
+    }
+    if (guard) {
+      deep = false;
+    }
+    // inspect [[Class]]
+    var isObj = isObject(value);
+    if (isObj) {
+      // don't clone `arguments` objects, functions, or non-object Objects
+      var className = toString.call(value);
+      if (!cloneableClasses[className] || (noArgsClass && isArguments(value))) {
+        return value;
+      }
+      var isArr = className == arrayClass;
+      isObj = isArr || (className == objectClass ? isPlainObject(value) : isObj);
+    }
+    // shallow clone
+    if (!isObj || !deep) {
+      // don't clone functions
+      return isObj
+        ? (isArr ? slice.call(value) : assign({}, value))
+        : value;
+    }
+
+    var ctor = value.constructor;
+    switch (className) {
+      case boolClass:
+      case dateClass:
+        return new ctor(+value);
+
+      case numberClass:
+      case stringClass:
+        return new ctor(value);
+
+      case regexpClass:
+        return ctor(value.source, reFlags.exec(value));
+    }
+    // check for circular references and return corresponding clone
+    stackA || (stackA = []);
+    stackB || (stackB = []);
+
+    var length = stackA.length;
+    while (length--) {
+      if (stackA[length] == value) {
+        return stackB[length];
+      }
+    }
+    // init cloned object
+    var result = isArr ? ctor(value.length) : {};
+
+    // add the source value to the stack of traversed objects
+    // and associate it with its clone
+    stackA.push(value);
+    stackB.push(result);
+
+    // recursively populate clone (susceptible to call stack limits)
+    (isArr ? forEach : forOwn)(value, function(objValue, key) {
+      result[key] = clone(objValue, deep, null, stackA, stackB);
+    });
+
+    return result;
+  }
+
+  /**
+   * Assigns own enumerable properties of source object(s) to the `destination`
+   * object for all `destination` properties that resolve to `null`/`undefined`.
+   * Once a property is set, additional defaults of the same property will be
+   * ignored.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The destination object.
+   * @param {Object} [default1, default2, ...] The default objects.
+   * @returns {Object} Returns the destination object.
+   * @example
+   *
+   * var iceCream = { 'flavor': 'chocolate' };
+   * _.defaults(iceCream, { 'flavor': 'vanilla', 'sprinkles': 'rainbow' });
+   * // => { 'flavor': 'chocolate', 'sprinkles': 'rainbow' }
+   */
+  var defaults = createIterator(assignIteratorOptions, {
+    'objectLoop': 'if (result[index] == null) ' + assignIteratorOptions.objectLoop
+  });
+
+  /**
+   * Creates a sorted array of all enumerable properties, own and inherited,
+   * of `object` that have function values.
+   *
+   * @static
+   * @memberOf _
+   * @alias methods
+   * @category Objects
+   * @param {Object} object The object to inspect.
+   * @returns {Array} Returns a new array of property names that have function values.
+   * @example
+   *
+   * _.functions(_);
+   * // => ['all', 'any', 'bind', 'bindAll', 'clone', 'compact', 'compose', ...]
+   */
+  function functions(object) {
+    var result = [];
+    forIn(object, function(value, key) {
+      if (isFunction(value)) {
+        result.push(key);
+      }
+    });
+    return result.sort();
+  }
+
+  /**
+   * Checks if the specified object `property` exists and is a direct property,
+   * instead of an inherited property.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The object to check.
+   * @param {String} property The property to check for.
+   * @returns {Boolean} Returns `true` if key is a direct property, else `false`.
+   * @example
+   *
+   * _.has({ 'a': 1, 'b': 2, 'c': 3 }, 'b');
+   * // => true
+   */
+  function has(object, property) {
+    return object ? hasOwnProperty.call(object, property) : false;
+  }
+
+  /**
+   * Creates an object composed of the inverted keys and values of the given `object`.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The object to invert.
+   * @returns {Object} Returns the created inverted object.
+   * @example
+   *
+   *  _.invert({ 'first': 'Moe', 'second': 'Larry', 'third': 'Curly' });
+   * // => { 'Moe': 'first', 'Larry': 'second', 'Curly': 'third' } (order is not guaranteed)
+   */
+  function invert(object) {
+    var result = {};
+    forOwn(object, function(value, key) {
+      result[value] = key;
+    });
+    return result;
+  }
+
+  /**
+   * Checks if `value` is an array.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is an array, else `false`.
+   * @example
+   *
+   * (function() { return _.isArray(arguments); })();
+   * // => false
+   *
+   * _.isArray([1, 2, 3]);
+   * // => true
+   */
+  var isArray = nativeIsArray || function(value) {
+    return toString.call(value) == arrayClass;
+  };
+
+  /**
+   * Checks if `value` is a boolean (`true` or `false`) value.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a boolean value, else `false`.
+   * @example
+   *
+   * _.isBoolean(null);
+   * // => false
+   */
+  function isBoolean(value) {
+    return value === true || value === false || toString.call(value) == boolClass;
+  }
+
+  /**
+   * Checks if `value` is a date.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a date, else `false`.
+   * @example
+   *
+   * _.isDate(new Date);
+   * // => true
+   */
+  function isDate(value) {
+    return toString.call(value) == dateClass;
+  }
+
+  /**
+   * Checks if `value` is a DOM element.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a DOM element, else `false`.
+   * @example
+   *
+   * _.isElement(document.body);
+   * // => true
+   */
+  function isElement(value) {
+    return value ? value.nodeType === 1 : false;
+  }
+
+  /**
+   * Checks if `value` is empty. Arrays, strings, or `arguments` objects with a
+   * length of `0` and objects with no own enumerable properties are considered
+   * "empty".
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Array|Object|String} value The value to inspect.
+   * @returns {Boolean} Returns `true` if the `value` is empty, else `false`.
+   * @example
+   *
+   * _.isEmpty([1, 2, 3]);
+   * // => false
+   *
+   * _.isEmpty({});
+   * // => true
+   *
+   * _.isEmpty('');
+   * // => true
+   */
+  function isEmpty(value) {
+    var result = true;
+    if (!value) {
+      return result;
+    }
+    var className = toString.call(value),
+        length = value.length;
+
+    if ((className == arrayClass || className == stringClass ||
+        className == argsClass || (noArgsClass && isArguments(value))) ||
+        (className == objectClass && typeof length == 'number' && isFunction(value.splice))) {
+      return !length;
+    }
+    forOwn(value, function() {
+      return (result = false);
+    });
+    return result;
+  }
+
+  /**
+   * Performs a deep comparison between two values to determine if they are
+   * equivalent to each other.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} a The value to compare.
+   * @param {Mixed} b The other value to compare.
+   * @param- {Object} [stackA=[]] Internally used track traversed `a` objects.
+   * @param- {Object} [stackB=[]] Internally used track traversed `b` objects.
+   * @returns {Boolean} Returns `true` if the values are equvalent, else `false`.
+   * @example
+   *
+   * var moe = { 'name': 'moe', 'luckyNumbers': [13, 27, 34] };
+   * var clone = { 'name': 'moe', 'luckyNumbers': [13, 27, 34] };
+   *
+   * moe == clone;
+   * // => false
+   *
+   * _.isEqual(moe, clone);
+   * // => true
+   */
+  function isEqual(a, b, stackA, stackB) {
+    // exit early for identical values
+    if (a === b) {
+      // treat `+0` vs. `-0` as not equal
+      return a !== 0 || (1 / a == 1 / b);
+    }
+    // a strict comparison is necessary because `null == undefined`
+    if (a == null || b == null) {
+      return a === b;
+    }
+    // compare [[Class]] names
+    var className = toString.call(a);
+    if (className != toString.call(b)) {
+      return false;
+    }
+    switch (className) {
+      case boolClass:
+      case dateClass:
+        // coerce dates and booleans to numbers, dates to milliseconds and booleans
+        // to `1` or `0`, treating invalid dates coerced to `NaN` as not equal
+        return +a == +b;
+
+      case numberClass:
+        // treat `NaN` vs. `NaN` as equal
+        return a != +a
+          ? b != +b
+          // but treat `+0` vs. `-0` as not equal
+          : (a == 0 ? (1 / a == 1 / b) : a == +b);
+
+      case regexpClass:
+      case stringClass:
+        // coerce regexes to strings (http://es5.github.com/#x15.10.6.4)
+        // treat string primitives and their corresponding object instances as equal
+        return a == b + '';
+    }
+    // exit early, in older browsers, if `a` is array-like but not `b`
+    var isArr = className == arrayClass || className == argsClass;
+    if (noArgsClass && !isArr && (isArr = isArguments(a)) && !isArguments(b)) {
+      return false;
+    }
+    if (!isArr) {
+      // unwrap any `lodash` wrapped values
+      if (a.__wrapped__ || b.__wrapped__) {
+        return isEqual(a.__wrapped__ || a, b.__wrapped__ || b);
+      }
+      // exit for functions and DOM nodes
+      if (className != objectClass || (noNodeClass && (
+          (typeof a.toString != 'function' && typeof (a + '') == 'string') ||
+          (typeof b.toString != 'function' && typeof (b + '') == 'string')))) {
+        return false;
+      }
+      var ctorA = a.constructor,
+          ctorB = b.constructor;
+
+      // non `Object` object instances with different constructors are not equal
+      if (ctorA != ctorB && !(
+            isFunction(ctorA) && ctorA instanceof ctorA &&
+            isFunction(ctorB) && ctorB instanceof ctorB
+          )) {
+        return false;
+      }
+    }
+    // assume cyclic structures are equal
+    // the algorithm for detecting cyclic structures is adapted from ES 5.1
+    // section 15.12.3, abstract operation `JO` (http://es5.github.com/#x15.12.3)
+    stackA || (stackA = []);
+    stackB || (stackB = []);
+
+    var length = stackA.length;
+    while (length--) {
+      if (stackA[length] == a) {
+        return stackB[length] == b;
+      }
+    }
+
+    var index = -1,
+        result = true,
+        size = 0;
+
+    // add `a` and `b` to the stack of traversed objects
+    stackA.push(a);
+    stackB.push(b);
+
+    // recursively compare objects and arrays (susceptible to call stack limits)
+    if (isArr) {
+      // compare lengths to determine if a deep comparison is necessary
+      size = a.length;
+      result = size == b.length;
+
+      if (result) {
+        // deep compare the contents, ignoring non-numeric properties
+        while (size--) {
+          if (!(result = isEqual(a[size], b[size], stackA, stackB))) {
+            break;
+          }
+        }
+      }
+      return result;
+    }
+    // deep compare objects
+    for (var key in a) {
+      if (hasOwnProperty.call(a, key)) {
+        // count the number of properties.
+        size++;
+        // deep compare each property value.
+        if (!(hasOwnProperty.call(b, key) && isEqual(a[key], b[key], stackA, stackB))) {
+          return false;
+        }
+      }
+    }
+    // ensure both objects have the same number of properties
+    for (key in b) {
+      // The JS engine in Adobe products, like InDesign, has a bug that causes
+      // `!size--` to throw an error so it must be wrapped in parentheses.
+      // https://github.com/documentcloud/underscore/issues/355
+      if (hasOwnProperty.call(b, key) && !(size--)) {
+        // `size` will be `-1` if `b` has more properties than `a`
+        return false;
+      }
+    }
+    // handle JScript [[DontEnum]] bug
+    if (hasDontEnumBug) {
+      while (++index < 7) {
+        key = shadowed[index];
+        if (hasOwnProperty.call(a, key) &&
+            !(hasOwnProperty.call(b, key) && isEqual(a[key], b[key], stackA, stackB))) {
+          return false;
+        }
+      }
+    }
+    return true;
+  }
+
+  /**
+   * Checks if `value` is, or can be coerced to, a finite number.
+   *
+   * Note: This is not the same as native `isFinite`, which will return true for
+   * booleans and empty strings. See http://es5.github.com/#x15.1.2.5.
+   *
+   * @deprecated
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a finite number, else `false`.
+   * @example
+   *
+   * _.isFinite(-101);
+   * // => true
+   *
+   * _.isFinite('10');
+   * // => true
+   *
+   * _.isFinite(true);
+   * // => false
+   *
+   * _.isFinite('');
+   * // => false
+   *
+   * _.isFinite(Infinity);
+   * // => false
+   */
+  function isFinite(value) {
+    return nativeIsFinite(value) && !nativeIsNaN(parseFloat(value));
+  }
+
+  /**
+   * Checks if `value` is a function.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a function, else `false`.
+   * @example
+   *
+   * _.isFunction(_);
+   * // => true
+   */
+  function isFunction(value) {
+    return typeof value == 'function';
+  }
+  // fallback for older versions of Chrome and Safari
+  if (isFunction(/x/)) {
+    isFunction = function(value) {
+      return toString.call(value) == funcClass;
+    };
+  }
+
+  /**
+   * Checks if `value` is the language type of Object.
+   * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is an object, else `false`.
+   * @example
+   *
+   * _.isObject({});
+   * // => true
+   *
+   * _.isObject([1, 2, 3]);
+   * // => true
+   *
+   * _.isObject(1);
+   * // => false
+   */
+  function isObject(value) {
+    // check if the value is the ECMAScript language type of Object
+    // http://es5.github.com/#x8
+    // and avoid a V8 bug
+    // http://code.google.com/p/v8/issues/detail?id=2291
+    return value ? objectTypes[typeof value] : false;
+  }
+
+  /**
+   * Checks if `value` is `NaN`.
+   *
+   * Note: This is not the same as native `isNaN`, which will return true for
+   * `undefined` and other values. See http://es5.github.com/#x15.1.2.4.
+   *
+   * @deprecated
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is `NaN`, else `false`.
+   * @example
+   *
+   * _.isNaN(NaN);
+   * // => true
+   *
+   * _.isNaN(new Number(NaN));
+   * // => true
+   *
+   * isNaN(undefined);
+   * // => true
+   *
+   * _.isNaN(undefined);
+   * // => false
+   */
+  function isNaN(value) {
+    // `NaN` as a primitive is the only value that is not equal to itself
+    // (perform the [[Class]] check first to avoid errors with some host objects in IE)
+    return toString.call(value) == numberClass && value != +value
+  }
+
+  /**
+   * Checks if `value` is `null`.
+   *
+   * @deprecated
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is `null`, else `false`.
+   * @example
+   *
+   * _.isNull(null);
+   * // => true
+   *
+   * _.isNull(undefined);
+   * // => false
+   */
+  function isNull(value) {
+    return value === null;
+  }
+
+  /**
+   * Checks if `value` is a number.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a number, else `false`.
+   * @example
+   *
+   * _.isNumber(8.4 * 5);
+   * // => true
+   */
+  function isNumber(value) {
+    return toString.call(value) == numberClass;
+  }
+
+  /**
+   * Checks if a given `value` is an object created by the `Object` constructor.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if `value` is a plain object, else `false`.
+   * @example
+   *
+   * function Stooge(name, age) {
+   *   this.name = name;
+   *   this.age = age;
+   * }
+   *
+   * _.isPlainObject(new Stooge('moe', 40));
+   * // => false
+   *
+   * _.isPlainObject([1, 2, 3]);
+   * // => false
+   *
+   * _.isPlainObject({ 'name': 'moe', 'age': 40 });
+   * // => true
+   */
+  var isPlainObject = !getPrototypeOf ? shimIsPlainObject : function(value) {
+    if (!(value && typeof value == 'object')) {
+      return false;
+    }
+    var valueOf = value.valueOf,
+        objProto = typeof valueOf == 'function' && (objProto = getPrototypeOf(valueOf)) && getPrototypeOf(objProto);
+
+    return objProto
+      ? value == objProto || (getPrototypeOf(value) == objProto && !isArguments(value))
+      : shimIsPlainObject(value);
+  };
+
+  /**
+   * Checks if `value` is a regular expression.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a regular expression, else `false`.
+   * @example
+   *
+   * _.isRegExp(/moe/);
+   * // => true
+   */
+  function isRegExp(value) {
+    return toString.call(value) == regexpClass;
+  }
+
+  /**
+   * Checks if `value` is a string.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is a string, else `false`.
+   * @example
+   *
+   * _.isString('moe');
+   * // => true
+   */
+  function isString(value) {
+    return toString.call(value) == stringClass;
+  }
+
+  /**
+   * Checks if `value` is `undefined`.
+   *
+   * @deprecated
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Mixed} value The value to check.
+   * @returns {Boolean} Returns `true` if the `value` is `undefined`, else `false`.
+   * @example
+   *
+   * _.isUndefined(void 0);
+   * // => true
+   */
+  function isUndefined(value) {
+    return value === undefined;
+  }
+
+  /**
+   * Creates an array composed of the own enumerable property names of `object`.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The object to inspect.
+   * @returns {Array} Returns a new array of property names.
+   * @example
+   *
+   * _.keys({ 'one': 1, 'two': 2, 'three': 3 });
+   * // => ['one', 'two', 'three'] (order is not guaranteed)
+   */
+  var keys = !nativeKeys ? shimKeys : function(object) {
+    // avoid iterating over the `prototype` property
+    return typeof object == 'function' && propertyIsEnumerable.call(object, 'prototype')
+      ? shimKeys(object)
+      : (isObject(object) ? nativeKeys(object) : []);
+  };
+
+  /**
+   * Merges enumerable properties of the source object(s) into the `destination`
+   * object. Subsequent sources will overwrite propery assignments of previous
+   * sources.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The destination object.
+   * @param {Object} [source1, source2, ...] The source objects.
+   * @param- {Object} [indicator] Internally used to indicate that the `stack`
+   *  argument is an array of traversed objects instead of another source object.
+   * @param- {Array} [stackA=[]] Internally used to track traversed source objects.
+   * @param- {Array} [stackB=[]] Internally used to associate values with their
+   *  source counterparts.
+   * @returns {Object} Returns the destination object.
+   * @example
+   *
+   * var stooges = [
+   *   { 'name': 'moe' },
+   *   { 'name': 'larry' }
+   * ];
+   *
+   * var ages = [
+   *   { 'age': 40 },
+   *   { 'age': 50 }
+   * ];
+   *
+   * _.merge(stooges, ages);
+   * // => [{ 'name': 'moe', 'age': 40 }, { 'name': 'larry', 'age': 50 }]
+   */
+  function merge(object, source, indicator) {
+    var args = arguments,
+        index = 0,
+        length = 2,
+        stackA = args[3],
+        stackB = args[4];
+
+    if (indicator !== indicatorObject) {
+      stackA = [];
+      stackB = [];
+
+      // work with `_.reduce` by only using its callback `accumulator` and `value` arguments
+      if (typeof indicator != 'number') {
+        length = args.length;
+      }
+    }
+    while (++index < length) {
+      forOwn(args[index], function(source, key) {
+        var found, isArr, value;
+        if (source && ((isArr = isArray(source)) || isPlainObject(source))) {
+          // avoid merging previously merged cyclic sources
+          var stackLength = stackA.length;
+          while (stackLength--) {
+            found = stackA[stackLength] == source;
+            if (found) {
+              break;
+            }
+          }
+          if (found) {
+            object[key] = stackB[stackLength];
+          }
+          else {
+            // add `source` and associated `value` to the stack of traversed objects
+            stackA.push(source);
+            stackB.push(value = (value = object[key], isArr)
+              ? (isArray(value) ? value : [])
+              : (isPlainObject(value) ? value : {})
+            );
+            // recursively merge objects and arrays (susceptible to call stack limits)
+            object[key] = merge(value, source, indicatorObject, stackA, stackB);
+          }
+        } else if (source != null) {
+          object[key] = source;
+        }
+      });
+    }
+    return object;
+  }
+
+  /**
+   * Creates a shallow clone of `object` excluding the specified properties.
+   * Property names may be specified as individual arguments or as arrays of
+   * property names. If `callback` is passed, it will be executed for each property
+   * in the `object`, omitting the properties `callback` returns truthy for. The
+   * `callback` is bound to `thisArg` and invoked with three arguments; (value, key, object).
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The source object.
+   * @param {Function|String} callback|[prop1, prop2, ...] The properties to omit
+   *  or the function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Object} Returns an object without the omitted properties.
+   * @example
+   *
+   * _.omit({ 'name': 'moe', 'age': 40, 'userid': 'moe1' }, 'userid');
+   * // => { 'name': 'moe', 'age': 40 }
+   *
+   * _.omit({ 'name': 'moe', '_hint': 'knucklehead', '_seed': '96c4eb' }, function(value, key) {
+   *   return key.charAt(0) == '_';
+   * });
+   * // => { 'name': 'moe' }
+   */
+  function omit(object, callback, thisArg) {
+    var isFunc = typeof callback == 'function',
+        result = {};
+
+    if (isFunc) {
+      callback = createCallback(callback, thisArg);
+    } else {
+      var props = concat.apply(arrayRef, arguments);
+    }
+    forIn(object, function(value, key, object) {
+      if (isFunc
+            ? !callback(value, key, object)
+            : indexOf(props, key, 1) < 0
+          ) {
+        result[key] = value;
+      }
+    });
+    return result;
+  }
+
+  /**
+   * Creates a two dimensional array of the given object's key-value pairs,
+   * i.e. `[[key1, value1], [key2, value2]]`.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The object to inspect.
+   * @returns {Array} Returns new array of key-value pairs.
+   * @example
+   *
+   * _.pairs({ 'moe': 30, 'larry': 40, 'curly': 50 });
+   * // => [['moe', 30], ['larry', 40], ['curly', 50]] (order is not guaranteed)
+   */
+  function pairs(object) {
+    var result = [];
+    forOwn(object, function(value, key) {
+      result.push([key, value]);
+    });
+    return result;
+  }
+
+  /**
+   * Creates a shallow clone of `object` composed of the specified properties.
+   * Property names may be specified as individual arguments or as arrays of
+   * property names. If `callback` is passed, it will be executed for each property
+   * in the `object`, picking the properties `callback` returns truthy for. The
+   * `callback` is bound to `thisArg` and invoked with three arguments; (value, key, object).
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The source object.
+   * @param {Function|String} callback|[prop1, prop2, ...] The properties to pick
+   *  or the function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Object} Returns an object composed of the picked properties.
+   * @example
+   *
+   * _.pick({ 'name': 'moe', 'age': 40, 'userid': 'moe1' }, 'name', 'age');
+   * // => { 'name': 'moe', 'age': 40 }
+   *
+   * _.pick({ 'name': 'moe', '_hint': 'knucklehead', '_seed': '96c4eb' }, function(value, key) {
+   *   return key.charAt(0) != '_';
+   * });
+   * // => { 'name': 'moe' }
+   */
+  function pick(object, callback, thisArg) {
+    var result = {};
+    if (typeof callback != 'function') {
+      var index = 0,
+          props = concat.apply(arrayRef, arguments),
+          length = props.length;
+
+      while (++index < length) {
+        var key = props[index];
+        if (key in object) {
+          result[key] = object[key];
+        }
+      }
+    } else {
+      callback = createCallback(callback, thisArg);
+      forIn(object, function(value, key, object) {
+        if (callback(value, key, object)) {
+          result[key] = value;
+        }
+      });
+    }
+    return result;
+  }
+
+  /**
+   * Creates an array composed of the own enumerable property values of `object`.
+   *
+   * @static
+   * @memberOf _
+   * @category Objects
+   * @param {Object} object The object to inspect.
+   * @returns {Array} Returns a new array of property values.
+   * @example
+   *
+   * _.values({ 'one': 1, 'two': 2, 'three': 3 });
+   * // => [1, 2, 3]
+   */
+  function values(object) {
+    var result = [];
+    forOwn(object, function(value) {
+      result.push(value);
+    });
+    return result;
+  }
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Checks if a given `target` element is present in a `collection` using strict
+   * equality for comparisons, i.e. `===`. If `fromIndex` is negative, it is used
+   * as the offset from the end of the collection.
+   *
+   * @static
+   * @memberOf _
+   * @alias include
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Mixed} target The value to check for.
+   * @param {Number} [fromIndex=0] The index to search from.
+   * @returns {Boolean} Returns `true` if the `target` element is found, else `false`.
+   * @example
+   *
+   * _.contains([1, 2, 3], 1);
+   * // => true
+   *
+   * _.contains([1, 2, 3], 1, 2);
+   * // => false
+   *
+   * _.contains({ 'name': 'moe', 'age': 40 }, 'moe');
+   * // => true
+   *
+   * _.contains('curly', 'ur');
+   * // => true
+   */
+  function contains(collection, target, fromIndex) {
+    var index = -1,
+        length = collection ? collection.length : 0,
+        result = false;
+
+    fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex) || 0;
+    if (typeof length == 'number') {
+      result = (isString(collection)
+        ? collection.indexOf(target, fromIndex)
+        : indexOf(collection, target, fromIndex)
+      ) > -1;
+    } else {
+      forEach(collection, function(value) {
+        if (++index >= fromIndex) {
+          return !(result = value === target);
+        }
+      });
+    }
+    return result;
+  }
+
+  /**
+   * Creates an object composed of keys returned from running each element of
+   * `collection` through a `callback`. The corresponding value of each key is
+   * the number of times the key was returned by `callback`. The `callback` is
+   * bound to `thisArg` and invoked with three arguments; (value, index|key, collection).
+   * The `callback` argument may also be the name of a property to count by (e.g. 'length').
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function|String} callback|property The function called per iteration
+   *  or property name to count by.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Object} Returns the composed aggregate object.
+   * @example
+   *
+   * _.countBy([4.3, 6.1, 6.4], function(num) { return Math.floor(num); });
+   * // => { '4': 1, '6': 2 }
+   *
+   * _.countBy([4.3, 6.1, 6.4], function(num) { return this.floor(num); }, Math);
+   * // => { '4': 1, '6': 2 }
+   *
+   * _.countBy(['one', 'two', 'three'], 'length');
+   * // => { '3': 2, '5': 1 }
+   */
+  function countBy(collection, callback, thisArg) {
+    var result = {};
+    callback = createCallback(callback, thisArg);
+    forEach(collection, function(value, key, collection) {
+      key = callback(value, key, collection);
+      (hasOwnProperty.call(result, key) ? result[key]++ : result[key] = 1);
+    });
+    return result;
+  }
+
+  /**
+   * Checks if the `callback` returns a truthy value for **all** elements of a
+   * `collection`. The `callback` is bound to `thisArg` and invoked with three
+   * arguments; (value, index|key, collection).
+   *
+   * @static
+   * @memberOf _
+   * @alias all
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} [callback=identity] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Boolean} Returns `true` if all elements pass the callback check,
+   *  else `false`.
+   * @example
+   *
+   * _.every([true, 1, null, 'yes'], Boolean);
+   * // => false
+   */
+  function every(collection, callback, thisArg) {
+    var result = true;
+    callback = createCallback(callback, thisArg);
+
+    if (isArray(collection)) {
+      var index = -1,
+          length = collection.length;
+
+      while (++index < length) {
+        if (!(result = !!callback(collection[index], index, collection))) {
+          break;
+        }
+      }
+    } else {
+      forEach(collection, function(value, index, collection) {
+        return (result = !!callback(value, index, collection));
+      });
+    }
+    return result;
+  }
+
+  /**
+   * Examines each element in a `collection`, returning an array of all elements
+   * the `callback` returns truthy for. The `callback` is bound to `thisArg` and
+   * invoked with three arguments; (value, index|key, collection).
+   *
+   * @static
+   * @memberOf _
+   * @alias select
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} [callback=identity] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Array} Returns a new array of elements that passed the callback check.
+   * @example
+   *
+   * var evens = _.filter([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; });
+   * // => [2, 4, 6]
+   */
+  function filter(collection, callback, thisArg) {
+    var result = [];
+    callback = createCallback(callback, thisArg);
+
+    if (isArray(collection)) {
+      var index = -1,
+          length = collection.length;
+
+      while (++index < length) {
+        var value = collection[index];
+        if (callback(value, index, collection)) {
+          result.push(value);
+        }
+      }
+    } else {
+      forEach(collection, function(value, index, collection) {
+        if (callback(value, index, collection)) {
+          result.push(value);
+        }
+      });
+    }
+    return result;
+  }
+
+  /**
+   * Examines each element in a `collection`, returning the first one the `callback`
+   * returns truthy for. The function returns as soon as it finds an acceptable
+   * element, and does not iterate over the entire `collection`. The `callback` is
+   * bound to `thisArg` and invoked with three arguments; (value, index|key, collection).
+   *
+   * @static
+   * @memberOf _
+   * @alias detect
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} callback The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Mixed} Returns the element that passed the callback check,
+   *  else `undefined`.
+   * @example
+   *
+   * var even = _.find([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; });
+   * // => 2
+   */
+  function find(collection, callback, thisArg) {
+    var result;
+    callback = createCallback(callback, thisArg);
+    forEach(collection, function(value, index, collection) {
+      if (callback(value, index, collection)) {
+        result = value;
+        return false;
+      }
+    });
+    return result;
+  }
+
+  /**
+   * Iterates over a `collection`, executing the `callback` for each element in
+   * the `collection`. The `callback` is bound to `thisArg` and invoked with three
+   * arguments; (value, index|key, collection). Callbacks may exit iteration early
+   * by explicitly returning `false`.
+   *
+   * @static
+   * @memberOf _
+   * @alias each
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} callback The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Array|Object|String} Returns `collection`.
+   * @example
+   *
+   * _([1, 2, 3]).forEach(alert).join(',');
+   * // => alerts each number and returns '1,2,3'
+   *
+   * _.forEach({ 'one': 1, 'two': 2, 'three': 3 }, alert);
+   * // => alerts each number (order is not guaranteed)
+   */
+  var forEach = createIterator(forEachIteratorOptions);
+
+  /**
+   * Creates an object composed of keys returned from running each element of
+   * `collection` through a `callback`. The corresponding value of each key is an
+   * array of elements passed to `callback` that returned the key. The `callback`
+   * is bound to `thisArg` and invoked with three arguments; (value, index|key, collection).
+   * The `callback` argument may also be the name of a property to group by (e.g. 'length').
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function|String} callback|property The function called per iteration
+   *  or property name to group by.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Object} Returns the composed aggregate object.
+   * @example
+   *
+   * _.groupBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num); });
+   * // => { '4': [4.2], '6': [6.1, 6.4] }
+   *
+   * _.groupBy([4.2, 6.1, 6.4], function(num) { return this.floor(num); }, Math);
+   * // => { '4': [4.2], '6': [6.1, 6.4] }
+   *
+   * _.groupBy(['one', 'two', 'three'], 'length');
+   * // => { '3': ['one', 'two'], '5': ['three'] }
+   */
+  function groupBy(collection, callback, thisArg) {
+    var result = {};
+    callback = createCallback(callback, thisArg);
+    forEach(collection, function(value, key, collection) {
+      key = callback(value, key, collection);
+      (hasOwnProperty.call(result, key) ? result[key] : result[key] = []).push(value);
+    });
+    return result;
+  }
+
+  /**
+   * Invokes the method named by `methodName` on each element in the `collection`,
+   * returning an array of the results of each invoked method. Additional arguments
+   * will be passed to each invoked method. If `methodName` is a function it will
+   * be invoked for, and `this` bound to, each element in the `collection`.
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function|String} methodName The name of the method to invoke or
+   *  the function invoked per iteration.
+   * @param {Mixed} [arg1, arg2, ...] Arguments to invoke the method with.
+   * @returns {Array} Returns a new array of the results of each invoked method.
+   * @example
+   *
+   * _.invoke([[5, 1, 7], [3, 2, 1]], 'sort');
+   * // => [[1, 5, 7], [1, 2, 3]]
+   *
+   * _.invoke([123, 456], String.prototype.split, '');
+   * // => [['1', '2', '3'], ['4', '5', '6']]
+   */
+  function invoke(collection, methodName) {
+    var args = slice.call(arguments, 2),
+        isFunc = typeof methodName == 'function',
+        result = [];
+
+    forEach(collection, function(value) {
+      result.push((isFunc ? methodName : value[methodName]).apply(value, args));
+    });
+    return result;
+  }
+
+  /**
+   * Creates an array of values by running each element in the `collection`
+   * through a `callback`. The `callback` is bound to `thisArg` and invoked with
+   * three arguments; (value, index|key, collection).
+   *
+   * @static
+   * @memberOf _
+   * @alias collect
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} [callback=identity] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Array} Returns a new array of the results of each `callback` execution.
+   * @example
+   *
+   * _.map([1, 2, 3], function(num) { return num * 3; });
+   * // => [3, 6, 9]
+   *
+   * _.map({ 'one': 1, 'two': 2, 'three': 3 }, function(num) { return num * 3; });
+   * // => [3, 6, 9] (order is not guaranteed)
+   */
+  function map(collection, callback, thisArg) {
+    var index = -1,
+        length = collection ? collection.length : 0,
+        result = Array(typeof length == 'number' ? length : 0);
+
+    callback = createCallback(callback, thisArg);
+    if (isArray(collection)) {
+      while (++index < length) {
+        result[index] = callback(collection[index], index, collection);
+      }
+    } else {
+      forEach(collection, function(value, key, collection) {
+        result[++index] = callback(value, key, collection);
+      });
+    }
+    return result;
+  }
+
+  /**
+   * Retrieves the maximum value of an `array`. If `callback` is passed,
+   * it will be executed for each value in the `array` to generate the
+   * criterion by which the value is ranked. The `callback` is bound to
+   * `thisArg` and invoked with three arguments; (value, index, collection).
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} [callback] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Mixed} Returns the maximum value.
+   * @example
+   *
+   * var stooges = [
+   *   { 'name': 'moe', 'age': 40 },
+   *   { 'name': 'larry', 'age': 50 },
+   *   { 'name': 'curly', 'age': 60 }
+   * ];
+   *
+   * _.max(stooges, function(stooge) { return stooge.age; });
+   * // => { 'name': 'curly', 'age': 60 };
+   */
+  function max(collection, callback, thisArg) {
+    var computed = -Infinity,
+        index = -1,
+        length = collection ? collection.length : 0,
+        result = computed;
+
+    if (callback || !isArray(collection)) {
+      callback = !callback && isString(collection)
+        ? charAtCallback
+        : createCallback(callback, thisArg);
+
+      forEach(collection, function(value, index, collection) {
+        var current = callback(value, index, collection);
+        if (current > computed) {
+          computed = current;
+          result = value;
+        }
+      });
+    } else {
+      while (++index < length) {
+        if (collection[index] > result) {
+          result = collection[index];
+        }
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Retrieves the minimum value of an `array`. If `callback` is passed,
+   * it will be executed for each value in the `array` to generate the
+   * criterion by which the value is ranked. The `callback` is bound to `thisArg`
+   * and invoked with three arguments; (value, index, collection).
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} [callback] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Mixed} Returns the minimum value.
+   * @example
+   *
+   * _.min([10, 5, 100, 2, 1000]);
+   * // => 2
+   */
+  function min(collection, callback, thisArg) {
+    var computed = Infinity,
+        index = -1,
+        length = collection ? collection.length : 0,
+        result = computed;
+
+    if (callback || !isArray(collection)) {
+      callback = !callback && isString(collection)
+        ? charAtCallback
+        : createCallback(callback, thisArg);
+
+      forEach(collection, function(value, index, collection) {
+        var current = callback(value, index, collection);
+        if (current < computed) {
+          computed = current;
+          result = value;
+        }
+      });
+    } else {
+      while (++index < length) {
+        if (collection[index] < result) {
+          result = collection[index];
+        }
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Retrieves the value of a specified property from all elements in
+   * the `collection`.
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {String} property The property to pluck.
+   * @returns {Array} Returns a new array of property values.
+   * @example
+   *
+   * var stooges = [
+   *   { 'name': 'moe', 'age': 40 },
+   *   { 'name': 'larry', 'age': 50 },
+   *   { 'name': 'curly', 'age': 60 }
+   * ];
+   *
+   * _.pluck(stooges, 'name');
+   * // => ['moe', 'larry', 'curly']
+   */
+  function pluck(collection, property) {
+    var result = [];
+    forEach(collection, function(value) {
+      result.push(value[property]);
+    });
+    return result;
+  }
+
+  /**
+   * Boils down a `collection` to a single value. The initial state of the
+   * reduction is `accumulator` and each successive step of it should be returned
+   * by the `callback`. The `callback` is bound to `thisArg` and invoked with 4
+   * arguments; for arrays they are (accumulator, value, index|key, collection).
+   *
+   * @static
+   * @memberOf _
+   * @alias foldl, inject
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} callback The function called per iteration.
+   * @param {Mixed} [accumulator] Initial value of the accumulator.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Mixed} Returns the accumulated value.
+   * @example
+   *
+   * var sum = _.reduce([1, 2, 3], function(memo, num) { return memo + num; });
+   * // => 6
+   */
+  function reduce(collection, callback, accumulator, thisArg) {
+    var noaccum = arguments.length < 3;
+    callback = createCallback(callback, thisArg);
+    forEach(collection, function(value, index, collection) {
+      accumulator = noaccum
+        ? (noaccum = false, value)
+        : callback(accumulator, value, index, collection)
+    });
+    return accumulator;
+  }
+
+  /**
+   * The right-associative version of `_.reduce`.
+   *
+   * @static
+   * @memberOf _
+   * @alias foldr
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} callback The function called per iteration.
+   * @param {Mixed} [accumulator] Initial value of the accumulator.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Mixed} Returns the accumulated value.
+   * @example
+   *
+   * var list = [[0, 1], [2, 3], [4, 5]];
+   * var flat = _.reduceRight(list, function(a, b) { return a.concat(b); }, []);
+   * // => [4, 5, 2, 3, 0, 1]
+   */
+  function reduceRight(collection, callback, accumulator, thisArg) {
+    var iteratee = collection,
+        length = collection ? collection.length : 0,
+        noaccum = arguments.length < 3;
+
+    if (typeof length != 'number') {
+      var props = keys(collection);
+      length = props.length;
+    } else if (noCharByIndex && isString(collection)) {
+      iteratee = collection.split('');
+    }
+    forEach(collection, function(value, index, collection) {
+      index = props ? props[--length] : --length;
+      accumulator = noaccum
+        ? (noaccum = false, iteratee[index])
+        : callback.call(thisArg, accumulator, iteratee[index], index, collection);
+    });
+    return accumulator;
+  }
+
+  /**
+   * The opposite of `_.filter`, this method returns the values of a
+   * `collection` that `callback` does **not** return truthy for.
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} [callback=identity] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Array} Returns a new array of elements that did **not** pass the
+   *  callback check.
+   * @example
+   *
+   * var odds = _.reject([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; });
+   * // => [1, 3, 5]
+   */
+  function reject(collection, callback, thisArg) {
+    callback = createCallback(callback, thisArg);
+    return filter(collection, function(value, index, collection) {
+      return !callback(value, index, collection);
+    });
+  }
+
+  /**
+   * Creates an array of shuffled `array` values, using a version of the
+   * Fisher-Yates shuffle. See http://en.wikipedia.org/wiki/Fisher-Yates_shuffle.
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to shuffle.
+   * @returns {Array} Returns a new shuffled collection.
+   * @example
+   *
+   * _.shuffle([1, 2, 3, 4, 5, 6]);
+   * // => [4, 1, 6, 3, 5, 2]
+   */
+  function shuffle(collection) {
+    var index = -1,
+        result = Array(collection ? collection.length : 0);
+
+    forEach(collection, function(value) {
+      var rand = floor(nativeRandom() * (++index + 1));
+      result[index] = result[rand];
+      result[rand] = value;
+    });
+    return result;
+  }
+
+  /**
+   * Gets the size of the `collection` by returning `collection.length` for arrays
+   * and array-like objects or the number of own enumerable properties for objects.
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to inspect.
+   * @returns {Number} Returns `collection.length` or number of own enumerable properties.
+   * @example
+   *
+   * _.size([1, 2]);
+   * // => 2
+   *
+   * _.size({ 'one': 1, 'two': 2, 'three': 3 });
+   * // => 3
+   *
+   * _.size('curly');
+   * // => 5
+   */
+  function size(collection) {
+    var length = collection ? collection.length : 0;
+    return typeof length == 'number' ? length : keys(collection).length;
+  }
+
+  /**
+   * Checks if the `callback` returns a truthy value for **any** element of a
+   * `collection`. The function returns as soon as it finds passing value, and
+   * does not iterate over the entire `collection`. The `callback` is bound to
+   * `thisArg` and invoked with three arguments; (value, index|key, collection).
+   *
+   * @static
+   * @memberOf _
+   * @alias any
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function} [callback=identity] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Boolean} Returns `true` if any element passes the callback check,
+   *  else `false`.
+   * @example
+   *
+   * _.some([null, 0, 'yes', false], Boolean);
+   * // => true
+   */
+  function some(collection, callback, thisArg) {
+    var result;
+    callback = createCallback(callback, thisArg);
+
+    if (isArray(collection)) {
+      var index = -1,
+          length = collection.length;
+
+      while (++index < length) {
+        if ((result = callback(collection[index], index, collection))) {
+          break;
+        }
+      }
+    } else {
+      forEach(collection, function(value, index, collection) {
+        return !(result = callback(value, index, collection));
+      });
+    }
+    return !!result;
+  }
+
+  /**
+   * Creates an array, stable sorted in ascending order by the results of
+   * running each element of `collection` through a `callback`. The `callback`
+   * is bound to `thisArg` and invoked with three arguments; (value, index|key, collection).
+   * The `callback` argument may also be the name of a property to sort by (e.g. 'length').
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Function|String} callback|property The function called per iteration
+   *  or property name to sort by.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Array} Returns a new array of sorted elements.
+   * @example
+   *
+   * _.sortBy([1, 2, 3], function(num) { return Math.sin(num); });
+   * // => [3, 1, 2]
+   *
+   * _.sortBy([1, 2, 3], function(num) { return this.sin(num); }, Math);
+   * // => [3, 1, 2]
+   *
+   * _.sortBy(['larry', 'brendan', 'moe'], 'length');
+   * // => ['moe', 'larry', 'brendan']
+   */
+  function sortBy(collection, callback, thisArg) {
+    var result = [];
+    callback = createCallback(callback, thisArg);
+    forEach(collection, function(value, index, collection) {
+      result.push({
+        'criteria': callback(value, index, collection),
+        'index': index,
+        'value': value
+      });
+    });
+
+    var length = result.length;
+    result.sort(compareAscending);
+    while (length--) {
+      result[length] = result[length].value;
+    }
+    return result;
+  }
+
+  /**
+   * Converts the `collection`, to an array.
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to convert.
+   * @returns {Array} Returns the new converted array.
+   * @example
+   *
+   * (function() { return _.toArray(arguments).slice(1); })(1, 2, 3, 4);
+   * // => [2, 3, 4]
+   */
+  function toArray(collection) {
+    if (collection && typeof collection.length == 'number') {
+      return (noArraySliceOnStrings ? isString(collection) : typeof collection == 'string')
+        ? collection.split('')
+        : slice.call(collection);
+    }
+    return values(collection);
+  }
+
+  /**
+   * Examines each element in a `collection`, returning an array of all elements
+   * that contain the given `properties`.
+   *
+   * @static
+   * @memberOf _
+   * @category Collections
+   * @param {Array|Object|String} collection The collection to iterate over.
+   * @param {Object} properties The object of property values to filter by.
+   * @returns {Array} Returns a new array of elements that contain the given `properties`.
+   * @example
+   *
+   * var stooges = [
+   *   { 'name': 'moe', 'age': 40 },
+   *   { 'name': 'larry', 'age': 50 },
+   *   { 'name': 'curly', 'age': 60 }
+   * ];
+   *
+   * _.where(stooges, { 'age': 40 });
+   * // => [{ 'name': 'moe', 'age': 40 }]
+   */
+  function where(collection, properties) {
+    var props = keys(properties);
+    return filter(collection, function(object) {
+      var length = props.length;
+      while (length--) {
+        var result = object[props[length]] === properties[props[length]];
+        if (!result) {
+          break;
+        }
+      }
+      return !!result;
+    });
+  }
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Creates an array with all falsey values of `array` removed. The values
+   * `false`, `null`, `0`, `""`, `undefined` and `NaN` are all falsey.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to compact.
+   * @returns {Array} Returns a new filtered array.
+   * @example
+   *
+   * _.compact([0, 1, false, 2, '', 3]);
+   * // => [1, 2, 3]
+   */
+  function compact(array) {
+    var index = -1,
+        length = array ? array.length : 0,
+        result = [];
+
+    while (++index < length) {
+      var value = array[index];
+      if (value) {
+        result.push(value);
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Creates an array of `array` elements not present in the other arrays
+   * using strict equality for comparisons, i.e. `===`.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to process.
+   * @param {Array} [array1, array2, ...] Arrays to check.
+   * @returns {Array} Returns a new array of `array` elements not present in the
+   *  other arrays.
+   * @example
+   *
+   * _.difference([1, 2, 3, 4, 5], [5, 2, 10]);
+   * // => [1, 3, 4]
+   */
+  function difference(array) {
+    var index = -1,
+        length = array ? array.length : 0,
+        flattened = concat.apply(arrayRef, arguments),
+        contains = cachedContains(flattened, length),
+        result = [];
+
+    while (++index < length) {
+      var value = array[index];
+      if (!contains(value)) {
+        result.push(value);
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Gets the first element of the `array`. Pass `n` to return the first `n`
+   * elements of the `array`.
+   *
+   * @static
+   * @memberOf _
+   * @alias head, take
+   * @category Arrays
+   * @param {Array} array The array to query.
+   * @param {Number} [n] The number of elements to return.
+   * @param- {Object} [guard] Internally used to allow this method to work with
+   *  others like `_.map` without using their callback `index` argument for `n`.
+   * @returns {Mixed} Returns the first element or an array of the first `n`
+   *  elements of `array`.
+   * @example
+   *
+   * _.first([5, 4, 3, 2, 1]);
+   * // => 5
+   */
+  function first(array, n, guard) {
+    if (array) {
+      return (n == null || guard) ? array[0] : slice.call(array, 0, n);
+    }
+  }
+
+  /**
+   * Flattens a nested array (the nesting can be to any depth). If `shallow` is
+   * truthy, `array` will only be flattened a single level.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to compact.
+   * @param {Boolean} shallow A flag to indicate only flattening a single level.
+   * @returns {Array} Returns a new flattened array.
+   * @example
+   *
+   * _.flatten([1, [2], [3, [[4]]]]);
+   * // => [1, 2, 3, 4];
+   *
+   * _.flatten([1, [2], [3, [[4]]]], true);
+   * // => [1, 2, 3, [[4]]];
+   */
+  function flatten(array, shallow) {
+    var index = -1,
+        length = array ? array.length : 0,
+        result = [];
+
+    while (++index < length) {
+      var value = array[index];
+
+      // recursively flatten arrays (susceptible to call stack limits)
+      if (isArray(value)) {
+        push.apply(result, shallow ? value : flatten(value));
+      } else {
+        result.push(value);
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Gets the index at which the first occurrence of `value` is found using
+   * strict equality for comparisons, i.e. `===`. If the `array` is already
+   * sorted, passing `true` for `fromIndex` will run a faster binary search.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to search.
+   * @param {Mixed} value The value to search for.
+   * @param {Boolean|Number} [fromIndex=0] The index to search from or `true` to
+   *  perform a binary search on a sorted `array`.
+   * @returns {Number} Returns the index of the matched value or `-1`.
+   * @example
+   *
+   * _.indexOf([1, 2, 3, 1, 2, 3], 2);
+   * // => 1
+   *
+   * _.indexOf([1, 2, 3, 1, 2, 3], 2, 3);
+   * // => 4
+   *
+   * _.indexOf([1, 1, 2, 2, 3, 3], 2, true);
+   * // => 2
+   */
+  function indexOf(array, value, fromIndex) {
+    var index = -1,
+        length = array ? array.length : 0;
+
+    if (typeof fromIndex == 'number') {
+      index = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex || 0) - 1;
+    } else if (fromIndex) {
+      index = sortedIndex(array, value);
+      return array[index] === value ? index : -1;
+    }
+    while (++index < length) {
+      if (array[index] === value) {
+        return index;
+      }
+    }
+    return -1;
+  }
+
+  /**
+   * Gets all but the last element of `array`. Pass `n` to exclude the last `n`
+   * elements from the result.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to query.
+   * @param {Number} [n=1] The number of elements to exclude.
+   * @param- {Object} [guard] Internally used to allow this method to work with
+   *  others like `_.map` without using their callback `index` argument for `n`.
+   * @returns {Array} Returns all but the last element or `n` elements of `array`.
+   * @example
+   *
+   * _.initial([3, 2, 1]);
+   * // => [3, 2]
+   */
+  function initial(array, n, guard) {
+    return array
+      ? slice.call(array, 0, -((n == null || guard) ? 1 : n))
+      : [];
+  }
+
+  /**
+   * Computes the intersection of all the passed-in arrays using strict equality
+   * for comparisons, i.e. `===`.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} [array1, array2, ...] Arrays to process.
+   * @returns {Array} Returns a new array of unique elements, in order, that are
+   *  present in **all** of the arrays.
+   * @example
+   *
+   * _.intersection([1, 2, 3], [101, 2, 1, 10], [2, 1]);
+   * // => [1, 2]
+   */
+  function intersection(array) {
+    var args = arguments,
+        argsLength = args.length,
+        cache = {},
+        result = [];
+
+    forEach(array, function(value) {
+      if (indexOf(result, value) < 0) {
+        var length = argsLength;
+        while (--length) {
+          if (!(cache[length] || (cache[length] = cachedContains(args[length])))(value)) {
+            return;
+          }
+        }
+        result.push(value);
+      }
+    });
+    return result;
+  }
+
+  /**
+   * Gets the last element of the `array`. Pass `n` to return the last `n`
+   * elements of the `array`.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to query.
+   * @param {Number} [n] The number of elements to return.
+   * @param- {Object} [guard] Internally used to allow this method to work with
+   *  others like `_.map` without using their callback `index` argument for `n`.
+   * @returns {Mixed} Returns the last element or an array of the last `n`
+   *  elements of `array`.
+   * @example
+   *
+   * _.last([3, 2, 1]);
+   * // => 1
+   */
+  function last(array, n, guard) {
+    if (array) {
+      var length = array.length;
+      return (n == null || guard) ? array[length - 1] : slice.call(array, -n || length);
+    }
+  }
+
+  /**
+   * Gets the index at which the last occurrence of `value` is found using strict
+   * equality for comparisons, i.e. `===`. If `fromIndex` is negative, it is used
+   * as the offset from the end of the collection.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to search.
+   * @param {Mixed} value The value to search for.
+   * @param {Number} [fromIndex=array.length-1] The index to search from.
+   * @returns {Number} Returns the index of the matched value or `-1`.
+   * @example
+   *
+   * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2);
+   * // => 4
+   *
+   * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2, 3);
+   * // => 1
+   */
+  function lastIndexOf(array, value, fromIndex) {
+    var index = array ? array.length : 0;
+    if (typeof fromIndex == 'number') {
+      index = (fromIndex < 0 ? nativeMax(0, index + fromIndex) : nativeMin(fromIndex, index - 1)) + 1;
+    }
+    while (index--) {
+      if (array[index] === value) {
+        return index;
+      }
+    }
+    return -1;
+  }
+
+  /**
+   * Creates an object composed from arrays of `keys` and `values`. Pass either
+   * a single two dimensional array, i.e. `[[key1, value1], [key2, value2]]`, or
+   * two arrays, one of `keys` and one of corresponding `values`.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} keys The array of keys.
+   * @param {Array} [values=[]] The array of values.
+   * @returns {Object} Returns an object composed of the given keys and
+   *  corresponding values.
+   * @example
+   *
+   * _.object(['moe', 'larry', 'curly'], [30, 40, 50]);
+   * // => { 'moe': 30, 'larry': 40, 'curly': 50 }
+   */
+  function object(keys, values) {
+    var index = -1,
+        length = keys ? keys.length : 0,
+        result = {};
+
+    while (++index < length) {
+      var key = keys[index];
+      if (values) {
+        result[key] = values[index];
+      } else {
+        result[key[0]] = key[1];
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Creates an array of numbers (positive and/or negative) progressing from
+   * `start` up to but not including `stop`. This method is a port of Python's
+   * `range()` function. See http://docs.python.org/library/functions.html#range.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Number} [start=0] The start of the range.
+   * @param {Number} end The end of the range.
+   * @param {Number} [step=1] The value to increment or descrement by.
+   * @returns {Array} Returns a new range array.
+   * @example
+   *
+   * _.range(10);
+   * // => [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
+   *
+   * _.range(1, 11);
+   * // => [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+   *
+   * _.range(0, 30, 5);
+   * // => [0, 5, 10, 15, 20, 25]
+   *
+   * _.range(0, -10, -1);
+   * // => [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]
+   *
+   * _.range(0);
+   * // => []
+   */
+  function range(start, end, step) {
+    start = +start || 0;
+    step = +step || 1;
+
+    if (end == null) {
+      end = start;
+      start = 0;
+    }
+    // use `Array(length)` so V8 will avoid the slower "dictionary" mode
+    // http://www.youtube.com/watch?v=XAqIpGU8ZZk#t=16m27s
+    var index = -1,
+        length = nativeMax(0, ceil((end - start) / step)),
+        result = Array(length);
+
+    while (++index < length) {
+      result[index] = start;
+      start += step;
+    }
+    return result;
+  }
+
+  /**
+   * The opposite of `_.initial`, this method gets all but the first value of
+   * `array`. Pass `n` to exclude the first `n` values from the result.
+   *
+   * @static
+   * @memberOf _
+   * @alias drop, tail
+   * @category Arrays
+   * @param {Array} array The array to query.
+   * @param {Number} [n=1] The number of elements to exclude.
+   * @param- {Object} [guard] Internally used to allow this method to work with
+   *  others like `_.map` without using their callback `index` argument for `n`.
+   * @returns {Array} Returns all but the first value or `n` values of `array`.
+   * @example
+   *
+   * _.rest([3, 2, 1]);
+   * // => [2, 1]
+   */
+  function rest(array, n, guard) {
+    return array
+      ? slice.call(array, (n == null || guard) ? 1 : n)
+      : [];
+  }
+
+  /**
+   * Uses a binary search to determine the smallest index at which the `value`
+   * should be inserted into `array` in order to maintain the sort order of the
+   * sorted `array`. If `callback` is passed, it will be executed for `value` and
+   * each element in `array` to compute their sort ranking. The `callback` is
+   * bound to `thisArg` and invoked with one argument; (value). The `callback`
+   * argument may also be the name of a property to order by.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to iterate over.
+   * @param {Mixed} value The value to evaluate.
+   * @param {Function|String} [callback=identity|property] The function called
+   *  per iteration or property name to order by.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Number} Returns the index at which the value should be inserted
+   *  into `array`.
+   * @example
+   *
+   * _.sortedIndex([20, 30, 50], 40);
+   * // => 2
+   *
+   * _.sortedIndex([{ 'x': 20 }, { 'x': 30 }, { 'x': 50 }], { 'x': 40 }, 'x');
+   * // => 2
+   *
+   * var dict = {
+   *   'wordToNumber': { 'twenty': 20, 'thirty': 30, 'fourty': 40, 'fifty': 50 }
+   * };
+   *
+   * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) {
+   *   return dict.wordToNumber[word];
+   * });
+   * // => 2
+   *
+   * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) {
+   *   return this.wordToNumber[word];
+   * }, dict);
+   * // => 2
+   */
+  function sortedIndex(array, value, callback, thisArg) {
+    var low = 0,
+        high = array ? array.length : low;
+
+    // explicitly reference `identity` for better engine inlining
+    callback = callback ? createCallback(callback, thisArg) : identity;
+    value = callback(value);
+    while (low < high) {
+      var mid = (low + high) >>> 1;
+      callback(array[mid]) < value
+        ? low = mid + 1
+        : high = mid;
+    }
+    return low;
+  }
+
+  /**
+   * Computes the union of the passed-in arrays using strict equality for
+   * comparisons, i.e. `===`.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} [array1, array2, ...] Arrays to process.
+   * @returns {Array} Returns a new array of unique values, in order, that are
+   *  present in one or more of the arrays.
+   * @example
+   *
+   * _.union([1, 2, 3], [101, 2, 1, 10], [2, 1]);
+   * // => [1, 2, 3, 101, 10]
+   */
+  function union() {
+    return uniq(concat.apply(arrayRef, arguments));
+  }
+
+  /**
+   * Creates a duplicate-value-free version of the `array` using strict equality
+   * for comparisons, i.e. `===`. If the `array` is already sorted, passing `true`
+   * for `isSorted` will run a faster algorithm. If `callback` is passed, each
+   * element of `array` is passed through a callback` before uniqueness is computed.
+   * The `callback` is bound to `thisArg` and invoked with three arguments; (value, index, array).
+   *
+   * @static
+   * @memberOf _
+   * @alias unique
+   * @category Arrays
+   * @param {Array} array The array to process.
+   * @param {Boolean} [isSorted=false] A flag to indicate that the `array` is already sorted.
+   * @param {Function} [callback=identity] The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Array} Returns a duplicate-value-free array.
+   * @example
+   *
+   * _.uniq([1, 2, 1, 3, 1]);
+   * // => [1, 2, 3]
+   *
+   * _.uniq([1, 1, 2, 2, 3], true);
+   * // => [1, 2, 3]
+   *
+   * _.uniq([1, 2, 1.5, 3, 2.5], function(num) { return Math.floor(num); });
+   * // => [1, 2, 3]
+   *
+   * _.uniq([1, 2, 1.5, 3, 2.5], function(num) { return this.floor(num); }, Math);
+   * // => [1, 2, 3]
+   */
+  function uniq(array, isSorted, callback, thisArg) {
+    var index = -1,
+        length = array ? array.length : 0,
+        result = [],
+        seen = result;
+
+    // juggle arguments
+    if (typeof isSorted == 'function') {
+      thisArg = callback;
+      callback = isSorted;
+      isSorted = false;
+    }
+    // init value cache for large arrays
+    var isLarge = !isSorted && length > 74;
+    if (isLarge) {
+      var cache = {};
+    }
+    if (callback) {
+      seen = [];
+      callback = createCallback(callback, thisArg);
+    }
+    while (++index < length) {
+      var value = array[index],
+          computed = callback ? callback(value, index, array) : value;
+
+      if (isLarge) {
+        // manually coerce `computed` to a string because `hasOwnProperty`, in
+        // some older versions of Firefox, coerces objects incorrectly
+        seen = hasOwnProperty.call(cache, computed + '') ? cache[computed] : (cache[computed] = []);
+      }
+      if (isSorted
+            ? !index || seen[seen.length - 1] !== computed
+            : indexOf(seen, computed) < 0
+          ) {
+        if (callback || isLarge) {
+          seen.push(computed);
+        }
+        result.push(value);
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Creates an array with all occurrences of the passed values removed using
+   * strict equality for comparisons, i.e. `===`.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} array The array to filter.
+   * @param {Mixed} [value1, value2, ...] Values to remove.
+   * @returns {Array} Returns a new filtered array.
+   * @example
+   *
+   * _.without([1, 2, 1, 0, 3, 1, 4], 0, 1);
+   * // => [2, 3, 4]
+   */
+  function without(array) {
+    var index = -1,
+        length = array ? array.length : 0,
+        contains = cachedContains(arguments, 1, 20),
+        result = [];
+
+    while (++index < length) {
+      var value = array[index];
+      if (!contains(value)) {
+        result.push(value);
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Groups the elements of each array at their corresponding indexes. Useful for
+   * separate data sources that are coordinated through matching array indexes.
+   * For a matrix of nested arrays, `_.zip.apply(...)` can transpose the matrix
+   * in a similar fashion.
+   *
+   * @static
+   * @memberOf _
+   * @category Arrays
+   * @param {Array} [array1, array2, ...] Arrays to process.
+   * @returns {Array} Returns a new array of grouped elements.
+   * @example
+   *
+   * _.zip(['moe', 'larry', 'curly'], [30, 40, 50], [true, false, false]);
+   * // => [['moe', 30, true], ['larry', 40, false], ['curly', 50, false]]
+   */
+  function zip(array) {
+    var index = -1,
+        length = array ? max(pluck(arguments, 'length')) : 0,
+        result = Array(length);
+
+    while (++index < length) {
+      result[index] = pluck(arguments, index);
+    }
+    return result;
+  }
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Creates a function that is restricted to executing `func` only after it is
+   * called `n` times. The `func` is executed with the `this` binding of the
+   * created function.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Number} n The number of times the function must be called before
+   * it is executed.
+   * @param {Function} func The function to restrict.
+   * @returns {Function} Returns the new restricted function.
+   * @example
+   *
+   * var renderNotes = _.after(notes.length, render);
+   * _.forEach(notes, function(note) {
+   *   note.asyncSave({ 'success': renderNotes });
+   * });
+   * // `renderNotes` is run once, after all notes have saved
+   */
+  function after(n, func) {
+    if (n < 1) {
+      return func();
+    }
+    return function() {
+      if (--n < 1) {
+        return func.apply(this, arguments);
+      }
+    };
+  }
+
+  /**
+   * Creates a function that, when called, invokes `func` with the `this`
+   * binding of `thisArg` and prepends any additional `bind` arguments to those
+   * passed to the bound function.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to bind.
+   * @param {Mixed} [thisArg] The `this` binding of `func`.
+   * @param {Mixed} [arg1, arg2, ...] Arguments to be partially applied.
+   * @returns {Function} Returns the new bound function.
+   * @example
+   *
+   * var func = function(greeting) {
+   *   return greeting + ' ' + this.name;
+   * };
+   *
+   * func = _.bind(func, { 'name': 'moe' }, 'hi');
+   * func();
+   * // => 'hi moe'
+   */
+  function bind(func, thisArg) {
+    // use `Function#bind` if it exists and is fast
+    // (in V8 `Function#bind` is slower except when partially applied)
+    return isBindFast || (nativeBind && arguments.length > 2)
+      ? nativeBind.call.apply(nativeBind, arguments)
+      : createBound(func, thisArg, slice.call(arguments, 2));
+  }
+
+  /**
+   * Binds methods on `object` to `object`, overwriting the existing method.
+   * If no method names are provided, all the function properties of `object`
+   * will be bound.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Object} object The object to bind and assign the bound methods to.
+   * @param {String} [methodName1, methodName2, ...] Method names on the object to bind.
+   * @returns {Object} Returns `object`.
+   * @example
+   *
+   * var buttonView = {
+   *  'label': 'lodash',
+   *  'onClick': function() { alert('clicked: ' + this.label); }
+   * };
+   *
+   * _.bindAll(buttonView);
+   * jQuery('#lodash_button').on('click', buttonView.onClick);
+   * // => When the button is clicked, `this.label` will have the correct value
+   */
+  function bindAll(object) {
+    var funcs = arguments,
+        index = funcs.length > 1 ? 0 : (funcs = functions(object), -1),
+        length = funcs.length;
+
+    while (++index < length) {
+      var key = funcs[index];
+      object[key] = bind(object[key], object);
+    }
+    return object;
+  }
+
+  /**
+   * Creates a function that, when called, invokes the method at `object[key]`
+   * and prepends any additional `bindKey` arguments to those passed to the bound
+   * function. This method differs from `_.bind` by allowing bound functions to
+   * reference methods that will be redefined or don't yet exist.
+   * See http://michaux.ca/articles/lazy-function-definition-pattern.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Object} object The object the method belongs to.
+   * @param {String} key The key of the method.
+   * @param {Mixed} [arg1, arg2, ...] Arguments to be partially applied.
+   * @returns {Function} Returns the new bound function.
+   * @example
+   *
+   * var object = {
+   *   'name': 'moe',
+   *   'greet': function(greeting) {
+   *     return greeting + ' ' + this.name;
+   *   }
+   * };
+   *
+   * var func = _.bindKey(object, 'greet', 'hi');
+   * func();
+   * // => 'hi moe'
+   *
+   * object.greet = function(greeting) {
+   *   return greeting + ', ' + this.name + '!';
+   * };
+   *
+   * func();
+   * // => 'hi, moe!'
+   */
+  function bindKey(object, key) {
+    return createBound(object, key, slice.call(arguments, 2));
+  }
+
+  /**
+   * Creates a function that is the composition of the passed functions,
+   * where each function consumes the return value of the function that follows.
+   * In math terms, composing the functions `f()`, `g()`, and `h()` produces `f(g(h()))`.
+   * Each function is executed with the `this` binding of the composed function.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} [func1, func2, ...] Functions to compose.
+   * @returns {Function} Returns the new composed function.
+   * @example
+   *
+   * var greet = function(name) { return 'hi: ' + name; };
+   * var exclaim = function(statement) { return statement + '!'; };
+   * var welcome = _.compose(exclaim, greet);
+   * welcome('moe');
+   * // => 'hi: moe!'
+   */
+  function compose() {
+    var funcs = arguments;
+    return function() {
+      var args = arguments,
+          length = funcs.length;
+
+      while (length--) {
+        args = [funcs[length].apply(this, args)];
+      }
+      return args[0];
+    };
+  }
+
+  /**
+   * Creates a function that will delay the execution of `func` until after
+   * `wait` milliseconds have elapsed since the last time it was invoked. Pass
+   * `true` for `immediate` to cause debounce to invoke `func` on the leading,
+   * instead of the trailing, edge of the `wait` timeout. Subsequent calls to
+   * the debounced function will return the result of the last `func` call.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to debounce.
+   * @param {Number} wait The number of milliseconds to delay.
+   * @param {Boolean} immediate A flag to indicate execution is on the leading
+   *  edge of the timeout.
+   * @returns {Function} Returns the new debounced function.
+   * @example
+   *
+   * var lazyLayout = _.debounce(calculateLayout, 300);
+   * jQuery(window).on('resize', lazyLayout);
+   */
+  function debounce(func, wait, immediate) {
+    var args,
+        result,
+        thisArg,
+        timeoutId;
+
+    function delayed() {
+      timeoutId = null;
+      if (!immediate) {
+        result = func.apply(thisArg, args);
+      }
+    }
+    return function() {
+      var isImmediate = immediate && !timeoutId;
+      args = arguments;
+      thisArg = this;
+
+      clearTimeout(timeoutId);
+      timeoutId = setTimeout(delayed, wait);
+
+      if (isImmediate) {
+        result = func.apply(thisArg, args);
+      }
+      return result;
+    };
+  }
+
+  /**
+   * Executes the `func` function after `wait` milliseconds. Additional arguments
+   * will be passed to `func` when it is invoked.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to delay.
+   * @param {Number} wait The number of milliseconds to delay execution.
+   * @param {Mixed} [arg1, arg2, ...] Arguments to invoke the function with.
+   * @returns {Number} Returns the `setTimeout` timeout id.
+   * @example
+   *
+   * var log = _.bind(console.log, console);
+   * _.delay(log, 1000, 'logged later');
+   * // => 'logged later' (Appears after one second.)
+   */
+  function delay(func, wait) {
+    var args = slice.call(arguments, 2);
+    return setTimeout(function() { func.apply(undefined, args); }, wait);
+  }
+
+  /**
+   * Defers executing the `func` function until the current call stack has cleared.
+   * Additional arguments will be passed to `func` when it is invoked.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to defer.
+   * @param {Mixed} [arg1, arg2, ...] Arguments to invoke the function with.
+   * @returns {Number} Returns the `setTimeout` timeout id.
+   * @example
+   *
+   * _.defer(function() { alert('deferred'); });
+   * // returns from the function before `alert` is called
+   */
+  function defer(func) {
+    var args = slice.call(arguments, 1);
+    return setTimeout(function() { func.apply(undefined, args); }, 1);
+  }
+
+  /**
+   * Creates a function that memoizes the result of `func`. If `resolver` is
+   * passed, it will be used to determine the cache key for storing the result
+   * based on the arguments passed to the memoized function. By default, the first
+   * argument passed to the memoized function is used as the cache key. The `func`
+   * is executed with the `this` binding of the memoized function.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to have its output memoized.
+   * @param {Function} [resolver] A function used to resolve the cache key.
+   * @returns {Function} Returns the new memoizing function.
+   * @example
+   *
+   * var fibonacci = _.memoize(function(n) {
+   *   return n < 2 ? n : fibonacci(n - 1) + fibonacci(n - 2);
+   * });
+   */
+  function memoize(func, resolver) {
+    var cache = {};
+    return function() {
+      var key = resolver ? resolver.apply(this, arguments) : arguments[0];
+      return hasOwnProperty.call(cache, key)
+        ? cache[key]
+        : (cache[key] = func.apply(this, arguments));
+    };
+  }
+
+  /**
+   * Creates a function that is restricted to execute `func` once. Repeat calls to
+   * the function will return the value of the first call. The `func` is executed
+   * with the `this` binding of the created function.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to restrict.
+   * @returns {Function} Returns the new restricted function.
+   * @example
+   *
+   * var initialize = _.once(createApplication);
+   * initialize();
+   * initialize();
+   * // Application is only created once.
+   */
+  function once(func) {
+    var result,
+        ran = false;
+
+    return function() {
+      if (ran) {
+        return result;
+      }
+      ran = true;
+      result = func.apply(this, arguments);
+
+      // clear the `func` variable so the function may be garbage collected
+      func = null;
+      return result;
+    };
+  }
+
+  /**
+   * Creates a function that, when called, invokes `func` with any additional
+   * `partial` arguments prepended to those passed to the new function. This
+   * method is similar to `bind`, except it does **not** alter the `this` binding.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to partially apply arguments to.
+   * @param {Mixed} [arg1, arg2, ...] Arguments to be partially applied.
+   * @returns {Function} Returns the new partially applied function.
+   * @example
+   *
+   * var greet = function(greeting, name) { return greeting + ': ' + name; };
+   * var hi = _.partial(greet, 'hi');
+   * hi('moe');
+   * // => 'hi: moe'
+   */
+  function partial(func) {
+    return createBound(func, slice.call(arguments, 1));
+  }
+
+  /**
+   * Creates a function that, when executed, will only call the `func`
+   * function at most once per every `wait` milliseconds. If the throttled
+   * function is invoked more than once during the `wait` timeout, `func` will
+   * also be called on the trailing edge of the timeout. Subsequent calls to the
+   * throttled function will return the result of the last `func` call.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Function} func The function to throttle.
+   * @param {Number} wait The number of milliseconds to throttle executions to.
+   * @returns {Function} Returns the new throttled function.
+   * @example
+   *
+   * var throttled = _.throttle(updatePosition, 100);
+   * jQuery(window).on('scroll', throttled);
+   */
+  function throttle(func, wait) {
+    var args,
+        result,
+        thisArg,
+        timeoutId,
+        lastCalled = 0;
+
+    function trailingCall() {
+      lastCalled = new Date;
+      timeoutId = null;
+      result = func.apply(thisArg, args);
+    }
+    return function() {
+      var now = new Date,
+          remaining = wait - (now - lastCalled);
+
+      args = arguments;
+      thisArg = this;
+
+      if (remaining <= 0) {
+        clearTimeout(timeoutId);
+        lastCalled = now;
+        result = func.apply(thisArg, args);
+      }
+      else if (!timeoutId) {
+        timeoutId = setTimeout(trailingCall, remaining);
+      }
+      return result;
+    };
+  }
+
+  /**
+   * Creates a function that passes `value` to the `wrapper` function as its
+   * first argument. Additional arguments passed to the function are appended
+   * to those passed to the `wrapper` function. The `wrapper` is executed with
+   * the `this` binding of the created function.
+   *
+   * @static
+   * @memberOf _
+   * @category Functions
+   * @param {Mixed} value The value to wrap.
+   * @param {Function} wrapper The wrapper function.
+   * @returns {Function} Returns the new function.
+   * @example
+   *
+   * var hello = function(name) { return 'hello ' + name; };
+   * hello = _.wrap(hello, function(func) {
+   *   return 'before, ' + func('moe') + ', after';
+   * });
+   * hello();
+   * // => 'before, hello moe, after'
+   */
+  function wrap(value, wrapper) {
+    return function() {
+      var args = [value];
+      push.apply(args, arguments);
+      return wrapper.apply(this, args);
+    };
+  }
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Converts the characters `&`, `<`, `>`, `"`, and `'` in `string` to their
+   * corresponding HTML entities.
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {String} string The string to escape.
+   * @returns {String} Returns the escaped string.
+   * @example
+   *
+   * _.escape('Moe, Larry & Curly');
+   * // => "Moe, Larry &amp; Curly"
+   */
+  function escape(string) {
+    return string == null ? '' : (string + '').replace(reUnescapedHtml, escapeHtmlChar);
+  }
+
+  /**
+   * This function returns the first argument passed to it.
+   *
+   * Note: It is used throughout Lo-Dash as a default callback.
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {Mixed} value Any value.
+   * @returns {Mixed} Returns `value`.
+   * @example
+   *
+   * var moe = { 'name': 'moe' };
+   * moe === _.identity(moe);
+   * // => true
+   */
+  function identity(value) {
+    return value;
+  }
+
+  /**
+   * Adds functions properties of `object` to the `lodash` function and chainable
+   * wrapper.
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {Object} object The object of function properties to add to `lodash`.
+   * @example
+   *
+   * _.mixin({
+   *   'capitalize': function(string) {
+   *     return string.charAt(0).toUpperCase() + string.slice(1).toLowerCase();
+   *   }
+   * });
+   *
+   * _.capitalize('larry');
+   * // => 'Larry'
+   *
+   * _('curly').capitalize();
+   * // => 'Curly'
+   */
+  function mixin(object) {
+    forEach(functions(object), function(methodName) {
+      var func = lodash[methodName] = object[methodName];
+
+      lodash.prototype[methodName] = function() {
+        var args = [this.__wrapped__];
+        push.apply(args, arguments);
+
+        var result = func.apply(lodash, args);
+        if (this.__chain__) {
+          result = new lodash(result);
+          result.__chain__ = true;
+        }
+        return result;
+      };
+    });
+  }
+
+  /**
+   * Reverts the '_' variable to its previous value and returns a reference to
+   * the `lodash` function.
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @returns {Function} Returns the `lodash` function.
+   * @example
+   *
+   * var lodash = _.noConflict();
+   */
+  function noConflict() {
+    window._ = oldDash;
+    return this;
+  }
+
+  /**
+   * Produces a random number between `min` and `max` (inclusive). If only one
+   * argument is passed, a number between `0` and the given number will be returned.
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {Number} [min=0] The minimum possible value.
+   * @param {Number} [max=1] The maximum possible value.
+   * @returns {Number} Returns a random number.
+   * @example
+   *
+   * _.random(0, 5);
+   * // => a number between 1 and 5
+   *
+   * _.random(5);
+   * // => also a number between 1 and 5
+   */
+  function random(min, max) {
+    if (min == null && max == null) {
+      max = 1;
+    }
+    min = +min || 0;
+    if (max == null) {
+      max = min;
+      min = 0;
+    }
+    return min + floor(nativeRandom() * ((+max || 0) - min + 1));
+  }
+
+  /**
+   * Resolves the value of `property` on `object`. If `property` is a function
+   * it will be invoked and its result returned, else the property value is
+   * returned. If `object` is falsey, then `null` is returned.
+   *
+   * @deprecated
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {Object} object The object to inspect.
+   * @param {String} property The property to get the value of.
+   * @returns {Mixed} Returns the resolved value.
+   * @example
+   *
+   * var object = {
+   *   'cheese': 'crumpets',
+   *   'stuff': function() {
+   *     return 'nonsense';
+   *   }
+   * };
+   *
+   * _.result(object, 'cheese');
+   * // => 'crumpets'
+   *
+   * _.result(object, 'stuff');
+   * // => 'nonsense'
+   */
+  function result(object, property) {
+    // based on Backbone's private `getValue` function
+    // https://github.com/documentcloud/backbone/blob/0.9.2/backbone.js#L1419-1424
+    var value = object ? object[property] : null;
+    return isFunction(value) ? object[property]() : value;
+  }
+
+  /**
+   * A micro-templating method that handles arbitrary delimiters, preserves
+   * whitespace, and correctly escapes quotes within interpolated code.
+   *
+   * Note: In the development build `_.template` utilizes sourceURLs for easier
+   * debugging. See http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl
+   *
+   * Note: Lo-Dash may be used in Chrome extensions by either creating a `lodash csp`
+   * build and avoiding `_.template` use, or loading Lo-Dash in a sandboxed page.
+   * See http://developer.chrome.com/trunk/extensions/sandboxingEval.html
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {String} text The template text.
+   * @param {Obect} data The data object used to populate the text.
+   * @param {Object} options The options object.
+   *  escape - The "escape" delimiter regexp.
+   *  evaluate - The "evaluate" delimiter regexp.
+   *  interpolate - The "interpolate" delimiter regexp.
+   *  sourceURL - The sourceURL of the template's compiled source.
+   *  variable - The data object variable name.
+   *
+   * @returns {Function|String} Returns a compiled function when no `data` object
+   *  is given, else it returns the interpolated text.
+   * @example
+   *
+   * // using a compiled template
+   * var compiled = _.template('hello <%= name %>');
+   * compiled({ 'name': 'moe' });
+   * // => 'hello moe'
+   *
+   * var list = '<% _.forEach(people, function(name) { %><li><%= name %></li><% }); %>';
+   * _.template(list, { 'people': ['moe', 'larry', 'curly'] });
+   * // => '<li>moe</li><li>larry</li><li>curly</li>'
+   *
+   * // using the "escape" delimiter to escape HTML in data property values
+   * _.template('<b><%- value %></b>', { 'value': '<script>' });
+   * // => '<b>&lt;script&gt;</b>'
+   *
+   * // using the ES6 delimiter as an alternative to the default "interpolate" delimiter
+   * _.template('hello ${ name }', { 'name': 'curly' });
+   * // => 'hello curly'
+   *
+   * // using the internal `print` function in "evaluate" delimiters
+   * _.template('<% print("hello " + epithet); %>!', { 'epithet': 'stooge' });
+   * // => 'hello stooge!'
+   *
+   * // using custom template delimiters
+   * _.templateSettings = {
+   *   'interpolate': /{{([\s\S]+?)}}/g
+   * };
+   *
+   * _.template('hello {{ name }}!', { 'name': 'mustache' });
+   * // => 'hello mustache!'
+   *
+   * // using the `sourceURL` option to specify a custom sourceURL for the template
+   * var compiled = _.template('hello <%= name %>', null, { 'sourceURL': '/basic/greeting.jst' });
+   * compiled(data);
+   * // => find the source of "greeting.jst" under the Sources tab or Resources panel of the web inspector
+   *
+   * // using the `variable` option to ensure a with-statement isn't used in the compiled template
+   * var compiled = _.template('hello <%= data.name %>!', null, { 'variable': 'data' });
+   * compiled.source;
+   * // => function(data) {
+   *   var __t, __p = '', __e = _.escape;
+   *   __p += 'hello ' + ((__t = ( data.name )) == null ? '' : __t) + '!';
+   *   return __p;
+   * }
+   *
+   * // using the `source` property to inline compiled templates for meaningful
+   * // line numbers in error messages and a stack trace
+   * fs.writeFileSync(path.join(cwd, 'jst.js'), '\
+   *   var JST = {\
+   *     "main": ' + _.template(mainText).source + '\
+   *   };\
+   * ');
+   */
+  function template(text, data, options) {
+    // based on John Resig's `tmpl` implementation
+    // http://ejohn.org/blog/javascript-micro-templating/
+    // and Laura Doktorova's doT.js
+    // https://github.com/olado/doT
+    text || (text = '');
+    options || (options = {});
+
+    var isEvaluating,
+        result,
+        settings = lodash.templateSettings,
+        index = 0,
+        interpolate = options.interpolate || settings.interpolate || reNoMatch,
+        source = "__p += '",
+        variable = options.variable || settings.variable,
+        hasVariable = variable;
+
+    // compile regexp to match each delimiter
+    var reDelimiters = RegExp(
+      (options.escape || settings.escape || reNoMatch).source + '|' +
+      interpolate.source + '|' +
+      (interpolate === reInterpolate ? reEsTemplate : reNoMatch).source + '|' +
+      (options.evaluate || settings.evaluate || reNoMatch).source + '|$'
+    , 'g');
+
+    text.replace(reDelimiters, function(match, escapeValue, interpolateValue, esTemplateValue, evaluateValue, offset) {
+      interpolateValue || (interpolateValue = esTemplateValue);
+
+      // escape characters that cannot be included in string literals
+      source += text.slice(index, offset).replace(reUnescapedString, escapeStringChar);
+
+      // replace delimiters with snippets
+      source +=
+        escapeValue ? "' +\n__e(" + escapeValue + ") +\n'" :
+        evaluateValue ? "';\n" + evaluateValue + ";\n__p += '" :
+        interpolateValue ? "' +\n((__t = (" + interpolateValue + ")) == null ? '' : __t) +\n'" : '';
+
+      isEvaluating || (isEvaluating = evaluateValue || reComplexDelimiter.test(escapeValue || interpolateValue));
+      index = offset + match.length;
+    });
+
+    source += "';\n";
+
+    // if `variable` is not specified and the template contains "evaluate"
+    // delimiters, wrap a with-statement around the generated code to add the
+    // data object to the top of the scope chain
+    if (!hasVariable) {
+      variable = 'obj';
+      if (isEvaluating) {
+        source = 'with (' + variable + ') {\n' + source + '\n}\n';
+      }
+      else {
+        // avoid a with-statement by prepending data object references to property names
+        var reDoubleVariable = RegExp('(\\(\\s*)' + variable + '\\.' + variable + '\\b', 'g');
+        source = source
+          .replace(reInsertVariable, '$&' + variable + '.')
+          .replace(reDoubleVariable, '$1__d');
+      }
+    }
+
+    // cleanup code by stripping empty strings
+    source = (isEvaluating ? source.replace(reEmptyStringLeading, '') : source)
+      .replace(reEmptyStringMiddle, '$1')
+      .replace(reEmptyStringTrailing, '$1;');
+
+    // frame code as the function body
+    source = 'function(' + variable + ') {\n' +
+      (hasVariable ? '' : variable + ' || (' + variable + ' = {});\n') +
+      'var __t, __p = \'\', __e = _.escape' +
+      (isEvaluating
+        ? ', __j = Array.prototype.join;\n' +
+          'function print() { __p += __j.call(arguments, \'\') }\n'
+        : (hasVariable ? '' : ', __d = ' + variable + '.' + variable + ' || ' + variable) + ';\n'
+      ) +
+      source +
+      'return __p\n}';
+
+    // use a sourceURL for easier debugging
+    // http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl
+    var sourceURL = useSourceURL
+      ? '\n//@ sourceURL=' + (options.sourceURL || '/lodash/template/source[' + (templateCounter++) + ']')
+      : '';
+
+    try {
+      result = Function('_', 'return ' + source + sourceURL)(lodash);
+    } catch(e) {
+      e.source = source;
+      throw e;
+    }
+
+    if (data) {
+      return result(data);
+    }
+    // provide the compiled function's source via its `toString` method, in
+    // supported environments, or the `source` property as a convenience for
+    // inlining compiled templates during the build process
+    result.source = source;
+    return result;
+  }
+
+  /**
+   * Executes the `callback` function `n` times, returning an array of the results
+   * of each `callback` execution. The `callback` is bound to `thisArg` and invoked
+   * with one argument; (index).
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {Number} n The number of times to execute the callback.
+   * @param {Function} callback The function called per iteration.
+   * @param {Mixed} [thisArg] The `this` binding of `callback`.
+   * @returns {Array} Returns a new array of the results of each `callback` execution.
+   * @example
+   *
+   * var diceRolls = _.times(3, _.partial(_.random, 1, 6));
+   * // => [3, 6, 4]
+   *
+   * _.times(3, function(n) { mage.castSpell(n); });
+   * // => calls `mage.castSpell(n)` three times, passing `n` of `0`, `1`, and `2` respectively
+   *
+   * _.times(3, function(n) { this.cast(n); }, mage);
+   * // => also calls `mage.castSpell(n)` three times
+   */
+  function times(n, callback, thisArg) {
+    n = +n || 0;
+    var index = -1,
+        result = Array(n);
+
+    while (++index < n) {
+      result[index] = callback.call(thisArg, index);
+    }
+    return result;
+  }
+
+  /**
+   * The opposite of `_.escape`, this method converts the HTML entities
+   * `&amp;`, `&lt;`, `&gt;`, `&quot;`, and `&#x27;` in `string` to their
+   * corresponding characters.
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {String} string The string to unescape.
+   * @returns {String} Returns the unescaped string.
+   * @example
+   *
+   * _.unescape('Moe, Larry &amp; Curly');
+   * // => "Moe, Larry & Curly"
+   */
+  function unescape(string) {
+    return string == null ? '' : (string + '').replace(reEscapedHtml, unescapeHtmlChar);
+  }
+
+  /**
+   * Generates a unique id. If `prefix` is passed, the id will be appended to it.
+   *
+   * @static
+   * @memberOf _
+   * @category Utilities
+   * @param {String} [prefix] The value to prefix the id with.
+   * @returns {Number|String} Returns a numeric id if no prefix is passed, else
+   *  a string id may be returned.
+   * @example
+   *
+   * _.uniqueId('contact_');
+   * // => 'contact_104'
+   */
+  function uniqueId(prefix) {
+    var id = idCounter++;
+    return prefix ? prefix + id : id;
+  }
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * Wraps the value in a `lodash` wrapper object.
+   *
+   * @static
+   * @memberOf _
+   * @category Chaining
+   * @param {Mixed} value The value to wrap.
+   * @returns {Object} Returns the wrapper object.
+   * @example
+   *
+   * var stooges = [
+   *   { 'name': 'moe', 'age': 40 },
+   *   { 'name': 'larry', 'age': 50 },
+   *   { 'name': 'curly', 'age': 60 }
+   * ];
+   *
+   * var youngest = _.chain(stooges)
+   *     .sortBy(function(stooge) { return stooge.age; })
+   *     .map(function(stooge) { return stooge.name + ' is ' + stooge.age; })
+   *     .first()
+   *     .value();
+   * // => 'moe is 40'
+   */
+  function chain(value) {
+    value = new lodash(value);
+    value.__chain__ = true;
+    return value;
+  }
+
+  /**
+   * Invokes `interceptor` with the `value` as the first argument, and then
+   * returns `value`. The purpose of this method is to "tap into" a method chain,
+   * in order to perform operations on intermediate results within the chain.
+   *
+   * @static
+   * @memberOf _
+   * @category Chaining
+   * @param {Mixed} value The value to pass to `interceptor`.
+   * @param {Function} interceptor The function to invoke.
+   * @returns {Mixed} Returns `value`.
+   * @example
+   *
+   * _.chain([1, 2, 3, 200])
+   *  .filter(function(num) { return num % 2 == 0; })
+   *  .tap(alert)
+   *  .map(function(num) { return num * num })
+   *  .value();
+   * // => // [2, 200] (alerted)
+   * // => [4, 40000]
+   */
+  function tap(value, interceptor) {
+    interceptor(value);
+    return value;
+  }
+
+  /**
+   * Enables method chaining on the wrapper object.
+   *
+   * @name chain
+   * @deprecated
+   * @memberOf _
+   * @category Chaining
+   * @returns {Mixed} Returns the wrapper object.
+   * @example
+   *
+   * _([1, 2, 3]).value();
+   * // => [1, 2, 3]
+   */
+  function wrapperChain() {
+    this.__chain__ = true;
+    return this;
+  }
+
+  /**
+   * Extracts the wrapped value.
+   *
+   * @name value
+   * @memberOf _
+   * @category Chaining
+   * @returns {Mixed} Returns the wrapped value.
+   * @example
+   *
+   * _([1, 2, 3]).value();
+   * // => [1, 2, 3]
+   */
+  function wrapperValue() {
+    return this.__wrapped__;
+  }
+
+  /*--------------------------------------------------------------------------*/
+
+  /**
+   * The semantic version number.
+   *
+   * @static
+   * @memberOf _
+   * @type String
+   */
+  lodash.VERSION = '0.10.0';
+
+  // assign static methods
+  lodash.assign = assign;
+  lodash.after = after;
+  lodash.bind = bind;
+  lodash.bindAll = bindAll;
+  lodash.bindKey = bindKey;
+  lodash.chain = chain;
+  lodash.clone = clone;
+  lodash.compact = compact;
+  lodash.compose = compose;
+  lodash.contains = contains;
+  lodash.countBy = countBy;
+  lodash.debounce = debounce;
+  lodash.defaults = defaults;
+  lodash.defer = defer;
+  lodash.delay = delay;
+  lodash.difference = difference;
+  lodash.escape = escape;
+  lodash.every = every;
+  lodash.filter = filter;
+  lodash.find = find;
+  lodash.first = first;
+  lodash.flatten = flatten;
+  lodash.forEach = forEach;
+  lodash.forIn = forIn;
+  lodash.forOwn = forOwn;
+  lodash.functions = functions;
+  lodash.groupBy = groupBy;
+  lodash.has = has;
+  lodash.identity = identity;
+  lodash.indexOf = indexOf;
+  lodash.initial = initial;
+  lodash.intersection = intersection;
+  lodash.invert = invert;
+  lodash.invoke = invoke;
+  lodash.isArguments = isArguments;
+  lodash.isArray = isArray;
+  lodash.isBoolean = isBoolean;
+  lodash.isDate = isDate;
+  lodash.isElement = isElement;
+  lodash.isEmpty = isEmpty;
+  lodash.isEqual = isEqual;
+  lodash.isFinite = isFinite;
+  lodash.isFunction = isFunction;
+  lodash.isNaN = isNaN;
+  lodash.isNull = isNull;
+  lodash.isNumber = isNumber;
+  lodash.isObject = isObject;
+  lodash.isPlainObject = isPlainObject;
+  lodash.isRegExp = isRegExp;
+  lodash.isString = isString;
+  lodash.isUndefined = isUndefined;
+  lodash.keys = keys;
+  lodash.last = last;
+  lodash.lastIndexOf = lastIndexOf;
+  lodash.map = map;
+  lodash.max = max;
+  lodash.memoize = memoize;
+  lodash.merge = merge;
+  lodash.min = min;
+  lodash.mixin = mixin;
+  lodash.noConflict = noConflict;
+  lodash.object = object;
+  lodash.omit = omit;
+  lodash.once = once;
+  lodash.pairs = pairs;
+  lodash.partial = partial;
+  lodash.pick = pick;
+  lodash.pluck = pluck;
+  lodash.random = random;
+  lodash.range = range;
+  lodash.reduce = reduce;
+  lodash.reduceRight = reduceRight;
+  lodash.reject = reject;
+  lodash.rest = rest;
+  lodash.result = result;
+  lodash.shuffle = shuffle;
+  lodash.size = size;
+  lodash.some = some;
+  lodash.sortBy = sortBy;
+  lodash.sortedIndex = sortedIndex;
+  lodash.tap = tap;
+  lodash.template = template;
+  lodash.throttle = throttle;
+  lodash.times = times;
+  lodash.toArray = toArray;
+  lodash.unescape = unescape;
+  lodash.union = union;
+  lodash.uniq = uniq;
+  lodash.uniqueId = uniqueId;
+  lodash.values = values;
+  lodash.where = where;
+  lodash.without = without;
+  lodash.wrap = wrap;
+  lodash.zip = zip;
+
+  // assign aliases
+  lodash.all = every;
+  lodash.any = some;
+  lodash.collect = map;
+  lodash.detect = find;
+  lodash.drop = rest;
+  lodash.each = forEach;
+  lodash.extend = assign;
+  lodash.foldl = reduce;
+  lodash.foldr = reduceRight;
+  lodash.head = first;
+  lodash.include = contains;
+  lodash.inject = reduce;
+  lodash.methods = functions;
+  lodash.select = filter;
+  lodash.tail = rest;
+  lodash.take = first;
+  lodash.unique = uniq;
+
+  // add pseudo private property to be used and removed during the build process
+  lodash._iteratorTemplate = iteratorTemplate;
+
+  /*--------------------------------------------------------------------------*/
+
+  // add all static functions to `lodash.prototype`
+  mixin(lodash);
+
+  // add `lodash.prototype.chain` after calling `mixin()` to avoid overwriting
+  // it with the wrapped `lodash.chain`
+  lodash.prototype.chain = wrapperChain;
+  lodash.prototype.value = wrapperValue;
+
+  // add all mutator Array functions to the wrapper.
+  forEach(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(methodName) {
+    var func = arrayRef[methodName];
+
+    lodash.prototype[methodName] = function() {
+      var value = this.__wrapped__;
+      func.apply(value, arguments);
+
+      // avoid array-like object bugs with `Array#shift` and `Array#splice` in
+      // Firefox < 10 and IE < 9
+      if (hasObjectSpliceBug && value.length === 0) {
+        delete value[0];
+      }
+      if (this.__chain__) {
+        value = new lodash(value);
+        value.__chain__ = true;
+      }
+      return value;
+    };
+  });
+
+  // add all accessor Array functions to the wrapper.
+  forEach(['concat', 'join', 'slice'], function(methodName) {
+    var func = arrayRef[methodName];
+
+    lodash.prototype[methodName] = function() {
+      var value = this.__wrapped__,
+          result = func.apply(value, arguments);
+
+      if (this.__chain__) {
+        result = new lodash(result);
+        result.__chain__ = true;
+      }
+      return result;
+    };
+  });
+
+  /*--------------------------------------------------------------------------*/
+
+  // expose Lo-Dash
+  // some AMD build optimizers, like r.js, check for specific condition patterns like the following:
+  if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) {
+    // Expose Lo-Dash to the global object even when an AMD loader is present in
+    // case Lo-Dash was injected by a third-party script and not intended to be
+    // loaded as a module. The global assignment can be reverted in the Lo-Dash
+    // module via its `noConflict()` method.
+    window._ = lodash;
+
+    // define as an anonymous module so, through path mapping, it can be
+    // referenced as the "underscore" module
+    define(function() {
+      return lodash;
+    });
+  }
+  // check for `exports` after `define` in case a build optimizer adds an `exports` object
+  else if (freeExports) {
+    // in Node.js or RingoJS v0.8.0+
+    if (typeof module == 'object' && module && module.exports == freeExports) {
+      (module.exports = lodash)._ = lodash;
+    }
+    // in Narwhal or RingoJS v0.7.0-
+    else {
+      freeExports._ = lodash;
+    }
+  }
+  else {
+    // in a browser or Rhino
+    window._ = lodash;
+  }
+}(this));
diff --git a/src/third_party/web_platform_tests/webaudio/js/vendor-prefixes.js b/src/third_party/web_platform_tests/webaudio/js/vendor-prefixes.js
new file mode 100644
index 0000000..287d692
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/js/vendor-prefixes.js
@@ -0,0 +1,2 @@
+window.AudioContext = window.AudioContext || window.webkitAudioContext;
+window.OfflineAudioContext = window.OfflineAudioContext || window.webkitOfflineAudioContext;
diff --git a/src/third_party/web_platform_tests/webaudio/refresh_idl.rb b/src/third_party/web_platform_tests/webaudio/refresh_idl.rb
new file mode 100755
index 0000000..a078475
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/refresh_idl.rb
@@ -0,0 +1,57 @@
+#!/usr/bin/env ruby
+require 'nokogiri'
+
+def base_dir
+  File.dirname(__FILE__)
+end
+
+def output_directory
+  File.join(base_dir, 'idl')
+end
+
+def specification
+  file = File.open(File.join(base_dir, 'specification.html'))
+  doc = Nokogiri::XML(file)
+  file.close
+  doc
+end
+
+def write_node_inner_text_to_file(filename, node)
+  File.open(filename, 'w') { |file| file.write(node.inner_text.strip) }
+  puts "Wrote: #{filename}"
+end
+
+def load_idl(id)
+  file = File.join(output_directory, id)
+  return false if !File.exist?(file)
+  File.read(file)
+end
+
+# Parse the specification writing each block of idl to its own file
+specification.css(".idl-code").each do |idl_block|
+  id = idl_block["id"]
+  write_node_inner_text_to_file(File.join(output_directory, id), idl_block) if id
+end
+
+# Update the idl in the pre blocks for each idl test
+idl_test_files = [
+  File.join(base_dir, 'the-audio-api', 'the-gainnode-interface', 'idl-test.html'),
+  File.join(base_dir, 'the-audio-api', 'the-audiodestinationnode-interface', 'idl-test.html'),
+  File.join(base_dir, 'the-audio-api', 'the-delaynode-interface', 'idl-test.html'),
+  File.join(base_dir, 'the-audio-api', 'the-audiobuffer-interface', 'idl-test.html'),
+]
+
+idl_test_files.each do |fn|
+  file = File.open(fn)
+  doc = Nokogiri::HTML(file)
+  file.close
+
+  doc.css('pre').each do |node|
+    node_id = node["id"]
+    if idl = load_idl(node_id)
+      node.content = idl
+    end
+  end
+
+  File.open(fn, 'w') { |file| file.write(doc.to_html)}
+end
diff --git a/src/third_party/web_platform_tests/webaudio/resources/sin_440Hz_-6dBFS_1s.wav b/src/third_party/web_platform_tests/webaudio/resources/sin_440Hz_-6dBFS_1s.wav
new file mode 100644
index 0000000..f660c3c
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/resources/sin_440Hz_-6dBFS_1s.wav
Binary files differ
diff --git a/src/third_party/web_platform_tests/webaudio/specification.html b/src/third_party/web_platform_tests/webaudio/specification.html
new file mode 100644
index 0000000..3178c5e
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/specification.html
@@ -0,0 +1,5911 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+      "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+  <title>Web Audio API</title>
+  <meta name="revision"
+  content="$Id: Overview.html,v 1.4 2012/07/30 11:44:57 tmichel Exp $" />
+  <link rel="stylesheet" href="style.css" type="text/css" />
+  <!--
+          <script src="section-links.js" type="application/ecmascript"></script> 
+          <script src="dfn.js" type="application/ecmascript"></script> 
+          -->
+  <!--[if IE]>
+          <style type='text/css'>
+            .ignore {
+              -ms-filter:"progid:DXImageTransform.Microsoft.Alpha(Opacity=50)";
+              filter: alpha(opacity=50);
+            }
+          </style>
+          <![endif]-->
+  <link rel="stylesheet" href="//www.w3.org/StyleSheets/TR/W3C-ED"
+  type="text/css" />
+</head>
+
+<body>
+
+<div class="head">
+<p><a href="http://www.w3.org/"><img width="72" height="48" alt="W3C"
+src="http://www.w3.org/Icons/w3c_home" /></a> </p>
+
+<h1 id="title" class="title">Web Audio API </h1>
+
+<h2 id="w3c-date-document"><acronym
+title="World Wide Web Consortium">W3C</acronym> Editor's Draft
+</h2>
+<dl>
+  <dt>This version: </dt>
+    <dd><a
+    href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html">https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html</a>
+    </dd>
+  <dt>Latest published version: </dt>
+    <dd><a
+      href="http://www.w3.org/TR/webaudio/">http://www.w3.org/TR/webaudio/</a>
+    </dd>
+  <dt>Previous version: </dt>
+     <dd><a
+      href="http://www.w3.org/TR/2012/WD-webaudio-20120315/">http://www.w3.org/TR/2012/WD-webaudio-20120315/</a>
+    </dd>
+</dl>
+
+<dl>
+  <dt>Editor: </dt>
+    <dd>Chris Rogers, Google &lt;crogers@google.com&gt;</dd>
+</dl>
+
+<p class="copyright"><a
+href="http://www.w3.org/Consortium/Legal/ipr-notice#Copyright">Copyright</a> ©
+2012 <a href="http://www.w3.org/"><acronym
+title="World Wide Web Consortium">W3C</acronym></a><sup>®</sup> (<a
+href="http://www.csail.mit.edu/"><acronym
+title="Massachusetts Institute of Technology">MIT</acronym></a>, <a
+href="http://www.ercim.eu/"><acronym
+title="European Research Consortium for Informatics and Mathematics">ERCIM</acronym></a>,
+<a href="http://www.keio.ac.jp/">Keio</a>), All Rights Reserved. W3C <a
+href="http://www.w3.org/Consortium/Legal/ipr-notice#Legal_Disclaimer">liability</a>,
+<a
+href="http://www.w3.org/Consortium/Legal/ipr-notice#W3C_Trademarks">trademark</a>
+and <a href="http://www.w3.org/Consortium/Legal/copyright-documents">document
+use</a> rules apply.</p>
+<hr />
+</div>
+
+<div id="abstract-section" class="section">
+<h2 id="abstract">Abstract</h2>
+
+<p>This specification describes a high-level JavaScript <acronym
+title="Application Programming Interface">API</acronym> for processing and
+synthesizing audio in web applications. The primary paradigm is of an audio
+routing graph, where a number of <a
+href="#AudioNode-section"><code>AudioNode</code></a> objects are connected
+together to define the overall audio rendering. The actual processing will
+primarily take place in the underlying implementation (typically optimized
+Assembly / C / C++ code), but <a href="#JavaScriptProcessing-section">direct
+JavaScript processing and synthesis</a> is also supported. </p>
+
+<p>The <a href="#introduction">introductory</a> section covers the motivation
+behind this specification.</p>
+
+<p>This API is designed to be used in conjunction with other APIs and elements
+on the web platform, notably: XMLHttpRequest
+(using the <code>responseType</code> and <code>response</code> attributes). For
+games and interactive applications, it is anticipated to be used with the
+<code>canvas</code> 2D and WebGL 3D graphics APIs. </p>
+</div>
+
+<div id="sotd-section" class="section">
+<h2 id="sotd">Status of this Document</h2>
+
+
+<p><em>This section describes the status of this document at the time of its
+publication. Other documents may supersede this document. A list of current W3C
+publications and the latest revision of this technical report can be found in
+the <a href="http://www.w3.org/TR/">W3C technical reports index</a> at
+http://www.w3.org/TR/. </em></p>
+
+<p>This is the Editor's Draft of the <cite>Web Audio API</cite>
+specification. It has been produced by the <a
+href="http://www.w3.org/2011/audio/"><b>W3C Audio Working Group</b></a> , which
+is part of the W3C WebApps Activity.</p>
+
+<p></p>
+
+<p>Please send comments about this document to &lt;<a
+href="mailto:public-audio@w3.org">public-audio@w3.org</a>&gt; (<a
+href="http://lists.w3.org/Archives/Public/public-audio/">public archives</a> of
+the W3C audio mailing list). Web content and browser developers are encouraged
+to review this draft. </p>
+
+<p>Publication as a Working Draft does not imply endorsement by the W3C
+Membership. This is a draft document and may be updated, replaced or obsoleted
+by other documents at any time. It is inappropriate to cite this document as
+other than work in progress.</p>
+
+<p> This document was produced by a group operating under the <a href="http://www.w3.org/Consortium/Patent-Policy-20040205/">5 February 2004 W3C Patent Policy</a>. W3C maintains a <a rel="disclosure" href="http://www.w3.org/2004/01/pp-impl/46884/status">public list of any patent disclosures</a> made in connection with the deliverables of the group; that page also includes instructions for disclosing a patent. An individual who has actual knowledge of a patent which the individual believes contains <a href="http://www.w3.org/Consortium/Patent-Policy-20040205/#def-essential">Essential Claim(s)</a> must disclose the information in accordance with <a href="http://www.w3.org/Consortium/Patent-Policy-20040205/#sec-Disclosure">section 6 of the W3C Patent Policy</a>. </p>
+</div>
+
+<div id="toc">
+<h2 id="L13522">Table of Contents</h2>
+
+<div class="toc">
+<ul>
+  <li><a href="#introduction">1. Introduction</a>
+    <ul>
+      <li><a href="#Features">1.1. Features</a></li>
+      <li><a href="#ModularRouting">1.2. Modular Routing</a></li>
+      <li><a href="#APIOverview">1.3. API Overview</a></li>
+    </ul>
+  </li>
+  <li><a href="#conformance">2. Conformance</a></li>
+  <li><a href="#API-section">4. The Audio API</a>
+    <ul>
+      <li><a href="#AudioContext-section">4.1. The AudioContext Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioContext">4.1.1. Attributes</a></li>
+          <li><a href="#methodsandparams-AudioContext">4.1.2. Methods and
+            Parameters</a></li>
+          <li><a href="#lifetime-AudioContext">4.1.3. Lifetime</a></li>
+        </ul>
+      </li>
+      <li><a href="#OfflineAudioContext-section">4.1b. The OfflineAudioContext Interface</a> 
+      </li>
+
+      <li><a href="#AudioNode-section">4.2. The AudioNode Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioNode">4.2.1. Attributes</a></li>
+          <li><a href="#methodsandparams-AudioNode">4.2.2. Methods and
+            Parameters</a></li>
+          <li><a href="#lifetime-AudioNode">4.2.3. Lifetime</a></li>
+        </ul>
+      </li>
+      <li><a href="#AudioDestinationNode">4.4. The AudioDestinationNode
+        Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioDestinationNode">4.4.1. Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#AudioParam">4.5. The AudioParam Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioParam">4.5.1. Attributes</a></li>
+          <li><a href="#methodsandparams-AudioParam">4.5.2. Methods and
+            Parameters</a></li>
+          <li><a href="#computedValue-AudioParam-section">4.5.3. Computation of Value</a></li>
+          <li><a href="#example1-AudioParam-section">4.5.4. AudioParam Automation Example</a></li>
+        </ul>
+      </li>
+      <li><a href="#GainNode">4.7. The GainNode Interface</a> 
+        <ul>
+          <li><a href="#attributes-GainNode">4.7.1. Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#DelayNode">4.8. The DelayNode Interface</a> 
+        <ul>
+          <li><a href="#attributes-GainNode_2">4.8.1. Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#AudioBuffer">4.9. The AudioBuffer Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioBuffer">4.9.1. Attributes</a></li>
+          <li><a href="#methodsandparams-AudioBuffer">4.9.2. Methods and
+            Parameters</a></li>
+        </ul>
+      </li>
+      <li><a href="#AudioBufferSourceNode">4.10. The AudioBufferSourceNode
+        Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioBufferSourceNode">4.10.1.
+          Attributes</a></li>
+          <li><a href="#methodsandparams-AudioBufferSourceNode">4.10.2. Methods and
+            Parameters</a></li>
+        </ul>
+      </li>
+      <li><a href="#MediaElementAudioSourceNode">4.11. The
+        MediaElementAudioSourceNode Interface</a></li>
+      <li><a href="#ScriptProcessorNode">4.12. The ScriptProcessorNode
+        Interface</a> 
+        <ul>
+          <li><a href="#attributes-ScriptProcessorNode">4.12.1. Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#AudioProcessingEvent">4.13. The AudioProcessingEvent
+        Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioProcessingEvent">4.13.1. Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#PannerNode">4.14. The PannerNode Interface</a> 
+        <ul>
+          <li><a href="#attributes-PannerNode_attributes">4.14.2.
+            Attributes</a></li>
+          <li><a href="#Methods_and_Parameters">4.14.3. Methods and
+          Parameters</a></li>
+        </ul>
+      </li>
+      <li><a href="#AudioListener">4.15. The AudioListener Interface</a> 
+        <ul>
+          <li><a href="#attributes-AudioListener">4.15.1. Attributes</a></li>
+          <li><a href="#L15842">4.15.2. Methods and Parameters</a></li>
+        </ul>
+      </li>
+      <li><a href="#ConvolverNode">4.16. The ConvolverNode Interface</a> 
+        <ul>
+          <li><a href="#attributes-ConvolverNode">4.16.1. Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#AnalyserNode">4.17. The AnalyserNode
+        Interface</a> 
+        <ul>
+          <li><a href="#attributes-ConvolverNode_2">4.17.1. Attributes</a></li>
+          <li><a href="#methods-and-parameters">4.17.2. Methods and
+          Parameters</a></li>
+        </ul>
+      </li>
+      <li><a href="#ChannelSplitterNode">4.18. The ChannelSplitterNode
+        Interface</a> 
+        <ul>
+          <li><a href="#example-1">Example:</a></li>
+        </ul>
+      </li>
+      <li><a href="#ChannelMergerNode">4.19. The ChannelMergerNode Interface</a> 
+        <ul>
+          <li><a href="#example-2">Example:</a></li>
+        </ul>
+      </li>
+      <li><a href="#DynamicsCompressorNode">4.20. The DynamicsCompressorNode
+        Interface</a> 
+        <ul>
+          <li><a href="#attributes-DynamicsCompressorNode">4.20.1.
+          Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#BiquadFilterNode">4.21. The BiquadFilterNode Interface</a> 
+        <ul>
+          <li><a href="#BiquadFilterNode-description">4.21.1 Lowpass</a></li>
+          <li><a href="#HIGHPASS">4.21.2 Highpass</a></li>
+          <li><a href="#BANDPASS">4.21.3 Bandpass</a></li>
+          <li><a href="#LOWSHELF">4.21.4 Lowshelf</a></li>
+          <li><a href="#L16352">4.21.5 Highshelf</a></li>
+          <li><a href="#PEAKING">4.21.6 Peaking</a></li>
+          <li><a href="#NOTCH">4.21.7 Notch</a></li>
+          <li><a href="#ALLPASS">4.21.8 Allpass</a></li>
+          <li><a href="#Methods">4.21.9. Methods</a></li>
+        </ul>
+      </li>
+      <li><a href="#WaveShaperNode">4.22. The WaveShaperNode Interface</a> 
+        <ul>
+          <li><a href="#attributes-WaveShaperNode">4.22.1.
+            Attributes</a></li>
+        </ul>
+      </li>
+      <li><a href="#OscillatorNode">4.23. The OscillatorNode Interface</a> 
+        <ul>
+          <li><a href="#attributes-OscillatorNode">4.23.1.
+            Attributes</a></li>
+            <li><a href="#methodsandparams-OscillatorNode-section">4.23.2. Methods and
+            Parameters</a></li>
+        </ul>
+      </li>
+      <li><a href="#PeriodicWave">4.24. The PeriodicWave Interface</a> 
+      </li>
+      <li><a href="#MediaStreamAudioSourceNode">4.25. The
+        MediaStreamAudioSourceNode Interface</a></li>
+      <li><a href="#MediaStreamAudioDestinationNode">4.26. The
+        MediaStreamAudioDestinationNode Interface</a></li>
+    </ul>
+  </li>
+  <li><a href="#MixerGainStructure">6. Mixer Gain Structure</a> 
+    <ul>
+      <li><a href="#background">Background</a></li>
+      <li><a href="#SummingJunction">Summing Inputs</a></li>
+      <li><a href="#gain-Control">Gain Control</a></li>
+      <li><a href="#Example-mixer-with-send-busses">Example: Mixer with Send
+        Busses</a></li>
+    </ul>
+  </li>
+  <li><a href="#DynamicLifetime">7. Dynamic Lifetime</a> 
+    <ul>
+      <li><a href="#DynamicLifetime-background">Background</a></li>
+      <li><a href="#Example-DynamicLifetime">Example</a></li>
+    </ul>
+  </li>
+  <li><a href="#UpMix">9. Channel up-mixing and down-mixing</a> 
+    <ul>
+      <li><a href="#ChannelLayouts">9.1. Speaker Channel Layouts</a>
+      <ul>
+        <li><a href="#ChannelOrdering">9.1.1. Channel Ordering</a></li>
+        <li><a href="#UpMix-sub">9.1.2. Up Mixing</a></li>
+        <li><a href="#down-mix">9.1.3. Down Mixing</a></li>
+      </ul>
+      </li>
+
+      <li><a href="#ChannelRules-section">9.2. Channel Rules Examples</a>
+
+    </ul>
+  </li>
+  <li><a href="#Spatialization">11. Spatialization / Panning </a> 
+    <ul>
+      <li><a href="#Spatialization-background">Background</a></li>
+      <li><a href="#Spatialization-panning-algorithm">Panning Algorithm</a></li>
+      <li><a href="#Spatialization-distance-effects">Distance Effects</a></li>
+      <li><a href="#Spatialization-sound-cones">Sound Cones</a></li>
+      <li><a href="#Spatialization-doppler-shift">Doppler Shift</a></li>
+    </ul>
+  </li>
+  <li><a href="#Convolution">12. Linear Effects using Convolution</a> 
+    <ul>
+      <li><a href="#Convolution-background">Background</a></li>
+      <li><a href="#Convolution-motivation">Motivation for use as a
+      Standard</a></li>
+      <li><a href="#Convolution-implementation-guide">Implementation Guide</a></li>
+      <li><a href="#Convolution-reverb-effect">Reverb Effect (with
+        matrixing)</a></li>
+      <li><a href="#recording-impulse-responses">Recording Impulse
+      Responses</a></li>
+      <li><a href="#tools">Tools</a></li>
+      <li><a href="#recording-setup">Recording Setup</a></li>
+      <li><a href="#warehouse">The Warehouse Space</a></li>
+    </ul>
+  </li>
+  <li><a href="#JavaScriptProcessing">13. JavaScript Synthesis and
+    Processing</a> 
+    <ul>
+      <li><a href="#custom-DSP-effects">Custom DSP Effects</a></li>
+      <li><a href="#educational-applications">Educational Applications</a></li>
+      <li><a href="#javaScript-performance">JavaScript Performance</a></li>
+    </ul>
+  </li>
+  <li><a href="#Performance">15. Performance Considerations</a> 
+    <ul>
+      <li><a href="#Latency">15.1. Latency: What it is and Why it's
+        Important</a></li>
+      <li><a href="#audio-glitching">15.2. Audio Glitching</a></li>
+      <li><a href="#hardware-scalability">15.3. Hardware Scalability</a> 
+        <ul>
+          <li><a href="#CPU-monitoring">15.3.1. CPU monitoring</a></li>
+          <li><a href="#Voice-dropping">15.3.2. Voice Dropping</a></li>
+          <li><a href="#Simplification-of-Effects-Processing">15.3.3.
+            Simplification of Effects Processing</a></li>
+          <li><a href="#Sample-rate">15.3.4. Sample Rate</a></li>
+          <li><a href="#pre-flighting">15.3.5. Pre-flighting</a></li>
+          <li><a href="#Authoring-for-different-user-agents">15.3.6. Authoring
+            for different user agents</a></li>
+          <li><a href="#Scalability-of-Direct-JavaScript-Synthesis">15.3.7.
+            Scalability of Direct JavaScript Synthesis / Processing</a></li>
+        </ul>
+      </li>
+      <li><a href="#JavaScriptPerformance">15.4. JavaScript Issues with
+        real-time Processing and Synthesis: </a></li>
+    </ul>
+  </li>
+  <li><a href="#ExampleApplications">16. Example Applications</a> 
+    <ul>
+      <li><a href="#basic-sound-playback">Basic Sound Playback</a></li>
+      <li><a href="#threeD-environmentse-and-games">3D Environments and
+        Games</a></li>
+      <li><a href="#musical-applications">Musical Applications</a></li>
+      <li><a href="#music-visualizers">Music Visualizers</a></li>
+      <li><a href="#educational-applications_2">Educational
+      Applications</a></li>
+      <li><a href="#artistic-audio-exploration">Artistic Audio
+      Exploration</a></li>
+    </ul>
+  </li>
+  <li><a href="#SecurityConsiderations">17. Security Considerations</a></li>
+  <li><a href="#PrivacyConsiderations">18. Privacy Considerations</a></li>
+  <li><a href="#requirements">19. Requirements and Use Cases</a></li>
+  <li><a href="#OldNames">20. Old Names</a></li>
+  <li><a href="#L17310">A.References</a> 
+    <ul>
+      <li><a href="#Normative-references">A.1 Normative references</a></li>
+      <li><a href="#Informative-references">A.2 Informative references</a></li>
+    </ul>
+  </li>
+  <li><a href="#L17335">B.Acknowledgements</a></li>
+  <li><a href="#ChangeLog">C. Web Audio API Change Log</a></li>
+</ul>
+</div>
+</div>
+
+<div id="sections">
+
+<div id="div-introduction" class="section">
+<h2 id="introduction">1. Introduction</h2>
+
+<p class="norm">This section is informative.</p>
+
+<p>Audio on the web has been fairly primitive up to this point and until very
+recently has had to be delivered through plugins such as Flash and QuickTime.
+The introduction of the <code>audio</code> element in HTML5 is very important,
+allowing for basic streaming audio playback. But, it is not powerful enough to
+handle more complex audio applications. For sophisticated web-based games or
+interactive applications, another solution is required. It is a goal of this
+specification to include the capabilities found in modern game audio engines as
+well as some of the mixing, processing, and filtering tasks that are found in
+modern desktop audio production applications. </p>
+
+<p>The APIs have been designed with a wide variety of <a
+href="#ExampleApplications-section">use cases</a> in mind. Ideally, it should
+be able to support <i>any</i> use case which could reasonably be implemented
+with an optimized C++ engine controlled via JavaScript and run in a browser.
+That said, modern desktop audio software can have very advanced capabilities,
+some of which would be difficult or impossible to build with this system.
+Apple's Logic Audio is one such application which has support for external MIDI
+controllers, arbitrary plugin audio effects and synthesizers, highly optimized
+direct-to-disk audio file reading/writing, tightly integrated time-stretching,
+and so on. Nevertheless, the proposed system will be quite capable of
+supporting a large range of reasonably complex games and interactive
+applications, including musical ones. And it can be a very good complement to
+the more advanced graphics features offered by WebGL. The API has been designed
+so that more advanced capabilities can be added at a later time. </p>
+
+<div id="Features-section" class="section">
+<h2 id="Features">1.1. Features</h2>
+</div>
+
+<p>The API supports these primary features: </p>
+<ul>
+  <li><a href="#ModularRouting-section">Modular routing</a> for simple or
+    complex mixing/effect architectures, including <a
+    href="#MixerGainStructure-section">multiple sends and submixes</a>.</li>
+  <li><a href="#AudioParam">Sample-accurate scheduled sound
+    playback</a> with low <a href="#Latency-section">latency</a> for musical
+    applications requiring a very high degree of rhythmic precision such as
+    drum machines and sequencers. This also includes the possibility of <a
+    href="#DynamicLifetime-section">dynamic creation</a> of effects. </li>
+  <li>Automation of audio parameters for envelopes, fade-ins / fade-outs,
+    granular effects, filter sweeps, LFOs etc. </li>
+  <li>Flexible handling of channels in an audio stream, allowing them to be split and merged.</li>
+
+  <li>Processing of audio sources from an <code>audio</code> or
+    <code>video</code> <a href="#MediaElementAudioSourceNode">media
+    element</a>. </li>
+  
+    <li>Processing live audio input using a <a href="#MediaStreamAudioSourceNode">MediaStream</a>
+    from getUserMedia().  
+     </li>
+    
+  <li>Integration with WebRTC  
+  <ul>  
+  
+
+   <li>Processing audio received from a remote peer using a <a href="#MediaStreamAudioSourceNode">MediaStream</a>.
+    </li>
+
+  <li>Sending a generated or processed audio stream to a remote peer using a <a href="#MediaStreamAudioDestinationNode">MediaStream</a>.
+     </li>
+
+     </ul>
+     </li>
+
+  <li>Audio stream synthesis and processing <a
+    href="#JavaScriptProcessing-section">directly in JavaScript</a>. </li>
+  <li><a href="#Spatialization-section">Spatialized audio</a> supporting a wide
+    range of 3D games and immersive environments: 
+    <ul>
+      <li>Panning models: equal-power, HRTF, pass-through </li>
+      <li>Distance Attenuation </li>
+      <li>Sound Cones </li>
+      <li>Obstruction / Occlusion </li>
+      <li>Doppler Shift </li>
+      <li>Source / Listener based</li>
+    </ul>
+  </li>
+  <li>A <a href="#Convolution-section">convolution engine</a> for a wide range
+    of linear effects, especially very high-quality room effects. Here are some
+    examples of possible effects: 
+    <ul>
+      <li>Small / large room </li>
+      <li>Cathedral </li>
+      <li>Concert hall </li>
+      <li>Cave </li>
+      <li>Tunnel </li>
+      <li>Hallway </li>
+      <li>Forest </li>
+      <li>Amphitheater </li>
+      <li>Sound of a distant room through a doorway </li>
+      <li>Extreme filters</li>
+      <li>Strange backwards effects</li>
+      <li>Extreme comb filter effects </li>
+    </ul>
+  </li>
+  <li>Dynamics compression for overall control and sweetening of the mix </li>
+  <li>Efficient <a href="#AnalyserNode">real-time time-domain and
+    frequency analysis / music visualizer support</a></li>
+  <li>Efficient biquad filters for lowpass, highpass, and other common filters.
+  </li>
+  <li>A Waveshaping effect for distortion and other non-linear effects</li>
+  <li>Oscillators</li>
+
+</ul>
+
+<div id="ModularRouting-section">
+<h2 id="ModularRouting">1.2. Modular Routing</h2>
+
+<p>Modular routing allows arbitrary connections between different <a
+href="#AudioNode-section"><code>AudioNode</code></a> objects. Each node can
+have <dfn>inputs</dfn> and/or <dfn>outputs</dfn>. A <dfn>source node</dfn> has no inputs
+and a single output. A <dfn>destination node</dfn>  has
+one input and no outputs, the most common example being <a
+href="#AudioDestinationNode-section"><code>AudioDestinationNode</code></a> the final destination to the audio
+hardware. Other nodes such as filters can be placed between the source and destination nodes.
+The developer doesn't have to worry about low-level stream format details
+when two objects are connected together; <a href="#UpMix-section">the right
+thing just happens</a>. For example, if a mono audio stream is connected to a
+stereo input it should just mix to left and right channels <a
+href="#UpMix-section">appropriately</a>. </p>
+
+<p>In the simplest case, a single source can be routed directly to the output.
+All routing occurs within an <a
+href="#AudioContext-section"><code>AudioContext</code></a> containing a single
+<a href="#AudioDestinationNode-section"><code>AudioDestinationNode</code></a>:
+</p>
+<img alt="modular routing" src="images/modular-routing1.png" /> 
+
+<p>Illustrating this simple routing, here's a simple example playing a single
+sound: </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">ECMAScript</span> </div>
+
+<div class="blockContent">
+<pre class="code"><code class="es-code"> 
+
+var context = new AudioContext();
+
+function playSound() {
+    var source = context.createBufferSource();
+    source.buffer = dogBarkingBuffer;
+    source.connect(context.destination);
+    source.start(0);
+}
+                    </code></pre>
+</div>
+</div>
+
+<p>Here's a more complex example with three sources and a convolution reverb
+send with a dynamics compressor at the final output stage: </p>
+<img alt="modular routing2" src="images/modular-routing2.png" /> 
+
+<div class="example">
+
+<div class="exampleHeader">
+Example</div>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">ECMAScript</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="es-code"> 
+
+var context = 0;
+var compressor = 0;
+var reverb = 0;
+
+var source1 = 0;
+var source2 = 0;
+var source3 = 0;
+
+var lowpassFilter = 0;
+var waveShaper = 0;
+var panner = 0;
+
+var dry1 = 0;
+var dry2 = 0;
+var dry3 = 0;
+
+var wet1 = 0;
+var wet2 = 0;
+var wet3 = 0;
+
+var masterDry = 0;
+var masterWet = 0;
+
+function setupRoutingGraph () {
+    context = new AudioContext();
+
+    // Create the effects nodes.
+    lowpassFilter = context.createBiquadFilter();
+    waveShaper = context.createWaveShaper();
+    panner = context.createPanner();
+    compressor = context.createDynamicsCompressor();
+    reverb = context.createConvolver();
+
+    // Create master wet and dry.
+    masterDry = context.createGain();
+    masterWet = context.createGain();
+
+    // Connect final compressor to final destination.
+    compressor.connect(context.destination);
+
+    // Connect master dry and wet to compressor.
+    masterDry.connect(compressor);
+    masterWet.connect(compressor);
+
+    // Connect reverb to master wet.
+    reverb.connect(masterWet);
+
+    // Create a few sources.
+    source1 = context.createBufferSource();
+    source2 = context.createBufferSource();
+    source3 = context.createOscillator();
+
+    source1.buffer = manTalkingBuffer;
+    source2.buffer = footstepsBuffer;
+    source3.frequency.value = 440;
+
+    // Connect source1
+    dry1 = context.createGain();
+    wet1 = context.createGain();
+    source1.connect(lowpassFilter);
+    lowpassFilter.connect(dry1);
+    lowpassFilter.connect(wet1);
+    dry1.connect(masterDry);
+    wet1.connect(reverb);
+
+    // Connect source2
+    dry2 = context.createGain();
+    wet2 = context.createGain();
+    source2.connect(waveShaper);
+    waveShaper.connect(dry2);
+    waveShaper.connect(wet2);
+    dry2.connect(masterDry);
+    wet2.connect(reverb);
+
+    // Connect source3
+    dry3 = context.createGain();
+    wet3 = context.createGain();
+    source3.connect(panner);
+    panner.connect(dry3);
+    panner.connect(wet3);
+    dry3.connect(masterDry);
+    wet3.connect(reverb);
+    
+    // Start the sources now.
+    source1.start(0);
+    source2.start(0);
+    source3.start(0);
+}
+ </code></pre>
+</div>
+</div>
+</div>
+</div>
+
+</div>
+
+<div id="APIOverview-section" class="section">
+<h2 id="APIOverview">1.3. API Overview</h2>
+</div>
+
+<p>The interfaces defined are: </p>
+<ul>
+  <li>An <a class="dfnref" href="#AudioContext-section">AudioContext</a>
+    interface, which contains an audio signal graph representing connections
+    betweens AudioNodes. </li>
+  <li>An <a class="dfnref" href="#AudioNode-section">AudioNode</a> interface,
+    which represents audio sources, audio outputs, and intermediate processing
+    modules. AudioNodes can be dynamically connected together in a <a
+    href="#ModularRouting-section">modular fashion</a>. <code>AudioNodes</code>
+    exist in the context of an <code>AudioContext</code> </li>
+  <li>An <a class="dfnref"
+    href="#AudioDestinationNode-section">AudioDestinationNode</a> interface, an
+    AudioNode subclass representing the final destination for all rendered
+    audio. </li>
+  <li>An <a class="dfnref" href="#AudioBuffer-section">AudioBuffer</a>
+    interface, for working with memory-resident audio assets. These can
+    represent one-shot sounds, or longer audio clips. </li>
+  <li>An <a class="dfnref"
+    href="#AudioBufferSourceNode-section">AudioBufferSourceNode</a> interface,
+    an AudioNode which generates audio from an AudioBuffer. </li>
+  <li>A <a class="dfnref"
+    href="#MediaElementAudioSourceNode-section">MediaElementAudioSourceNode</a>
+    interface, an AudioNode which is the audio source from an
+    <code>audio</code>, <code>video</code>, or other media element. </li>
+  <li>A <a class="dfnref"
+    href="#MediaStreamAudioSourceNode-section">MediaStreamAudioSourceNode</a>
+    interface, an AudioNode which is the audio source from a
+    MediaStream such as live audio input, or from a remote peer. </li>
+  <li>A <a class="dfnref"
+    href="#MediaStreamAudioDestinationNode-section">MediaStreamAudioDestinationNode</a>
+    interface, an AudioNode which is the audio destination to a
+    MediaStream sent to a remote peer. </li>
+  <li>A <a class="dfnref"
+    href="#ScriptProcessorNode-section">ScriptProcessorNode</a> interface, an
+    AudioNode for generating or processing audio directly in JavaScript. </li>
+  <li>An <a class="dfnref"
+    href="#AudioProcessingEvent-section">AudioProcessingEvent</a> interface,
+    which is an event type used with <code>ScriptProcessorNode</code> objects.
+  </li>
+  <li>An <a class="dfnref" href="#AudioParam-section">AudioParam</a> interface,
+    for controlling an individual aspect of an AudioNode's functioning, such as
+    volume. </li>
+  <li>An <a class="dfnref" href="#GainNode-section">GainNode</a>
+    interface, for explicit gain control. Because inputs to AudioNodes support
+    multiple connections (as a unity-gain summing junction), mixers can be <a
+    href="#MixerGainStructure-section">easily built</a> with GainNodes.
+  </li>
+  <li>A <a class="dfnref" href="#BiquadFilterNode-section">BiquadFilterNode</a>
+    interface, an AudioNode for common low-order filters such as: 
+    <ul>
+      <li>Low Pass</li>
+      <li>High Pass </li>
+      <li>Band Pass </li>
+      <li>Low Shelf </li>
+      <li>High Shelf </li>
+      <li>Peaking </li>
+      <li>Notch </li>
+      <li>Allpass </li>
+    </ul>
+  </li>
+  <li>A <a class="dfnref" href="#DelayNode-section">DelayNode</a> interface, an
+    AudioNode which applies a dynamically adjustable variable delay. </li>
+  <li>An <a class="dfnref" href="#PannerNode-section">PannerNode</a>
+    interface, for spatializing / positioning audio in 3D space. </li>
+  <li>An <a class="dfnref" href="#AudioListener-section">AudioListener</a>
+    interface, which works with an <code>PannerNode</code> for
+    spatialization. </li>
+  <li>A <a class="dfnref" href="#ConvolverNode-section">ConvolverNode</a>
+    interface, an AudioNode for applying a <a
+    href="#Convolution-section">real-time linear effect</a> (such as the sound
+    of a concert hall). </li>
+  <li>A <a class="dfnref"
+    href="#AnalyserNode-section">AnalyserNode</a> interface,
+    for use with music visualizers, or other visualization applications. </li>
+  <li>A <a class="dfnref"
+    href="#ChannelSplitterNode-section">ChannelSplitterNode</a> interface,
+    for accessing the individual channels of an audio stream in the routing
+    graph. </li>
+  <li>A <a class="dfnref"
+    href="#ChannelMergerNode-section">ChannelMergerNode</a> interface, for
+    combining channels from multiple audio streams into a single audio stream.
+  </li>
+  <li>A <a
+    href="#DynamicsCompressorNode-section">DynamicsCompressorNode</a> interface, an 
+    AudioNode for dynamics compression. </li>
+  <li>A <a class="dfnref" href="#dfn-WaveShaperNode">WaveShaperNode</a>
+    interface, an AudioNode which applies a non-linear waveshaping effect for
+    distortion and other more subtle warming effects. </li>
+  <li>A <a class="dfnref" href="#dfn-OscillatorNode">OscillatorNode</a>
+    interface, an audio source generating a periodic waveform. </li>
+</ul>
+</div>
+
+<div id="conformance-section" class="section">
+<h2 id="conformance">2. Conformance</h2>
+
+<p>Everything in this specification is normative except for examples and
+sections marked as being informative. </p>
+
+<p>The keywords “<span class="rfc2119">MUST</span>”, “<span
+class="rfc2119">MUST NOT</span>”, “<span
+class="rfc2119">REQUIRED</span>”, “<span class="rfc2119">SHALL</span>”,
+“<span class="rfc2119">SHALL NOT</span>”, “<span
+class="rfc2119">RECOMMENDED</span>”, “<span class="rfc2119">MAY</span>”
+and “<span class="rfc2119">OPTIONAL</span>” in this document are to be
+interpreted as described in <cite><a href="http://www.ietf.org/rfc/rfc2119">Key
+words for use in RFCs to Indicate Requirement Levels</a></cite> <a
+href="#RFC2119">[RFC2119]</a>. </p>
+
+<p>The following conformance classes are defined by this specification: </p>
+<dl>
+  <dt><dfn id="dfn-conforming-implementation">conforming
+  implementation</dfn></dt>
+    <dd><p>A user agent is considered to be a <a class="dfnref"
+      href="#dfn-conforming-implementation">conforming implementation</a> if it
+      satisfies all of the <span class="rfc2119">MUST</span>-, <span
+      class="rfc2119">REQUIRED</span>- and <span
+      class="rfc2119">SHALL</span>-level criteria in this specification that
+      apply to implementations. </p>
+    </dd>
+</dl>
+</div>
+
+<div id="terminology-section" class="section">
+
+<div id="API-section-section" class="section">
+<h2 id="API-section">4. The Audio API</h2>
+</div>
+
+<div id="AudioContext-section-section" class="section">
+<h2 id="AudioContext-section">4.1. The AudioContext Interface</h2>
+
+<p>This interface represents a set of <a
+href="#AudioNode-section"><code>AudioNode</code></a> objects and their
+connections. It allows for arbitrary routing of signals to the <a
+href="#AudioDestinationNode-section"><code>AudioDestinationNode</code></a>
+(what the user ultimately hears). Nodes are created from the context and are
+then <a href="#ModularRouting-section">connected</a> together. In most use
+cases, only a single AudioContext is used per document.</p>
+
+<br>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-context-idl">
+
+callback DecodeSuccessCallback = void (AudioBuffer decodedData);
+callback DecodeErrorCallback = void ();
+
+[Constructor]
+interface <dfn id="dfn-AudioContext">AudioContext</dfn> : EventTarget {
+
+    readonly attribute AudioDestinationNode destination;
+    readonly attribute float sampleRate;
+    readonly attribute double currentTime;
+    readonly attribute AudioListener listener;
+
+    AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
+
+    void decodeAudioData(ArrayBuffer audioData,
+                         DecodeSuccessCallback successCallback,
+                         optional DecodeErrorCallback errorCallback);
+
+
+    <span class="comment">// AudioNode creation </span>
+    AudioBufferSourceNode createBufferSource();
+
+    MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
+
+    MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
+    MediaStreamAudioDestinationNode createMediaStreamDestination();
+
+    ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
+                                              optional unsigned long numberOfInputChannels = 2,
+                                              optional unsigned long numberOfOutputChannels = 2);
+
+    AnalyserNode createAnalyser();
+    GainNode createGain();
+    DelayNode createDelay(optional double maxDelayTime = 1.0);
+    BiquadFilterNode createBiquadFilter();
+    WaveShaperNode createWaveShaper();
+    PannerNode createPanner();
+    ConvolverNode createConvolver();
+
+    ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
+    ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
+
+    DynamicsCompressorNode createDynamicsCompressor();
+
+    OscillatorNode createOscillator();
+    PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-AudioContext-section" class="section">
+<h3 id="attributes-AudioContext">4.1.1. Attributes</h3>
+<dl>
+  <dt id="dfn-destination"><code>destination</code></dt>
+    <dd><p>An <a
+      href="#AudioDestinationNode-section"><code>AudioDestinationNode</code></a>
+      with a single input representing the final destination for all audio.
+      Usually this will represent the actual audio hardware.
+      All AudioNodes actively rendering
+      audio will directly or indirectly connect to <code>destination</code>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-sampleRate"><code>sampleRate</code></dt>
+    <dd><p>The sample rate (in sample-frames per second) at which the
+      AudioContext handles audio. It is assumed that all AudioNodes in the
+      context run at this rate. In making this assumption, sample-rate
+      converters or "varispeed" processors are not supported in real-time
+      processing.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-currentTime"><code>currentTime</code></dt>
+    <dd><p>This is a time in seconds which starts at zero when the context is
+      created and increases in real-time. All scheduled times are relative to
+      it. This is not a "transport" time which can be started, paused, and
+      re-positioned. It is always moving forward. A GarageBand-like timeline
+      transport system can be very easily built on top of this (in JavaScript).
+      This time corresponds to an ever-increasing hardware timestamp. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-listener"><code>listener</code></dt>
+    <dd><p>An <a href="#AudioListener-section"><code>AudioListener</code></a>
+      which is used for 3D <a
+      href="#Spatialization-section">spatialization</a>.</p>
+    </dd>
+</dl>
+</div>
+
+<div id="methodsandparams-AudioContext-section" class="section">
+<h3 id="methodsandparams-AudioContext">4.1.2. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-createBuffer">The <code>createBuffer</code> method</dt>
+    <dd><p>Creates an AudioBuffer of the given size. The audio data in the
+      buffer will be zero-initialized (silent).  An NOT_SUPPORTED_ERR exception will be thrown if
+      the <code>numberOfChannels</code> or <code>sampleRate</code> are out-of-bounds,
+      or if length is 0.</p>
+      <p>The <dfn id="dfn-numberOfChannels">numberOfChannels</dfn> parameter
+      determines how many channels the buffer will have.  An implementation must support at least 32 channels. </p>
+      <p>The <dfn id="dfn-length">length</dfn> parameter determines the size of
+      the buffer in sample-frames. </p>
+      <p>The <dfn id="dfn-sampleRate_2">sampleRate</dfn> parameter describes
+      the sample-rate of the linear PCM audio data in the buffer in
+      sample-frames per second.  An implementation must support sample-rates in at least the range 22050 to 96000.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-decodeAudioData">The <code>decodeAudioData</code> method</dt>
+    <dd><p>Asynchronously decodes the audio file data contained in the
+      ArrayBuffer. The ArrayBuffer can, for example, be loaded from an XMLHttpRequest's
+      <code>response</code> attribute after setting the <code>responseType</code> to "arraybuffer".
+       Audio file data can be in any of the
+      formats supported by the <code>audio</code> element. </p>
+      <p><dfn id="dfn-audioData">audioData</dfn> is an ArrayBuffer containing
+      audio file data.</p>
+      <p><dfn id="dfn-successCallback">successCallback</dfn> is a callback
+      function which will be invoked when the decoding is finished. The single
+      argument to this callback is an AudioBuffer representing the decoded PCM
+      audio data.</p>
+      <p><dfn id="dfn-errorCallback">errorCallback</dfn> is a callback function
+      which will be invoked if there is an error decoding the audio file
+      data.</p>
+      
+      <p>
+      The following steps must be performed:
+      </p>
+      <ol>
+      
+      <li>Temporarily neuter the <dfn>audioData</dfn> ArrayBuffer in such a way that JavaScript code may not
+      access or modify the data.</li>
+      <li>Queue a decoding operation to be performed on another thread.</li>
+      <li>The decoding thread will attempt to decode the encoded <dfn>audioData</dfn> into linear PCM.
+      If a decoding error is encountered due to the audio format not being recognized or supported, or
+      because of corrupted/unexpected/inconsistent data then the <dfn>audioData</dfn> neutered state
+      will be restored to normal and the <dfn>errorCallback</dfn> will be
+      scheduled to run on the main thread's event loop and these steps will be terminated.</li>
+      <li>The decoding thread will take the result, representing the decoded linear PCM audio data,
+      and resample it to the sample-rate of the AudioContext if it is different from the sample-rate
+      of <dfn>audioData</dfn>.  The final result (after possibly sample-rate converting) will be stored
+      in an AudioBuffer.
+      </li>
+      <li>The <dfn>audioData</dfn> neutered state will be restored to normal
+      </li>
+      <li>
+      The <dfn>successCallback</dfn> function will be scheduled to run on the main thread's event loop
+      given the AudioBuffer from step (4) as an argument.
+      </li>
+      </ol>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createBufferSource">The <code>createBufferSource</code>
+  method</dt>
+    <dd><p>Creates an <a
+      href="#AudioBufferSourceNode-section"><code>AudioBufferSourceNode</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createMediaElementSource">The <code>createMediaElementSource</code>
+  method</dt>
+    <dd><p>Creates a <a
+      href="#MediaElementAudioSourceNode-section"><code>MediaElementAudioSourceNode</code></a> given an HTMLMediaElement.
+      As a consequence of calling this method, audio playback from the HTMLMediaElement will be re-routed
+      into the processing graph of the AudioContext.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createMediaStreamSource">The <code>createMediaStreamSource</code>
+  method</dt>
+    <dd><p>Creates a <a
+      href="#MediaStreamAudioSourceNode-section"><code>MediaStreamAudioSourceNode</code></a> given a MediaStream.
+      As a consequence of calling this method, audio playback from the MediaStream will be re-routed
+      into the processing graph of the AudioContext.</p>
+    </dd>
+</dl>
+
+<dl>
+  <dt id="dfn-createMediaStreamDestination">The <code>createMediaStreamDestination</code>
+  method</dt>
+    <dd><p>Creates a <a
+      href="#MediaStreamAudioDestinationNode-section"><code>MediaStreamAudioDestinationNode</code></a>.
+      </p>
+    </dd>
+</dl>
+
+<dl>
+  <dt id="dfn-createScriptProcessor">The <code>createScriptProcessor</code>
+  method</dt>
+    <dd><p>Creates a <a
+      href="#ScriptProcessorNode"><code>ScriptProcessorNode</code></a> for
+      direct audio processing using JavaScript.  An INDEX_SIZE_ERR exception MUST be thrown if <code>bufferSize</code> or <code>numberOfInputChannels</code> or <code>numberOfOutputChannels</code>
+      are outside the valid range. </p>
+      <p>The <dfn id="dfn-bufferSize">bufferSize</dfn> parameter determines the
+      buffer size in units of sample-frames.  If it's not passed in, or if the
+      value is 0, then the implementation will choose the best buffer size for
+      the given environment, which will be constant power of 2 throughout the lifetime
+      of the node. Otherwise if the author explicitly specifies the bufferSize,
+      it must be one of the following values: 256, 512, 1024, 2048, 4096, 8192,
+      16384. This value controls how
+      frequently the <code>audioprocess</code> event is dispatched and
+      how many sample-frames need to be processed each call. Lower values for
+      <code>bufferSize</code> will result in a lower (better) <a
+      href="#Latency-section">latency</a>. Higher values will be necessary to
+      avoid audio breakup and <a href="#Glitching-section">glitches</a>.
+      It is recommended for authors to not specify this buffer size and allow
+      the implementation to pick a good buffer size to balance between latency
+      and audio quality.
+      </p>
+      <p>The <dfn id="dfn-numberOfInputChannels">numberOfInputChannels</dfn> parameter (defaults to 2) and
+      determines the number of channels for this node's input.  Values of up to 32 must be supported. </p>
+      <p>The <dfn id="dfn-numberOfOutputChannels">numberOfOutputChannels</dfn> parameter (defaults to 2) and
+      determines the number of channels for this node's output.  Values of up to 32 must be supported.</p>
+      <p>It is invalid for both <code>numberOfInputChannels</code> and
+      <code>numberOfOutputChannels</code> to be zero. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createAnalyser">The <code>createAnalyser</code> method</dt>
+    <dd><p>Creates a <a
+      href="#AnalyserNode-section"><code>AnalyserNode</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createGain">The <code>createGain</code> method</dt>
+    <dd><p>Creates a <a
+      href="#GainNode-section"><code>GainNode</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createDelay">The <code>createDelay</code> method</dt>
+    <dd><p>Creates a <a href="#DelayNode-section"><code>DelayNode</code></a>
+      representing a variable delay line. The initial default delay time will
+      be 0 seconds.</p>
+      <p>The <dfn id="dfn-maxDelayTime">maxDelayTime</dfn> parameter is
+      optional and specifies the maximum delay time in seconds allowed for the delay line.  If specified, this value MUST be
+      greater than zero and less than three minutes or a NOT_SUPPORTED_ERR exception will be thrown.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createBiquadFilter">The <code>createBiquadFilter</code>
+  method</dt>
+    <dd><p>Creates a <a
+      href="#BiquadFilterNode-section"><code>BiquadFilterNode</code></a>
+      representing a second order filter which can be configured as one of
+      several common filter types.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createWaveShaper">The <code>createWaveShaper</code>
+  method</dt>
+    <dd><p>Creates a <a
+      href="#WaveShaperNode-section"><code>WaveShaperNode</code></a>
+      representing a non-linear distortion.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createPanner">The <code>createPanner</code> method</dt>
+    <dd><p>Creates an <a
+      href="#PannerNode-section"><code>PannerNode</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createConvolver">The <code>createConvolver</code> method</dt>
+    <dd><p>Creates a <a
+      href="#ConvolverNode-section"><code>ConvolverNode</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createChannelSplitter">The <code>createChannelSplitter</code>
+  method</dt>
+    <dd><p>Creates an <a
+      href="#ChannelSplitterNode-section"><code>ChannelSplitterNode</code></a>
+      representing a channel splitter.  An exception will be thrown for invalid parameter values.</p>
+      <p>The <dfn id="dfn-numberOfOutputs">numberOfOutputs</dfn> parameter
+      determines the number of outputs.  Values of up to 32 must be supported.  If not specified, then 6 will be used. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createChannelMerger">The <code>createChannelMerger</code>
+  method</dt>
+    <dd><p>Creates an <a
+      href="#ChannelMergerNode-section"><code>ChannelMergerNode</code></a>
+      representing a channel merger.  An exception will be thrown for invalid parameter values.</p>
+      <p>The <dfn id="dfn-numberOfInputs">numberOfInputs</dfn> parameter
+      determines the number of inputs.  Values of up to 32 must be supported.  If not specified, then 6 will be used. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createDynamicsCompressor">The
+  <code>createDynamicsCompressor</code> method</dt>
+    <dd><p>Creates a <a
+      href="#DynamicsCompressorNode-section"><code>DynamicsCompressorNode</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createOscillator">The
+  <code>createOscillator</code> method</dt>
+    <dd><p>Creates an <a
+      href="#OscillatorNode-section"><code>OscillatorNode</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-createPeriodicWave">The
+  <code>createPeriodicWave</code> method</dt>
+    <dd><p>Creates a <a
+      href="#PeriodicWave-section"><code>PeriodicWave</code></a> representing a waveform containing arbitrary harmonic content.
+      The <code>real</code> and <code>imag</code> parameters must be of type <code>Float32Array</code> of equal
+      lengths greater than zero and less than or equal to 4096 or an exception will be thrown.
+      These parameters specify the Fourier coefficients of a
+      <a href="http://en.wikipedia.org/wiki/Fourier_series">Fourier series</a> representing the partials of a periodic waveform.
+      The created PeriodicWave will be used with an <a href="#OscillatorNode-section"><code>OscillatorNode</code></a>
+      and will represent a <em>normalized</em> time-domain waveform having maximum absolute peak value of 1.
+      Another way of saying this is that the generated waveform of an <a href="#OscillatorNode-section"><code>OscillatorNode</code></a>
+      will have maximum peak value at 0dBFS. Conveniently, this corresponds to the full-range of the signal values used by the Web Audio API.
+      Because the PeriodicWave will be normalized on creation, the <code>real</code> and <code>imag</code> parameters
+      represent <em>relative</em> values.
+      </p>
+      <p>The <dfn id="dfn-real">real</dfn> parameter represents an array of <code>cosine</code> terms (traditionally the A terms).
+      In audio terminology, the first element (index 0) is the DC-offset of the periodic waveform and is usually set to zero.
+      The second element (index 1) represents the fundamental frequency.  The third element represents the first overtone, and so on.</p>
+      <p>The <dfn id="dfn-imag">imag</dfn> parameter represents an array of <code>sine</code> terms (traditionally the B terms).
+      The first element (index 0) should be set to zero (and will be ignored) since this term does not exist in the Fourier series.
+      The second element (index 1) represents the fundamental frequency.  The third element represents the first overtone, and so on.</p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<h3 id="lifetime-AudioContext">4.1.3. Lifetime</h3>
+<p class="norm">This section is informative.</p>
+
+<p>
+Once created, an <code>AudioContext</code> will continue to play sound until it has no more sound to play, or
+the page goes away.
+</p>
+
+<div id="OfflineAudioContext-section-section" class="section">
+<h2 id="OfflineAudioContext-section">4.1b. The OfflineAudioContext Interface</h2>
+<p>
+OfflineAudioContext is a particular type of AudioContext for rendering/mixing-down (potentially) faster than real-time.
+It does not render to the audio hardware, but instead renders as quickly as possible, calling a completion event handler
+with the result provided as an AudioBuffer.
+</p>
+
+
+<p>
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="offline-audio-context-idl">
+[Constructor(unsigned long numberOfChannels, unsigned long length, float sampleRate)]
+interface <dfn id="dfn-OfflineAudioContext">OfflineAudioContext</dfn> : AudioContext {
+
+    void startRendering();
+    
+    attribute EventHandler oncomplete;
+
+};
+</code></pre>
+</div>
+</div>
+
+
+<div id="attributes-OfflineAudioContext-section" class="section">
+<h3 id="attributes-OfflineAudioContext">4.1b.1. Attributes</h3>
+<dl>
+  <dt id="dfn-oncomplete"><code>oncomplete</code></dt>
+    <dd><p>An EventHandler of type <a href="#OfflineAudioCompletionEvent-section">OfflineAudioCompletionEvent</a>.</p>
+    </dd>
+</dl>
+</div>
+
+
+<div id="methodsandparams-OfflineAudioContext-section" class="section">
+<h3 id="methodsandparams-OfflineAudioContext">4.1b.2. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-startRendering">The <code>startRendering</code>
+  method</dt>
+    <dd><p>Given the current connections and scheduled changes, starts rendering audio.  The
+    <code>oncomplete</code> handler will be called once the rendering has finished.
+    This method must only be called one time or an exception will be thrown.</p>
+    </dd>
+</dl>
+</div>
+
+
+<div id="OfflineAudioCompletionEvent-section" class="section">
+<h2 id="OfflineAudioCompletionEvent">4.1c. The OfflineAudioCompletionEvent Interface</h2>
+
+<p>This is an <code>Event</code> object which is dispatched to <a
+href="#OfflineAudioContext-section"><code>OfflineAudioContext</code></a>. </p>
+
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="offline-audio-completion-event-idl">
+
+interface <dfn id="dfn-OfflineAudioCompletionEvent">OfflineAudioCompletionEvent</dfn> : Event {
+
+    readonly attribute AudioBuffer renderedBuffer;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-OfflineAudioCompletionEvent-section" class="section">
+<h3 id="attributes-OfflineAudioCompletionEvent">4.1c.1. Attributes</h3>
+<dl>
+  <dt id="dfn-renderedBuffer"><code>renderedBuffer</code></dt>
+    <dd><p>An AudioBuffer containing the rendered audio data once an OfflineAudioContext has finished rendering.
+    It will have a number of channels equal to the <code>numberOfChannels</code> parameter
+    of the OfflineAudioContext constructor.</p>
+    </dd>
+</dl>
+</div>
+</div>
+
+
+<div id="AudioNode-section-section" class="section">
+<h2 id="AudioNode-section">4.2. The AudioNode Interface</h2>
+
+<p>AudioNodes are the building blocks of an <a
+href="#AudioContext-section"><code>AudioContext</code></a>. This interface
+represents audio sources, the audio destination, and intermediate processing
+modules. These modules can be connected together to form <a
+href="#ModularRouting-section">processing graphs</a> for rendering audio to the
+audio hardware. Each node can have <dfn>inputs</dfn> and/or <dfn>outputs</dfn>.
+A <dfn>source node</dfn> has no inputs
+and a single output. An <a
+href="#AudioDestinationNode-section"><code>AudioDestinationNode</code></a> has
+one input and no outputs and represents the final destination to the audio
+hardware. Most processing nodes such as filters will have one input and one
+output.  Each type of <code>AudioNode</code> differs in the details of how it processes or synthesizes audio.  But, in general, <code>AudioNodes</code>
+will process its inputs (if it has any), and generate audio for its outputs (if it has any).
+ </p>
+ 
+<p>
+Each <dfn>output</dfn> has one or more <dfn>channels</dfn>.  The exact number of channels depends on the details of the specific AudioNode.
+</p>
+
+<p>
+An output may connect to one or more <code>AudioNode</code> inputs, thus <em>fan-out</em> is supported.  An input initially has no connections,
+but may be connected from one
+or more <code>AudioNode</code> outputs, thus <em>fan-in</em> is supported.  When the <code>connect()</code> method is called to connect
+an output of an AudioNode to an input of an AudioNode, we call that a <dfn>connection</dfn> to the input.
+</p>
+
+<p>
+Each AudioNode <dfn>input</dfn> has a specific number of channels at any given time.  This number can change depending on the <dfn>connection(s)</dfn>
+made to the input.  If the input has no connections then it has one channel which is silent.
+</p>
+
+<p>
+For each <dfn>input</dfn>, an <code>AudioNode</code> performs a mixing (usually an up-mixing) of all connections to that input.
+
+Please see <a href="#MixerGainStructure-section">Mixer Gain Structure</a> for more informative details, and the <a href="#UpMix-section">Channel up-mixing and down-mixing</a>
+  section for normative requirements.
+
+</p>
+
+<p>
+For performance reasons, practical implementations will need to use block processing, with each <code>AudioNode</code> processing a
+fixed number of sample-frames of size <em>block-size</em>.  In order to get uniform behavior across implementations, we will define this
+value explicitly.  <em>block-size</em> is defined to be 128 sample-frames which corresponds to roughly 3ms at a sample-rate of 44.1KHz.
+</p>
+
+<p>
+AudioNodes are <em>EventTarget</em>s, as described in <cite><a href="http://dom.spec.whatwg.org/">DOM</a></cite>
+<a href="#DOM">[DOM]</a>.  This means that it is possible to dispatch events to AudioNodes the same
+way that other EventTargets accept events.
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-node-idl">
+
+enum <dfn>ChannelCountMode</dfn> {
+    "max",
+    "clamped-max",
+    "explicit"
+};
+
+enum <dfn>ChannelInterpretation</dfn> {
+    "speakers",
+    "discrete"
+};
+
+interface <dfn id="dfn-AudioNode">AudioNode</dfn> : EventTarget {
+
+    void connect(AudioNode destination, optional unsigned long output = 0, optional unsigned long input = 0);
+    void connect(AudioParam destination, optional unsigned long output = 0);
+    void disconnect(optional unsigned long output = 0);
+
+    readonly attribute AudioContext context;
+    readonly attribute unsigned long numberOfInputs;
+    readonly attribute unsigned long numberOfOutputs;
+
+    // Channel up-mixing and down-mixing rules for all inputs.
+    attribute unsigned long channelCount;
+    attribute ChannelCountMode channelCountMode;
+    attribute ChannelInterpretation channelInterpretation;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-AudioNode-section" class="section">
+<h3 id="attributes-AudioNode">4.2.1. Attributes</h3>
+<dl>
+  <dt id="dfn-context"><code>context</code></dt>
+    <dd><p>The AudioContext which owns this AudioNode.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-numberOfInputs_2"><code>numberOfInputs</code></dt>
+    <dd><p>The number of inputs feeding into the AudioNode. For <dfn>source nodes</dfn>,
+    this will be 0.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-numberOfOutputs_2"><code>numberOfOutputs</code></dt>
+    <dd><p>The number of outputs coming out of the AudioNode. This will be 0
+      for an AudioDestinationNode.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-channelCount"><code>channelCount</code><dt>
+    <dd><p>The number of channels used when up-mixing and down-mixing connections to any inputs to the node.  The default value is 2
+       except for specific nodes where its value is specially determined.
+       This attribute has no effect for nodes with no inputs.
+       If this value is set to zero, the implementation MUST raise the
+       NOT_SUPPORTED_ERR exception.</p>
+      <p>See the <a href="#UpMix-section">Channel up-mixing and down-mixing</a>
+        section for more information on this attribute.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-channelCountMode"><code>channelCountMode</code><dt>
+    <dd><p>Determines how channels will be counted when up-mixing and down-mixing connections to any inputs to the node
+      .  This attribute has no effect for nodes with no inputs.</p>
+      <p>See the <a href="#UpMix-section">Channel up-mixing and down-mixing</a>
+        section for more information on this attribute.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-channelInterpretation"><code>channelInterpretation</code><dt>
+    <dd><p>Determines how individual channels will be treated when up-mixing and down-mixing connections to any inputs to the node.
+    This attribute has no effect for nodes with no inputs.</p>
+      <p>See the <a href="#UpMix-section">Channel up-mixing and down-mixing</a>
+        section for more information on this attribute.</p>
+    </dd>
+</dl>
+</div>
+
+<div id="methodsandparams-AudioNode-section" class="section">
+<h3 id="methodsandparams-AudioNode">4.2.2. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-connect-AudioNode">The <code>connect</code> to AudioNode method</dt>
+    <dd><p>Connects the AudioNode to another AudioNode.</p>
+      <p>The <dfn id="dfn-destination_2">destination</dfn> parameter is the
+      AudioNode to connect to.</p>
+      <p>The <dfn id="dfn-output_2">output</dfn> parameter is an index
+      describing which output of the AudioNode from which to connect. An
+      out-of-bound value throws an exception.</p>
+      <p>The <dfn id="dfn-input_2">input</dfn> parameter is an index describing
+      which input of the destination AudioNode to connect to. An out-of-bound
+      value throws an exception. </p>
+      <p>It is possible to connect an AudioNode output to more than one input
+      with multiple calls to connect(). Thus, "fan-out" is supported. </p>
+      <p>
+      It is possible to connect an AudioNode to another AudioNode which creates a <em>cycle</em>.
+      In other words, an AudioNode may connect to another AudioNode, which in turn connects back
+      to the first AudioNode.  This is allowed only if there is at least one
+      <a class="dfnref" href="#DelayNode-section">DelayNode</a> in the <em>cycle</em> or an exception will
+      be thrown.
+      </p>
+      
+      <p>
+      There can only be one connection between a given output of one specific node and a given input of another specific node.
+      Multiple connections with the same termini are ignored.  For example: 
+      </p>
+
+      <pre>
+      nodeA.connect(nodeB);
+      nodeA.connect(nodeB);
+
+      will have the same effect as
+
+      nodeA.connect(nodeB);
+      </pre>
+      
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-connect-AudioParam">The <code>connect</code> to AudioParam method</dt>
+    <dd><p>Connects the AudioNode to an AudioParam, controlling the parameter
+      value with an audio-rate signal.
+      </p>
+
+      <p>The <dfn id="dfn-destination_3">destination</dfn> parameter is the
+      AudioParam to connect to.</p>
+      <p>The <dfn id="dfn-output_3-destination">output</dfn> parameter is an index
+      describing which output of the AudioNode from which to connect. An
+      out-of-bound value throws an exception.</p>      
+
+      <p>It is possible to connect an AudioNode output to more than one AudioParam
+      with multiple calls to connect(). Thus, "fan-out" is supported. </p>
+      <p>It is possible to connect more than one AudioNode output to a single AudioParam
+      with multiple calls to connect().  Thus, "fan-in" is supported. </p>
+      <p>An AudioParam will take the rendered audio data from any AudioNode output connected to it and <a href="#down-mix">convert it to mono</a> by down-mixing if it is not
+      already mono, then mix it together with other such outputs and finally will mix with the <em>intrinsic</em>
+      parameter value (the value the AudioParam would normally have without any audio connections), including any timeline changes
+      scheduled for the parameter. </p>
+
+      <p>
+      There can only be one connection between a given output of one specific node and a specific AudioParam.
+      Multiple connections with the same termini are ignored.  For example: 
+      </p>
+
+      <pre>
+      nodeA.connect(param);
+      nodeA.connect(param);
+
+      will have the same effect as
+
+      nodeA.connect(param);
+      </pre>
+
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-disconnect">The <code>disconnect</code> method</dt>
+    <dd><p>Disconnects an AudioNode's output.</p>
+      <p>The <dfn id="dfn-output_3-disconnect">output</dfn> parameter is an index
+      describing which output of the AudioNode to disconnect.  An out-of-bound
+      value throws an exception.</p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<h3 id="lifetime-AudioNode">4.2.3. Lifetime</h3>
+
+<p class="norm">This section is informative.</p>
+
+<p>An implementation may choose any method to avoid unnecessary resource usage and unbounded memory growth of unused/finished
+nodes.  The following is a description to help guide the general expectation of how node lifetime would be managed.
+</p>
+
+<p>
+An <code>AudioNode</code> will live as long as there are any references to it.  There are several types of references:
+</p>
+
+<ol>
+<li>A <em>normal</em> JavaScript reference obeying normal garbage collection rules. </li>
+<li>A <em>playing</em> reference for both <code>AudioBufferSourceNodes</code> and <code>OscillatorNodes</code>.
+These nodes maintain a <em>playing</em>
+reference to themselves while they are currently playing.</li>
+<li>A <em>connection</em> reference which occurs if another <code>AudioNode</code> is connected to it.   </li>
+<li>A <em>tail-time</em> reference which an <code>AudioNode</code> maintains on itself as long as it has
+any internal processing state which has not yet been emitted.  For example, a <code>ConvolverNode</code> has
+a tail which continues to play even after receiving silent input (think about clapping your hands in a large concert
+  hall and continuing to hear the sound reverberate throughout the hall).  Some <code>AudioNodes</code> have this
+  property.  Please see details for specific nodes.</li>
+</ol>
+
+<p>
+Any <code>AudioNodes</code> which are connected in a cycle <em>and</em> are directly or indirectly connected to the
+<code>AudioDestinationNode</code> of the <code>AudioContext</code> will stay alive as long as the <code>AudioContext</code> is alive.
+</p>
+
+<p>
+When an <code>AudioNode</code> has no references it will be deleted.  But before it is deleted, it will disconnect itself
+from any other <code>AudioNodes</code> which it is connected to.  In this way it releases all connection references (3) it has to other nodes.
+</p>
+
+<p>
+Regardless of any of the above references, it can be assumed that the <code>AudioNode</code> will be deleted when its <code>AudioContext</code> is deleted.
+</p>
+
+
+<div id="AudioDestinationNode-section" class="section">
+<h2 id="AudioDestinationNode">4.4. The AudioDestinationNode Interface</h2>
+
+<p>This is an <a href="#AudioNode-section"><code>AudioNode</code></a>
+representing the final audio destination and is what the user will ultimately
+hear. It can often be considered as an audio output device which is connected to
+speakers. All rendered audio to be heard will be routed to this node, a
+"terminal" node in the AudioContext's routing graph. There is only a single
+AudioDestinationNode per AudioContext, provided through the
+<code>destination</code> attribute of <a
+href="#AudioContext-section"><code>AudioContext</code></a>. </p>
+<pre>
+      numberOfInputs  : 1
+      numberOfOutputs : 0
+
+      channelCount = 2;
+      channelCountMode = "explicit";
+      channelInterpretation = "speakers";
+</pre>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-destination-node-idl">
+
+interface <dfn id="dfn-AudioDestinationNode">AudioDestinationNode</dfn> : AudioNode {
+
+    readonly attribute unsigned long maxChannelCount;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-AudioDestinationNode-section" class="section">
+<h3 id="attributes-AudioDestinationNode">4.4.1. Attributes</h3>
+<dl>
+  <dt id="dfn-maxChannelCount"><code>maxChannelCount</code></dt>
+    <dd><p>The maximum number of channels that the <code>channelCount</code> attribute can be set to.
+    An <code>AudioDestinationNode</code> representing the audio hardware end-point (the normal case) can potentially output more than
+    2 channels of audio if the audio hardware is multi-channel.  <code>maxChannelCount</code> is the maximum number of channels that
+    this hardware is capable of supporting.  If this value is 0, then this indicates that <code>channelCount</code> may not be
+    changed.  This will be the case for an <code>AudioDestinationNode</code> in an <code>OfflineAudioContext</code> and also for
+    basic implementations with hardware support for stereo output only.</p>
+
+    <p><code>channelCount</code> defaults to 2 for a destination in a normal AudioContext, and may be set to any non-zero value less than or equal
+    to <code>maxChannelCount</code>.  An exception will be thrown if this value is not within the valid range.  Giving a concrete example, if
+    the audio hardware supports 8-channel output, then we may set <code>numberOfChannels</code> to 8, and render 8-channels of output.
+    </p>
+    
+    <p>
+    For an AudioDestinationNode in an OfflineAudioContext, the <code>channelCount</code> is determined when the offline context is created and this value
+    may not be changed.
+    </p>
+    
+    </dd>
+</dl>
+
+</div>
+</div>
+
+<div id="AudioParam-section" class="section">
+<h2 id="AudioParam">4.5. The AudioParam Interface</h2>
+
+<p>AudioParam controls an individual aspect of an <a
+href="#AudioNode-section"><code>AudioNode</code></a>'s functioning, such as
+volume. The parameter can be set immediately to a particular value using the
+"value" attribute. Or, value changes can be scheduled to happen at
+very precise times (in the coordinate system of AudioContext.currentTime), for envelopes, volume fades, LFOs, filter sweeps, grain
+windows, etc. In this way, arbitrary timeline-based automation curves can be
+set on any AudioParam.  Additionally, audio signals from the outputs of <code>AudioNodes</code> can be connected
+to an <code>AudioParam</code>, summing with the <em>intrinsic</em> parameter value.
+</p>
+
+<p>
+Some synthesis and processing <code>AudioNodes</code> have <code>AudioParams</code> as attributes whose values must
+ be taken into account on a per-audio-sample basis.
+For other <code>AudioParams</code>, sample-accuracy is not important and the value changes can be sampled more coarsely.
+Each individual <code>AudioParam</code> will specify that it is either an <em>a-rate</em> parameter
+which means that its values must be taken into account on a per-audio-sample basis, or it is a <em>k-rate</em> parameter.  
+</p>
+
+<p>
+Implementations must use block processing, with each <code>AudioNode</code>
+processing 128 sample-frames in each block.
+</p>
+
+<p>
+For each 128 sample-frame block, the value of a <em>k-rate</em> parameter must
+be sampled at the time of the very first sample-frame, and that value must be
+used for the entire block.  <em>a-rate</em> parameters must be sampled for each
+sample-frame of the block.
+</p>
+
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-param-idl">
+
+interface <dfn id="dfn-AudioParam">AudioParam</dfn> {
+
+    attribute float value;
+    readonly attribute float defaultValue;
+
+    <span class="comment">// Parameter automation. </span>
+    void setValueAtTime(float value, double startTime);
+    void linearRampToValueAtTime(float value, double endTime);
+    void exponentialRampToValueAtTime(float value, double endTime);
+
+    <span class="comment">// Exponentially approach the target value with a rate having the given time constant. </span>
+    void setTargetAtTime(float target, double startTime, double timeConstant);
+
+    <span class="comment">// Sets an array of arbitrary parameter values starting at time for the given duration. </span>
+    <span class="comment">// The number of values will be scaled to fit into the desired duration. </span>
+    void setValueCurveAtTime(Float32Array values, double startTime, double duration);
+
+    <span class="comment">// Cancels all scheduled parameter changes with times greater than or equal to startTime. </span>
+    void cancelScheduledValues(double startTime);
+
+};
+</code></pre>
+</div>
+</div>
+
+
+
+<div id="attributes-AudioParam-section" class="section">
+<h3 id="attributes-AudioParam">4.5.1. Attributes</h3>
+
+<dl>
+  <dt id="dfn-value"><code>value</code></dt>
+    <dd><p>The parameter's floating-point value.  This attribute is initialized to the
+      <code>defaultValue</code>.  If a value is set during a time when there are any automation events scheduled then
+      it will be ignored and no exception will be thrown.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-defaultValue"><code>defaultValue</code></dt>
+    <dd><p>Initial value for the value attribute</p>
+    </dd>
+</dl>
+</div>
+
+<div id="methodsandparams-AudioParam-section" class="section">
+<h3 id="methodsandparams-AudioParam">4.5.2. Methods and Parameters</h3>
+
+<p>
+An <code>AudioParam</code> maintains a time-ordered event list which is initially empty.  The times are in
+the time coordinate system of AudioContext.currentTime.  The events define a mapping from time to value.  The following methods
+can change the event list by adding a new event into the list of a type specific to the method.  Each event
+has a time associated with it, and the events will always be kept in time-order in the list.  These
+methods will be called <em>automation</em> methods:</p>
+
+<ul>
+<li>setValueAtTime() - <em>SetValue</em></li>
+<li>linearRampToValueAtTime() - <em>LinearRampToValue</em></li>
+<li>exponentialRampToValueAtTime() - <em>ExponentialRampToValue</em></li>
+<li>setTargetAtTime() - <em>SetTarget</em></li>
+<li>setValueCurveAtTime() - <em>SetValueCurve</em></li>
+</ul>
+
+<p>
+The following rules will apply when calling these methods:
+</p>
+<ul>
+<li>If one of these events is added at a time where there is already an event of the exact same type, then the new event will replace the old
+one.</li>
+<li>If one of these events is added at a time where there is already one or more events of a different type, then it will be
+placed in the list after them, but before events whose times are after the event. </li>
+<li>If setValueCurveAtTime() is called for time T and duration D and there are any events having a time greater than T, but less than
+T + D, then an exception will be thrown.  In other words, it's not ok to schedule a value curve during a time period containing other events.</li>
+<li>Similarly an exception will be thrown if any <em>automation</em> method is called at a time which is inside of the time interval
+of a <em>SetValueCurve</em> event at time T and duration D.</li>
+</ul>
+<p>
+</p>
+
+<dl>
+  <dt id="dfn-setValueAtTime">The <code>setValueAtTime</code> method</dt>
+    <dd><p>Schedules a parameter value change at the given time.</p>
+      <p>The <dfn id="dfn-value_2">value</dfn> parameter is the value the
+      parameter will change to at the given time.</p>
+      <p>The <dfn id="dfn-startTime_2">startTime</dfn> parameter is the time in the same time coordinate system as AudioContext.currentTime.</p>
+      <p>
+      If there are no more events after this <em>SetValue</em> event, then for t >= startTime,  v(t) = value.  In other words, the value will remain constant.
+      </p>
+      <p>
+      If the next event (having time T1) after this <em>SetValue</em> event is not of type <em>LinearRampToValue</em> or <em>ExponentialRampToValue</em>,
+      then, for t: startTime &lt;= t &lt; T1,  v(t) = value.
+      In other words, the value will remain constant during this time interval, allowing the creation of "step" functions.
+      </p>
+      <p>
+      If the next event after this <em>SetValue</em> event is of type <em>LinearRampToValue</em> or <em>ExponentialRampToValue</em> then please
+      see details below.
+      </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-linearRampToValueAtTime">The <code>linearRampToValueAtTime</code>
+  method</dt>
+    <dd><p>Schedules a linear continuous change in parameter value from the
+      previous scheduled parameter value to the given value.</p>
+      <p>The <dfn id="dfn-value_3">value</dfn> parameter is the value the
+      parameter will linearly ramp to at the given time.</p>
+      <p>The <dfn id="dfn-endTime_3">endTime</dfn> parameter is the time in the same time coordinate system as AudioContext.currentTime.</p>
+
+      <p>
+      The value during the time interval T0 &lt;= t &lt; T1 (where T0 is the time of the previous event and T1 is the endTime parameter passed into this method)
+      will be calculated as:
+      </p>
+      <pre>
+      v(t) = V0 + (V1 - V0) * ((t - T0) / (T1 - T0))
+      </pre>
+      <p>
+      Where V0 is the value at the time T0 and V1 is the value parameter passed into this method.
+      </p>
+      <p>
+      If there are no more events after this LinearRampToValue event then for t >= T1, v(t) = V1
+      </p>
+
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-exponentialRampToValueAtTime">The
+  <code>exponentialRampToValueAtTime</code> method</dt>
+    <dd><p>Schedules an exponential continuous change in parameter value from
+      the previous scheduled parameter value to the given value. Parameters
+      representing filter frequencies and playback rate are best changed
+      exponentially because of the way humans perceive sound. </p>
+      <p>The <dfn id="dfn-value_4">value</dfn> parameter is the value the
+      parameter will exponentially ramp to at the given time.  An exception will be thrown if this value is less than
+      or equal to 0, or if the value at the time of the previous event is less than or equal to 0.</p>
+      <p>The <dfn id="dfn-endTime_4">endTime</dfn> parameter is the time in the same time coordinate system as AudioContext.currentTime.</p>
+      <p>
+      The value during the time interval T0 &lt;= t &lt; T1 (where T0 is the time of the previous event and T1 is the endTime parameter passed into this method)
+      will be calculated as:
+      </p>
+      <pre>
+      v(t) = V0 * (V1 / V0) ^ ((t - T0) / (T1 - T0))
+      </pre>
+      <p>
+      Where V0 is the value at the time T0 and V1 is the value parameter passed into this method.
+      </p>
+      <p>
+      If there are no more events after this ExponentialRampToValue event then for t >= T1, v(t) = V1
+      </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-setTargetAtTime">The <code>setTargetAtTime</code>
+  method</dt>
+    <dd><p>Start exponentially approaching the target value at the given time
+      with a rate having the given time constant. Among other uses, this is
+      useful for implementing the "decay" and "release" portions of an ADSR
+      envelope. Please note that the parameter value does not immediately
+      change to the target value at the given time, but instead gradually
+      changes to the target value.</p>
+      <p>The <dfn id="dfn-target">target</dfn> parameter is the value
+      the parameter will <em>start</em> changing to at the given time.</p>
+      <p>The <dfn id="dfn-startTime">startTime</dfn> parameter is the time in the same time coordinate system as AudioContext.currentTime.</p>
+      <p>The <dfn id="dfn-timeConstant">timeConstant</dfn> parameter is the
+      time-constant value of first-order filter (exponential) approach to the
+      target value. The larger this value is, the slower the transition will
+      be.</p>
+      <p>
+      More precisely, <em>timeConstant</em> is the time it takes a first-order linear continuous time-invariant system
+      to reach the value 1 - 1/e (around 63.2%) given a step input response (transition from 0 to 1 value).
+      </p>
+      <p>
+      During the time interval: <em>T0</em> &lt;= t &lt; <em>T1</em>, where T0 is the <em>startTime</em> parameter and T1 represents the time of the event following this
+      event (or <em>infinity</em> if there are no following events):
+      </p>
+      <pre>
+      v(t) = V1 + (V0 - V1) * exp(-(t - T0) / <em>timeConstant</em>)
+      </pre>
+      <p>
+      Where V0 is the initial value (the .value attribute) at T0 (the <em>startTime</em> parameter) and V1 is equal to the <em>target</em>
+      parameter.
+      </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-setValueCurveAtTime">The <code>setValueCurveAtTime</code>
+  method</dt>
+    <dd><p>Sets an array of arbitrary parameter values starting at the given
+      time for the given duration. The number of values will be scaled to fit
+      into the desired duration. </p>
+      <p>The <dfn id="dfn-values">values</dfn> parameter is a Float32Array
+      representing a parameter value curve. These values will apply starting at
+      the given time and lasting for the given duration. </p>
+      <p>The <dfn id="dfn-startTime_5">startTime</dfn> parameter is the time in the same time coordinate system as AudioContext.currentTime.</p>
+      <p>The <dfn id="dfn-duration_5">duration</dfn> parameter is the
+      amount of time in seconds (after the <em>time</em> parameter) where values will be calculated according to the <em>values</em> parameter..</p>
+      <p>
+      During the time interval: <em>startTime</em> &lt;= t &lt; <em>startTime</em> + <em>duration</em>, values will be calculated:
+      </p>
+      <pre>
+      v(t) = values[N * (t - startTime) / duration], where <em>N</em> is the length of the <em>values</em> array.
+      </pre>
+      <p>
+      After the end of the curve time interval (t >= <em>startTime</em> + <em>duration</em>), the value will remain constant at the final curve value,
+      until there is another automation event (if any).
+      </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-cancelScheduledValues">The <code>cancelScheduledValues</code>
+  method</dt>
+    <dd><p>Cancels all scheduled parameter changes with times greater than or
+      equal to startTime.</p>
+      <p>The <dfn>startTime</dfn> parameter is the starting
+      time at and after which any previously scheduled parameter changes will
+      be cancelled.  It is a time in the same time coordinate system as AudioContext.currentTime.</p>
+    </dd>
+</dl>
+</div>
+</div>
+
+
+
+<div id="computedValue-AudioParam-section" class="section">
+<h3>4.5.3. Computation of Value</h3>
+
+<p>
+<dfn>computedValue</dfn> is the final value controlling the audio DSP and is computed by the audio rendering thread during each rendering time quantum.
+  It must be internally computed as follows:
+</p>
+
+<ol>
+<li>An <em>intrinsic</em> parameter value will be calculated at each time, which is either the value set directly to the .value attribute,
+or, if there are any scheduled parameter changes (automation events) with times before or at this time,
+the value as calculated from these events.  If the .value attribute
+is set after any automation events have been scheduled, then these events will be removed.  When read, the .value attribute
+always returns the <em>intrinsic</em> value for the current time.  If automation events are removed from a given time range, then the
+<em>intrinsic</em> value will remain unchanged and stay at its previous value until either the .value attribute is directly set, or automation events are added
+for the time range.
+</li>
+
+<li>
+An AudioParam will take the rendered audio data from any AudioNode output connected to it and <a href="#down-mix">convert it to mono</a> by down-mixing if it is not
+already mono, then mix it together with other such outputs.  If there are no AudioNodes connected to it, then this value is 0, having no
+effect on the <em>computedValue</em>.
+</li>
+
+<li>
+The <em>computedValue</em> is the sum of the <em>intrinsic</em> value and the value calculated from (2).
+</li>
+
+</ol>
+
+</div>
+
+
+<div id="example1-AudioParam-section" class="section">
+<h3 id="example1-AudioParam">4.5.4. AudioParam Automation Example</h3>
+
+
+
+<div class="example">
+
+<div class="exampleHeader">
+Example</div>
+<img alt="AudioParam automation" src="images/audioparam-automation1.png" /> 
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">ECMAScript</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="es-code"> 
+var t0 = 0;
+var t1 = 0.1;
+var t2 = 0.2;
+var t3 = 0.3;
+var t4 = 0.4;
+var t5 = 0.6;
+var t6 = 0.7;
+var t7 = 1.0;
+
+var curveLength = 44100;
+var curve = new Float32Array(curveLength);
+for (var i = 0; i &lt; curveLength; ++i)
+    curve[i] = Math.sin(Math.PI * i / curveLength);
+
+param.setValueAtTime(0.2, t0);
+param.setValueAtTime(0.3, t1);
+param.setValueAtTime(0.4, t2);
+param.linearRampToValueAtTime(1, t3);
+param.linearRampToValueAtTime(0.15, t4);
+param.exponentialRampToValueAtTime(0.75, t5);
+param.exponentialRampToValueAtTime(0.05, t6);
+param.setValueCurveAtTime(curve, t6, t7 - t6);
+</code></pre>
+</div>
+</div>
+</div>
+</div>
+
+<div id="GainNode-section" class="section">
+<h2 id="GainNode">4.7. The GainNode Interface</h2>
+
+<p>Changing the gain of an audio signal is a fundamental operation in audio
+applications.  The <code>GainNode</code> is one of the building blocks for creating <a
+href="#MixerGainStructure-section">mixers</a>.
+This interface is an AudioNode with a single input and single
+output: </p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCountMode = "max";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>It multiplies the input audio signal by the (possibly time-varying) <code>gain</code> attribute, copying the result to the output.
+ By default, it will take the input and pass it through to the output unchanged, which represents a constant gain change
+ of 1. 
+</p>
+
+<p>
+As with other <code>AudioParams</code>, the <code>gain</code> parameter represents a mapping from time
+(in the coordinate system of AudioContext.currentTime) to floating-point value.
+
+Every PCM audio sample in the input is multiplied by the <code>gain</code> parameter's value for the specific time
+corresponding to that audio sample.  This multiplied value represents the PCM audio sample for the output.
+</p>
+
+<p>
+The number of channels of the output will always equal the number of channels of the input, with each channel
+of the input being multiplied by the <code>gain</code> values and being copied into the corresponding channel
+of the output.
+</p>
+
+<p>
+ The implementation must make
+gain changes to the audio stream smoothly, without introducing noticeable
+clicks or glitches. This process is called "de-zippering". </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="gain-node-idl">
+
+interface <dfn id="dfn-GainNode">GainNode</dfn> : AudioNode {
+
+    readonly attribute AudioParam gain;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-GainNode-section" class="section">
+<h3 id="attributes-GainNode">4.7.1. Attributes</h3>
+<dl>
+  <dt id="dfn-gain"><code>gain</code></dt>
+    <dd><p>Represents the amount of gain to apply. Its
+      default <code>value</code> is 1 (no gain change). The nominal <code>minValue</code> is 0, but may be
+      set negative for phase inversion.  The nominal <code>maxValue</code> is 1, but higher values are allowed (no
+      exception thrown).This parameter is <em>a-rate</em> </p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="DelayNode-section" class="section">
+<h2 id="DelayNode">4.8. The DelayNode Interface</h2>
+
+<p>A delay-line is a fundamental building block in audio applications. This
+interface is an AudioNode with a single input and single output: </p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCountMode = "max";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>
+The number of channels of the output always equals the number of channels of the input.
+</p>
+
+<p>It delays the incoming audio signal by a certain amount. The default
+amount is 0 seconds (no delay). When the delay time is changed, the
+implementation must make the transition smoothly, without introducing
+noticeable clicks or glitches to the audio stream. </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="delay-node-idl">
+
+interface <dfn id="dfn-DelayNode">DelayNode</dfn> : AudioNode {
+
+    readonly attribute AudioParam delayTime;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-GainNode-section_2" class="section">
+<h3 id="attributes-GainNode_2">4.8.1. Attributes</h3>
+<dl>
+  <dt id="dfn-delayTime_2"><code>delayTime</code></dt>
+    <dd><p>An AudioParam object representing the amount of delay (in seconds)
+      to apply. The default value (<code>delayTime.value</code>) is 0 (no
+      delay). The minimum value is 0 and the maximum value is determined by the <em>maxDelayTime</em>
+      argument to the <code>AudioContext</code> method <code>createDelay</code>.  This parameter is <em>a-rate</em></p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="AudioBuffer-section" class="section">
+<h2 id="AudioBuffer">4.9. The AudioBuffer Interface</h2>
+
+<p>This interface represents a memory-resident audio asset (for one-shot sounds
+and other short audio clips). Its format is non-interleaved IEEE 32-bit linear PCM with a
+nominal range of -1 -&gt; +1. It can contain one or more channels.  Typically, it would be expected that the length
+of the PCM data would be fairly short (usually somewhat less than a minute).
+For longer sounds, such as music soundtracks, streaming should be used with the
+<code>audio</code> element and <code>MediaElementAudioSourceNode</code>. </p>
+
+<p>
+An AudioBuffer may be used by one or more AudioContexts.
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-buffer-idl">
+
+interface <dfn id="dfn-AudioBuffer">AudioBuffer</dfn> {
+
+    readonly attribute float sampleRate;
+    readonly attribute long length;
+
+    <span class="comment">// in seconds </span>
+    readonly attribute double duration;
+
+    readonly attribute long numberOfChannels;
+
+    Float32Array getChannelData(unsigned long channel);
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-AudioBuffer-section" class="section">
+<h3 id="attributes-AudioBuffer">4.9.1. Attributes</h3>
+<dl>
+  <dt id="dfn-sampleRate_AudioBuffer"><code>sampleRate</code></dt>
+    <dd><p>The sample-rate for the PCM audio data in samples per second.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-length_AudioBuffer"><code>length</code></dt>
+    <dd><p>Length of the PCM audio data in sample-frames.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-duration_AudioBuffer"><code>duration</code></dt>
+    <dd><p>Duration of the PCM audio data in seconds.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-numberOfChannels_AudioBuffer"><code>numberOfChannels</code></dt>
+    <dd><p>The number of discrete audio channels.</p>
+    </dd>
+</dl>
+</div>
+
+<div id="methodsandparams-AudioBuffer-section" class="section">
+<h3 id="methodsandparams-AudioBuffer">4.9.2. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-getChannelData">The <code>getChannelData</code> method</dt>
+    <dd><p>Returns the <code>Float32Array</code> representing the PCM audio data for the specific channel.</p>
+      <p>The <dfn id="dfn-channel">channel</dfn> parameter is an index
+      representing the particular channel to get data for.  An index value of 0 represents
+      the first channel.  This  index value MUST be less than <code>numberOfChannels</code>
+      or an exception will be thrown.</p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="AudioBufferSourceNode-section" class="section">
+<h2 id="AudioBufferSourceNode">4.10. The AudioBufferSourceNode Interface</h2>
+
+<p>This interface represents an audio source from an in-memory audio asset in
+an <code>AudioBuffer</code>. It is useful for playing short audio assets
+which require a high degree of scheduling flexibility (can playback in
+rhythmically perfect ways). The start() method is used to schedule when 
+sound playback will happen.  The playback will stop automatically when
+the buffer's audio data has been completely
+played (if the <code>loop</code> attribute is false), or when the stop()
+method has been called and the specified time has been reached. Please see more
+details in the start() and stop() description.  start() and stop() may not be issued
+multiple times for a given
+AudioBufferSourceNode. </p>
+<pre>    numberOfInputs  : 0
+    numberOfOutputs : 1
+    </pre>
+
+<p>
+The number of channels of the output always equals the number of channels of the AudioBuffer
+assigned to the .buffer attribute, or is one channel of silence if .buffer is NULL.
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-buffer-source-node-idl">
+
+interface <dfn id="dfn-AudioBufferSourceNode">AudioBufferSourceNode</dfn> : AudioNode {
+
+    attribute AudioBuffer? buffer;
+
+    readonly attribute AudioParam playbackRate;
+
+    attribute boolean loop;
+    attribute double loopStart;
+    attribute double loopEnd;
+
+    void start(optional double when = 0, optional double offset = 0, optional double duration);
+    void stop(optional double when = 0);
+
+    attribute EventHandler onended;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-AudioBufferSourceNode-section" class="section">
+<h3 id="attributes-AudioBufferSourceNode">4.10.1. Attributes</h3>
+<dl>
+  <dt id="dfn-buffer_AudioBufferSourceNode"><code>buffer</code></dt>
+    <dd><p>Represents the audio asset to be played. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-playbackRate_AudioBufferSourceNode"><code>playbackRate</code></dt>
+    <dd><p>The speed at which to render the audio stream. The default
+      playbackRate.value is 1.  This parameter is <em>a-rate</em> </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-loop_AudioBufferSourceNode"><code>loop</code></dt>
+    <dd><p>Indicates if the audio data should play in a loop.  The default value is false. </p>
+    </dd>
+</dl>
+
+<dl>
+  <dt id="dfn-loopStart_AudioBufferSourceNode"><code>loopStart</code></dt>
+    <dd><p>An optional value in seconds where looping should begin if the <code>loop</code> attribute is true.
+    Its default value is 0, and it may usefully be set to any value between 0 and the duration of the buffer.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-loopEnd_AudioBufferSourceNode"><code>loopEnd</code></dt>
+    <dd><p>An optional value in seconds where looping should end if the <code>loop</code> attribute is true.
+    Its default value is 0, and it may usefully be set to any value between 0 and the duration of the buffer.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-onended_AudioBufferSourceNode"><code>onended</code></dt>
+    <dd><p>A property used to set the <code>EventHandler</code> (described in <cite><a
+      href="http://www.whatwg.org/specs/web-apps/current-work/#eventhandler">HTML</a></cite>)
+      for the ended event that is dispatched to <a
+      href="#AudioBufferSourceNode-section"><code>AudioBufferSourceNode</code></a>
+      node types.  When the playback of the buffer for an <code>AudioBufferSourceNode</code>
+      is finished, an event of type <code>Event</code> (described in <cite><a
+      href="http://www.whatwg.org/specs/web-apps/current-work/#event">HTML</a></cite>)
+      will be dispatched to the event handler. </p>
+    </dd>
+</dl>
+
+
+</div>
+</div>
+
+<div id="methodsandparams-AudioBufferSourceNode-section" class="section">
+<h3 id="methodsandparams-AudioBufferSourceNode">4.10.2. Methods and
+Parameters</h3>
+<dl>
+  <dt id="dfn-start">The <code>start</code> method</dt>
+    <dd><p>Schedules a sound to playback at an exact time.</p>
+      <p>The <dfn id="dfn-when">when</dfn> parameter describes at what time (in
+      seconds) the sound should start playing. It is in the same
+      time coordinate system as AudioContext.currentTime. If 0 is passed in for
+      this value or if the value is less than <b>currentTime</b>, then the
+      sound will start playing immediately.  <code>start</code> may only be called one time
+      and must be called before <code>stop</code> is called or an exception will be thrown.</p>
+      <p>The <dfn id="dfn-offset">offset</dfn> parameter describes
+      the offset time in the buffer (in seconds) where playback will begin. If 0 is passed
+      in for this value, then playback will start from the beginning of the buffer.</p>
+      <p>The <dfn id="dfn-duration">duration</dfn> parameter
+      describes the duration of the portion (in seconds) to be played.  If this parameter is not passed,
+      the duration will be equal to the total duration of the AudioBuffer minus the <code>offset</code> parameter.
+      Thus if neither <code>offset</code> nor <code>duration</code> are specified then the implied duration is
+      the total duration of the AudioBuffer.
+      </p>
+
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-stop">The <code>stop</code> method</dt>
+    <dd><p>Schedules a sound to stop playback at an exact time.</p>
+      <p>The <dfn id="dfn-when_AudioBufferSourceNode_2">when</dfn> parameter
+      describes at what time (in seconds) the sound should stop playing.
+      It is in the same time coordinate system as AudioContext.currentTime.
+      If 0 is passed in for this value or if the value is less than
+      <b>currentTime</b>, then the sound will stop playing immediately.
+      <code>stop</code> must only be called one time and only after a call to <code>start</code> or <code>stop</code>,
+      or an exception will be thrown.</p>
+    </dd>
+</dl>
+</div>
+
+<div id="looping-AudioBufferSourceNode-section" class="section">
+<h3 id="looping-AudioBufferSourceNode">4.10.3. Looping</h3>
+<p>
+If the <code>loop</code> attribute is true when <code>start()</code> is called, then playback will continue indefinitely
+until <code>stop()</code> is called and the stop time is reached.  We'll call this "loop" mode.  Playback always starts at the point in the buffer indicated
+by the <code>offset</code> argument of <code>start()</code>, and in <em>loop</em> mode will continue playing until it reaches the <em>actualLoopEnd</em> position
+in the buffer (or the end of the buffer), at which point it will wrap back around to the <em>actualLoopStart</em> position in the buffer, and continue
+playing according to this pattern.
+</p>
+
+<p>
+In <em>loop</em> mode then the <em>actual</em> loop points are calculated as follows from the <code>loopStart</code> and <code>loopEnd</code> attributes:
+</p>
+
+<blockquote>
+<pre>
+    if ((loopStart || loopEnd) &amp;&amp; loopStart >= 0 &amp;&amp; loopEnd > 0 &amp;&amp; loopStart &lt; loopEnd) {
+        actualLoopStart = loopStart;
+        actualLoopEnd = min(loopEnd, buffer.length);
+    } else {
+        actualLoopStart = 0;
+        actualLoopEnd = buffer.length;
+    }
+</pre>
+</blockquote>
+
+<p>
+Note that the default values for <code>loopStart</code> and <code>loopEnd</code> are both 0, which indicates that looping should occur from the very start
+to the very end of the buffer.
+</p>
+
+<p>
+Please note that as a low-level implementation detail, the AudioBuffer is at a specific sample-rate (usually the same as the AudioContext sample-rate), and
+that the loop times (in seconds) must be converted to the appropriate sample-frame positions in the buffer according to this sample-rate.
+</p>
+
+</div>
+
+<div id="MediaElementAudioSourceNode-section" class="section">
+<h2 id="MediaElementAudioSourceNode">4.11. The MediaElementAudioSourceNode
+Interface</h2>
+
+<p>This interface represents an audio source from an <code>audio</code> or
+<code>video</code> element. </p>
+<pre>    numberOfInputs  : 0
+    numberOfOutputs : 1
+    </pre>
+
+<p>
+The number of channels of the output corresponds to the number of channels of the media referenced by the HTMLMediaElement.
+Thus, changes to the media element's .src attribute can change the number of channels output by this node.
+If the .src attribute is not set, then the number of channels output will be one silent channel.
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="media-element-audio-source-node-idl">
+
+interface <dfn id="dfn-MediaElementAudioSourceNode">MediaElementAudioSourceNode</dfn> : AudioNode {
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<p>A MediaElementAudioSourceNode
+is created given an HTMLMediaElement using the AudioContext <a href="#dfn-createMediaElementSource">createMediaElementSource()</a> method. </p>
+
+<p>
+The number of channels of the single output equals the number of channels of the audio referenced by
+the HTMLMediaElement passed in as the argument to createMediaElementSource(), or is 1 if the HTMLMediaElement
+has no audio.
+</p>
+
+<p>
+The HTMLMediaElement must behave in an identical fashion after the MediaElementAudioSourceNode has
+been created, <em>except</em> that the rendered audio will no longer be heard directly, but instead will be heard
+as a consequence of the MediaElementAudioSourceNode being connected through the routing graph.  Thus pausing, seeking,
+volume, <code>.src</code> attribute changes, and other aspects of the HTMLMediaElement must behave as they normally would
+if <em>not</em> used with a MediaElementAudioSourceNode.
+</p>
+
+<div class="example">
+
+<div class="exampleHeader">
+Example</div>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">ECMAScript</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="es-code"> 
+var mediaElement = document.getElementById('mediaElementID');
+var sourceNode = context.createMediaElementSource(mediaElement);
+sourceNode.connect(filterNode);
+ </code></pre>
+</div>
+</div>
+</div>
+</div>
+
+
+<div id="ScriptProcessorNode-section" class="section">
+<h2 id="ScriptProcessorNode">4.12. The ScriptProcessorNode Interface</h2>
+
+<p>This interface is an AudioNode which can generate, process, or analyse audio
+directly using JavaScript. </p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCount = numberOfInputChannels;
+    channelCountMode = "explicit";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>The ScriptProcessorNode is constructed with a <code>bufferSize</code> which
+must be one of the following values: 256, 512, 1024, 2048, 4096, 8192, 16384.
+This value controls how frequently the <code>audioprocess</code> event
+is dispatched and how many sample-frames need to be processed each call.
+Lower numbers for <code>bufferSize</code> will result in a lower (better) <a
+href="#Latency-section">latency</a>. Higher numbers will be necessary to avoid
+audio breakup and <a href="#Glitching-section">glitches</a>.
+This value will be picked by the implementation if the bufferSize argument
+to <code>createScriptProcessor</code> is not passed in, or is set to 0.</p>
+
+<p><code>numberOfInputChannels</code> and <code>numberOfOutputChannels</code>
+determine the number of input and output channels. It is invalid for both
+<code>numberOfInputChannels</code> and <code>numberOfOutputChannels</code> to
+be zero. </p>
+<pre>    var node = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels);
+    </pre>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="script-processor-node-idl">
+
+interface <dfn id="dfn-ScriptProcessorNode">ScriptProcessorNode</dfn> : AudioNode {
+
+    attribute EventHandler onaudioprocess;
+
+    readonly attribute long bufferSize;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-ScriptProcessorNode-section" class="section">
+<h3 id="attributes-ScriptProcessorNode">4.12.1. Attributes</h3>
+<dl>
+  <dt id="dfn-onaudioprocess"><code>onaudioprocess</code></dt>
+    <dd><p>A property used to set the <code>EventHandler</code> (described in <cite><a
+      href="http://www.whatwg.org/specs/web-apps/current-work/#eventhandler">HTML</a></cite>)
+      for the audioprocess event that is dispatched to <a
+      href="#ScriptProcessorNode-section"><code>ScriptProcessorNode</code></a>
+      node types. An event of type <a
+      href="#AudioProcessingEvent-section"><code>AudioProcessingEvent</code></a>
+      will be dispatched to the event handler. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-bufferSize_ScriptProcessorNode"><code>bufferSize</code></dt>
+    <dd><p>The size of the buffer (in sample-frames) which needs to be
+      processed each time <code>onprocessaudio</code> is called. Legal values
+      are (256, 512, 1024, 2048, 4096, 8192, 16384). </p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="AudioProcessingEvent-section" class="section">
+<h2 id="AudioProcessingEvent">4.13. The AudioProcessingEvent Interface</h2>
+
+<p>This is an <code>Event</code> object which is dispatched to <a
+href="#ScriptProcessorNode-section"><code>ScriptProcessorNode</code></a> nodes. </p>
+
+<p>The event handler processes audio from the input (if any) by accessing the
+audio data from the <code>inputBuffer</code> attribute. The audio data which is
+the result of the processing (or the synthesized data if there are no inputs)
+is then placed into the <code>outputBuffer</code>. </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-processing-event-idl">
+
+interface <dfn id="dfn-AudioProcessingEvent">AudioProcessingEvent</dfn> : Event {
+
+    readonly attribute double playbackTime;
+    readonly attribute AudioBuffer inputBuffer;
+    readonly attribute AudioBuffer outputBuffer;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-AudioProcessingEvent-section" class="section">
+<h3 id="attributes-AudioProcessingEvent">4.13.1. Attributes</h3>
+<dl>
+  <dt id="dfn-playbackTime"><code>playbackTime</code></dt>
+    <dd><p>The time when the audio will be played in the same time coordinate system as AudioContext.currentTime.
+      <code>playbackTime</code> allows for very tight synchronization between
+      processing directly in JavaScript with the other events in the context's
+      rendering graph. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-inputBuffer"><code>inputBuffer</code></dt>
+    <dd><p>An AudioBuffer containing the input audio data.  It will have a number of channels equal to the <code>numberOfInputChannels</code> parameter
+    of the createScriptProcessor() method.  This AudioBuffer is only valid while in the scope of the <code>onaudioprocess</code>
+    function.  Its values will be meaningless outside of this scope.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-outputBuffer"><code>outputBuffer</code></dt>
+    <dd><p>An AudioBuffer where the output audio data should be written.  It will have a number of channels equal to the
+    <code>numberOfOutputChannels</code> parameter of the createScriptProcessor() method.
+      Script code within the scope of the <code>onaudioprocess</code> function is expected to modify the
+      <code>Float32Array</code> arrays representing channel data in this AudioBuffer.
+      Any script modifications to this AudioBuffer outside of this scope will not produce any audible effects.</p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="PannerNode-section" class="section">
+<h2 id="PannerNode">4.14. The PannerNode Interface</h2>
+
+<p>This interface represents a processing node which <a
+href="#Spatialization-section">positions / spatializes</a> an incoming audio
+stream in three-dimensional space. The spatialization is in relation to the <a
+href="#AudioContext-section"><code>AudioContext</code></a>'s <a
+href="#AudioListener-section"><code>AudioListener</code></a>
+(<code>listener</code> attribute). </p>
+
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCount = 2;
+    channelCountMode = "clamped-max";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>
+The audio stream from the input will be either mono or stereo, depending on the connection(s) to the input.
+</p>
+
+<p>
+The output of this node is hard-coded to stereo (2 channels) and <em>currently</em> cannot be configured.
+</p>
+
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="panner-node-idl">
+
+enum <dfn>PanningModelType</dfn> {
+  "equalpower",
+  "HRTF"
+};
+
+enum <dfn>DistanceModelType</dfn> {
+  "linear",
+  "inverse",
+  "exponential"
+};
+
+interface <dfn id="dfn-PannerNode">PannerNode</dfn> : AudioNode {
+
+    <span class="comment">// Default for stereo is HRTF </span>
+    attribute PanningModelType panningModel;
+
+    <span class="comment">// Uses a 3D cartesian coordinate system </span>
+    void setPosition(double x, double y, double z);
+    void setOrientation(double x, double y, double z);
+    void setVelocity(double x, double y, double z);
+
+    <span class="comment">// Distance model and attributes </span>
+    attribute DistanceModelType distanceModel;
+    attribute double refDistance;
+    attribute double maxDistance;
+    attribute double rolloffFactor;
+
+    <span class="comment">// Directional sound cone </span>
+    attribute double coneInnerAngle;
+    attribute double coneOuterAngle;
+    attribute double coneOuterGain;
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="attributes-PannerNode_attributes-section" class="section">
+<h3 id="attributes-PannerNode_attributes">4.14.2. Attributes</h3>
+<dl>
+  <dt id="dfn-panningModel"><code>panningModel</code></dt>
+    <dd><p>Determines which spatialization algorithm will be used to position
+      the audio in 3D space. The default is "HRTF". </p>
+
+      <dl>
+        <dt id="dfn-EQUALPOWER"><code>"equalpower"</code></dt>
+          <dd><p>A simple and efficient spatialization algorithm using equal-power
+            panning. </p>
+          </dd>
+      </dl>
+      <dl>
+        <dt id="dfn-HRTF"><code>"HRTF"</code></dt>
+          <dd><p>A higher quality spatialization algorithm using a convolution with
+            measured impulse responses from human subjects. This panning method
+            renders stereo output. </p>
+          </dd>
+      </dl>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-distanceModel"><code>distanceModel</code></dt>
+    <dd><p>Determines which algorithm will be used to reduce the volume of an
+      audio source as it moves away from the listener.  The default is "inverse".
+</p>
+
+<dl>
+  <dt id="dfn-LINEAR_DISTANCE"><code>"linear"</code></dt>
+    <dd><p>A linear distance model which calculates <em>distanceGain</em> according to: </p>
+    <pre>
+1 - rolloffFactor * (distance - refDistance) / (maxDistance - refDistance)
+    </pre>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-INVERSE_DISTANCE"><code>"inverse"</code></dt>
+  <dd><p>An inverse distance model which calculates <em>distanceGain</em> according to: </p>
+    <pre>
+refDistance / (refDistance + rolloffFactor * (distance - refDistance))
+    </pre>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-EXPONENTIAL_DISTANCE"><code>"exponential"</code></dt>
+  <dd><p>An exponential distance model which calculates <em>distanceGain</em> according to: </p>
+  <pre>
+pow(distance / refDistance, -rolloffFactor)
+  </pre>
+    </dd>
+</dl>
+
+
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-refDistance"><code>refDistance</code></dt>
+    <dd><p>A reference distance for reducing volume as source move further from
+      the listener.  The default value is 1. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-maxDistance"><code>maxDistance</code></dt>
+    <dd><p>The maximum distance between source and listener, after which the
+      volume will not be reduced any further.  The default value is 10000. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-rolloffFactor"><code>rolloffFactor</code></dt>
+    <dd><p>Describes how quickly the volume is reduced as source moves away
+      from listener.  The default value is 1. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-coneInnerAngle"><code>coneInnerAngle</code></dt>
+    <dd><p>A parameter for directional audio sources, this is an angle, inside
+      of which there will be no volume reduction.  The default value is 360. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-coneOuterAngle"><code>coneOuterAngle</code></dt>
+    <dd><p>A parameter for directional audio sources, this is an angle, outside
+      of which the volume will be reduced to a constant value of
+      <b>coneOuterGain</b>.  The default value is 360. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-coneOuterGain"><code>coneOuterGain</code></dt>
+    <dd><p>A parameter for directional audio sources, this is the amount of
+      volume reduction outside of the <b>coneOuterAngle</b>.  The default value is 0. </p>
+    </dd>
+</dl>
+</div>
+
+<h3 id="Methods_and_Parameters">4.14.3. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-setPosition">The <code>setPosition</code> method</dt>
+    <dd><p>Sets the position of the audio source relative to the
+      <b>listener</b> attribute. A 3D cartesian coordinate system is used.</p>
+      <p>The <dfn id="dfn-x">x, y, z</dfn> parameters represent the coordinates
+      in 3D space. </p>
+      <p>The default value is (0,0,0)
+      </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-setOrientation">The <code>setOrientation</code> method</dt>
+    <dd><p>Describes which direction the audio source is pointing in the 3D
+      cartesian coordinate space. Depending on how directional the sound is
+      (controlled by the <b>cone</b> attributes), a sound pointing away from
+      the listener can be very quiet or completely silent.</p>
+      <p>The <dfn id="dfn-x_2">x, y, z</dfn> parameters represent a direction
+      vector in 3D space. </p>
+      <p>The default value is (1,0,0)
+      </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-setVelocity">The <code>setVelocity</code> method</dt>
+    <dd><p>Sets the velocity vector of the audio source. This vector controls
+      both the direction of travel and the speed in 3D space. This velocity
+      relative to the listener's velocity is used to determine how much doppler
+      shift (pitch change) to apply.  The units used for this vector is <em>meters / second</em>
+      and is independent of the units used for position and orientation vectors.</p>
+      <p>The <dfn id="dfn-x_3">x, y, z</dfn> parameters describe a direction
+      vector indicating direction of travel and intensity. </p>
+      <p>The default value is (0,0,0)
+      </p>
+    </dd>
+</dl>
+
+<div id="AudioListener-section" class="section">
+<h2 id="AudioListener">4.15. The AudioListener Interface</h2>
+
+<p>This interface represents the position and orientation of the person
+listening to the audio scene. All <a
+href="#PannerNode-section"><code>PannerNode</code></a> objects
+spatialize in relation to the AudioContext's <code>listener</code>. See <a
+href="#Spatialization-section">this</a> section for more details about
+spatialization. </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="audio-listener-idl">
+
+interface <dfn id="dfn-AudioListener">AudioListener</dfn> {
+
+    attribute double dopplerFactor;
+    attribute double speedOfSound;
+
+    <span class="comment">// Uses a 3D cartesian coordinate system </span>
+    void setPosition(double x, double y, double z);
+    void setOrientation(double x, double y, double z, double xUp, double yUp, double zUp);
+    void setVelocity(double x, double y, double z);
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="attributes-AudioListener-section" class="section">
+<h3 id="attributes-AudioListener">4.15.1. Attributes</h3>
+<dl>
+  <dt id="dfn-dopplerFactor"><code>dopplerFactor</code></dt>
+    <dd><p>A constant used to determine the amount of pitch shift to use when
+      rendering a doppler effect.  The default value is 1. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-speedOfSound"><code>speedOfSound</code></dt>
+    <dd><p>The speed of sound used for calculating doppler shift. The default
+      value is 343.3. </p>
+    </dd>
+</dl>
+</div>
+
+<h3 id="L15842">4.15.2. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-setPosition_2">The <code>setPosition</code> method</dt>
+    <dd><p>Sets the position of the listener in a 3D cartesian coordinate
+      space. <code>PannerNode</code> objects use this position relative to
+      individual audio sources for spatialization.</p>
+      <p>The <dfn id="dfn-x_AudioListener">x, y, z</dfn> parameters represent
+      the coordinates in 3D space. </p>
+      <p>The default value is (0,0,0)
+      </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-setOrientation_2">The <code>setOrientation</code> method</dt>
+    <dd><p>Describes which direction the listener is pointing in the 3D
+      cartesian coordinate space. Both a <b>front</b> vector and an <b>up</b>
+      vector are provided.  In simple human terms, the <b>front</b> vector represents which
+      direction the person's nose is pointing.  The <b>up</b> vector represents the
+      direction the top of a person's head is pointing.  These values are expected to
+      be linearly independent (at right angles to each other).  For normative requirements
+      of how these values are to be interpreted, see the
+      <a href="#Spatialization-section">spatialization section</a>.
+      </p>
+      <p>The <dfn id="dfn-x_setOrientation">x, y, z</dfn> parameters represent
+      a <b>front</b> direction vector in 3D space, with the default value being (0,0,-1) </p>
+      <p>The <dfn id="dfn-x_setOrientation_2">xUp, yUp, zUp</dfn> parameters
+      represent an <b>up</b> direction vector in 3D space, with the default value being (0,1,0)  </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-setVelocity_4">The <code>setVelocity</code> method</dt>
+    <dd><p>Sets the velocity vector of the listener. This vector controls both
+      the direction of travel and the speed in 3D space. This velocity relative to
+      an audio source's velocity is used to determine how much doppler shift
+      (pitch change) to apply.  The units used for this vector is <em>meters / second</em>
+        and is independent of the units used for position and orientation vectors.</p>
+      <p>The <dfn id="dfn-x_setVelocity_5">x, y, z</dfn> parameters describe a
+      direction vector indicating direction of travel and intensity. </p>
+      <p>The default value is (0,0,0)
+      </p>
+    </dd>
+</dl>
+
+<div id="ConvolverNode-section" class="section">
+<h2 id="ConvolverNode">4.16. The ConvolverNode Interface</h2>
+
+<p>This interface represents a processing node which applies a <a
+href="#Convolution-section">linear convolution effect</a> given an impulse
+response.  Normative requirements for multi-channel convolution matrixing are described
+<a href="#Convolution-reverb-effect">here</a>. </p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCount = 2;
+    channelCountMode = "clamped-max";
+    channelInterpretation = "speakers";
+</pre>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="convolver-node-idl">
+
+interface <dfn id="dfn-ConvolverNode">ConvolverNode</dfn> : AudioNode {
+
+    attribute AudioBuffer? buffer;
+    attribute boolean normalize;
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="attributes-ConvolverNode-section" class="section">
+<h3 id="attributes-ConvolverNode">4.16.1. Attributes</h3>
+<dl>
+  <dt id="dfn-buffer_ConvolverNode"><code>buffer</code></dt>
+    <dd><p>A mono, stereo, or 4-channel <code>AudioBuffer</code> containing the (possibly multi-channel) impulse response
+      used by the ConvolverNode.  This <code>AudioBuffer</code> must be of the same sample-rate as the AudioContext or an exception will
+      be thrown.  At the time when this attribute is set, the <em>buffer</em> and the state of the <em>normalize</em>
+      attribute will be used to configure the ConvolverNode with this impulse response having the given normalization.
+      The initial value of this attribute is null.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-normalize"><code>normalize</code></dt>
+    <dd><p>Controls whether the impulse response from the buffer will be scaled
+      by an equal-power normalization when the <code>buffer</code> atttribute
+      is set. Its default value is <code>true</code> in order to achieve a more
+      uniform output level from the convolver when loaded with diverse impulse
+      responses. If <code>normalize</code> is set to <code>false</code>, then
+      the convolution will be rendered with no pre-processing/scaling of the
+      impulse response.  Changes to this value do not take effect until the next time
+      the <em>buffer</em> attribute is set. </p>
+      
+    </dd>
+</dl>
+
+      <p>
+      If the <em>normalize</em> attribute is false when the <em>buffer</em> attribute is set then the
+      ConvolverNode will perform a linear convolution given the exact impulse response contained within the <em>buffer</em>.
+      </p>
+      <p>
+      Otherwise, if the <em>normalize</em> attribute is true when the <em>buffer</em> attribute is set then the
+      ConvolverNode will first perform a scaled RMS-power analysis of the audio data contained within <em>buffer</em> to calculate a
+      <em>normalizationScale</em> given this algorithm:
+      </p>
+
+
+      <div class="block">
+
+      <div class="blockTitleDiv">
+
+      <div class="blockContent">
+      <pre class="code"><code class="es-code"> 
+
+float calculateNormalizationScale(buffer)
+{
+    const float GainCalibration = 0.00125;
+    const float GainCalibrationSampleRate = 44100;
+    const float MinPower = 0.000125;
+  
+    // Normalize by RMS power.
+    size_t numberOfChannels = buffer->numberOfChannels();
+    size_t length = buffer->length();
+
+    float power = 0;
+
+    for (size_t i = 0; i &lt; numberOfChannels; ++i) {
+        float* sourceP = buffer->channel(i)->data();
+        float channelPower = 0;
+
+        int n = length;
+        while (n--) {
+            float sample = *sourceP++;
+            channelPower += sample * sample;
+        }
+
+        power += channelPower;
+    }
+
+    power = sqrt(power / (numberOfChannels * length));
+
+    // Protect against accidental overload.
+    if (isinf(power) || isnan(power) || power &lt; MinPower)
+        power = MinPower;
+
+    float scale = 1 / power;
+
+    // Calibrate to make perceived volume same as unprocessed.
+    scale *= GainCalibration;
+
+    // Scale depends on sample-rate.
+    if (buffer->sampleRate())
+        scale *= GainCalibrationSampleRate / buffer->sampleRate();
+
+    // True-stereo compensation.
+    if (buffer->numberOfChannels() == 4)
+        scale *= 0.5;
+
+    return scale;
+}
+          </code></pre>
+
+      </div>
+      </div>
+      </div>
+
+<p>
+During processing, the ConvolverNode will then take this calculated <em>normalizationScale</em> value and multiply it by the result of the linear convolution
+resulting from processing the input with the impulse response (represented by the <em>buffer</em>) to produce the
+final output.  Or any mathematically equivalent operation may be used, such as pre-multiplying the
+input by <em>normalizationScale</em>, or pre-multiplying a version of the impulse-response by <em>normalizationScale</em>.
+</p>
+
+</div>
+
+<div id="AnalyserNode-section" class="section">
+<h2 id="AnalyserNode">4.17. The AnalyserNode Interface</h2>
+
+<p>This interface represents a node which is able to provide real-time
+frequency and time-domain <a href="#AnalyserNode">analysis</a>
+information. The audio stream will be passed un-processed from input to output.
+</p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 1    <em>Note that this output may be left unconnected.</em>
+
+    channelCount = 1;
+    channelCountMode = "explicit";
+    channelInterpretation = "speakers";
+</pre>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="analyser-node-idl">
+
+interface <dfn id="dfn-AnalyserNode">AnalyserNode</dfn> : AudioNode {
+
+    <span class="comment">// Real-time frequency-domain data </span>
+    void getFloatFrequencyData(Float32Array array);
+    void getByteFrequencyData(Uint8Array array);
+
+    <span class="comment">// Real-time waveform data </span>
+    void getByteTimeDomainData(Uint8Array array);
+
+    attribute unsigned long fftSize;
+    readonly attribute unsigned long frequencyBinCount;
+
+    attribute double minDecibels;
+    attribute double maxDecibels;
+
+    attribute double smoothingTimeConstant;
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="attributes-ConvolverNode-section_2" class="section">
+<h3 id="attributes-ConvolverNode_2">4.17.1. Attributes</h3>
+<dl>
+  <dt id="dfn-fftSize"><code>fftSize</code></dt>
+    <dd><p>The size of the FFT used for frequency-domain analysis. This must be
+      a non-zero power of two in the range 32 to 2048, otherwise an INDEX_SIZE_ERR exception MUST be thrown.
+      The default value is 2048.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-frequencyBinCount"><code>frequencyBinCount</code></dt>
+    <dd><p>Half the FFT size. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-minDecibels"><code>minDecibels</code></dt>
+    <dd><p>The minimum power value in the scaling range for the FFT analysis
+      data for conversion to unsigned byte values.
+      The default value is -100.
+      If the value of this attribute is set to a value more than or equal to <code>maxDecibels</code>,
+      an INDEX_SIZE_ERR exception MUST be thrown.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-maxDecibels"><code>maxDecibels</code></dt>
+    <dd><p>The maximum power value in the scaling range for the FFT analysis
+      data for conversion to unsigned byte values.
+      The default value is -30.
+      If the value of this attribute is set to a value less than or equal to <code>minDecibels</code>,
+      an INDEX_SIZE_ERR exception MUST be thrown.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-smoothingTimeConstant"><code>smoothingTimeConstant</code></dt>
+    <dd><p>A value from 0 -&gt; 1 where 0 represents no time averaging
+      with the last analysis frame.
+      The default value is 0.8.
+      If the value of this attribute is set to a value less than 0 or more than 1,
+      an INDEX_SIZE_ERR exception MUST be thrown.</p>
+    </dd>
+</dl>
+</div>
+
+<h3 id="methods-and-parameters">4.17.2. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-getFloatFrequencyData">The <code>getFloatFrequencyData</code>
+  method</dt>
+    <dd><p>Copies the current frequency data into the passed floating-point
+      array. If the array has fewer elements than the frequencyBinCount, the
+      excess elements will be dropped. If the array has more elements than
+      the frequencyBinCount, the excess elements will be ignored.</p>
+      <p>The <dfn id="dfn-array">array</dfn> parameter is where
+      frequency-domain analysis data will be copied. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-getByteFrequencyData">The <code>getByteFrequencyData</code>
+  method</dt>
+    <dd><p>Copies the current frequency data into the passed unsigned byte
+      array. If the array has fewer elements than the frequencyBinCount, the
+      excess elements will be dropped. If the array has more elements than
+      the frequencyBinCount, the excess elements will be ignored.</p>
+      <p>The <dfn id="dfn-array_2">array</dfn> parameter is where
+      frequency-domain analysis data will be copied. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-getByteTimeDomainData">The <code>getByteTimeDomainData</code>
+  method</dt>
+    <dd><p>Copies the current time-domain (waveform) data into the passed
+      unsigned byte array. If the array has fewer elements than the
+      fftSize, the excess elements will be dropped. If the array has more
+      elements than fftSize, the excess elements will be ignored.</p>
+      <p>The <dfn id="dfn-array_3">array</dfn> parameter is where time-domain
+      analysis data will be copied. </p>
+    </dd>
+</dl>
+
+<div id="ChannelSplitterNode-section" class="section">
+<h2 id="ChannelSplitterNode">4.18. The ChannelSplitterNode Interface</h2>
+
+<p>The <code>ChannelSplitterNode</code> is for use in more advanced
+applications and would often be used in conjunction with <a
+href="#ChannelMergerNode-section"><code>ChannelMergerNode</code></a>. </p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : Variable N (defaults to 6) // number of "active" (non-silent) outputs is determined by number of channels in the input
+
+    channelCountMode = "max";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>This interface represents an AudioNode for accessing the individual channels
+of an audio stream in the routing graph. It has a single input, and a number of
+"active" outputs which equals the number of channels in the input audio stream.
+For example, if a stereo input is connected to an
+<code>ChannelSplitterNode</code> then the number of active outputs will be two
+(one from the left channel and one from the right). There are always a total
+number of N outputs (determined by the <code>numberOfOutputs</code> parameter to the AudioContext method <code>createChannelSplitter()</code>),
+ The default number is 6 if this value is not provided. Any outputs
+which are not "active" will output silence and would typically not be connected
+to anything. </p>
+
+<h3 id="example-1">Example:</h3>
+<img alt="channel splitter" src="images/channel-splitter.png" /> 
+
+<p>Please note that in this example, the splitter does <b>not</b> interpret the channel identities (such as left, right, etc.), but
+simply splits out channels in the order that they are input.</p>
+
+<p>One application for <code>ChannelSplitterNode</code> is for doing "matrix
+mixing" where individual gain control of each channel is desired. </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="channel-splitter-node-idl">
+
+interface <dfn id="dfn-ChannelSplitterNode">ChannelSplitterNode</dfn> : AudioNode {
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="ChannelMergerNode-section" class="section">
+<h2 id="ChannelMergerNode">4.19. The ChannelMergerNode Interface</h2>
+
+<p>The <code>ChannelMergerNode</code> is for use in more advanced applications
+and would often be used in conjunction with <a
+href="#ChannelSplitterNode-section"><code>ChannelSplitterNode</code></a>. </p>
+<pre>
+    numberOfInputs  : Variable N (default to 6)  // number of connected inputs may be less than this
+    numberOfOutputs : 1
+
+    channelCountMode = "max";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>This interface represents an AudioNode for combining channels from multiple
+audio streams into a single audio stream. It has a variable number of inputs (defaulting to 6), but not all of them
+need be connected. There is a single output whose audio stream has a number of
+channels equal to the sum of the numbers of channels of all the connected
+inputs. For example, if an <code>ChannelMergerNode</code> has two connected
+inputs (both stereo), then the output will be four channels, the first two from
+the first input and the second two from the second input. In another example
+with two connected inputs (both mono), the output will be two channels
+(stereo), with the left channel coming from the first input and the right
+channel coming from the second input. </p>
+
+<h3 id="example-2">Example:</h3>
+<img alt="channel merger" src="images/channel-merger.png" /> 
+
+<p>Please note that in this example, the merger does <b>not</b> interpret the channel identities (such as left, right, etc.), but
+simply combines channels in the order that they are input.</p>
+
+
+<p>Be aware that it is possible to connect an <code>ChannelMergerNode</code>
+in such a way that it outputs an audio stream with a large number of channels
+greater than the maximum supported by the audio hardware. In this case where such an output is connected
+to the AudioContext .destination (the audio hardware), then the extra channels will be ignored.
+Thus, the <code>ChannelMergerNode</code> should be used in situations where the number
+of channels is well understood. </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="channel-merger-node-idl">
+
+interface <dfn id="dfn-ChannelMergerNode">ChannelMergerNode</dfn> : AudioNode {
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="DynamicsCompressorNode-section" class="section">
+<h2 id="DynamicsCompressorNode">4.20. The DynamicsCompressorNode Interface</h2>
+
+<p>DynamicsCompressorNode is an AudioNode processor implementing a dynamics
+compression effect. </p>
+
+<p>Dynamics compression is very commonly used in musical production and game
+audio. It lowers the volume of the loudest parts of the signal and raises the
+volume of the softest parts. Overall, a louder, richer, and fuller sound can be
+achieved. It is especially important in games and musical applications where
+large numbers of individual sounds are played simultaneous to control the
+overall signal level and help avoid clipping (distorting) the audio output to
+the speakers. </p>
+<pre>    
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCount = 2;
+    channelCountMode = "explicit";
+    channelInterpretation = "speakers";
+</pre>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="dynamics-compressor-node-idl">
+
+interface <dfn id="dfn-DynamicsCompressorNode">DynamicsCompressorNode</dfn> : AudioNode {
+
+    readonly attribute AudioParam threshold; // in Decibels
+    readonly attribute AudioParam knee; // in Decibels
+    readonly attribute AudioParam ratio; // unit-less
+    readonly attribute AudioParam reduction; // in Decibels
+    readonly attribute AudioParam attack; // in Seconds
+    readonly attribute AudioParam release; // in Seconds
+
+};
+</code>
+</pre>
+</div>
+</div>
+
+<div id="attributes-DynamicsCompressorNode-section" class="section">
+<h3 id="attributes-DynamicsCompressorNode">4.20.1. Attributes</h3>
+<p>
+All parameters are <em>k-rate</em>
+</p>
+
+<dl>
+  <dt id="dfn-threshold"><code>threshold</code></dt>
+    <dd><p>The decibel value above which the compression will start taking
+      effect.  Its default value is -24, with a nominal range of -100 to 0. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-knee"><code>knee</code></dt>
+    <dd><p>A decibel value representing the range above the threshold where the
+      curve smoothly transitions to the "ratio" portion. Its default value is 30, with a nominal range of 0 to 40. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-ratio"><code>ratio</code></dt>
+    <dd><p>The amount of dB change in input for a 1 dB change in output. Its default value is 12, with a nominal range of 1 to 20. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-reduction"><code>reduction</code></dt>
+    <dd><p>A read-only decibel value for metering purposes, representing the
+      current amount of gain reduction that the compressor is applying to the
+      signal. If fed no signal the value will be 0 (no gain reduction).  The nominal range is -20 to 0. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-attack"><code>attack</code></dt>
+    <dd><p>The amount of time (in seconds) to reduce the gain by 10dB.   Its default value is 0.003, with a nominal range of 0 to 1. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-release"><code>release</code></dt>
+    <dd><p>The amount of time (in seconds) to increase the gain by 10dB.  Its default value is 0.250, with a nominal range of 0 to 1. </p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="BiquadFilterNode-section" class="section">
+<h2 id="BiquadFilterNode">4.21. The BiquadFilterNode Interface</h2>
+
+<p>BiquadFilterNode is an AudioNode processor implementing very common
+low-order filters. </p>
+
+<p>Low-order filters are the building blocks of basic tone controls (bass, mid,
+treble), graphic equalizers, and more advanced filters. Multiple
+BiquadFilterNode filters can be combined to form more complex filters. The
+filter parameters such as "frequency" can be changed over time for filter
+sweeps, etc. Each BiquadFilterNode can be configured as one of a number of
+common filter types as shown in the IDL below.  The default filter type
+is "lowpass".</p>
+<pre>    
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCountMode = "max";
+    channelInterpretation = "speakers";
+</pre>
+<p>
+The number of channels of the output always equals the number of channels of the input.
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="biquad-filter-node-idl">
+
+enum <dfn>BiquadFilterType</dfn> {
+  "lowpass",
+  "highpass",
+  "bandpass",
+  "lowshelf",
+  "highshelf",
+  "peaking",
+  "notch",
+  "allpass"
+};
+
+interface <dfn id="dfn-BiquadFilterNode">BiquadFilterNode</dfn> : AudioNode {
+
+    attribute BiquadFilterType type;
+    readonly attribute AudioParam frequency; // in Hertz
+    readonly attribute AudioParam detune; // in Cents
+    readonly attribute AudioParam Q; // Quality factor
+    readonly attribute AudioParam gain; // in Decibels
+
+    void getFrequencyResponse(Float32Array frequencyHz,
+                              Float32Array magResponse,
+                              Float32Array phaseResponse);
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<p>The filter types are briefly described below. We note that all of these
+filters are very commonly used in audio processing. In terms of implementation,
+they have all been derived from standard analog filter prototypes. For more
+technical details, we refer the reader to the excellent <a
+href="http://www.musicdsp.org/files/Audio-EQ-Cookbook.txt">reference</a> by
+Robert Bristow-Johnson.</p>
+
+<p>
+All parameters are <em>k-rate</em> with the following default parameter values:
+</p>
+
+<blockquote>
+<dl>
+  <dt>frequency</dt>
+    <dd>350Hz, with a nominal range of 10 to the Nyquist frequency (half the sample-rate).
+    </dd>
+  <dt><a href="http://en.wikipedia.org/wiki/Q_factor">Q</a></dt>
+    <dd>1, with a nominal range of 0.0001 to 1000.</dd>
+  <dt>gain</dt>
+    <dd>0, with a nominal range of -40 to 40.</dd>
+</dl>
+</blockquote>
+
+
+
+<div id="BiquadFilterNode-description-section" class="section">
+<h3 id="BiquadFilterNode-description">4.21.1 "lowpass"</h3>
+
+<p>A <a href="http://en.wikipedia.org/wiki/Low-pass_filter">lowpass filter</a>
+allows frequencies below the cutoff frequency to pass through and attenuates
+frequencies above the cutoff. It implements a standard second-order
+resonant lowpass filter with 12dB/octave rolloff.</p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The cutoff frequency</dd>
+    <dt>Q</dt>
+      <dd>Controls how peaked the response will be at the cutoff frequency. A
+        large value makes the response more peaked.  Please note that for this filter type, this
+        value is not a traditional Q, but is a resonance value in decibels.</dd>
+    <dt>gain</dt>
+      <dd>Not used in this filter type</dd>
+  </dl>
+</blockquote>
+
+<h3 id="HIGHPASS">4.21.2 "highpass"</h3>
+
+<p>A <a href="http://en.wikipedia.org/wiki/High-pass_filter">highpass
+filter</a> is the opposite of a lowpass filter. Frequencies above the cutoff
+frequency are passed through, but frequencies below the cutoff are attenuated.
+It implements a standard second-order resonant highpass filter with
+12dB/octave rolloff.</p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The cutoff frequency below which the frequencies are attenuated</dd>
+    <dt>Q</dt>
+      <dd>Controls how peaked the response will be at the cutoff frequency. A
+        large value makes the response more peaked. Please note that for this filter type, this
+        value is not a traditional Q, but is a resonance value in decibels.</dd>
+    <dt>gain</dt>
+      <dd>Not used in this filter type</dd>
+  </dl>
+</blockquote>
+
+<h3 id="BANDPASS">4.21.3 "bandpass"</h3>
+
+<p>A <a href="http://en.wikipedia.org/wiki/Band-pass_filter">bandpass
+filter</a> allows a range of frequencies to pass through and attenuates the
+frequencies below and above this frequency range. It implements a
+second-order bandpass filter.</p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The center of the frequency band</dd>
+    <dt><a href="http://en.wikipedia.org/wiki/Q_factor">Q</a></dt>
+      <dd>Controls the width of the band. The width becomes narrower as the Q
+        value increases.</dd>
+    <dt>gain</dt>
+      <dd>Not used in this filter type</dd>
+  </dl>
+</blockquote>
+
+<h3 id="LOWSHELF">4.21.4 "lowshelf"</h3>
+
+<p>The lowshelf filter allows all frequencies through, but adds a boost (or
+attenuation) to the lower frequencies. It implements a second-order
+lowshelf filter.</p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The upper limit of the frequences where the boost (or attenuation) is
+        applied.</dd>
+    <dt><a href="http://en.wikipedia.org/wiki/Q_factor">Q</a></dt>
+      <dd>Not used in this filter type.</dd>
+    <dt>gain</dt>
+      <dd>The boost, in dB, to be applied. If the value is negative, the
+        frequencies are attenuated.</dd>
+  </dl>
+</blockquote>
+
+<h3 id="L16352">4.21.5 "highshelf"</h3>
+
+<p>The highshelf filter is the opposite of the lowshelf filter and allows all
+frequencies through, but adds a boost to the higher frequencies. It
+implements a second-order highshelf filter</p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The lower limit of the frequences where the boost (or attenuation) is
+        applied.</dd>
+    <dt><a href="http://en.wikipedia.org/wiki/Q_factor">Q</a></dt>
+      <dd>Not used in this filter type.</dd>
+    <dt>gain</dt>
+      <dd>The boost, in dB, to be applied. If the value is negative, the
+        frequencies are attenuated.</dd>
+  </dl>
+</blockquote>
+
+<h3 id="PEAKING">4.21.6 "peaking"</h3>
+
+<p>The peaking filter allows all frequencies through, but adds a boost (or
+attenuation) to a range of frequencies. </p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The center frequency of where the boost is applied.</dd>
+    <dt><a href="http://en.wikipedia.org/wiki/Q_factor">Q</a></dt>
+      <dd>Controls the width of the band of frequencies that are boosted. A
+        large value implies a narrow width.</dd>
+    <dt>gain</dt>
+      <dd>The boost, in dB, to be applied. If the value is negative, the
+        frequencies are attenuated.</dd>
+  </dl>
+</blockquote>
+
+<h3 id="NOTCH">4.21.7 "notch"</h3>
+
+<p>The notch filter (also known as a <a
+href="http://en.wikipedia.org/wiki/Band-stop_filter">band-stop or
+band-rejection filter</a>) is the opposite of a bandpass filter. It allows all
+frequencies through, except for a set of frequencies.</p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The center frequency of where the notch is applied.</dd>
+    <dt><a href="http://en.wikipedia.org/wiki/Q_factor">Q</a></dt>
+      <dd>Controls the width of the band of frequencies that are attenuated. A
+        large value implies a narrow width.</dd>
+    <dt>gain</dt>
+      <dd>Not used in this filter type.</dd>
+  </dl>
+</blockquote>
+
+<h3 id="ALLPASS">4.21.8 "allpass"</h3>
+
+<p>An <a
+href="http://en.wikipedia.org/wiki/All-pass_filter#Digital_Implementation">allpass
+filter</a> allows all frequencies through, but changes the phase relationship
+between the various frequencies. It implements a second-order allpass
+filter</p>
+
+<blockquote>
+  <dl>
+    <dt>frequency</dt>
+      <dd>The frequency where the center of the phase transition occurs. Viewed
+        another way, this is the frequency with maximal <a
+        href="http://en.wikipedia.org/wiki/Group_delay">group delay</a>.</dd>
+    <dt><a href="http://en.wikipedia.org/wiki/Q_factor">Q</a></dt>
+      <dd>Controls how sharp the phase transition is at the center frequency. A
+        larger value implies a sharper transition and a larger group delay.</dd>
+    <dt>gain</dt>
+      <dd>Not used in this filter type.</dd>
+  </dl>
+</blockquote>
+
+<h3 id="Methods">4.21.9. Methods</h3>
+<dl>
+  <dt id="dfn-getFrequencyResponse">The <code>getFrequencyResponse</code>
+  method</dt>
+    <dd><p>Given the current filter parameter settings, calculates the
+      frequency response for the specified frequencies. </p>
+      <p>The <dfn id="dfn-frequencyHz">frequencyHz</dfn> parameter specifies an
+      array of frequencies at which the response values will be calculated.</p>
+      <p>The <dfn id="dfn-magResponse">magResponse</dfn> parameter specifies an
+      output array receiving the linear magnitude response values.</p>
+      <p>The <dfn id="dfn-phaseResponse">phaseResponse</dfn> parameter
+      specifies an output array receiving the phase response values in
+      radians.</p>
+    </dd>
+</dl>
+</div>
+
+<div id="WaveShaperNode-section" class="section">
+<h2 id="WaveShaperNode">4.22. The WaveShaperNode Interface</h2>
+
+<p>WaveShaperNode is an AudioNode processor implementing non-linear distortion
+effects. </p>
+
+<p>Non-linear waveshaping distortion is commonly used for both subtle
+non-linear warming, or more obvious distortion effects. Arbitrary non-linear
+shaping curves may be specified.</p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 1
+
+    channelCountMode = "max";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>
+The number of channels of the output always equals the number of channels of the input.
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="wave-shaper-node-idl">
+
+enum <dfn>OverSampleType</dfn> {
+    "none",
+    "2x",
+    "4x"
+};
+
+interface <dfn id="dfn-WaveShaperNode">WaveShaperNode</dfn> : AudioNode {
+
+    attribute Float32Array? curve;
+    attribute OverSampleType oversample;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-WaveShaperNode-section" class="section">
+<h3 id="attributes-WaveShaperNode">4.22.1. Attributes</h3>
+<dl>
+  <dt id="dfn-curve"><code>curve</code></dt>
+    <dd><p>The shaping curve used for the waveshaping effect. The input signal
+      is nominally within the range -1 -&gt; +1. Each input sample within this
+      range will index into the shaping curve with a signal level of zero
+      corresponding to the center value of the curve array. Any sample value
+      less than -1 will correspond to the first value in the curve array. Any
+      sample value greater than +1 will correspond to the last value in
+      the curve array.  The implementation must perform linear interpolation between
+      adjacent points in the curve.  Initially the curve attribute is null, which means that
+      the WaveShaperNode will pass its input to its output without modification.</p>
+    </dd>
+</dl>
+
+<dl>
+  <dt id="dfn-oversample"><code>oversample</code></dt>
+    <dd><p>Specifies what type of oversampling (if any) should be used when applying the shaping curve.    
+    The default value is "none", meaning the curve will be applied directly to the input samples.
+    A value of "2x" or "4x" can improve the quality of the processing by avoiding some aliasing, with
+    the "4x" value yielding the highest quality.  For some applications, it's better to use no oversampling
+    in order to get a very precise shaping curve.
+    </p>
+    <p>
+    A value of "2x" or "4x" means that the following steps must be performed:
+    <ol>
+    <li>Up-sample the input samples to 2x or 4x the sample-rate of the AudioContext.  Thus for each
+    processing block of 128 samples, generate 256 (for 2x) or 512 (for 4x) samples.</li>
+    <li>Apply the shaping curve.</li>
+    <li>Down-sample the result back to the sample-rate of the AudioContext.  Thus taking the 256 (or 512) processed samples, generating 128 as
+    the final result.
+    </ol>
+    The exact up-sampling and down-sampling filters are not specified, and can be tuned for sound quality (low aliasing, etc.), low latency, and performance.
+    </p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="OscillatorNode-section" class="section">
+<h2 id="OscillatorNode">4.23. The OscillatorNode Interface</h2>
+
+<p>OscillatorNode represents an audio source generating a periodic waveform. It can be set to
+a few commonly used waveforms. Additionally, it can be set to an arbitrary periodic
+waveform through the use of a <a href="#PeriodicWave-section"><code>PeriodicWave</code></a> object. </p>
+
+<p>Oscillators are common foundational building blocks in audio synthesis.  An OscillatorNode will start emitting sound at the time
+specified by the <code>start()</code> method. </p>
+
+<p>
+Mathematically speaking, a <em>continuous-time</em> periodic waveform can have very high (or infinitely high) frequency information when considered
+in the frequency domain.  When this waveform is sampled as a discrete-time digital audio signal at a particular sample-rate,
+then care must be taken to discard (filter out) the high-frequency information higher than the <em>Nyquist</em> frequency (half the sample-rate)
+before converting the waveform to a digital form.  If this is not done, then <em>aliasing</em> of higher frequencies (than the Nyquist frequency) will fold
+back as mirror images into frequencies lower than the Nyquist frequency.  In many cases this will cause audibly objectionable artifacts.
+This is a basic and well understood principle of audio DSP.
+</p>
+
+<p>
+There are several practical approaches that an implementation may take to avoid this aliasing.
+But regardless of approach, the <em>idealized</em> discrete-time digital audio signal is well defined mathematically.
+The trade-off for the implementation is a matter of implementation cost (in terms of CPU usage) versus fidelity to
+achieving this ideal.
+</p>
+
+<p>
+It is expected that an implementation will take some care in achieving this ideal, but it is reasonable to consider lower-quality,
+less-costly approaches on lower-end hardware.
+</p>
+
+<p>
+Both .frequency and .detune are <em>a-rate</em> parameters and are used together to determine a <em>computedFrequency</em> value:
+</p>
+
+<pre>
+computedFrequency(t) = frequency(t) * pow(2, detune(t) / 1200)
+</pre>
+
+<p>
+The OscillatorNode's instantaneous phase at each time is the time integral of <em>computedFrequency</em>.
+</p>
+
+<pre>    numberOfInputs  : 0
+    numberOfOutputs : 1 (mono output)
+    </pre>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="oscillator-node-idl">
+
+enum <dfn>OscillatorType</dfn> {
+  "sine",
+  "square",
+  "sawtooth",
+  "triangle",
+  "custom"
+};
+
+interface <dfn id="dfn-OscillatorNode">OscillatorNode</dfn> : AudioNode {
+
+    attribute OscillatorType type;
+
+    readonly attribute AudioParam frequency; // in Hertz
+    readonly attribute AudioParam detune; // in Cents
+
+    void start(double when);
+    void stop(double when);
+    void setPeriodicWave(PeriodicWave periodicWave);
+
+    attribute EventHandler onended;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-OscillatorNode-section" class="section">
+<h3 id="attributes-OscillatorNode">4.23.1. Attributes</h3>
+<dl>
+  <dt id="dfn-type"><code>type</code></dt>
+    <dd><p>The shape of the periodic waveform.  It may directly be set to any of the type constant values except for "custom".
+    The <a href="#dfn-setPeriodicWave"><code>setPeriodicWave()</code></a> method can be used to set a custom waveform, which results in this attribute
+    being set to "custom".  The default value is "sine". </p>
+    </dd>
+</dl>
+
+<dl>
+  <dt id="dfn-frequency"><code>frequency</code></dt>
+    <dd><p>The frequency (in Hertz) of the periodic waveform. This parameter is <em>a-rate</em> </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-detune"><code>detune</code></dt>
+    <dd><p>A detuning value (in Cents) which will offset the <code>frequency</code> by the given amount.
+     This parameter is <em>a-rate</em> </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-onended"><code>onended</code></dt>
+    <dd><p>A property used to set the <code>EventHandler</code> (described in <cite><a
+      href="http://www.whatwg.org/specs/web-apps/current-work/#eventhandler">HTML</a></cite>)
+      for the ended event that is dispatched to <a
+      href="#OscillatorNode-section"><code>OscillatorNode</code></a>
+      node types.  When the playback of the buffer for an <code>OscillatorNode</code>
+      is finished, an event of type <code>Event</code> (described in <cite><a
+      href="http://www.whatwg.org/specs/web-apps/current-work/#event">HTML</a></cite>)
+      will be dispatched to the event handler. </p>
+    </dd>
+</dl>
+</div>
+</div>
+
+<div id="methodsandparams-OscillatorNode-section" class="section">
+<h3 id="methodsandparams-OscillatorNode">4.23.2. Methods and Parameters</h3>
+<dl>
+  <dt id="dfn-setPeriodicWave">The <code>setPeriodicWave</code>
+  method</dt>
+    <dd><p>Sets an arbitrary custom periodic waveform given a <a href="#PeriodicWave-section"><code>PeriodicWave</code></a>.</p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-start-AudioBufferSourceNode">The <code>start</code>
+  method</dt>
+    <dd><p>defined as in <a href="#AudioBufferSourceNode-section"><code>AudioBufferSourceNode</code></a>. </p>
+    </dd>
+</dl>
+<dl>
+  <dt id="dfn-stop-AudioBufferSourceNode">The <code>stop</code>
+  method</dt>
+    <dd><p>defined as in <a href="#AudioBufferSourceNode-section"><code>AudioBufferSourceNode</code></a>. </p>
+    </dd>
+</dl>
+</div>
+
+
+<div id="PeriodicWave-section" class="section">
+<h2 id="PeriodicWave">4.24. The PeriodicWave Interface</h2>
+
+<p>PeriodicWave represents an arbitrary periodic waveform to be used with an <a href="#OscillatorNode-section"><code>OscillatorNode</code></a>.
+Please see <a href="#dfn-createPeriodicWave">createPeriodicWave()</a> and <a href="#dfn-setPeriodicWave">setPeriodicWave()</a> and for more details. </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="wavetable-idl">
+
+interface <dfn id="dfn-PeriodicWave">PeriodicWave</dfn> {
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="MediaStreamAudioSourceNode-section" class="section">
+<h2 id="MediaStreamAudioSourceNode">4.25. The MediaStreamAudioSourceNode
+Interface</h2>
+
+<p>This interface represents an audio source from a <code>MediaStream</code>.
+The first <code>AudioMediaStreamTrack</code> from the <code>MediaStream</code> will be
+used as a source of audio.</p>
+<pre>    numberOfInputs  : 0
+    numberOfOutputs : 1
+</pre>
+
+  <p>
+  The number of channels of the output corresponds to the number of channels of the <code>AudioMediaStreamTrack</code>.
+  If there is no valid audio track, then the number of channels output will be one silent channel.
+  </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="media-stream-audio-source-node-idl">
+
+interface <dfn id="dfn-MediaStreamAudioSourceNode">MediaStreamAudioSourceNode</dfn> : AudioNode {
+
+};
+</code></pre>
+</div>
+</div>
+</div>
+
+<div id="MediaStreamAudioDestinationNode-section" class="section">
+<h2 id="MediaStreamAudioDestinationNode">4.26. The MediaStreamAudioDestinationNode
+Interface</h2>
+
+<p>This interface is an audio destination representing a <code>MediaStream</code> with a single <code>AudioMediaStreamTrack</code>.
+This MediaStream is created when the node is created and is accessible via the <dfn>stream</dfn> attribute.
+This stream can be used in a similar way as a MediaStream obtained via getUserMedia(), and
+can, for example, be sent to a remote peer using the RTCPeerConnection addStream() method.
+</p>
+<pre>
+    numberOfInputs  : 1
+    numberOfOutputs : 0
+
+    channelCount = 2;
+    channelCountMode = "explicit";
+    channelInterpretation = "speakers";
+</pre>
+
+<p>
+The number of channels of the input is by default 2 (stereo).  Any connections to the input
+are up-mixed/down-mixed to the number of channels of the input.
+</p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">Web IDL</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="idl-code" id="media-stream-audio-destination-node-idl">
+
+interface <dfn id="dfn-MediaStreamAudioDestinationNode">MediaStreamAudioDestinationNode</dfn> : AudioNode {
+
+    readonly attribute MediaStream stream;
+
+};
+</code></pre>
+</div>
+</div>
+
+<div id="attributes-MediaStreamAudioDestinationNode-section" class="section">
+<h3 id="attributes-MediaStreamAudioDestinationNode">4.26.1. Attributes</h3>
+<dl>
+  <dt id="dfn-stream"><code>stream</code></dt>
+    <dd><p>A MediaStream containing a single AudioMediaStreamTrack with the same number of channels
+    as the node itself.</p>
+    </dd>
+</dl>
+</div>
+
+</div>
+
+<div id="MixerGainStructure-section" class="section">
+<h2 id="MixerGainStructure">6. Mixer Gain Structure</h2>
+
+<p class="norm">This section is informative.</p>
+
+<h3 id="background">Background</h3>
+
+<p>One of the most important considerations when dealing with audio processing
+graphs is how to adjust the gain (volume) at various points. For example, in a
+standard mixing board model, each input bus has pre-gain, post-gain, and
+send-gains. Submix and master out busses also have gain control. The gain
+control described here can be used to implement standard mixing boards as well
+as other architectures. </p>
+
+<div id="SummingJunction-section" class="section">
+<h3 id="SummingJunction">Summing Inputs</h3>
+</div>
+
+<p>The inputs to <a href="#AudioNode-section"><code>AudioNodes</code></a> have
+the ability to accept connections from multiple outputs. The input then acts as
+a unity gain summing junction with each output signal being added with the
+others: </p>
+<img alt="unity gain summing junction"
+src="images/unity-gain-summing-junction.png" /> 
+
+<p>In cases where the channel layouts of the outputs do not match, a mix (usually up-mix) will occur according to the <a
+href="#UpMix-section">mixing rules</a>.
+</p>
+
+<h3 id="gain-Control">Gain Control</h3>
+
+<p>But many times, it's important to be able to control the gain for each of
+the output signals. The <a
+href="#GainNode-section"><code>GainNode</code></a> gives this
+control: </p>
+<img alt="mixer architecture new" src="images/mixer-architecture-new.png" /> 
+
+<p>Using these two concepts of unity gain summing junctions and GainNodes,
+it's possible to construct simple or complex mixing scenarios. </p>
+
+<h3 id="Example-mixer-with-send-busses">Example: Mixer with Send Busses</h3>
+
+<p>In a routing scenario involving multiple sends and submixes, explicit
+control is needed over the volume or "gain" of each connection to a mixer. Such
+routing topologies are very common and exist in even the simplest of electronic
+gear sitting around in a basic recording studio. </p>
+
+<p>Here's an example with two send mixers and a main mixer. Although possible,
+for simplicity's sake, pre-gain control and insert effects are not illustrated:
+</p>
+<img alt="mixer gain structure" src="images/mixer-gain-structure.png" /> 
+
+<p>This diagram is using a shorthand notation where "send 1", "send 2", and
+"main bus" are actually inputs to AudioNodes, but here are represented as
+summing busses, where the intersections g2_1, g3_1, etc. represent the "gain"
+or volume for the given source on the given mixer. In order to expose this
+gain, an <a href="#dfn-GainNode"><code>GainNode</code></a> is used:
+</p>
+
+<p>Here's how the above diagram could be constructed in JavaScript: </p>
+
+<div class="example">
+
+<div class="exampleHeader">
+Example</div>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">ECMAScript</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="es-code"> 
+
+var context = 0;
+var compressor = 0;
+var reverb = 0;
+var delay = 0;
+var s1 = 0;
+var s2 = 0;
+
+var source1 = 0;
+var source2 = 0;
+var g1_1 = 0;
+var g2_1 = 0;
+var g3_1 = 0;
+var g1_2 = 0;
+var g2_2 = 0;
+var g3_2 = 0;
+
+<span class="comment">// Setup routing graph </span>
+function setupRoutingGraph() {
+    context = new AudioContext();
+
+    compressor = context.createDynamicsCompressor();
+
+    <span class="comment">// Send1 effect </span>
+    reverb = context.createConvolver();
+    <span class="comment">// Convolver impulse response may be set here or later </span>
+
+    <span class="comment">// Send2 effect </span>
+    delay = context.createDelay();
+
+    <span class="comment">// Connect final compressor to final destination </span>
+    compressor.connect(context.destination);
+
+    <span class="comment">// Connect sends 1 &amp; 2 through effects to main mixer </span>
+    s1 = context.createGain();
+    reverb.connect(s1);
+    s1.connect(compressor);
+    
+    s2 = context.createGain();
+    delay.connect(s2);
+    s2.connect(compressor);
+
+    <span class="comment">// Create a couple of sources </span>
+    source1 = context.createBufferSource();
+    source2 = context.createBufferSource();
+    source1.buffer = manTalkingBuffer;
+    source2.buffer = footstepsBuffer;
+
+    <span class="comment">// Connect source1 </span>
+    g1_1 = context.createGain();
+    g2_1 = context.createGain();
+    g3_1 = context.createGain();
+    source1.connect(g1_1);
+    source1.connect(g2_1);
+    source1.connect(g3_1);
+    g1_1.connect(compressor);
+    g2_1.connect(reverb);
+    g3_1.connect(delay);
+
+    <span class="comment">// Connect source2 </span>
+    g1_2 = context.createGain();
+    g2_2 = context.createGain();
+    g3_2 = context.createGain();
+    source2.connect(g1_2);
+    source2.connect(g2_2);
+    source2.connect(g3_2);
+    g1_2.connect(compressor);
+    g2_2.connect(reverb);
+    g3_2.connect(delay);
+
+    <span class="comment">// We now have explicit control over all the volumes g1_1, g2_1, ..., s1, s2 </span>
+    g2_1.gain.value = 0.2; <span class="comment"> // For example, set source1 reverb gain </span>
+
+    <span class="comment"> // Because g2_1.gain is an "AudioParam", </span>
+    <span class="comment"> // an automation curve could also be attached to it. </span>
+    <span class="comment"> // A "mixing board" UI could be created in canvas or WebGL controlling these gains. </span>
+}
+
+ </code></pre>
+</div>
+</div>
+</div>
+</div>
+<br />
+
+
+<div id="DynamicLifetime-section">
+<h2 id="DynamicLifetime">7. Dynamic Lifetime</h2>
+
+<h3 id="DynamicLifetime-background">Background</h3>
+
+<p class="norm">This section is informative. Please see <a href="#lifetime-AudioContext">AudioContext lifetime</a>
+and <a href="#lifetime-AudioNode">AudioNode lifetime</a>  for normative requirements
+</p>
+
+<p>In addition to allowing the creation of static routing configurations, it
+should also be possible to do custom effect routing on dynamically allocated
+voices which have a limited lifetime. For the purposes of this discussion,
+let's call these short-lived voices "notes". Many audio applications
+incorporate the ideas of notes, examples being drum machines, sequencers, and
+3D games with many one-shot sounds being triggered according to game play. </p>
+
+<p>In a traditional software synthesizer, notes are dynamically allocated and
+released from a pool of available resources. The note is allocated when a MIDI
+note-on message is received. It is released when the note has finished playing
+either due to it having reached the end of its sample-data (if non-looping), it
+having reached a sustain phase of its envelope which is zero, or due to a MIDI
+note-off message putting it into the release phase of its envelope. In the MIDI
+note-off case, the note is not released immediately, but only when the release
+envelope phase has finished. At any given time, there can be a large number of
+notes playing but the set of notes is constantly changing as new notes are
+added into the routing graph, and old ones are released. </p>
+
+<p>The audio system automatically deals with tearing-down the part of the
+routing graph for individual "note" events. A "note" is represented by an
+<code>AudioBufferSourceNode</code>, which can be directly connected to other
+processing nodes. When the note has finished playing, the context will
+automatically release the reference to the <code>AudioBufferSourceNode</code>,
+which in turn will release references to any nodes it is connected to, and so
+on. The nodes will automatically get disconnected from the graph and will be
+deleted when they have no more references. Nodes in the graph which are
+long-lived and shared between dynamic voices can be managed explicitly.
+Although it sounds complicated, this all happens automatically with no extra
+JavaScript handling required. </p>
+
+<h3 id="Example-DynamicLifetime">Example</h3>
+
+<div class="example">
+
+<div class="exampleHeader">
+Example</div>
+<img alt="dynamic allocation" src="images/dynamic-allocation.png" /> 
+
+<p>The low-pass filter, panner, and second gain nodes are directly connected
+from the one-shot sound. So when it has finished playing the context will
+automatically release them (everything within the dotted line). If there are no
+longer any JavaScript references to the one-shot sound and connected nodes,
+then they will be immediately removed from the graph and deleted. The streaming
+source, has a global reference and will remain connected until it is explicitly
+disconnected. Here's how it might look in JavaScript: </p>
+
+<div class="block">
+
+<div class="blockTitleDiv">
+<span class="blockTitle">ECMAScript</span></div>
+
+<div class="blockContent">
+<pre class="code"><code class="es-code"> 
+
+var context = 0;
+var compressor = 0;
+var gainNode1 = 0;
+var streamingAudioSource = 0;
+
+<span class="comment">// Initial setup of the "long-lived" part of the routing graph </span> 
+function setupAudioContext() {
+    context = new AudioContext();
+
+    compressor = context.createDynamicsCompressor();
+    gainNode1 = context.createGain();
+
+    // Create a streaming audio source.
+    var audioElement = document.getElementById('audioTagID');
+    streamingAudioSource = context.createMediaElementSource(audioElement);
+    streamingAudioSource.connect(gainNode1);
+
+    gainNode1.connect(compressor);
+    compressor.connect(context.destination);
+}
+
+<span class="comment">// Later in response to some user action (typically mouse or key event) </span>
+<span class="comment">// a one-shot sound can be played. </span>
+function playSound() {
+    var oneShotSound = context.createBufferSource();
+    oneShotSound.buffer = dogBarkingBuffer;
+
+    <span class="comment">// Create a filter, panner, and gain node. </span>
+    var lowpass = context.createBiquadFilter();
+    var panner = context.createPanner();
+    var gainNode2 = context.createGain();
+
+    <span class="comment">// Make connections </span>
+    oneShotSound.connect(lowpass);
+    lowpass.connect(panner);
+    panner.connect(gainNode2);
+    gainNode2.connect(compressor);
+
+    <span class="comment">// Play 0.75 seconds from now (to play immediately pass in 0)</span>
+    oneShotSound.start(context.currentTime + 0.75);
+}
+</code></pre>
+</div>
+</div>
+</div>
+</div>
+
+
+
+<div id="UpMix-section" class="section">
+<h2 id="UpMix">9. Channel up-mixing and down-mixing</h2>
+
+<p class="norm">This section is normative.</p>
+
+<img src="images/unity-gain-summing-junction.png">
+
+<p>
+<a href="#MixerGainStructure-section">Mixer Gain Structure</a>
+describes how an <dfn>input</dfn> to an AudioNode can be connected from one or more <dfn>outputs</dfn>
+of an AudioNode.  Each of these connections from an output represents a stream with
+a specific non-zero number of channels.  An input has <em>mixing rules</em> for combining the channels
+from all of the connections to it.  As a simple example, if an input is connected from a mono output and
+a stereo output, then the mono connection will usually be up-mixed to stereo and summed with
+the stereo connection.  But, of course, it's important to define the exact <em>mixing rules</em> for
+every input to every AudioNode.  The default mixing rules for all of the inputs have been chosen so that
+things "just work" without worrying too much about the details, especially in the very common
+case of mono and stereo streams.  But the rules can be changed for advanced use cases, especially
+multi-channel.
+</p>
+
+<p>
+To define some terms, <em>up-mixing</em> refers to the process of taking a stream with a smaller
+number of channels and converting it to a stream with a larger number of channels.  <em>down-mixing</em>
+refers to the process of taking a stream with a larger number of channels and converting it to a stream
+with a smaller number of channels.
+</p>
+
+<p>
+An AudioNode input use three basic pieces of information to determine how to mix all the outputs
+connected to it.  As part of this process it computes an internal value <dfn>computedNumberOfChannels</dfn>
+ representing the actual number of channels of the input at any given time:
+</p>
+
+<p>
+The AudioNode attributes involved in channel up-mixing and down-mixing rules are defined
+<a href="#attributes-AudioNode-section">above</a>.  The following is a more precise specification
+on what each of them mean.
+</p>
+
+<ul>
+<li><dfn>channelCount</dfn> is used to help compute <dfn>computedNumberOfChannels</dfn>.</li>
+
+<li><dfn>channelCountMode</dfn> determines how <dfn>computedNumberOfChannels</dfn> will be computed.
+Once this number is computed, all of the connections will be up or down-mixed to that many channels.  For most nodes,
+the default value is "max".
+<ul>
+<li>“max”: <dfn>computedNumberOfChannels</dfn> is computed as the maximum of the number of channels of all connections.
+In this mode <dfn>channelCount</dfn> is ignored.</li>
+<li>“clamped-max”: same as “max” up to a limit of the <dfn>channelCount</dfn></li>
+<li>“explicit”: <dfn>computedNumberOfChannels</dfn> is the exact value as specified in <dfn>channelCount</dfn></li>
+</ul>
+
+</li>
+
+<li><dfn>channelInterpretation</dfn> determines how the individual channels will be treated.
+For example, will they be treated as speakers having a specific layout, or will they
+be treated as simple discrete channels?  This value influences exactly how the up and down mixing is
+performed.  The default value is "speakers".
+
+<ul>
+<li>“speakers”: use <a href="#ChannelLayouts">up-down-mix equations for mono/stereo/quad/5.1</a>.
+In cases where the number of channels do not match any of these basic speaker layouts, revert
+to "discrete".
+</li>
+<li>“discrete”: up-mix by filling channels until they run out then zero out remaining channels.
+                  down-mix by filling as many channels as possible, then dropping remaining channels</li>
+</ul>
+
+</li>
+
+</ul>
+
+<p>
+For each input of an AudioNode, an implementation must:
+</p>
+
+<ol>
+<li>Compute <dfn>computedNumberOfChannels</dfn>.</li>
+<li>For each connection to the input:
+<ul>
+<li> up-mix or down-mix the connection to <dfn>computedNumberOfChannels</dfn> according to <dfn>channelInterpretation</dfn>.</li>
+<li> Mix it together with all of the other mixed streams (from other connections). This is a straight-forward mixing together of each of the corresponding channels from each
+connection.</li>
+</ul>
+</li>
+</ol>
+
+
+
+
+<div id="ChannelLayouts-section" class="section">
+<h3 id="ChannelLayouts">9.1. Speaker Channel Layouts</h3>
+
+<p class="norm">This section is normative.</p>
+
+<p>
+When <dfn>channelInterpretation</dfn> is "speakers" then the up-mixing and down-mixing
+is defined for specific channel layouts.
+</p>
+
+<p>It's important to define the channel ordering (and define some
+abbreviations) for these speaker layouts.</p>
+
+<p>
+For now, only considers cases for mono, stereo, quad, 5.1. Later other channel
+layouts can be defined. 
+</p>
+
+<h4 id ="ChannelOrdering">9.1.1. Channel ordering</h4>
+
+<pre>  Mono
+    0: M: mono
+    
+  Stereo
+    0: L: left
+    1: R: right
+  </pre>
+
+<pre>  Quad
+    0: L:  left
+    1: R:  right
+    2: SL: surround left
+    3: SR: surround right
+
+  5.1
+    0: L:   left
+    1: R:   right
+    2: C:   center
+    3: LFE: subwoofer
+    4: SL:  surround left
+    5: SR:  surround right
+  </pre>
+</div>
+
+<h4 id="UpMix-sub">9.1.2. Up Mixing speaker layouts</h4>
+
+<pre>Mono up-mix:
+    
+    1 -&gt; 2 : up-mix from mono to stereo
+        output.L = input;
+        output.R = input;
+
+    1 -&gt; 4 : up-mix from mono to quad
+        output.L = input;
+        output.R = input;
+        output.SL = 0;
+        output.SR = 0;
+
+    1 -&gt; 5.1 : up-mix from mono to 5.1
+        output.L = 0;
+        output.R = 0;
+        output.C = input; // put in center channel
+        output.LFE = 0;
+        output.SL = 0;
+        output.SR = 0;
+
+Stereo up-mix:
+
+    2 -&gt; 4 : up-mix from stereo to quad
+        output.L = input.L;
+        output.R = input.R;
+        output.SL = 0;
+        output.SR = 0;
+
+    2 -&gt; 5.1 : up-mix from stereo to 5.1
+        output.L = input.L;
+        output.R = input.R;
+        output.C = 0;
+        output.LFE = 0;
+        output.SL = 0;
+        output.SR = 0;
+
+Quad up-mix:
+
+    4 -&gt; 5.1 : up-mix from stereo to 5.1
+        output.L = input.L;
+        output.R = input.R;
+        output.C = 0;
+        output.LFE = 0;
+        output.SL = input.SL;
+        output.SR = input.SR;</pre>
+
+<h4 id="down-mix">9.1.3. Down Mixing speaker layouts</h4>
+
+<p>A down-mix will be necessary, for example, if processing 5.1 source
+material, but playing back stereo. </p>
+<pre>  
+Mono down-mix:
+
+    2 -&gt; 1 : stereo to mono
+        output = 0.5 * (input.L + input.R);
+
+    4 -&gt; 1 : quad to mono
+        output = 0.25 * (input.L + input.R + input.SL + input.SR);
+
+    5.1 -&gt; 1 : 5.1 to mono
+        output = 0.7071 * (input.L + input.R) + input.C + 0.5 * (input.SL + input.SR)
+
+
+Stereo down-mix:
+
+    4 -&gt; 2 : quad to stereo
+        output.L = 0.5 * (input.L + input.SL);
+        output.R = 0.5 * (input.R + input.SR);
+
+    5.1 -&gt; 2 : 5.1 to stereo
+        output.L = L + 0.7071 * (input.C + input.SL)
+        output.R = R + 0.7071 * (input.C + input.SR)
+
+Quad down-mix:
+
+    5.1 -&gt; 4 : 5.1 to quad
+        output.L = L + 0.7071 * input.C
+        output.R = R + 0.7071 * input.C
+        output.SL = input.SL
+        output.SR = input.SR
+
+</pre>
+</div>
+
+<h3 id="ChannelRules-section">9.2. Channel Rules Examples</h3>
+
+<p class="norm">This section is informative.</p>
+
+<div class="block">
+<div class="blockTitleDiv">
+<div class="blockContent">
+<pre class="code"><code class="idl-code"> 
+// Set gain node to explicit 2-channels (stereo).
+gain.channelCount = 2;
+gain.channelCountMode = "explicit";
+gain.channelInterpretation = "speakers";
+
+// Set "hardware output" to 4-channels for DJ-app with two stereo output busses.
+context.destination.channelCount = 4;
+context.destination.channelCountMode = "explicit";
+context.destination.channelInterpretation = "discrete";
+
+// Set "hardware output" to 8-channels for custom multi-channel speaker array
+// with custom matrix mixing.
+context.destination.channelCount = 8;
+context.destination.channelCountMode = "explicit";
+context.destination.channelInterpretation = "discrete";
+
+// Set "hardware output" to 5.1 to play an HTMLAudioElement.
+context.destination.channelCount = 6;
+context.destination.channelCountMode = "explicit";
+context.destination.channelInterpretation = "speakers";
+
+// Explicitly down-mix to mono.
+gain.channelCount = 1;
+gain.channelCountMode = "explicit";
+gain.channelInterpretation = "speakers";
+</code></pre>
+</div>
+</div>
+</div>
+
+
+<div id="Spatialization-section" class="section">
+<h2 id="Spatialization">11. Spatialization / Panning </h2>
+
+<h3 id="Spatialization-background">Background</h3>
+
+<p>A common feature requirement for modern 3D games is the ability to
+dynamically spatialize and move multiple audio sources in 3D space. Game audio
+engines such as OpenAL, FMOD, Creative's EAX, Microsoft's XACT Audio, etc. have
+this ability. </p>
+
+<p>Using an <code>PannerNode</code>, an audio stream can be spatialized or
+positioned in space relative to an <code>AudioListener</code>. An <a
+href="#AudioContext-section"><code>AudioContext</code></a> will contain a
+single <code>AudioListener</code>. Both panners and listeners have a position
+in 3D space using a right-handed cartesian coordinate system.
+The units used in the coordinate system are not defined, and do not need to be
+because the effects calculated with these coordinates are independent/invariant
+of any particular units such as meters or feet.  <code>PannerNode</code>
+objects (representing the source stream) have an <code>orientation</code>
+vector representing in which direction the sound is projecting. Additionally,
+they have a <code>sound cone</code> representing how directional the sound is.
+For example, the sound could be omnidirectional, in which case it would be
+heard anywhere regardless of its orientation, or it can be more directional and
+heard only if it is facing the listener. <code>AudioListener</code> objects
+(representing a person's ears) have an <code>orientation</code> and
+<code>up</code> vector representing in which direction the person is facing.
+Because both the source stream and the listener can be moving, they both have a
+<code>velocity</code> vector representing both the speed and direction of
+movement. Taken together, these two velocities can be used to generate a
+doppler shift effect which changes the pitch. </p>
+
+<p>
+During rendering, the <code>PannerNode</code> calculates an <em>azimuth</em>
+and <em>elevation</em>.  These values are used internally by the implementation in
+order to render the spatialization effect.  See the <a href="#Spatialization-panning-algorithm">Panning Algorithm</a> section
+for details of how these values are used.
+</p>
+
+<p>
+The following algorithm must be used to calculate the <em>azimuth</em>
+and <em>elevation</em>:
+</p>
+
+<div class="block">
+<div class="blockTitleDiv">
+<div class="blockContent">
+<pre class="code"><code class="es-code"> 
+// Calculate the source-listener vector.
+vec3 sourceListener = source.position - listener.position;
+
+if (sourceListener.isZero()) {
+    // Handle degenerate case if source and listener are at the same point.
+    azimuth = 0;
+    elevation = 0;
+    return;
+}
+
+sourceListener.normalize();
+
+// Align axes.
+vec3 listenerFront = listener.orientation;
+vec3 listenerUp = listener.up;
+vec3 listenerRight = listenerFront.cross(listenerUp);
+listenerRight.normalize();
+
+vec3 listenerFrontNorm = listenerFront;
+listenerFrontNorm.normalize();
+
+vec3 up = listenerRight.cross(listenerFrontNorm);
+
+float upProjection = sourceListener.dot(up);
+
+vec3 projectedSource = sourceListener - upProjection * up;
+projectedSource.normalize();
+
+azimuth = 180 * acos(projectedSource.dot(listenerRight)) / PI;
+
+// Source in front or behind the listener.
+double frontBack = projectedSource.dot(listenerFrontNorm);
+if (frontBack &lt; 0)
+    azimuth = 360 - azimuth;
+
+// Make azimuth relative to "front" and not "right" listener vector.
+if ((azimuth >= 0) &amp;&amp; (azimuth &lt;= 270))
+    azimuth = 90 - azimuth;
+else
+    azimuth = 450 - azimuth;
+
+elevation = 90 - 180 * acos(sourceListener.dot(up)) / PI;
+
+if (elevation > 90)
+    elevation = 180 - elevation;
+else if (elevation &lt; -90)
+    elevation = -180 - elevation;
+</code></pre>
+</div>
+</div>
+</div>
+
+<h3 id="Spatialization-panning-algorithm">Panning Algorithm</h3>
+
+<p>
+<em>mono->stereo</em> and <em>stereo->stereo</em>  panning must be supported.
+<em>mono->stereo</em> processing is used when all connections to the input are mono.
+Otherwise <em>stereo->stereo</em> processing is used.</p>
+
+<p>The following algorithms must be implemented: </p>
+<ul>
+  <li>Equal-power (Vector-based) panning 
+    <p>This is a simple and relatively inexpensive algorithm which provides
+    basic, but reasonable results.  It is commonly used when panning musical sources.
+    </p>
+    The <em>elevation</em> value is ignored in this panning algorithm.
+
+    <p>
+    The following steps are used for processing:
+    </p>
+    
+    <ol>
+
+    <li>
+    <p>
+    The <em>azimuth</em> value is first contained to be within the range -90 &lt;= <em>azimuth</em> &lt;= +90 according to:
+    </p>
+    <pre>
+    // Clamp azimuth to allowed range of -180 -> +180.
+    azimuth = max(-180, azimuth);
+    azimuth = min(180, azimuth);
+
+    // Now wrap to range -90 -> +90.
+    if (azimuth &lt; -90)
+        azimuth = -180 - azimuth;
+    else if (azimuth > 90)
+        azimuth = 180 - azimuth;
+    </pre>
+    </li>
+    
+    <li>
+    <p>
+    A 0 -> 1 normalized value <em>x</em> is calculated from <em>azimuth</em> for <em>mono->stereo</em> as:
+    </p>
+    <pre>
+    x = (azimuth + 90) / 180    
+    </pre>
+
+    <p>
+    Or for <em>stereo->stereo</em> as:
+    </p>
+    <pre>
+    if (azimuth &lt;= 0) { // from -90 -> 0
+        // inputL -> outputL and "equal-power pan" inputR as in mono case
+        // by transforming the "azimuth" value from -90 -> 0 degrees into the range -90 -> +90.
+        x = (azimuth + 90) / 90;
+    } else { // from 0 -> +90
+        // inputR -> outputR and "equal-power pan" inputL as in mono case
+        // by transforming the "azimuth" value from 0 -> +90 degrees into the range -90 -> +90.
+        x = azimuth / 90;
+    }
+    </pre>
+    </li>
+    
+    <li>
+    <p>
+    Left and right gain values are then calculated:
+    </p>
+    <pre>
+    gainL = cos(0.5 * PI * x);
+    gainR = sin(0.5 * PI * x);    
+    </pre>
+    </li>
+    
+    <li>
+    <p>For <em>mono->stereo</em>, the output is calculated as:</p>
+    <pre>
+    outputL = input * gainL
+    outputR = input * gainR
+    </pre>
+    <p>Else for <em>stereo->stereo</em>, the output is calculated as:</p>
+    <pre>
+    if (azimuth &lt;= 0) { // from -90 -> 0
+        outputL = inputL + inputR * gainL;
+        outputR = inputR * gainR;
+    } else { // from 0 -> +90
+        outputL = inputL * gainL;
+        outputR = inputR + inputL * gainR;
+    }
+    </pre>
+    </li>
+    
+    </ol>
+    
+    
+    
+  </li>
+  <li><a
+    href="http://en.wikipedia.org/wiki/Head-related_transfer_function">HRTF</a>
+    panning (stereo only) 
+    <p>This requires a set of HRTF impulse responses recorded at a variety of
+    azimuths and elevations. There are a small number of open/free impulse
+    responses available. The implementation requires a highly optimized
+    convolution function. It is somewhat more costly than "equal-power", but
+    provides a more spatialized sound. </p>
+    <img alt="HRTF panner" src="images/HRTF_panner.png" /></li>
+</ul>
+
+<h3 id="Spatialization-distance-effects">Distance Effects</h3>
+<p>
+Sounds which are closer are louder, while sounds further away are quieter.
+Exactly <em>how</em> a sound's volume changes according to distance from the listener
+depends on the <em>distanceModel</em> attribute.
+</p>
+
+
+<p>
+During audio rendering, a <em>distance</em> value will be calculated based on the panner and listener positions according to:
+</p>
+<pre>
+v = panner.position - listener.position
+</pre>
+<pre>
+distance = sqrt(dot(v, v))
+</pre>
+
+<p>
+<em>distance</em> will then be used to calculate <em>distanceGain</em> which depends
+on the <em>distanceModel</em> attribute.  See the <a href="#dfn-distanceModel">distanceModel</a> section for details of
+how this is calculated for each distance model.
+</p>
+<p>As part of its processing, the <code>PannerNode</code> scales/multiplies the input audio signal by <em>distanceGain</em>
+to make distant sounds quieter and nearer ones louder.
+</p>
+
+
+
+
+<h3 id="Spatialization-sound-cones">Sound Cones</h3>
+
+<p>The listener and each sound source have an orientation vector describing
+which way they are facing. Each sound source's sound projection characteristics
+are described by an inner and outer "cone" describing the sound intensity as a
+function of the source/listener angle from the source's orientation vector.
+Thus, a sound source pointing directly at the listener will be louder than if
+it is pointed off-axis. Sound sources can also be omni-directional. </p>
+
+<p>
+The following algorithm must be used to calculate the gain contribution due
+to the cone effect, given the source (the <code>PannerNode</code>) and the listener:
+</p>
+
+<div class="block">
+<div class="blockTitleDiv">
+<div class="blockContent">
+<pre class="code"><code class="idl-code"> 
+if (source.orientation.isZero() || ((source.coneInnerAngle == 360) &amp;&amp; (source.coneOuterAngle == 360)))
+    return 1; // no cone specified - unity gain
+
+// Normalized source-listener vector
+vec3 sourceToListener = listener.position - source.position;
+sourceToListener.normalize();
+
+vec3 normalizedSourceOrientation = source.orientation;
+normalizedSourceOrientation.normalize();
+
+// Angle between the source orientation vector and the source-listener vector
+double dotProduct = sourceToListener.dot(normalizedSourceOrientation);
+double angle = 180 * acos(dotProduct) / PI;
+double absAngle = fabs(angle);
+
+// Divide by 2 here since API is entire angle (not half-angle)
+double absInnerAngle = fabs(source.coneInnerAngle) / 2;
+double absOuterAngle = fabs(source.coneOuterAngle) / 2;
+double gain = 1;
+
+if (absAngle &lt;= absInnerAngle)
+    // No attenuation
+    gain = 1;
+else if (absAngle &gt;= absOuterAngle)
+    // Max attenuation
+    gain = source.coneOuterGain;
+else {
+    // Between inner and outer cones
+    // inner -> outer, x goes from 0 -> 1
+    double x = (absAngle - absInnerAngle) / (absOuterAngle - absInnerAngle);
+    gain = (1 - x) + source.coneOuterGain * x;
+}
+
+return gain;
+</code></pre>
+</div>
+</div>
+</div>
+
+<h3 id="Spatialization-doppler-shift">Doppler Shift</h3>
+<ul>
+  <li>Introduces a pitch shift which can realistically simulate moving
+  sources.</li>
+  <li>Depends on: source / listener velocity vectors, speed of sound, doppler
+    factor.</li>
+</ul>
+
+<p>
+The following algorithm must be used to calculate the doppler shift value which is used
+as an additional playback rate scalar for all AudioBufferSourceNodes connecting directly or
+indirectly to the AudioPannerNode:
+</p>
+
+<div class="block">
+<div class="blockTitleDiv">
+<div class="blockContent">
+<pre class="code"><code class="idl-code"> 
+double dopplerShift = 1; // Initialize to default value
+double dopplerFactor = listener.dopplerFactor;
+
+if (dopplerFactor > 0) {
+    double speedOfSound = listener.speedOfSound;
+
+    // Don't bother if both source and listener have no velocity.
+    if (!source.velocity.isZero() || !listener.velocity.isZero()) {
+        // Calculate the source to listener vector.
+        vec3 sourceToListener = source.position - listener.position;
+
+        double sourceListenerMagnitude = sourceToListener.length();
+
+        double listenerProjection = sourceToListener.dot(listener.velocity) / sourceListenerMagnitude;
+        double sourceProjection = sourceToListener.dot(source.velocity) / sourceListenerMagnitude;
+
+        listenerProjection = -listenerProjection;
+        sourceProjection = -sourceProjection;
+
+        double scaledSpeedOfSound = speedOfSound / dopplerFactor;
+        listenerProjection = min(listenerProjection, scaledSpeedOfSound);
+        sourceProjection = min(sourceProjection, scaledSpeedOfSound);
+
+        dopplerShift = ((speedOfSound - dopplerFactor * listenerProjection) / (speedOfSound - dopplerFactor * sourceProjection));
+        fixNANs(dopplerShift); // Avoid illegal values
+
+        // Limit the pitch shifting to 4 octaves up and 3 octaves down.
+        dopplerShift = min(dopplerShift, 16);
+        dopplerShift = max(dopplerShift, 0.125);
+    }
+}
+</code></pre>
+</div>
+</div>
+</div>
+
+
+
+
+</div>
+
+<div id="Convolution-section" class="section">
+<h2 id="Convolution">12. Linear Effects using Convolution</h2>
+
+<h3 id="Convolution-background">Background</h3>
+
+<p><a href="http://en.wikipedia.org/wiki/Convolution">Convolution</a> is a
+mathematical process which can be applied to an audio signal to achieve many
+interesting high-quality linear effects. Very often, the effect is used to
+simulate an acoustic space such as a concert hall, cathedral, or outdoor
+amphitheater. It can also be used for complex filter effects, like a muffled
+sound coming from inside a closet, sound underwater, sound coming through a
+telephone, or playing through a vintage speaker cabinet. This technique is very
+commonly used in major motion picture and music production and is considered to
+be extremely versatile and of high quality. </p>
+
+<p>Each unique effect is defined by an <code>impulse response</code>. An
+impulse response can be represented as an audio file and <a
+href="#recording-impulse-responses">can be recorded</a> from a real acoustic
+space such as a cave, or can be synthetically generated through a great variety
+of techniques. </p>
+
+<h3 id="Convolution-motivation">Motivation for use as a Standard</h3>
+
+<p>A key feature of many game audio engines (OpenAL, FMOD, Creative's EAX,
+Microsoft's XACT Audio, etc.) is a reverberation effect for simulating the
+sound of being in an acoustic space. But the code used to generate the effect
+has generally been custom and algorithmic (generally using a hand-tweaked set
+of delay lines and allpass filters which feedback into each other). In nearly
+all cases, not only is the implementation custom, but the code is proprietary
+and closed-source, each company adding its own "black magic" to achieve its
+unique quality. Each implementation being custom with a different set of
+parameters makes it impossible to achieve a uniform desired effect. And the
+code being proprietary makes it impossible to adopt a single one of the
+implementations as a standard. Additionally, algorithmic reverberation effects
+are limited to a relatively narrow range of different effects, regardless of
+how the parameters are tweaked. </p>
+
+<p>A convolution effect solves these problems by using a very precisely defined
+mathematical algorithm as the basis of its processing. An impulse response
+represents an exact sound effect to be applied to an audio stream and is easily
+represented by an audio file which can be referenced by URL. The range of
+possible effects is enormous. </p>
+
+<h3 id="Convolution-implementation-guide">Implementation Guide</h3>
+<p>
+Linear convolution can be implemented efficiently.
+Here are some <a href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/convolution.html">notes</a>
+describing how it can be practically implemented.
+</p>
+
+<h3 id="Convolution-reverb-effect">Reverb Effect (with matrixing)</h3>
+
+<p class="norm">This section is normative.</p>
+
+<p>
+In the general case the source
+has N input channels, the impulse response has K channels, and the playback
+system has M output channels. Thus it's a matter of how to matrix these
+channels to achieve the final result.
+</p>
+
+<p>
+The subset of N, M, K below must be implemented (note that the first image in the diagram is just illustrating
+the general case and is not normative, while the following images are normative).
+Without loss of generality, developers desiring more complex and arbitrary matrixing can use multiple <code>ConvolverNode</code>
+objects in conjunction with an <code>ChannelMergerNode</code>.
+</p>
+
+
+<p>Single channel convolution operates on a mono audio input, using a mono
+impulse response, and generating a mono output. But to achieve a more spacious sound, 2 channel audio
+inputs and 1, 2, or 4 channel impulse responses will be considered. The following diagram, illustrates the
+common cases for stereo playback where N and M are 1 or 2 and K is 1, 2, or 4.
+</p>
+<img alt="reverb matrixing" src="images/reverb-matrixing.png" /> 
+
+<h3 id="recording-impulse-responses">Recording Impulse Responses</h3>
+
+<p class="norm">This section is informative.</p>
+<img alt="impulse response" src="images/impulse-response.png" /> <br />
+<br />
+
+
+<p>The most <a
+href="http://pcfarina.eng.unipr.it/Public/Papers/226-AES122.pdf">modern</a> and
+accurate way to record the impulse response of a real acoustic space is to use
+a long exponential sine sweep. The test-tone can be as long as 20 or 30
+seconds, or longer. <br />
+Several recordings of the test tone played through a speaker can be made with
+microphones placed and oriented at various positions in the room. It's
+important to document speaker placement/orientation, the types of microphones,
+their settings, placement, and orientations for each recording taken. </p>
+
+<p>Post-processing is required for each of these recordings by performing an
+inverse-convolution with the test tone, yielding the impulse response of the
+room with the corresponding microphone placement. These impulse responses are
+then ready to be loaded into the convolution reverb engine to re-create the
+sound of being in the room. </p>
+
+<h3 id="tools">Tools</h3>
+
+<p>Two command-line tools have been written: <br />
+<code>generate_testtones</code> generates an exponential sine-sweep test-tone
+and its inverse. Another tool <code>convolve</code> was written for
+post-processing. With these tools, anybody with recording equipment can record
+their own impulse responses. To test the tools in practice, several recordings
+were made in a warehouse space with interesting acoustics. These were later
+post-processed with the command-line tools. </p>
+<pre>% generate_testtones -h
+Usage: generate_testtone
+	[-o /Path/To/File/To/Create] Two files will be created: .tone and .inverse
+	[-rate &lt;sample rate&gt;] sample rate of the generated test tones
+	[-duration &lt;duration&gt;] The duration, in seconds, of the generated files
+	[-min_freq &lt;min_freq&gt;] The minimum frequency, in hertz, for the sine sweep
+
+% convolve -h
+Usage: convolve input_file impulse_response_file output_file</pre>
+<br />
+
+
+<h3 id="recording-setup">Recording Setup</h3>
+<img alt="recording setup" src="images/recording-setup.png" /> <br />
+<br />
+Audio Interface: Metric Halo Mobile I/O 2882 <br />
+<br />
+<br />
+<br />
+<img alt="microphones speaker" src="images/microphones-speaker.png" /> <br />
+<br />
+<img alt="microphone" src="images/microphone.png" /> <img alt="speaker"
+src="images/speaker.png" /> <br />
+<br />
+Microphones: AKG 414s, Speaker: Mackie HR824 <br />
+<br />
+<br />
+
+
+<h3 id="warehouse">The Warehouse Space</h3>
+<img alt="warehouse" src="images/warehouse.png" /> <br />
+<br />
+</div>
+
+<div id="JavaScriptProcessing-section" class="section">
+<h2 id="JavaScriptProcessing">13. JavaScript Synthesis and Processing</h2>
+
+<p class="norm">This section is informative.</p>
+
+<p>The Mozilla project has conducted <a
+href="https://wiki.mozilla.org/Audio_Data_API">Experiments</a> to synthesize
+and process audio directly in JavaScript. This approach is interesting for a
+certain class of audio processing and they have produced a number of impressive
+demos. This specification includes a means of synthesizing and processing
+directly using JavaScript by using a special subtype of <a
+href="#AudioNode-section"><code>AudioNode</code></a> called <a
+href="#ScriptProcessorNode-section"><code>ScriptProcessorNode</code></a>. </p>
+
+<p>Here are some interesting examples where direct JavaScript processing can be
+useful: </p>
+
+<h3 id="custom-DSP-effects">Custom DSP Effects</h3>
+
+<p>Unusual and interesting custom audio processing can be done directly in JS.
+It's also a good test-bed for prototyping new algorithms. This is an extremely
+rich area. </p>
+
+<h3 id="educational-applications">Educational Applications</h3>
+
+<p>JS processing is ideal for illustrating concepts in computer music synthesis
+and processing, such as showing the de-composition of a square wave into its
+harmonic components, FM synthesis techniques, etc. </p>
+
+<h3 id="javaScript-performance">JavaScript Performance</h3>
+
+<p>JavaScript has a variety of <a
+href="#JavaScriptPerformance-section">performance issues</a> so it is not
+suitable for all types of audio processing. The approach proposed in this
+document includes the ability to perform computationally intensive aspects of
+the audio processing (too expensive for JavaScript to compute in real-time)
+such as multi-source 3D spatialization and convolution in optimized C++ code.
+Both direct JavaScript processing and C++ optimized code can be combined due to
+the APIs <a href="#ModularRouting-section">modular approach</a>. </p>
+
+<div id="Performance-section" class="section">
+<h2 id="Performance">15. Performance Considerations</h2>
+
+<div id="Latency-section" class="section">
+<h3 id="Latency">15.1. Latency: What it is and Why it's Important</h3>
+</div>
+<img alt="latency" src="images/latency.png" /> 
+
+<p>For web applications, the time delay between mouse and keyboard events
+(keydown, mousedown, etc.) and a sound being heard is important. </p>
+
+<p>This time delay is called latency and is caused by several factors (input
+device latency, internal buffering latency, DSP processing latency, output
+device latency, distance of user's ears from speakers, etc.), and is
+cummulative. The larger this latency is, the less satisfying the user's
+experience is going to be. In the extreme, it can make musical production or
+game-play impossible. At moderate levels it can affect timing and give the
+impression of sounds lagging behind or the game being non-responsive. For
+musical applications the timing problems affect rhythm. For gaming, the timing
+problems affect precision of gameplay. For interactive applications, it
+generally cheapens the users experience much in the same way that very low
+animation frame-rates do. Depending on the application, a reasonable latency
+can be from as low as 3-6 milliseconds to 25-50 milliseconds. </p>
+
+<div id="Glitching-section" class="section">
+<h3 id="audio-glitching">15.2. Audio Glitching</h3>
+</div>
+
+<p>Audio glitches are caused by an interruption of the normal continuous audio
+stream, resulting in loud clicks and pops. It is considered to be a
+catastrophic failure of a multi-media system and must be avoided. It can be
+caused by problems with the threads responsible for delivering the audio stream
+to the hardware, such as scheduling latencies caused by threads not having the
+proper priority and time-constraints. It can also be caused by the audio DSP
+trying to do more work than is possible in real-time given the CPU's speed. </p>
+
+<h3 id="hardware-scalability">15.3. Hardware Scalability</h3>
+
+<p>The system should gracefully degrade to allow audio processing under
+resource constrained conditions without dropping audio frames. </p>
+
+<p>First of all, it should be clear that regardless of the platform, the audio
+processing load should never be enough to completely lock up the machine.
+Second, the audio rendering needs to produce a clean, un-interrupted audio
+stream without audible <a href="#Glitching-section">glitches</a>. </p>
+
+<p>The system should be able to run on a range of hardware, from mobile phones
+and tablet devices to laptop and desktop computers. But the more limited
+compute resources on a phone device make it necessary to consider techniques to
+scale back and reduce the complexity of the audio rendering. For example,
+voice-dropping algorithms can be implemented to reduce the total number of
+notes playing at any given time. </p>
+
+<p>Here's a list of some techniques which can be used to limit CPU usage: </p>
+
+<h4 id="CPU-monitoring">15.3.1. CPU monitoring</h4>
+
+<p>In order to avoid audio breakup, CPU usage must remain below 100%. </p>
+
+<p>The relative CPU usage can be dynamically measured for each AudioNode (and
+chains of connected nodes) as a percentage of the rendering time quantum. In a
+single-threaded implementation, overall CPU usage must remain below 100%. The
+measured usage may be used internally in the implementation for dynamic
+adjustments to the rendering. It may also be exposed through a
+<code>cpuUsage</code> attribute of <code>AudioNode</code> for use by
+JavaScript. </p>
+
+<p>In cases where the measured CPU usage is near 100% (or whatever threshold is
+considered too high), then an attempt to add additional <code>AudioNodes</code>
+into the rendering graph can trigger voice-dropping. </p>
+
+<h4 id="Voice-dropping">15.3.2. Voice Dropping</h4>
+
+<p>Voice-dropping is a technique which limits the number of voices (notes)
+playing at the same time to keep CPU usage within a reasonable range. There can
+either be an upper threshold on the total number of voices allowed at any given
+time, or CPU usage can be dynamically monitored and voices dropped when CPU
+usage exceeds a threshold. Or a combination of these two techniques can be
+applied. When CPU usage is monitored for each voice, it can be measured all the
+way from a source node through any effect processing nodes which apply
+uniquely to that voice. </p>
+
+<p>When a voice is "dropped", it needs to happen in such a way that it doesn't
+introduce audible clicks or pops into the rendered audio stream. One way to
+achieve this is to quickly fade-out the rendered audio for that voice before
+completely removing it from the rendering graph. </p>
+
+<p>When it is determined that one or more voices must be dropped, there are
+various strategies for picking which voice(s) to drop out of the total ensemble
+of voices currently playing. Here are some of the factors which can be used in
+combination to help with this decision: </p>
+<ul>
+  <li>Older voices, which have been playing the longest can be dropped instead
+    of more recent voices. </li>
+  <li>Quieter voices, which are contributing less to the overall mix may be
+    dropped instead of louder ones. </li>
+  <li>Voices which are consuming relatively more CPU resources may be dropped
+    instead of less "expensive" voices.</li>
+  <li>An AudioNode can have a <code>priority</code> attribute to help determine
+    the relative importance of the voices.</li>
+</ul>
+
+<h4 id="Simplification-of-Effects-Processing">15.3.3. Simplification of Effects
+Processing</h4>
+
+<p>Most of the effects described in this document are relatively inexpensive
+and will likely be able to run even on the slower mobile devices. However, the
+<a href="#ConvolverNode-section">convolution effect</a> can be configured with
+a variety of impulse responses, some of which will likely be too heavy for
+mobile devices. Generally speaking, CPU usage scales with the length of the
+impulse response and the number of channels it has. Thus, it is reasonable to
+consider that impulse responses which exceed a certain length will not be
+allowed to run. The exact limit can be determined based on the speed of the
+device. Instead of outright rejecting convolution with these long responses, it
+may be interesting to consider truncating the impulse responses to the maximum
+allowed length and/or reducing the number of channels of the impulse response.
+</p>
+
+<p>In addition to the convolution effect. The <a
+href="#PannerNode-section"><code>PannerNode</code></a> may also be
+expensive if using the HRTF panning model. For slower devices, a cheaper
+algorithm such as EQUALPOWER can be used to conserve compute resources. </p>
+
+<h4 id="Sample-rate">15.3.4. Sample Rate</h4>
+
+<p>For very slow devices, it may be worth considering running the rendering at
+a lower sample-rate than normal. For example, the sample-rate can be reduced
+from 44.1KHz to 22.05KHz. This decision must be made when the
+<code>AudioContext</code> is created, because changing the sample-rate
+on-the-fly can be difficult to implement and will result in audible glitching
+when the transition is made. </p>
+
+<h4 id="pre-flighting">15.3.5. Pre-flighting</h4>
+
+<p>It should be possible to invoke some kind of "pre-flighting" code (through
+JavaScript) to roughly determine the power of the machine. The JavaScript code
+can then use this information to scale back any more intensive processing it
+may normally run on a more powerful machine. Also, the underlying
+implementation may be able to factor in this information in the voice-dropping
+algorithm. </p>
+
+<p><span class="ednote">TODO: add specification and more detail here </span></p>
+
+<h4 id="Authoring-for-different-user-agents">15.3.6. Authoring for different
+user agents</h4>
+JavaScript code can use information about user-agent to scale back any more
+intensive processing it may normally run on a more powerful machine. 
+
+<h4 id="Scalability-of-Direct-JavaScript-Synthesis">15.3.7. Scalability of
+Direct JavaScript Synthesis / Processing</h4>
+
+<p>Any audio DSP / processing code done directly in JavaScript should also be
+concerned about scalability. To the extent possible, the JavaScript code itself
+needs to monitor CPU usage and scale back any more ambitious processing when
+run on less powerful devices. If it's an "all or nothing" type of processing,
+then user-agent check or pre-flighting should be done to avoid generating an
+audio stream with audio breakup. </p>
+
+<div id="JavaScriptPerformance-section" class="section">
+<h3 id="JavaScriptPerformance">15.4. JavaScript Issues with real-time
+Processing and Synthesis: </h3>
+</div>
+While processing audio in JavaScript, it is extremely challenging to get
+reliable, glitch-free audio while achieving a reasonably low-latency,
+especially under heavy processor load. 
+<ul>
+  <li>JavaScript is very much slower than heavily optimized C++ code and is not
+    able to take advantage of SSE optimizations and multi-threading which is
+    critical for getting good performance on today's processors. Optimized
+    native code can be on the order of twenty times faster for processing FFTs
+    as compared with JavaScript. It is not efficient enough for heavy-duty
+    processing of audio such as convolution and 3D spatialization of large
+    numbers of audio sources. </li>
+  <li>setInterval() and XHR handling will steal time from the audio processing.
+    In a reasonably complex game, some JavaScript resources will be needed for
+    game physics and graphics. This creates challenges because audio rendering
+    is deadline driven (to avoid glitches and get low enough latency).</li>
+  <li>JavaScript does not run in a real-time processing thread and thus can be
+    pre-empted by many other threads running on the system.</li>
+  <li>Garbage Collection (and autorelease pools on Mac OS X) can cause
+    unpredictable delay on a JavaScript thread. </li>
+  <li>Multiple JavaScript contexts can be running on the main thread, stealing
+    time from the context doing the processing. </li>
+  <li>Other code (other than JavaScript) such as page rendering runs on the
+    main thread. </li>
+  <li>Locks can be taken and memory is allocated on the JavaScript thread. This
+    can cause additional thread preemption. </li>
+</ul>
+The problems are even more difficult with today's generation of mobile devices
+which have processors with relatively poor performance and power consumption /
+battery-life issues. <br />
+<br />
+
+
+<div id="ExampleApplications-section" class="section">
+<h2 id="ExampleApplications">16. Example Applications</h2>
+
+<p class="norm">This section is informative.</p>
+
+<p>Please see the <a
+href="http://chromium.googlecode.com/svn/trunk/samples/audio/index.html">demo</a>
+page for working examples. </p>
+
+<p>Here are some of the types of applications a web audio system should be able
+to support: </p>
+
+<h3 id="basic-sound-playback">Basic Sound Playback</h3>
+
+<p>Simple and <a href="#Latency-section"><strong>low-latency</strong></a>
+playback of sound effects in response to simple user actions such as mouse
+click, roll-over, key press. </p>
+<br />
+
+
+<h3 id="threeD-environmentse-and-games">3D Environments and Games</h3>
+<img alt="quake" src="http://payload48.cargocollective.com/1/2/66805/3278334/redteam_680.jpg" />
+<br />
+<br />
+
+
+<p>Electronic Arts has produced an impressive immersive game called
+ <a href="http://sophie-lu.com/Strike-Fortress-EA">Strike Fortress</a>,
+taking advantage of 3D spatialization and convolution for room simulation.</p>
+
+<img alt="beach demo" src="images/beach-demo.png" />
+
+<p>3D environments with audio are common in games made for desktop applications
+and game consoles. Imagine a 3D island environment with spatialized audio,
+seagulls flying overhead, the waves crashing against the shore, the crackling
+of the fire, the creaking of the bridge, and the rustling of the trees in the
+wind. The sounds can be positioned naturally as one moves through the scene.
+Even going underwater, low-pass filters can be tweaked for just the right
+underwater sound. </p>
+<br />
+<br />
+<img alt="box2d" src="images/box2d.png" /> <img alt="8-ball"
+src="images/8-ball.png" /> <br />
+<br />
+
+
+<p><a href="http://box2d.org/">Box2D</a> is an interesting open-source
+library for 2D game physics. It has various implementations, including one
+based on Canvas 2D. A demo has been created with dynamic sound effects for each
+of the object collisions, taking into account the velocities vectors and
+positions to spatialize the sound events, and modulate audio effect parameters
+such as filter cutoff. </p>
+
+<p>A virtual pool game with multi-sampled sound effects has also been created.
+</p>
+<br />
+
+
+<h3 id="musical-applications">Musical Applications</h3>
+<img alt="garageband" src="images/garage-band.png" /> <img
+alt="shiny drum machine" src="images/shiny-drum-machine.png" /> <img
+alt="tonecraft" src="images/tonecraft.png" /> <br />
+<br />
+Many music composition and production applications are possible. Applications
+requiring tight scheduling of audio events can be implemented and can be both
+educational and entertaining. Drum machines, digital DJ applications, and even
+timeline-based digital music production software with some of the features of
+<a href="http://en.wikipedia.org/wiki/GarageBand">GarageBand</a> can be
+written. <br />
+<br />
+
+
+<h3 id="music-visualizers">Music Visualizers</h3>
+<img alt="music visualizer" src="images/music-visualizer.png" /> <br />
+<br />
+When combined with WebGL GLSL shaders, realtime analysis data can be presented
+in entertaining ways. These can be as advanced as any found in iTunes. <br />
+<br />
+
+
+<h3 id="educational-applications_2">Educational Applications</h3>
+<img alt="javascript processing" src="images/javascript-processing.png" /> 
+
+<p>A variety of educational applications can be written, illustrating concepts
+in music theory and computer music synthesis and processing. </p>
+<br />
+
+
+<h3 id="artistic-audio-exploration">Artistic Audio Exploration</h3>
+
+<p>There are many creative possibilites for artistic sonic environments for
+installation pieces. </p>
+<br />
+</div>
+
+<div id="SecurityConsiderations-section" class="section">
+<h2 id="SecurityConsiderations">17. Security Considerations</h2>
+
+<p>This section is <em>informative.</em> </p>
+</div>
+
+<div id="PrivacyConsiderations-section" class="section">
+<h2 id="PrivacyConsiderations">18. Privacy Considerations</h2>
+
+<p>This section is <em>informative</em>. When giving various information on
+available AudioNodes, the Web Audio API potentially exposes information on
+characteristic features of the client (such as audio hardware sample-rate) to
+any page that makes use of the AudioNode interface. Additionally, timing
+information can be collected through the RealtimeAnalyzerNode or
+ScriptProcessorNode interface. The information could subsequently be used to
+create a fingerprint of the client. </p>
+
+<p>Currently audio input is not specified in this document, but it will involve
+gaining access to the client machine's audio input or microphone. This will
+require asking the user for permission in an appropriate way, probably via the
+<a href="http://developers.whatwg.org/">getUserMedia()
+API</a>. </p>
+</div>
+
+<div id="requirements-section" class="section">
+<h2 id="requirements">19. Requirements and Use Cases</h2>
+
+<p>Please see <a href="#ExampleApplications-section">Example Applications</a>
+</p>
+</div>
+
+<div id="oldnames-section" class="section">
+<h2 id="OldNames">20. Old Names</h2>
+
+<p class="norm">This section is informative.</p>
+
+<p>Some method and attribute names have been improved during API review.
+The new names are described in the main body of this specification in the
+description for each node type, etc.  Here's a description of the older names
+to help content authors migrate to the latest spec.  Note that the partial
+interfaces are not normative and are only descriptive: 
+</p>
+<blockquote>
+<pre>
+
+partial interface <dfn>AudioBufferSourceNode</dfn> {
+    // Same as start()
+    void noteOn(double when);
+    void noteGrainOn(double when, double grainOffset, double grainDuration);
+    
+    // Same as stop()
+    void noteOff(double when);
+};
+
+partial interface <dfn>AudioContext</dfn> {
+    // Same as createGain()
+    GainNode createGainNode();
+    
+    // Same as createDelay()
+    DelayNode createDelayNode(optional double maxDelayTime = 1.0);
+
+    // Same as createScriptProcessor()
+    ScriptProcessorNode createJavaScriptNode(optional unsigned long bufferSize = 0,
+                                             optional unsigned long numberOfInputChannels = 2,
+                                             optional unsigned long numberOfOutputChannels = 2);
+};
+
+partial interface <dfn>OscillatorNode</dfn> {
+    // Same as start()
+    void noteOn(double when);
+    
+    // Same as stop()
+    void noteOff(double when);
+};
+
+partial interface <dfn>AudioParam</dfn> {
+    // Same as setTargetAtTime()
+    void setTargetValueAtTime(float target, double startTime, double timeConstant);
+};
+
+</pre>
+</blockquote>
+
+<p>Some attributes taking constant values have changed during API review.
+The old way used integer values, while the new way uses Web IDL string values.
+</p>
+
+<blockquote>
+<pre>
+// PannerNode constants for the .panningModel attribute
+
+// Old way
+const unsigned short EQUALPOWER = 0;
+const unsigned short HRTF = 1;
+
+// New way
+enum <dfn>PanningModelType</dfn> {
+  "equalpower",
+  "HRTF"
+};
+</pre>
+</blockquote>
+
+<blockquote>
+<pre>
+// PannerNode constants for the .distanceModel attribute
+
+// Old way
+const unsigned short LINEAR_DISTANCE = 0;
+const unsigned short INVERSE_DISTANCE = 1;
+const unsigned short EXPONENTIAL_DISTANCE = 2;
+
+// New way
+enum <dfn>DistanceModelType</dfn> {
+  "linear",
+  "inverse",
+  "exponential"
+};
+</pre>
+</blockquote>
+
+
+
+<blockquote>
+<pre>
+// BiquadFilterNode constants for the .type attribute
+
+// Old way
+const unsigned short LOWPASS = 0;
+const unsigned short HIGHPASS = 1;
+const unsigned short BANDPASS = 2;
+const unsigned short LOWSHELF = 3;
+const unsigned short HIGHSHELF = 4;
+const unsigned short PEAKING = 5;
+const unsigned short NOTCH = 6;
+const unsigned short ALLPASS = 7;
+
+// New way
+enum <dfn>BiquadFilterType</dfn> {
+  "lowpass",
+  "highpass",
+  "bandpass",
+  "lowshelf",
+  "highshelf",
+  "peaking",
+  "notch",
+  "allpass"
+};
+</pre>
+</blockquote>
+
+<blockquote>
+<pre>
+// OscillatorNode constants for the .type attribute
+
+// Old way
+const unsigned short SINE = 0;
+const unsigned short SQUARE = 1;
+const unsigned short SAWTOOTH = 2;
+const unsigned short TRIANGLE = 3;
+const unsigned short CUSTOM = 4;
+
+// New way
+enum <dfn>OscillatorType</dfn> {
+  "sine",
+  "square",
+  "sawtooth",
+  "triangle",
+  "custom"
+};
+</pre>
+</blockquote>
+
+
+
+</div>
+
+</div>
+</div>
+
+<div class="appendix section" id="references">
+<h2 id="L17310">A.References</h2>
+
+<div class="section" id="normative-references">
+<h3 id="Normative-references">A.1 Normative references</h3>
+<dl>
+  <dt id="DOM">[DOM] </dt>
+  <dd><a href="http://dom.spec.whatwg.org/">DOM</a>,
+      A. van Kesteren, A. Gregor, Ms2ger. WHATWG.</dd>
+  <dt id="HTML">[HTML] </dt>
+  <dd><a href="http://www.whatwg.org/specs/web-apps/current-work/multipage/">HTML</a>,
+      I. Hickson. WHATWG.</dd>
+  <dt id="RFC2119">[RFC2119] </dt>
+    <dd>S. Bradner. <a
+      href="http://www.ietf.org/rfc/rfc2119.txt"><cite><span>Key words for use
+      in RFCs to Indicate Requirement Levels.</span></cite></a> Internet RFC
+      2119. URL: <a
+      href="http://www.ietf.org/rfc/rfc2119.txt">http://www.ietf.org/rfc/rfc2119.txt</a>
+    </dd>
+</dl>
+</div>
+
+<div class="section" id="informative-references">
+<h3 id="Informative-references">A.2 Informative references</h3>
+
+<p>No informative references.</p>
+</div>
+</div>
+
+<div class="section" id="acknowledgements">
+<h2 id="L17335">B.Acknowledgements</h2>
+
+<p>Special thanks to the W3C <a href="http://www.w3.org/2011/audio/">Audio
+Working Group</a>. Members of the Working Group are (at the time of writing,
+and by alphabetical order): <br />
+Berkovitz, Joe (public Invited expert);Cardoso, Gabriel (INRIA);Carlson, Eric
+(Apple, Inc.);Gregan, Matthew (Mozilla Foundation);Jägenstedt, Philip (Opera
+Software);Kalliokoski, Jussi (public Invited expert);Lowis, Chris (British
+Broadcasting Corporation);MacDonald, Alistair (W3C Invited Experts);Michel,
+Thierry (W3C/ERCIM);Noble, Jer (Apple, Inc.);O'Callahan, Robert(Mozilla
+Foundation);Paradis, Matthew (British Broadcasting Corporation);Raman, T.V.
+(Google, Inc.);Rogers, Chris (Google, Inc.);Schepers, Doug (W3C/MIT);Shires,
+Glen (Google, Inc.);Smith, Michael (W3C/Keio);Thereaux, Olivier (British
+Broadcasting Corporation);Wei, James (Intel Corporation);Wilson, Chris (Google,
+Inc.); </p>
+</div>
+
+<div class="section" id="ChangeLog-section">
+<h2 id="ChangeLog">C. Web Audio API Change Log</h2>
+<pre>
+user:        crogers
+date:        Sun Dec 09 17:13:56 2012 -0800
+summary:     Basic description of OfflineAudioContext
+
+user:        crogers
+date:        Tue Dec 04 15:59:30 2012 -0800
+summary:     minor correction to wording for minValue and maxValue
+
+user:        crogers
+date:        Tue Dec 04 15:49:29 2012 -0800
+summary:     Bug 20161: Make decodeAudioData neuter its array buffer argument when it begins decoding a buffer, and bring it back to normal when the decoding is finished
+
+user:        crogers
+date:        Tue Dec 04 15:35:17 2012 -0800
+summary:     Bug 20039: Refine description of audio decoding
+
+user:        crogers
+date:        Tue Dec 04 15:23:07 2012 -0800
+summary:     elaborate on decoding steps for AudioContext createBuffer() and decodeAudioData()
+
+user:        crogers
+date:        Tue Dec 04 14:56:19 2012 -0800
+summary:     Bug 19770: Note that if the last event for an AudioParam is a setCurveValue event, the computed value after that event will be equal to the latest curve value
+
+user:        crogers
+date:        Tue Dec 04 14:48:04 2012 -0800
+summary:     Bug 19769: Note that before the first automation event, the computed AudioParam value will be AudioParam.value
+
+user:        crogers
+date:        Tue Dec 04 14:40:51 2012 -0800
+summary:     Bug 19768: Explicitly mention that the initial value of AudioParam.value will be defaultValue
+
+user:        crogers
+date:        Tue Dec 04 14:35:59 2012 -0800
+summary:     Bug 19767: Explicitly mention that the 2nd component of AudioParam.computedValue will be 0 if there are no AudioNodes connected to it
+
+user:        crogers
+date:        Tue Dec 04 14:30:08 2012 -0800
+summary:     Bug 19764: Note in the spec that AudioParam.minValue/maxValue are merely informational
+
+user:        crogers
+date:        Mon Dec 03 18:03:13 2012 -0800
+summary:     Convert integer constants to Web IDL enum string constants
+
+user:        crogers
+date:        Mon Dec 03 15:19:22 2012 -0800
+summary:     Bug 17411: (AudioPannerNodeUnits): AudioPannerNode units are underspecified
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Thu Nov 29 15:59:38 2012 -0500
+summary:     Change the Web IDL description of decodeAudioData arguments
+
+user:        crogers
+date:        Wed Nov 14 13:24:01 2012 -0800
+summary:     Bug 17393: (UseDoubles): float/double inconsistency
+
+user:        crogers
+date:        Wed Nov 14 13:16:57 2012 -0800
+summary:     Bug 17356: (AudioListenerOrientation): AudioListener.setOrientation vectors
+
+user:        crogers
+date:        Wed Nov 14 12:56:06 2012 -0800
+summary:     Bug 19957: PannerNode.coneGain is unused
+
+user:        crogers
+date:        Wed Nov 14 12:40:46 2012 -0800
+summary:     Bug 17412: AudioPannerNodeVectorNormalization): AudioPannerNode orientation normalization unspecified
+
+user:        crogers
+date:        Wed Nov 14 12:16:41 2012 -0800
+summary:     Bug 17411: (AudioPannerNodeUnits): AudioPannerNode units are underspecified
+
+user:        crogers
+date:        Tue Nov 13 16:14:22 2012 -0800
+summary:     be more explicit about maxDelayTime units
+
+user:        crogers
+date:        Tue Nov 13 16:02:50 2012 -0800
+summary:     Bug 19766: Clarify that reading AudioParam.computedValue will return the latest computed value for the latest audio quantum
+
+user:        crogers
+date:        Tue Nov 13 15:47:25 2012 -0800
+summary:     Bug 19872: Should specify the defaults for PannerNode's position, ...
+
+user:        crogers
+date:        Tue Nov 13 15:27:53 2012 -0800
+summary:     Bug 17390: (Joe Berkovitz): Loop start/stop points
+
+user:        croger
+date:        Tue Nov 13 14:49:20 2012 -0800
+summary:     Bug 19765: Note that setting AudioParam.value will be ignored when any automation events have been set on the object
+
+user:        crogers
+date:        Tue Nov 13 14:39:07 2012 -0800
+summary:     Bug 19873: Clarify PannerNode.listener
+
+user:        crogers
+date:        Tue Nov 13 13:35:21 2012 -0800
+summary:     Bug 19900: Clarify the default values for the AudioParam attributes of BiquadFilterNode
+
+user:        crogers
+date:        Tue Nov 13 13:06:38 2012 -0800
+summary:     Bug 19884: Specify the default value and ranges for the DynamicsCompressorNode AudioParam members
+
+user:        crogers
+date:        Tue Nov 13 12:57:02 2012 -0800
+summary:     Bug 19910: Disallow AudioContext.createDelay(max) where max &lt;= 0
+
+user:        crogers
+date:        Mon Nov 12 12:02:18 2012 -0800
+summary:     Add example code for more complex example
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Thu Nov 01 11:32:39 2012 -0400
+summary:     Specify the default value for the AudioContext.createDelay() optional argument in Web IDL
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Tue Oct 30 20:29:48 2012 -0400
+summary:     Mark the AudioParam members as readonly
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Tue Oct 30 20:24:52 2012 -0400
+summary:     Make GainNode and DelayNode valid Web IDL
+
+user:        crogers
+date:        Mon Oct 29 14:29:23 2012 -0700
+summary:     consolidate AudioBufferSourceNode start() method
+
+user:        crogers
+date:        Fri Oct 19 15:15:28 2012 -0700
+summary:     Bug 18332: Node creation method naming inconsistencies
+
+user:        crogers
+date:        Mon Oct 15 17:22:54 2012 -0700
+summary:     Bug 17407: Interface naming inconsistency
+
+user:        crogers
+date:        Tue Oct 09 17:21:19 2012 -0700
+summary:     Bug 17369: Oscillator.detune attribute not defined
+
+user:        crogers
+date:        Tue Oct 09 16:08:50 2012 -0700
+summary:     Bug 17346: HTMLMediaElement integration
+
+user:        crogers
+date:        Tue Oct 09 15:20:50 2012 -0700
+summary:     Bug 17354: AudioListener default position, orientation and velocity
+
+user:        crogers
+date:        Tue Oct 09 15:02:04 2012 -0700
+summary:     Bug 17795: Behavior of multiple connections to same node needs to be explicitly defined
+
+user:        crogers
+date:        Mon Oct 08 13:18:45 2012 -0700
+summary:     Add missing AudioContext.createWaveShaper() method
+
+user:        crogers
+date:        Fri Oct 05 18:13:44 2012 -0700
+summary:     Bug 17399: AudioParam sampling is undefined
+
+user:        crogers
+date:        Fri Oct 05 17:41:52 2012 -0700
+summary:     Bug 17386: Realtime Analysis empty section
+
+user:        crogers
+date:        Fri Oct 05 17:38:14 2012 -0700
+summary:     minor tweak to down-mix section
+
+user:        crogers
+date:        Fri Oct 05 17:35:05 2012 -0700
+summary:     Bug 17380: Channel down mixing incomplete
+
+user:        crogers
+date:        Fri Oct 05 15:40:57 2012 -0700
+summary:     Bug 17375: MixerGainStructure should be marked as informative
+
+user:        crogers
+date:        Fri Oct 05 14:29:20 2012 -0700
+summary:     Bug 17381: (EventScheduling): Event Scheduling ('Need more detail here')
+
+user:        crogers
+date:        Fri Oct 05 13:12:46 2012 -0700
+summary:     Fix 18663: Need a method to get a readonly reading of the combined value when using AudioParam automation curve
+
+user:        crogers
+date:        Fri Oct 05 12:48:36 2012 -0700
+summary:     Fix 18662: Setting audioparam value while there is an automation curve will cancel that automation curve and set value immediately
+
+user:        crogers
+date:        Fri Oct 05 12:26:28 2012 -0700
+summary:     Fix 18661: Use startTime / endTime parameter names for AudioParam automation methods
+
+user:        crogers
+date:        Wed Oct 03 12:26:39 2012 -0700
+summary:     Specify default value for .distanceModel
+
+user:        crogers
+date:        Tue Oct 02 12:33:36 2012 -0700
+summary:     Fix Issues 17338 and 17337: AudioGain interface is not needed (Part 2)
+
+user:        crogers
+date:        Tue Oct 02 12:28:55 2012 -0700
+summary:     Fix Issues 17338 and 17337: AudioGain interface is not needed
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Sep 26 18:22:36 2012 -0400
+summary:     Make AudioBufferSourceNode.buffer nullable
+
+user:        crogers
+date:        Tue Sep 25 12:56:14 2012 -0700
+summary:     noteOn/noteOff changed to start/stop -- added deprecation notes
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Fri Aug 24 18:27:29 2012 -0400
+summary:     Make the AudioContext object have a constructor
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Fri Aug 24 15:54:10 2012 -0400
+summary:     Denote IDL definitions as Web IDL
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Fri Aug 24 15:04:37 2012 -0400
+summary:     Use `long` instead of `int`, since the int type doesn't exist in Web IDL
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Fri Aug 24 15:02:43 2012 -0400
+summary:     Add a missing attribute keyword in AudioProcessingEvent
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Tue Aug 21 15:36:48 2012 -0400
+summary:     Remove the 'raises' notation from the IDLs
+
+user:        crogers
+date:        Thu Aug 16 16:30:55 2012 -0700
+summary:     Issue 17398: Add more detailed information about how AudioParam value is calculated
+
+user:        crogers
+date:        Thu Aug 16 15:21:38 2012 -0700
+summary:     another try with the style sheet
+
+user:        crogers
+date:        Thu Aug 16 14:53:54 2012 -0700
+summary:     use local style sheet to avoid https errors
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 23:05:49 2012 -0400
+summary:     Replace the white-space based indentation of Web IDL code with a CSS-based one
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 22:56:03 2012 -0400
+summary:     Remove more useless trailing whitespaces
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 22:47:21 2012 -0400
+summary:     Remove the optional 'in' keyword from the Web IDL method declarations
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 22:42:03 2012 -0400
+summary:     Add trailing semicolons for Web IDL interface declarations
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 22:37:32 2012 -0400
+summary:     Remove useless trailing spaces
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 22:35:33 2012 -0400
+summary:     Use the correct Web IDL notation for the AudioBufferCallback callback type
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 22:28:37 2012 -0400
+summary:     Remove the extra semicolon in the IDL file for AudioContext
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Wed Aug 15 22:24:02 2012 -0400
+summary:     Replace the old [Optional] IDL tag with the Web IDL optional keyword
+
+user:        Ehsan Akhgari (Mozilla)
+date:        Tue Aug 14 10:18:19 2012 -0400
+summary:     Empty changeset to test my commit access
+
+date:        Mon Aug 13 13:26:52 2012 -0700
+* Integrate Thierry Michel's 3rd public working draft edits
+
+date:        Tue Jun 26 15:56:31 2012 -0700
+* add MediaStreamAudioSourceNode
+
+date:        Mon Jun 18 13:26:21 2012 -0700
+* minor formatting fix
+
+date:        Mon Jun 18 13:19:34 2012 -0700
+* Add details for azimuth/elevation calculation
+
+date:        Fri Jun 15 17:35:27 2012 -0700
+* Add equal-power-panning details
+
+date:        Thu Jun 14 17:31:16 2012 -0700
+* Add equations for distance models
+
+date:        Wed Jun 13 17:40:49 2012 -0700
+* Bug 17334: Add precise equations for AudioParam.setTargetValueAtTime()
+
+date:        Fri Jun 08 17:44:26 2012 -0700
+* fix small typo
+
+date:        Fri Jun 08 16:54:04 2012 -0700
+* Bug 17413: AudioBuffers' relationship to AudioContext
+
+date:        Fri Jun 08 16:05:45 2012 -0700
+* Bug 17359: Add much more detail about ConvolverNode
+
+date:        Fri Jun 08 12:59:29 2012 -0700
+* minor formatting fix
+
+date:        Fri Jun 08 12:57:11 2012 -0700
+* Bug 17335: Add much more technical detail to setValueCurveAtTime()
+
+date:        Wed Jun 06 16:34:43 2012 -0700
+*Add much more detail about parameter automation, including an example
+
+date:        Mon Jun 04 17:25:08 2012 -0700
+* ISSUE-85: OscillatorNode folding considerations
+
+date:        Mon Jun 04 17:02:20 2012 -0700
+* ISSUE-45: AudioGain scale underdefined
+
+date:        Mon Jun 04 16:40:43 2012 -0700
+* ISSUE-41: AudioNode as input to AudioParam underdefined
+
+date:        Mon Jun 04 16:14:48 2012 -0700
+* ISSUE-20: Relationship to currentTime
+
+date:        Mon Jun 04 15:48:49 2012 -0700
+* ISSUE-94: Dynamic Lifetime
+
+date:        Mon Jun 04 13:59:31 2012 -0700
+* ISSUE-42: add more detail about AudioParam sampling and block processing
+
+date:        Mon Jun 04 12:28:48 2012 -0700
+* fix typo - minor edits
+
+date:        Thu May 24 18:01:20 2012 -0700
+* ISSUE-69: add implementors guide for linear convolution
+
+date:        Thu May 24 17:35:45 2012 -0700
+* ISSUE-49: better define AudioBuffer audio data access
+
+date:        Thu May 24 17:15:29 2012 -0700
+* fix small typo
+
+date:        Thu May 24 17:13:34 2012 -0700
+* ISSUE-24: define circular routing behavior
+
+date:        Thu May 24 16:35:24 2012 -0700
+* ISSUE-42: specify a-rate or k-rate for each AudioParam
+
+date:        Fri May 18 17:01:36 2012 -0700
+* ISSUE-53: noteOn and noteOff interaction
+
+date:        Fri May 18 16:33:29 2012 -0700
+* ISSUE-34: Remove .name attribute from AudioParam
+
+date:        Fri May 18 16:27:19 2012 -0700
+* ISSUE-33: Add maxNumberOfChannels attribute to AudioDestinationNode
+
+date:        Fri May 18 15:50:08 2012 -0700
+* ISSUE-19: added more info about AudioBuffer - IEEE 32-bit
+
+date:        Fri May 18 15:37:27 2012 -0700
+* ISSUE-29: remove reference to webkitAudioContext
+
+date:        Fri Apr 27 12:36:54 2012 -0700
+* fix two small typos reported by James Wei
+
+date:        Tue Apr 24 12:27:11 2012 -0700
+* small cleanup to ChannelSplitterNode and ChannelMergerNode
+
+date:        Tue Apr 17 11:35:56 2012 -0700
+* small fix to createWaveTable()
+
+date:        Tue Apr 13 2012
+* Cleanup AudioNode connect() and disconnect() method descriptions.
+* Add AudioNode connect() to AudioParam method.
+
+date:        Tue Apr 13 2012
+* Add OscillatorNode and WaveTable
+* Define default values for optional arguments in createJavaScriptNode(), createChannelSplitter(), createChannelMerger()
+* Define default filter type for BiquadFilterNode as LOWPASS
+
+date:        Tue Apr 11 2012
+* add AudioContext .activeSourceCount attribute
+* createBuffer() methods can throw exceptions
+* add AudioContext method createMediaElementSource()
+* update AudioContext methods createJavaScriptNode() (clean up description of parameters)
+* update AudioContext method createChannelSplitter() (add numberOfOutputs parameter)
+* update AudioContext method createChannelMerger() (add numberOfInputs parameter)
+* update description of out-of-bounds AudioParam values (exception will not be thrown)
+* remove AudioBuffer .gain attribute
+* remove AudioBufferSourceNode .gain attribute
+* remove AudioListener .gain attribute
+* add AudioBufferSourceNode .playbackState attribute and state constants
+* AnalyserNode no longer requires its output be connected to anything
+* update ChannelMergerNode section describing numberOfOutputs (defaults to 6 but settable in constructor)
+* update ChannelSplitterNode section describing numberOfInputs (defaults to 6 but settable in constructor)
+* add note in Spatialization sections about potential to get arbitrary convolution matrixing
+
+date:        Tue Apr 10 2012
+* Rebased editor's draft document based on edits from Thierry Michel (from 2nd public working draft).
+
+date:        Tue Mar 13 12:13:41 2012 -0100
+*  fixed all the HTML errors
+*  added ids to all Headings
+*  added alt attribute to all img
+*  fix broken anchors
+*  added a new status of this document section
+*  added mandatory spec headers
+*  generated a new table of content
+*  added a Reference section
+*  added an Acknowledgments section
+*  added a Web Audio API Change Log 
+
+date:        Fri Mar 09 15:12:42 2012 -0800
+* add optional maxDelayTime argument to createDelay()
+* add more detail about playback state to AudioBufferSourceNode
+* upgrade noteOn(), noteGrainOn(), noteOff() times to double from float
+
+date:        Mon Feb 06 16:52:39 2012 -0800
+* Cleanup ScriptProcessorNode section
+* Add distance model constants for PannerNode according to the OpenAL spec
+* Add .normalize attribute to ConvolverNode
+* Add getFrequencyResponse() method to BiquadFilterNode
+* Tighten up the up-mix equations
+
+date:        Fri Nov 04 15:40:58 2011 -0700
+summary:     Add more technical detail to BiquadFilterNode description (contributed by Raymond Toy)
+
+date:        Sat Oct 15 19:08:15 2011 -0700
+summary:     small edits to the introduction
+
+date:        Sat Oct 15 19:00:15 2011 -0700
+summary:     initial commit
+
+date:        Tue Sep 13 12:49:11 2011 -0700
+summary:     add convolution reverb design document
+
+date:        Mon Aug 29 17:05:58 2011 -0700
+summary:     document the decodeAudioData() method
+
+date:        Mon Aug 22 14:36:33 2011 -0700
+summary:     fix broken MediaElementAudioSourceNode link
+
+date:        Mon Aug 22 14:33:57 2011 -0700
+summary:     refine section describing integration with HTMLMediaElement
+
+date:        Mon Aug 01 12:05:53 2011 -0700
+summary:     add Privacy section
+
+date:        Mon Jul 18 17:53:50 2011 -0700
+summary:     small update - tweak musical applications thumbnail images
+
+date:        Mon Jul 18 17:23:00 2011 -0700
+summary:     initial commit of Web Audio API specification</pre>
+</div>
+</body>
+</html>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-analysernode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-analysernode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-analysernode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffer-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffer-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffer-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffer-interface/idl-test.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffer-interface/idl-test.html
new file mode 100644
index 0000000..72ed88d
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffer-interface/idl-test.html
@@ -0,0 +1,107 @@
+<!DOCTYPE html>
+<html class="a">
+<head>
+<title>AudioBuffer IDL Test</title>
+<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/WebIDLParser.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
+    #event-target-idl,
+    #audio-context-idl
+    { visibility:hidden; height: 0px;}
+  </style>
+</head>
+<body class="a">
+
+   <pre id="event-target-idl">interface EventTarget {
+  void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  boolean dispatchEvent(Event event);
+};
+
+/*
+callback interface EventListener {
+  void handleEvent(Event event);
+};
+*/
+// Callback interfaces are not supported yet, but that's ok
+interface EventListener {};
+</pre>
+
+   <pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
+callback DecodeErrorCallback = void ();
+
+[Constructor]
+interface AudioContext : EventTarget {
+
+    readonly attribute AudioDestinationNode destination;
+    readonly attribute float sampleRate;
+    readonly attribute double currentTime;
+    readonly attribute AudioListener listener;
+
+    AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
+
+    void decodeAudioData(ArrayBuffer audioData,
+                         DecodeSuccessCallback successCallback,
+                         optional DecodeErrorCallback errorCallback);
+
+
+    // AudioNode creation
+    AudioBufferSourceNode createBufferSource();
+
+    MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
+
+    MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
+    MediaStreamAudioDestinationNode createMediaStreamDestination();
+
+    ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
+                                              optional unsigned long numberOfInputChannels = 2,
+                                              optional unsigned long numberOfOutputChannels = 2);
+
+    AnalyserNode createAnalyser();
+    GainNode createGain();
+    DelayNode createDelay(optional double maxDelayTime = 1.0);
+    BiquadFilterNode createBiquadFilter();
+    WaveShaperNode createWaveShaper();
+    PannerNode createPanner();
+    ConvolverNode createConvolver();
+
+    ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
+    ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
+
+    DynamicsCompressorNode createDynamicsCompressor();
+
+    OscillatorNode createOscillator();
+    PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
+
+};</pre>
+
+   <pre id="audio-buffer-idl">interface AudioBuffer {
+
+    readonly attribute float sampleRate;
+    readonly attribute long length;
+
+    // in seconds
+    readonly attribute double duration;
+
+    readonly attribute long numberOfChannels;
+
+    Float32Array getChannelData(unsigned long channel);
+
+};</pre>
+
+  <div id="log"></div>
+
+  <script>
+(function() {
+  var idl_array = new IdlArray();
+  idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
+  idl_array.add_idls(document.getElementById("audio-buffer-idl").textContent);
+
+  // For these tests the value of the arguments is unimportant.
+  audio_buffer = (new AudioContext).createBuffer(numberOfChannels = 1, length = 256, sampleRate = 44100);
+
+  idl_array.add_objects({AudioBuffer: ["audio_buffer"]});
+  idl_array.test();
+})();
+  </script>
+</body>
+</html>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiocontext-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiocontext-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiocontext-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiodestinationnode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiodestinationnode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiodestinationnode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiodestinationnode-interface/idl-test.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiodestinationnode-interface/idl-test.html
new file mode 100644
index 0000000..257b18d
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiodestinationnode-interface/idl-test.html
@@ -0,0 +1,128 @@
+<!DOCTYPE html>
+<html class="a">
+<head>
+<title>AudioDestinationNode IDL Test</title>
+<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/WebIDLParser.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
+    #event-target-idl,
+    #audio-context-idl,
+    #audio-node-idl
+    { visibility:hidden; height: 0px;}
+  </style>
+</head>
+<body class="a">
+
+   <pre id="event-target-idl">interface EventTarget {
+  void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  boolean dispatchEvent(Event event);
+};
+
+/*
+callback interface EventListener {
+  void handleEvent(Event event);
+};
+*/
+// Callback interfaces are not supported yet, but that's ok
+interface EventListener {};
+</pre>
+
+   <pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
+callback DecodeErrorCallback = void ();
+
+[Constructor]
+interface AudioContext : EventTarget {
+
+    readonly attribute AudioDestinationNode destination;
+    readonly attribute float sampleRate;
+    readonly attribute double currentTime;
+    readonly attribute AudioListener listener;
+
+    AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
+
+    void decodeAudioData(ArrayBuffer audioData,
+                         DecodeSuccessCallback successCallback,
+                         optional DecodeErrorCallback errorCallback);
+
+
+    // AudioNode creation
+    AudioBufferSourceNode createBufferSource();
+
+    MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
+
+    MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
+    MediaStreamAudioDestinationNode createMediaStreamDestination();
+
+    ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
+                                              optional unsigned long numberOfInputChannels = 2,
+                                              optional unsigned long numberOfOutputChannels = 2);
+
+    AnalyserNode createAnalyser();
+    GainNode createGain();
+    DelayNode createDelay(optional double maxDelayTime = 1.0);
+    BiquadFilterNode createBiquadFilter();
+    WaveShaperNode createWaveShaper();
+    PannerNode createPanner();
+    ConvolverNode createConvolver();
+
+    ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
+    ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
+
+    DynamicsCompressorNode createDynamicsCompressor();
+
+    OscillatorNode createOscillator();
+    PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
+
+};</pre>
+
+   <pre id="audio-node-idl">enum ChannelCountMode {
+    "max",
+    "clamped-max",
+    "explicit"
+};
+
+enum ChannelInterpretation {
+    "speakers",
+    "discrete"
+};
+
+interface AudioNode : EventTarget {
+
+    void connect(AudioNode destination, optional unsigned long output = 0, optional unsigned long input = 0);
+    void connect(AudioParam destination, optional unsigned long output = 0);
+    void disconnect(optional unsigned long output = 0);
+
+    readonly attribute AudioContext context;
+    readonly attribute unsigned long numberOfInputs;
+    readonly attribute unsigned long numberOfOutputs;
+
+    // Channel up-mixing and down-mixing rules for all inputs.
+    attribute unsigned long channelCount;
+    attribute ChannelCountMode channelCountMode;
+    attribute ChannelInterpretation channelInterpretation;
+
+};</pre>
+
+   <pre id="audio-destination-node-idl">interface AudioDestinationNode : AudioNode {
+
+    readonly attribute unsigned long maxChannelCount;
+
+};</pre>
+
+  <div id="log"></div>
+
+  <script>
+(function() {
+  var idl_array = new IdlArray();
+  idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-node-idl").textContent);
+  idl_array.add_idls(document.getElementById("audio-destination-node-idl").textContent);
+
+  audio_destination_node = (new AudioContext).destination;
+
+  idl_array.add_objects({AudioDestinationNode: ["audio_destination_node"]});
+  idl_array.test();
+})();
+  </script>
+</body>
+</html>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiolistener-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiolistener-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audiolistener-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audionode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audionode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audionode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audionode-interface/audionode-connect-return-value.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audionode-interface/audionode-connect-return-value.html
new file mode 100644
index 0000000..3af44fb
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audionode-interface/audionode-connect-return-value.html
@@ -0,0 +1,15 @@
+<!DOCTYPE html>
+<title>Test the return value of connect when connecting two AudioNodes</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+test(function(t) {
+  var context = new OfflineAudioContext(1, 1, 44100);
+  var g1 = context.createGain();
+  var g2 = context.createGain();
+  var rv = g1.connect(g2);
+  assert_equals(rv, g2);
+  var rv = g1.connect(g2);
+  assert_equals(rv, g2);
+}, "connect should return the node connected to.");
+</script>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioparam-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioparam-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioparam-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueAtTime.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueAtTime.html
new file mode 100644
index 0000000..dde8c27
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueAtTime.html
@@ -0,0 +1,71 @@
+<!DOCTYPE html>
+<title>Test setValueAtTime with startTime in the past</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+function do_test(t, context) {
+  var source = context.createBufferSource();
+  source.buffer =
+    function() {
+      var buffer = context.createBuffer(1, 1, context.sampleRate);
+      buffer.getChannelData(0)[0] = 1.0;
+      return buffer;
+    }();
+  source.loop = true;
+  source.start();
+
+  // Use a ramp of slope 1/sample to measure time.
+  // The end value is the extent of exact precision in single precision float.
+  const rampEnd = Math.pow(2, 24);
+  const rampEndSeconds = rampEnd / context.sampleRate;
+  var test = context.createGain();
+  test.gain.setValueAtTime(0.0, 0.0);
+  test.gain.linearRampToValueAtTime(rampEnd, rampEndSeconds);
+
+  // With a different starting point on the same line, the result should be
+  // the same.  |currentTime| may include double precision floating point
+  // rounding errors, so round to nearest integer sample to ignore these.
+  var scheduledSample = Math.round(context.currentTime * context.sampleRate);
+  assert_equals(scheduledSample % 128, 0,
+                "currentTime advances in blocks of 128 samples");
+  var reference = context.createGain();
+  reference.gain.setValueAtTime(scheduledSample, context.currentTime);
+  reference.gain.linearRampToValueAtTime(rampEnd, rampEndSeconds);
+
+  source.connect(test);
+  source.connect(reference);
+
+  var merger = context.createChannelMerger();
+  test.connect(merger, 0, 0);
+  reference.connect(merger, 0, 1);
+
+  var processor = context.createScriptProcessor(0, 2, 0);
+  merger.connect(processor);
+  processor.onaudioprocess =
+    t.step_func_done((e) => {
+      source.stop();
+      processor.onaudioprocess = null;
+
+      var testValue = e.inputBuffer.getChannelData(0)[0];
+      var referenceValue = e.inputBuffer.getChannelData(1)[0];
+
+      assert_equals(testValue, referenceValue,
+                    "ramp value matches expected");
+      assert_greater_than_equal(testValue, scheduledSample,
+                                "time does not retreat");
+      assert_equals(testValue % 128, 0,
+                    "ScriptProcessor blocks align on 128-sample blocks");
+    });
+}
+
+async_test(function(t) {
+  var context = new AudioContext;
+  (function waitForTimeAdvance() {
+    if (context.currentTime == 0) {
+      t.step_timeout(waitForTimeAdvance, 0);
+    } else {
+      do_test(t, context);
+    }
+  })();
+});
+</script>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioprocessingevent-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioprocessingevent-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-audioprocessingevent-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-biquadfilternode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-biquadfilternode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-biquadfilternode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-channelmergernode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-channelmergernode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-channelmergernode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-channelsplitternode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-channelsplitternode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-channelsplitternode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-convolvernode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-convolvernode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-convolvernode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-delaynode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-delaynode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-delaynode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-delaynode-interface/idl-test.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-delaynode-interface/idl-test.html
new file mode 100644
index 0000000..4587e39
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-delaynode-interface/idl-test.html
@@ -0,0 +1,152 @@
+<!DOCTYPE html>
+<html class="a">
+<head>
+<title>DelayNode IDL Test</title>
+<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/WebIDLParser.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
+    #event-target-idl,
+    #audio-context-idl,
+    #audio-node-idl,
+    #audio-param-idl
+    { visibility:hidden; height: 0px;}
+  </style>
+</head>
+<body class="a">
+
+   <pre id="event-target-idl">interface EventTarget {
+  void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  boolean dispatchEvent(Event event);
+};
+
+/*
+callback interface EventListener {
+  void handleEvent(Event event);
+};
+*/
+// Callback interfaces are not supported yet, but that's ok
+interface EventListener {};
+</pre>
+
+   <pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
+callback DecodeErrorCallback = void ();
+
+[Constructor]
+interface AudioContext : EventTarget {
+
+    readonly attribute AudioDestinationNode destination;
+    readonly attribute float sampleRate;
+    readonly attribute double currentTime;
+    readonly attribute AudioListener listener;
+
+    AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
+
+    void decodeAudioData(ArrayBuffer audioData,
+                         DecodeSuccessCallback successCallback,
+                         optional DecodeErrorCallback errorCallback);
+
+
+    // AudioNode creation
+    AudioBufferSourceNode createBufferSource();
+
+    MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
+
+    MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
+    MediaStreamAudioDestinationNode createMediaStreamDestination();
+
+    ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
+                                              optional unsigned long numberOfInputChannels = 2,
+                                              optional unsigned long numberOfOutputChannels = 2);
+
+    AnalyserNode createAnalyser();
+    GainNode createGain();
+    DelayNode createDelay(optional double maxDelayTime = 1.0);
+    BiquadFilterNode createBiquadFilter();
+    WaveShaperNode createWaveShaper();
+    PannerNode createPanner();
+    ConvolverNode createConvolver();
+
+    ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
+    ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
+
+    DynamicsCompressorNode createDynamicsCompressor();
+
+    OscillatorNode createOscillator();
+    PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
+
+};</pre>
+
+   <pre id="audio-node-idl">enum ChannelCountMode {
+    "max",
+    "clamped-max",
+    "explicit"
+};
+
+enum ChannelInterpretation {
+    "speakers",
+    "discrete"
+};
+
+interface AudioNode : EventTarget {
+
+    void connect(AudioNode destination, optional unsigned long output = 0, optional unsigned long input = 0);
+    void connect(AudioParam destination, optional unsigned long output = 0);
+    void disconnect(optional unsigned long output = 0);
+
+    readonly attribute AudioContext context;
+    readonly attribute unsigned long numberOfInputs;
+    readonly attribute unsigned long numberOfOutputs;
+
+    // Channel up-mixing and down-mixing rules for all inputs.
+    attribute unsigned long channelCount;
+    attribute ChannelCountMode channelCountMode;
+    attribute ChannelInterpretation channelInterpretation;
+
+};</pre>
+
+   <pre id="audio-param-idl">interface AudioParam {
+
+    attribute float value;
+    readonly attribute float defaultValue;
+
+    // Parameter automation.
+    void setValueAtTime(float value, double startTime);
+    void linearRampToValueAtTime(float value, double endTime);
+    void exponentialRampToValueAtTime(float value, double endTime);
+
+    // Exponentially approach the target value with a rate having the given time constant.
+    void setTargetAtTime(float target, double startTime, double timeConstant);
+
+    // Sets an array of arbitrary parameter values starting at time for the given duration.
+    // The number of values will be scaled to fit into the desired duration.
+    void setValueCurveAtTime(Float32Array values, double startTime, double duration);
+
+    // Cancels all scheduled parameter changes with times greater than or equal to startTime.
+    void cancelScheduledValues(double startTime);
+
+};</pre>
+
+<pre id="delay-node-idl">interface DelayNode : AudioNode {
+
+    readonly attribute AudioParam delayTime;
+
+};</pre>
+
+  <div id="log"></div>
+
+  <script>
+(function() {
+  var idl_array = new IdlArray();
+  idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-node-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-param-idl").textContent);
+  idl_array.add_idls(document.getElementById("delay-node-idl").textContent);
+
+  delay_node = (new AudioContext).createDelay();
+
+  idl_array.add_objects({DelayNode: ["delay_node"]});
+  idl_array.test();
+})();
+  </script>
+</body>
+</html>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-dynamicscompressornode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-dynamicscompressornode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-dynamicscompressornode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/gain-expected.wav b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/gain-expected.wav
new file mode 100644
index 0000000..b445bd8
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/gain-expected.wav
Binary files differ
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/idl-test.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/idl-test.html
new file mode 100644
index 0000000..dea13b1
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/idl-test.html
@@ -0,0 +1,152 @@
+<!DOCTYPE html>
+<html class="a">
+<head>
+<title>GainNode IDL Test</title>
+<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/WebIDLParser.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
+    #event-target-idl,
+    #audio-context-idl,
+    #audio-node-idl,
+    #audio-param-idl
+    { visibility:hidden; height: 0px;}
+  </style>
+</head>
+<body class="a">
+
+   <pre id="event-target-idl">interface EventTarget {
+  void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
+  boolean dispatchEvent(Event event);
+};
+
+/*
+callback interface EventListener {
+  void handleEvent(Event event);
+};
+*/
+// Callback interfaces are not supported yet, but that's ok
+interface EventListener {};
+</pre>
+
+   <pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
+callback DecodeErrorCallback = void ();
+
+[Constructor]
+interface AudioContext : EventTarget {
+
+    readonly attribute AudioDestinationNode destination;
+    readonly attribute float sampleRate;
+    readonly attribute double currentTime;
+    readonly attribute AudioListener listener;
+
+    AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
+
+    void decodeAudioData(ArrayBuffer audioData,
+                         DecodeSuccessCallback successCallback,
+                         optional DecodeErrorCallback errorCallback);
+
+
+    // AudioNode creation
+    AudioBufferSourceNode createBufferSource();
+
+    MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
+
+    MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
+    MediaStreamAudioDestinationNode createMediaStreamDestination();
+
+    ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
+                                              optional unsigned long numberOfInputChannels = 2,
+                                              optional unsigned long numberOfOutputChannels = 2);
+
+    AnalyserNode createAnalyser();
+    GainNode createGain();
+    DelayNode createDelay(optional double maxDelayTime = 1.0);
+    BiquadFilterNode createBiquadFilter();
+    WaveShaperNode createWaveShaper();
+    PannerNode createPanner();
+    ConvolverNode createConvolver();
+
+    ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
+    ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
+
+    DynamicsCompressorNode createDynamicsCompressor();
+
+    OscillatorNode createOscillator();
+    PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
+
+};</pre>
+
+   <pre id="audio-node-idl">enum ChannelCountMode {
+    "max",
+    "clamped-max",
+    "explicit"
+};
+
+enum ChannelInterpretation {
+    "speakers",
+    "discrete"
+};
+
+interface AudioNode : EventTarget {
+
+    void connect(AudioNode destination, optional unsigned long output = 0, optional unsigned long input = 0);
+    void connect(AudioParam destination, optional unsigned long output = 0);
+    void disconnect(optional unsigned long output = 0);
+
+    readonly attribute AudioContext context;
+    readonly attribute unsigned long numberOfInputs;
+    readonly attribute unsigned long numberOfOutputs;
+
+    // Channel up-mixing and down-mixing rules for all inputs.
+    attribute unsigned long channelCount;
+    attribute ChannelCountMode channelCountMode;
+    attribute ChannelInterpretation channelInterpretation;
+
+};</pre>
+
+   <pre id="audio-param-idl">interface AudioParam {
+
+    attribute float value;
+    readonly attribute float defaultValue;
+
+    // Parameter automation.
+    void setValueAtTime(float value, double startTime);
+    void linearRampToValueAtTime(float value, double endTime);
+    void exponentialRampToValueAtTime(float value, double endTime);
+
+    // Exponentially approach the target value with a rate having the given time constant.
+    void setTargetAtTime(float target, double startTime, double timeConstant);
+
+    // Sets an array of arbitrary parameter values starting at time for the given duration.
+    // The number of values will be scaled to fit into the desired duration.
+    void setValueCurveAtTime(Float32Array values, double startTime, double duration);
+
+    // Cancels all scheduled parameter changes with times greater than or equal to startTime.
+    void cancelScheduledValues(double startTime);
+
+};</pre>
+
+<pre id="gain-node-idl">interface GainNode : AudioNode {
+
+    readonly attribute AudioParam gain;
+
+};</pre>
+
+  <div id="log"></div>
+
+  <script>
+(function() {
+  var idl_array = new IdlArray();
+  idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-node-idl").textContent);
+  idl_array.add_untested_idls(document.getElementById("audio-param-idl").textContent);
+  idl_array.add_idls(document.getElementById("gain-node-idl").textContent);
+
+  gain_node = (new AudioContext).createGain();
+
+  idl_array.add_objects({GainNode: ["gain_node"]});
+  idl_array.test();
+})();
+  </script>
+</body>
+</html>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/test-gainnode.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/test-gainnode.html
new file mode 100644
index 0000000..4f92fbb
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-gainnode-interface/test-gainnode.html
@@ -0,0 +1,121 @@
+<!doctype html>
+
+<!--
+Tests that GainNode is properly scaling the gain.
+We'll render 11 notes, starting at a gain of 1.0, decreasing in gain by 0.1.
+The 11th note will be of gain 0.0, so it should be silent (at the end in the rendered output).
+
+Based on a test from the WebKit test suite
+(https://github.com/WebKit/webkit/blob/master/LayoutTests/webaudio/gain.html)
+-->
+
+<html class="a">
+ <head>
+  <title>GainNode interface</title>
+  <script src="/resources/testharness.js"></script>
+  <script src="/resources/testharnessreport.js"></script>
+  <script src="/webaudio/js/lodash.js"></script>
+  <script src="/webaudio/js/vendor-prefixes.js"></script>
+  <script src="/webaudio/js/helpers.js"></script>
+  <script src="/webaudio/js/buffer-loader.js"></script>
+ </head>
+ <body class="a">
+  <div id="log"></div>
+  <script>
+var gainNodeTest = async_test("GainNode");
+
+var sampleRate = 44100.0;
+var bufferDurationSeconds = 0.125;
+var numberOfNotes = 11;
+var noteSpacing = bufferDurationSeconds + 0.020; // leave 20ms of silence between each "note"
+var lengthInSeconds = numberOfNotes * noteSpacing;
+
+var context = 0;
+var expectedBuffer = 0;
+var actualBuffer = 0;
+var sinWaveBuffer = 0;
+
+function createSinWaveBuffer(lengthInSeconds, frequency) {
+  var audioBuffer = context.createBuffer(2, lengthInSeconds * sampleRate, sampleRate);
+
+  var n = audioBuffer.length;
+  var channelL = audioBuffer.getChannelData(0);
+  var channelR = audioBuffer.getChannelData(1);
+
+  for (var i = 0; i < n; ++i) {
+    channelL[i] = Math.sin(frequency * 2.0*Math.PI * i / sampleRate);
+    channelR[i] = channelL[i];
+  }
+
+  return audioBuffer;
+}
+
+function playNote(time, gain) {
+  var source = context.createBufferSource();
+  source.buffer = sinWaveBuffer;
+
+  var gainNode = context.createGain();
+  gainNode.gain.value = gain;
+
+  source.connect(gainNode);
+  gainNode.connect(context.destination);
+
+  source.start(time);
+}
+
+function loadExpectedBuffer(event) {
+  actualBuffer = event.renderedBuffer;
+
+  bufferLoader = new BufferLoader(
+    context,
+    ['/webaudio/the-audio-api/the-gainnode-interface/gain-expected.wav'],
+    bufferLoadCompleted
+  );
+  bufferLoader.load();
+};
+
+function bufferLoadCompleted(buffer) {
+  compareExpectedWithActualBuffer(buffer);
+};
+
+setup( function() {
+  // Create offline audio context.
+  context = new OfflineAudioContext(2, sampleRate * lengthInSeconds, sampleRate);
+
+  // Create a buffer for a short "note".
+  sinWaveBuffer = createSinWaveBuffer(bufferDurationSeconds, 880.0);
+
+  // Render 11 notes, starting at a gain of 1.0, decreasing in gain by 0.1.
+  // The last note will be of gain 0.0, so shouldn't be perceptible in the rendered output.
+  for (var i = 0; i < numberOfNotes; ++i) {
+    var time = i * noteSpacing;
+    var gain = 1.0 - i / (numberOfNotes - 1);
+    playNote(time, gain);
+  }
+
+  context.oncomplete = loadExpectedBuffer;
+  context.startRendering();
+}, {timeout: 10000});
+
+function compareExpectedWithActualBuffer(expected) {
+  var expectedBuffer = expected[0];
+
+  gainNodeTest.step(function() {
+    assert_array_approx_equals(expectedBuffer.getChannelData(0),
+                               actualBuffer.getChannelData(0),
+                               1e-4,
+                               "comparing expected and rendered buffers (channel 0)");
+  });
+
+  gainNodeTest.step(function() {
+    assert_array_approx_equals(expectedBuffer.getChannelData(1),
+                               actualBuffer.getChannelData(1),
+                               1e-4,
+                               "comparing expected and rendered buffers (channel 1)");
+  });
+
+  gainNodeTest.done();
+};
+  </script>
+ </body>
+</html>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediaelementaudiosourcenode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediaelementaudiosourcenode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediaelementaudiosourcenode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediaelementaudiosourcenode-interface/mediaElementAudioSourceToScriptProcessorTest.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediaelementaudiosourcenode-interface/mediaElementAudioSourceToScriptProcessorTest.html
new file mode 100644
index 0000000..69427b4
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediaelementaudiosourcenode-interface/mediaElementAudioSourceToScriptProcessorTest.html
@@ -0,0 +1,124 @@
+<!doctype html>
+
+<!--
+Tests that a create MediaElementSourceNode that is passed through
+a script processor passes the stream data.
+The the script processor saves the input buffers it gets to a temporary
+array, and after the playback has stopped, the contents are compared
+to those of a loaded AudioBuffer with the same source.
+
+Somewhat similiar to a test from Mozilla:
+(http://mxr.mozilla.org/mozilla-central/source/content/media/webaudio/test/test_mediaElementAudioSourceNode.html?force=1)
+-->
+
+<html class="a">
+ <head>
+  <title>MediaElementAudioSource interface test (to scriptProcessor)</title>
+  <script src="/resources/testharness.js"></script>
+  <script src="/resources/testharnessreport.js"></script>
+  <script src="/webaudio/js/lodash.js"></script>
+  <script src="/webaudio/js/vendor-prefixes.js"></script>
+  <script src="/webaudio/js/helpers.js"></script>
+  <script src="/webaudio/js/buffer-loader.js"></script>
+ </head>
+ <body class="a">
+  <div id="log"></div>
+  <script>
+ var elementSourceTest = async_test("Element Source tests completed");
+
+ var src = '/webaudio/resources/sin_440Hz_-6dBFS_1s.wav';
+ var BUFFER_SIZE = 2048;
+ var context = null;
+ var actualBufferArrayC0 = new Float32Array(0);
+ var actualBufferArrayC1 = new Float32Array(0);
+ var audio = null, source = null, processor = null
+
+ function loadExpectedBuffer(event) {
+   bufferLoader = new BufferLoader(
+     context,
+     [src],
+     bufferLoadCompleted
+   );
+   bufferLoader.load();
+ };
+
+ function bufferLoadCompleted(buffer) {
+   runTests(buffer);
+ };
+
+ function concatTypedArray(arr1, arr2) {
+   var result = new Float32Array(arr1.length + arr2.length);
+   result.set(arr1);
+   result.set(arr2, arr1.length);
+   return result;
+ }
+
+ // Create Audio context
+ context = new AudioContext();
+
+ // Create an audio element, and a media element source
+ audio = document.createElement('audio');
+ audio.src = src;
+ source = context.createMediaElementSource(audio);
+
+function processListener (e) {
+  actualBufferArrayC0 = concatTypedArray(actualBufferArrayC0, e.inputBuffer.getChannelData(0));
+  actualBufferArrayC1 = concatTypedArray(actualBufferArrayC1, e.inputBuffer.getChannelData(1));
+}
+
+ // Create a processor node to copy the input to the actual buffer
+ processor = context.createScriptProcessor(BUFFER_SIZE);
+ source.connect(processor);
+ processor.connect(context.destination);
+ processor.addEventListener('audioprocess', processListener);
+
+ // When media playback ended, save the begin to compare with expected buffer
+ audio.addEventListener("ended", function(e) {
+   // Setting a timeout since we need audioProcess event to run for all samples
+   window.setTimeout(loadExpectedBuffer, 50);
+ });
+
+ audio.play();
+
+ function runTests(expected) {
+   source.disconnect();
+   processor.disconnect();
+
+   // firefox seems to process events after disconnect
+   processor.removeEventListener('audioprocess', processListener)
+
+   var expectedBuffer = expected[0];
+
+   // Trim the actual elements because we don't have a fine-grained
+   // control over the start and end time of recording the data.
+   var actualTrimmedC0 = trimEmptyElements(actualBufferArrayC0);
+   var actualTrimmedC1 = trimEmptyElements(actualBufferArrayC1);
+   var expectedLength = trimEmptyElements(expectedBuffer.getChannelData(0)).length;
+
+   // Test that there is some data.
+   test(function() {
+     assert_greater_than(actualTrimmedC0.length, 0,
+                         "processed data array (C0) length greater than 0");
+     assert_greater_than(actualTrimmedC1.length, 0,
+                         "processed data array (C1) length greater than 0");
+   }, "Channel 0 processed some data");
+
+   // Test the actual contents of the 1st and second channel.
+   test(function() {
+     assert_array_approx_equals(
+       actualTrimmedC0,
+       trimEmptyElements(expectedBuffer.getChannelData(0)),
+       1e-4,
+       "comparing expected and rendered buffers (channel 0)");
+     assert_array_approx_equals(
+       actualTrimmedC1,
+       trimEmptyElements(expectedBuffer.getChannelData(1)),
+       1e-4,
+       "comparing expected and rendered buffers (channel 1)");
+   }, "All data processed correctly");
+
+   elementSourceTest.done();
+ };
+  </script>
+ </body>
+</html>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediastreamaudiodestinationnode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediastreamaudiodestinationnode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediastreamaudiodestinationnode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediastreamaudiosourcenode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediastreamaudiosourcenode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-mediastreamaudiosourcenode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-offlineaudiocontext-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-offlineaudiocontext-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-offlineaudiocontext-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-offlineaudiocontext-interface/current-time-block-size.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-offlineaudiocontext-interface/current-time-block-size.html
new file mode 100644
index 0000000..ee976f7
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-offlineaudiocontext-interface/current-time-block-size.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<title>Test currentTime at completion of OfflineAudioContext rendering</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+promise_test(function() {
+  // sampleRate is a power of two so that time can be represented exactly
+  // in double currentTime.
+  var context = new OfflineAudioContext(1, 1, 65536);
+  return context.startRendering().
+    then(function(buffer) {
+      assert_equals(buffer.length, 1, "buffer length");
+      assert_equals(context.currentTime, 128 / context.sampleRate,
+                    "currentTime at completion");
+    });
+});
+</script>
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-oscillatornode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-oscillatornode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-oscillatornode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-pannernode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-pannernode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-pannernode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-periodicwave-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-periodicwave-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-periodicwave-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-scriptprocessornode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-scriptprocessornode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-scriptprocessornode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-waveshapernode-interface/.gitkeep b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-waveshapernode-interface/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-waveshapernode-interface/.gitkeep
diff --git a/src/third_party/web_platform_tests/webaudio/the-audio-api/the-waveshapernode-interface/curve-tests.html b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-waveshapernode-interface/curve-tests.html
new file mode 100644
index 0000000..32ace55
--- /dev/null
+++ b/src/third_party/web_platform_tests/webaudio/the-audio-api/the-waveshapernode-interface/curve-tests.html
@@ -0,0 +1,212 @@
+<!doctype html>
+<html>
+<head>
+	<title>WaveShaperNode interface - Curve tests | WebAudio</title>
+
+	<script type="text/javascript" src="../../../resources/testharness.js"></script>
+	<script type="text/javascript" src="../../../resources/testharnessreport.js"></script>
+	<script type="text/javascript" src="../../js/vendor-prefixes.js"></script>
+</head>
+<body>
+	<div id="log">
+	</div>
+
+	<script type="text/javascript">
+		var sampleRate=44100.0;
+		var tolerance=0.01;
+
+		/*
+		Testing that -1, 0 and +1 map correctly to curve (with 1:1 correlation)
+		=======================================================================
+		From the specification:
+			The input signal is nominally within the range -1 -> +1.
+			Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
+				to the center value of the curve array.
+		*/
+		(function() {
+			var threeElementCurve=[2.0, -3.0, 4.0];
+			var inputData=[-1.0, 0, 1.0];
+			var expectedData=[2.0, -3.0, 4.0];
+			executeTest(threeElementCurve, inputData, expectedData, "Testing that -1, 0 and +1 map correctly to curve (with 1:1 correlation)");
+		})();
+
+		/*
+		Testing interpolation (where inputs don't correlate directly to curve elements)
+		===============================================================================
+		From the specification:
+			The implementation must perform linear interpolation between adjacent points in the curve.
+		*/
+		(function() {
+			var threeElementCurve=[2.0, -3.0, 4.0];
+			var inputData=[-0.5, +0.5, +0.75];
+			var expectedData=[-0.5, +0.5, +2.25];
+			executeTest(threeElementCurve, inputData, expectedData, "Testing interpolation (where inputs don't correlate directly to curve elements)");
+		})();
+
+		/*
+		Testing out-of-range inputs (should be mapped to the first/last elements of the curve)
+		======================================================================================
+		From the specification:
+			Any sample value less than -1 will correspond to the first value in the curve array.
+			Any sample value greater than +1 will correspond to the last value in the curve array.
+		*/
+		(function() {
+			var threeElementCurve=[2.0, -3.0, 4.0];
+			var inputData=[-1.5, +1.5];
+			var expectedData=[2.0, 4.0];
+			executeTest(threeElementCurve, inputData, expectedData, "Testing out-of-range inputs (should be mapped to the first/last elements of the curve)");
+		})();
+
+		/*
+		Testing a 2-element curve (does not have a middle element)
+		==========================================================
+		From the specification:
+			Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
+				to the center value of the curve array.
+			The implementation must perform linear interpolation between adjacent points in the curve.
+		*/
+		(function() {
+			var twoElementCurve=[2.0, -2.0];
+			var inputData=[-1.0, 0, 1.0];
+			var expectedData=[2.0, 0.0, -2.0];
+			executeTest(twoElementCurve, inputData, expectedData, "Testing a 2-element curve (does not have a middle element)");
+		})();
+
+		/*
+		Testing a 4-element curve (does not have a middle element)
+		==========================================================
+		From the specification:
+			Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
+				to the center value of the curve array.
+			The implementation must perform linear interpolation between adjacent points in the curve.
+		*/
+		(function() {
+			var fourElementCurve=[1.0, 2.0, 4.0, 7.0];
+			var inputData=[-1.0, 0, 1.0];
+			var expectedData=[1.0, 3.0, 7.0];
+			executeTest(fourElementCurve, inputData, expectedData, "Testing a 4-element curve (does not have a middle element)");
+		})();
+
+		/*
+		Testing a huge curve
+		====================
+		From the specification:
+			Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
+				to the center value of the curve array.
+		*/
+		(function() {
+			var bigCurve=[];
+			for(var i=0;i<=60000;i++) { bigCurve.push(i/3.5435); }
+			var inputData=[-1.0, 0, 1.0];
+			var expectedData=[bigCurve[0], bigCurve[30000], bigCurve[60000]];
+			executeTest(bigCurve, inputData, expectedData, "Testing a huge curve");
+		})();
+
+		/*
+		Testing single-element curve (boundary condition)
+		=================================================
+		From the specification:
+			Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
+				to the center value of the curve array.
+			Any sample value less than -1 will correspond to the first value in the curve array.
+			Any sample value greater than +1 will correspond to the last value in the curve array.
+			The implementation must perform linear interpolation between adjacent points in the curve.
+		Note:
+			I found a post on the W3C audio mailing list (from one of the Chris's) that suggested it would be feasible
+				to use the WaveShaperNode to create constant values.
+		*/
+		(function() {
+			var oneElementCurve=[1.0];
+			var inputData=[-1.0, 0, 1.0, -2.0, 2.0];
+			var expectedData=[1.0, 1.0, 1.0, 1.0, 1.0];
+			executeTest(oneElementCurve, inputData, expectedData, "Testing single-element curve (boundary condition)");
+		})();
+
+		/*
+		Testing null curve (should return input values)
+		===============================================
+		From the specification:
+			Initially the curve attribute is null, which means that the WaveShaperNode will pass its input to its output
+				without modification.
+		*/
+		(function() {
+			var inputData=[-1.0, 0, 1.0, 2.0];
+			var expectedData=[-1.0, 0.0, 1.0, 2.0];
+			executeTest(null, inputData, expectedData, "Testing null curve (should return input values)");
+		})();
+
+		/*
+		Testing zero-element curve (unspecified result)
+		===============================================
+		From the specification:
+			Unspecified result (I assume it will be treated in the same way as a null curve).
+		Note:
+			Mozilla test_waveShaperNoCurve.html indicates they expect same results as a null curve.
+		*/
+		(function() {
+			var zeroElementCurve=[];
+			var inputData=[-1.0, 0, 1.0, 2.0];
+			var expectedData=[-1.0, 0.0, 1.0, 2.0];
+			executeTest(zeroElementCurve, inputData, expectedData, "Testing zero-element curve (unspecified result)");
+		})();
+
+
+		/**
+		* Function that does the actual testing (using an asynchronous test).
+		* @param {?Array.<number>} curveData - Array containing values for the WaveShaper curve.
+		* @param {!Array.<number>} inputData - Array containing values for the input stream.
+		* @param {!Array.<number>} expectedData - Array containing expected results for each of the corresponding inputs.
+		* @param {!string} testName - Name of the test case.
+		*/
+		function executeTest(curveData, inputData, expectedData, testName) {
+			var stTest=async_test("WaveShaperNode - "+testName);
+
+			// Create offline audio context.
+			var ac=new OfflineAudioContext(1, inputData.length, sampleRate);
+
+			// Create the WaveShaper and its curve.
+			var waveShaper=ac.createWaveShaper();
+			if(curveData!=null) {
+				var curve=new Float32Array(curveData.length);
+				for(var i=0;i<curveData.length;i++) { curve[i]=curveData[i]; }
+				waveShaper.curve=curve;
+			}
+			waveShaper.connect(ac.destination);
+
+			// Create buffer containing the input values.
+			var inputBuffer=ac.createBuffer(1, Math.max(inputData.length, 2), sampleRate);
+			var d=inputBuffer.getChannelData(0);
+			for(var i=0;i<inputData.length;i++) { d[i]=inputData[i]; }
+
+			// Play the input buffer through the WaveShaper.
+			var src=ac.createBufferSource();
+			src.buffer=inputBuffer;
+			src.connect(waveShaper);
+			src.start();
+
+			// Test the outputs match the expected values.
+			ac.oncomplete=function(ev) {
+				var d=ev.renderedBuffer.getChannelData(0);
+
+				stTest.step(function() {
+					for(var i=0;i<expectedData.length;i++) {
+						var curveText="null";
+						if(curve!=null) {
+							if(curveData.length<20) {
+								curveText=curveData.join(",");
+							} else {
+								curveText="TooBigToDisplay ("+(curveData.length-1)+" elements)";
+							}
+						}
+						var comment="Input="+inputData[i]+", Curve=["+curveText+"] >>> ";
+						assert_approx_equals(d[i], expectedData[i], tolerance, comment);
+					}
+				});
+
+				stTest.done();
+			};
+			ac.startRendering();
+		}
+	</script>
+</body>
+</html>