Import Cobalt 21.master.0.286167
Includes the following patches:
https://cobalt-review.googlesource.com/c/cobalt/+/5930
by zapati.ahn@samsung.com
diff --git a/src/.pre-commit-config.yaml b/src/.pre-commit-config.yaml
deleted file mode 100644
index 440ebc1..0000000
--- a/src/.pre-commit-config.yaml
+++ /dev/null
@@ -1,54 +0,0 @@
-# See https://pre-commit.com for more information
-# See https://pre-commit.com/hooks.html for more hooks
-default_stages: [commit]
-repos:
-- repo: local
- hooks:
- - id: cpplint
- name: cpplint
- entry: cpplint
- language: system
- types: [c++]
- args: [--verbose=4, --quiet]
- exclude: '.*tests?.(cc|h)$'
- stages: [push]
- - id: cpplint_test
- name: cpplint_test
- entry: cpplint
- language: system
- types: [c++]
- args: [--verbose=5, --quiet]
- files: '.*tests?.(cc|h)$'
- stages: [push]
- - id: yapf
- name: yapf
- entry: yapf
- language: system
- types: [python]
- args: [--style=yapf, -i]
- - id: pylint
- name: pylint
- entry: pylint
- language: system
- types: [python]
- args: [-d W0201]
- stages: [push]
- - id: clang-format
- name: clang-format
- entry: third_party/precommit-hooks/clang-format_wrapper.py
- language: python
- types: [c++]
- args: [-i, -style=file]
- - id: google-java-format
- name: google-java-format
- entry: third_party/precommit-hooks/google-java-format_wrapper.py
- language: python
- types: [java]
- args: [-i]
- - id: gcheckstyle
- name: gcheckstyle
- entry: third_party/precommit-hooks/gcheckstyle_wrapper.py
- language: python
- types: [java]
- verbose: true
- stages: [push]
diff --git a/src/base/trace_event/trace_event_memory_overhead.h b/src/base/trace_event/trace_event_memory_overhead.h
index 2687e93..7016f0f 100644
--- a/src/base/trace_event/trace_event_memory_overhead.h
+++ b/src/base/trace_event/trace_event_memory_overhead.h
@@ -6,6 +6,7 @@
#define BASE_TRACE_EVENT_TRACE_EVENT_MEMORY_OVERHEAD_H_
#include <unordered_map>
+#include <string>
#include "base/base_export.h"
#include "base/macros.h"
diff --git a/src/build/protoc.gypi b/src/build/protoc.gypi
index 89e3e16..622c853 100644
--- a/src/build/protoc.gypi
+++ b/src/build/protoc.gypi
@@ -98,7 +98,7 @@
'<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
],
'action': [
- 'python',
+ 'python2',
'<(protoc_wrapper)',
'--include',
'<(cc_include)',
@@ -116,7 +116,6 @@
'--cpp_out', '<(cc_generator_options)<(cc_dir)',
'--python_out', '<(py_dir)',
],
- 'msvs_cygwin_shell': 0,
'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
'process_outputs_as_sources': 1,
},
diff --git a/src/cobalt/bindings/bindings.gypi b/src/cobalt/bindings/bindings.gypi
index e89948c..3b33a64 100644
--- a/src/cobalt/bindings/bindings.gypi
+++ b/src/cobalt/bindings/bindings.gypi
@@ -274,7 +274,7 @@
'-e h -d <(generated_source_output_dir) -b <(DEPTH) <(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT))'
],
'action': [
- 'python',
+ 'python2',
'<(idl_compiler_script)',
'--cache-dir',
'<(bindings_scripts_output_dir)',
@@ -363,7 +363,7 @@
'-e cc -d <(generated_source_output_dir) -b <(DEPTH) <(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT))'
],
'action': [
- 'python',
+ 'python2',
'<(idl_compiler_script)',
'--cache-dir',
'<(bindings_scripts_output_dir)',
@@ -437,7 +437,7 @@
'<(generated_type_conversion_header_file)',
],
'action': [
- 'python',
+ 'python2',
'<(conversion_header_generator_script)',
'--cache-dir',
'<(bindings_scripts_output_dir)',
@@ -539,7 +539,7 @@
'<(bindings_scripts_output_dir)/parsetab.pickle',
],
'action': [
- 'python',
+ 'python2',
'<(bindings_scripts_dir)/blink_idl_parser.py',
'<(bindings_scripts_output_dir)',
],
@@ -562,7 +562,7 @@
'<(bindings_scripts_output_dir)/cached_jinja_templates.stamp', # Dummy to track dependency
],
'action': [
- 'python',
+ 'python2',
'<(DEPTH)/cobalt/bindings/code_generator_cobalt.py',
'<(bindings_scripts_output_dir)',
'<(bindings_templates_dir)',
diff --git a/src/cobalt/bindings/run_cobalt_bindings_tests.bat b/src/cobalt/bindings/run_cobalt_bindings_tests.bat
old mode 100644
new mode 100755
index 515918f..56080aa
--- a/src/cobalt/bindings/run_cobalt_bindings_tests.bat
+++ b/src/cobalt/bindings/run_cobalt_bindings_tests.bat
@@ -14,4 +14,4 @@
@rem limitations under the License.
@rem
-@python run_cobalt_bindings_tests.py mozjs-45 %*
+@run_cobalt_bindings_tests.py mozjs-45 %*
diff --git a/src/cobalt/bindings/run_cobalt_bindings_tests.py b/src/cobalt/bindings/run_cobalt_bindings_tests.py
old mode 100644
new mode 100755
index a9e3be7..eb0fe24
--- a/src/cobalt/bindings/run_cobalt_bindings_tests.py
+++ b/src/cobalt/bindings/run_cobalt_bindings_tests.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python2
# Copyright 2014 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,12 +25,12 @@
import argparse
import os
import sys
+from webkitpy.bindings.bindings_tests import run_bindings_tests
import _env # pylint: disable=unused-import
from cobalt.bindings.idl_compiler_cobalt import IdlCompilerCobalt
from cobalt.bindings.mozjs45.code_generator_mozjs45 import CodeGeneratorMozjs45
from cobalt.bindings.v8c.code_generator_v8c import CodeGeneratorV8c
-from webkitpy.bindings.bindings_tests import run_bindings_tests
def main(argv):
diff --git a/src/cobalt/bindings/run_cobalt_bindings_tests.sh b/src/cobalt/bindings/run_cobalt_bindings_tests.sh
index 1309ce4..6f65c9e 100755
--- a/src/cobalt/bindings/run_cobalt_bindings_tests.sh
+++ b/src/cobalt/bindings/run_cobalt_bindings_tests.sh
@@ -1,3 +1,4 @@
+#!/bin/bash
#
# Copyright 2016 The Cobalt Authors. All Rights Reserved.
#
@@ -14,10 +15,8 @@
# limitations under the License.
#
-#!/bin/bash
-
# Ensure we are in the bindings directory.
cd "$(dirname "${BASH_SOURCE[0]}")"
-python run_cobalt_bindings_tests.py mozjs45 "$@"
-python run_cobalt_bindings_tests.py v8c "$@"
+./run_cobalt_bindings_tests.py mozjs45 "$@"
+./run_cobalt_bindings_tests.py v8c "$@"
diff --git a/src/cobalt/browser/application.cc b/src/cobalt/browser/application.cc
index 27edd11..b9cb809 100644
--- a/src/cobalt/browser/application.cc
+++ b/src/cobalt/browser/application.cc
@@ -18,6 +18,7 @@
#include "cobalt/browser/application.h"
+#include <map>
#include <memory>
#include <string>
#include <vector>
@@ -255,6 +256,15 @@
return initial_url;
}
+bool ValidateSplashScreen(const base::Optional<GURL>& url) {
+ if (url->is_valid() &&
+ (url->SchemeIsFile() || url->SchemeIs("h5vcc-embedded"))) {
+ return true;
+ }
+ LOG(FATAL) << "Ignoring invalid fallback splash screen: " << url->spec();
+ return false;
+}
+
base::Optional<GURL> GetFallbackSplashScreenURL() {
base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
std::string fallback_splash_screen_string;
@@ -270,15 +280,51 @@
}
base::Optional<GURL> fallback_splash_screen_url =
GURL(fallback_splash_screen_string);
- if (!fallback_splash_screen_url->is_valid() ||
- !(fallback_splash_screen_url->SchemeIsFile() ||
- fallback_splash_screen_url->SchemeIs("h5vcc-embedded"))) {
- LOG(FATAL) << "Ignoring invalid fallback splash screen: "
- << fallback_splash_screen_string;
- }
+ ValidateSplashScreen(fallback_splash_screen_url);
return fallback_splash_screen_url;
}
+// Parses the fallback_splash_screen_topics command line parameter
+// and maps topics to full file url locations, if valid.
+void ParseFallbackSplashScreenTopics(
+ const base::Optional<GURL>& default_fallback_splash_screen_url,
+ std::map<std::string, GURL>* fallback_splash_screen_topic_map) {
+ base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
+ std::string topics;
+ if (command_line->HasSwitch(switches::kFallbackSplashScreenTopics)) {
+ topics = command_line->GetSwitchValueASCII(
+ switches::kFallbackSplashScreenTopics);
+ } else {
+ topics = configuration::Configuration::GetInstance()
+ ->CobaltFallbackSplashScreenTopics();
+ }
+
+ // Note: values in topics_map may be either file paths or filenames.
+ std::map<std::string, std::string> topics_map;
+ BrowserModule::GetParamMap(topics, topics_map);
+ for (auto iterator = topics_map.begin(); iterator != topics_map.end();
+ iterator++) {
+ std::string topic = iterator->first;
+ std::string location = iterator->second;
+ base::Optional<GURL> topic_fallback_url = GURL(location);
+
+ // If not a valid url, check whether it is a valid filename in the
+ // same directory as the default fallback url.
+ if (!topic_fallback_url->is_valid()) {
+ if (default_fallback_splash_screen_url) {
+ topic_fallback_url = GURL(
+ default_fallback_splash_screen_url->GetWithoutFilename().spec() +
+ location);
+ } else {
+ break;
+ }
+ }
+ if (ValidateSplashScreen(topic_fallback_url)) {
+ (*fallback_splash_screen_topic_map)[topic] = topic_fallback_url.value();
+ }
+ }
+}
+
base::TimeDelta GetTimedTraceDuration() {
#if defined(ENABLE_DEBUG_COMMAND_LINE_SWITCHES)
base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
@@ -570,6 +616,9 @@
options.build_auto_mem_settings = memory_settings::GetDefaultBuildSettings();
options.fallback_splash_screen_url = fallback_splash_screen_url;
+ ParseFallbackSplashScreenTopics(fallback_splash_screen_url,
+ &options.fallback_splash_screen_topic_map);
+
if (command_line->HasSwitch(browser::switches::kFPSPrint)) {
options.renderer_module_options.enable_fps_stdout = true;
}
diff --git a/src/cobalt/browser/browser_module.cc b/src/cobalt/browser/browser_module.cc
index 69c959b..ace1e98 100644
--- a/src/cobalt/browser/browser_module.cc
+++ b/src/cobalt/browser/browser_module.cc
@@ -15,6 +15,7 @@
#include "cobalt/browser/browser_module.h"
#include <algorithm>
+#include <map>
#include <memory>
#include <vector>
@@ -31,6 +32,7 @@
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
#include "cobalt/base/cobalt_paths.h"
+#include "cobalt/base/init_cobalt.h"
#include "cobalt/base/source_location.h"
#include "cobalt/base/tokens.h"
#include "cobalt/browser/on_screen_keyboard_starboard_bridge.h"
@@ -453,6 +455,7 @@
base::Unretained(this))));
}
+ // Set the fallback splash screen url to the default fallback url.
fallback_splash_screen_url_ = options.fallback_splash_screen_url;
// Synchronously construct our WebModule object.
@@ -564,7 +567,9 @@
DestroySplashScreen(base::TimeDelta());
if (options_.enable_splash_screen_on_reloads ||
main_web_module_generation_ == 1) {
- splash_screen_cache_->SetUrl(url);
+ base::Optional<std::string> topic = SetSplashScreenTopicFallback(url);
+ splash_screen_cache_->SetUrl(url, topic);
+
if (fallback_splash_screen_url_ ||
splash_screen_cache_->IsSplashScreenCached()) {
splash_screen_.reset(new SplashScreen(
@@ -1977,5 +1982,64 @@
return system_window_->GetSbWindow();
}
+base::Optional<std::string> BrowserModule::SetSplashScreenTopicFallback(
+ const GURL& url) {
+ std::map<std::string, std::string> url_param_map;
+ // If this is the initial startup, use topic within deeplink, if specified.
+ if (main_web_module_generation_ == 1) {
+ GetParamMap(GetInitialDeepLink(), url_param_map);
+ }
+ // If this is not the initial startup, there was no deeplink specified, or
+ // the deeplink did not have a topic, check the current url for a topic.
+ if (url_param_map["topic"].empty()) {
+ GetParamMap(url.query(), url_param_map);
+ }
+ std::string splash_topic = url_param_map["topic"];
+ // If a topic was found, check whether a fallback url was specified.
+ if (!splash_topic.empty()) {
+ GURL splash_url = options_.fallback_splash_screen_topic_map[splash_topic];
+ if (!splash_url.spec().empty()) {
+ // Update fallback splash screen url to topic-specific URL.
+ fallback_splash_screen_url_ = splash_url;
+ }
+ return base::Optional<std::string>(splash_topic);
+ }
+ return base::Optional<std::string>();
+}
+
+void BrowserModule::GetParamMap(const std::string& url,
+ std::map<std::string, std::string>& map) {
+ bool next_is_option = true;
+ bool next_is_value = false;
+ std::string option = "";
+ base::StringTokenizer tokenizer(url, "&=");
+ tokenizer.set_options(base::StringTokenizer::RETURN_DELIMS);
+
+ while (tokenizer.GetNext()) {
+ if (tokenizer.token_is_delim()) {
+ switch (*tokenizer.token_begin()) {
+ case '&':
+ next_is_option = true;
+ break;
+ case '=':
+ next_is_value = true;
+ break;
+ }
+ } else {
+ std::string token = tokenizer.token();
+ if (next_is_value && !option.empty()) {
+ // Overwrite previous value when an option appears more than once.
+ map[option] = token;
+ }
+ option = "";
+ if (next_is_option) {
+ option = token;
+ }
+ next_is_option = false;
+ next_is_value = false;
+ }
+ }
+}
+
} // namespace browser
} // namespace cobalt
diff --git a/src/cobalt/browser/browser_module.h b/src/cobalt/browser/browser_module.h
index 2d9b43f..0f6648a 100644
--- a/src/cobalt/browser/browser_module.h
+++ b/src/cobalt/browser/browser_module.h
@@ -15,6 +15,7 @@
#ifndef COBALT_BROWSER_BROWSER_MODULE_H_
#define COBALT_BROWSER_BROWSER_MODULE_H_
+#include <map>
#include <memory>
#include <string>
#include <vector>
@@ -104,6 +105,7 @@
memory_settings::AutoMemSettings command_line_auto_mem_settings;
memory_settings::AutoMemSettings build_auto_mem_settings;
base::Optional<GURL> fallback_splash_screen_url;
+ std::map<std::string, GURL> fallback_splash_screen_topic_map;
base::Optional<cssom::ViewportSize> requested_viewport_size;
bool enable_splash_screen_on_reloads;
bool enable_on_screen_keyboard = true;
@@ -216,6 +218,11 @@
bool IsWebModuleLoaded() { return web_module_loaded_.IsSignaled(); }
+ // Parses url and defines a mapping of parameter values of the form
+ // &option=value&foo=bar.
+ static void GetParamMap(const std::string& url,
+ std::map<std::string, std::string>& map);
+
private:
#if SB_HAS(CORE_DUMP_HANDLER_SUPPORT)
static void CoreDumpHandler(void* browser_module_as_void);
@@ -457,6 +464,10 @@
// applied according to the current time.
scoped_refptr<render_tree::Node> GetLastSubmissionAnimated();
+ // Sets the fallback splash screen url to a topic-specific URL, if applicable.
+ // Returns the topic used, or an empty Optional if a topic isn't found.
+ base::Optional<std::string> SetSplashScreenTopicFallback(const GURL& url);
+
// TODO:
// WeakPtr usage here can be avoided if BrowserModule has a thread to
// own where it can ensure that its tasks are all resolved when it is
diff --git a/src/cobalt/browser/splash_screen_cache.cc b/src/cobalt/browser/splash_screen_cache.cc
index 5cf8299..58b9caf 100644
--- a/src/cobalt/browser/splash_screen_cache.cc
+++ b/src/cobalt/browser/splash_screen_cache.cc
@@ -18,6 +18,7 @@
#include <string>
#include <vector>
+#include "base/base64.h"
#include "base/hash.h"
#include "base/optional.h"
#include "base/strings/string_util.h"
@@ -59,9 +60,14 @@
base::AutoLock lock(lock_);
}
-bool SplashScreenCache::CacheSplashScreen(const std::string& content) const {
+bool SplashScreenCache::CacheSplashScreen(
+ const std::string& content,
+ const base::Optional<std::string>& topic) const {
base::AutoLock lock(lock_);
- base::Optional<std::string> key = GetKeyForStartUrl(url_);
+ // Cache the content so that it's retrievable for the topic specified in the
+ // rel attribute. This topic may or may not match the topic-specified for this
+ // particular startup, tracked with "topic_".
+ base::Optional<std::string> key = GetKeyForStartConfig(url_, topic);
if (!key) {
return false;
}
@@ -95,7 +101,7 @@
kSbFileMaxPath)) {
return false;
}
- base::Optional<std::string> key = GetKeyForStartUrl(url_);
+ base::Optional<std::string> key = GetKeyForStartConfig(url_, topic_);
if (!key) return false;
std::string full_path =
std::string(path.data()) + kSbFileSepString + key.value();
@@ -130,9 +136,8 @@
return result_size;
}
-// static
-base::Optional<std::string> SplashScreenCache::GetKeyForStartUrl(
- const GURL& url) {
+base::Optional<std::string> SplashScreenCache::GetKeyForStartConfig(
+ const GURL& url, const base::Optional<std::string>& topic) const {
base::Optional<std::string> encoded_url = base::GetApplicationKey(url);
if (!encoded_url) {
return base::nullopt;
@@ -146,26 +151,36 @@
}
std::string subpath = "";
- std::string subcomponent = kSbFileSepString + std::string("splash_screen");
- if (SbStringConcat(path.data(), subcomponent.c_str(), kSbFileMaxPath) >=
- static_cast<int>(kSbFileMaxPath)) {
+ if (!AddPathDirectory(std::string("splash_screen"), path, subpath)) {
return base::nullopt;
}
- subpath += "splash_screen";
- subcomponent = kSbFileSepString + *encoded_url;
- if (SbStringConcat(path.data(), subcomponent.c_str(), kSbFileMaxPath) >=
- static_cast<int>(kSbFileMaxPath)) {
+ if (!AddPathDirectory(*encoded_url, path, subpath)) {
return base::nullopt;
}
- subpath += subcomponent;
- subcomponent = kSbFileSepString + std::string("splash.html");
- if (SbStringConcat(path.data(), subcomponent.c_str(), kSbFileMaxPath) >
- static_cast<int>(kSbFileMaxPath)) {
+ if (topic && !topic.value().empty()) {
+ std::string encoded_topic;
+ base::Base64Encode(topic.value(), &encoded_topic);
+ if (!AddPathDirectory(encoded_topic, path, subpath)) {
+ return base::nullopt;
+ }
+ }
+ if (!AddPathDirectory(std::string("splash.html"), path, subpath)) {
return base::nullopt;
}
- subpath += subcomponent;
- return subpath;
+ return subpath.erase(0, 1); // Remove leading separator
+}
+
+bool SplashScreenCache::AddPathDirectory(const std::string& directory,
+ std::vector<char>& path,
+ std::string& subpath) const {
+ std::string subcomponent = kSbFileSepString + directory;
+ if (SbStringConcat(path.data(), subcomponent.c_str(), kSbFileMaxPath) >=
+ static_cast<int>(kSbFileMaxPath)) {
+ return false;
+ }
+ subpath += subcomponent;
+ return true;
}
} // namespace browser
diff --git a/src/cobalt/browser/splash_screen_cache.h b/src/cobalt/browser/splash_screen_cache.h
index 336d124..b10e8cd 100644
--- a/src/cobalt/browser/splash_screen_cache.h
+++ b/src/cobalt/browser/splash_screen_cache.h
@@ -17,6 +17,7 @@
#include <memory>
#include <string>
+#include <vector>
#include "base/optional.h"
#include "base/synchronization/lock.h"
@@ -37,7 +38,8 @@
SplashScreenCache();
// Cache the splash screen.
- bool CacheSplashScreen(const std::string& content) const;
+ bool CacheSplashScreen(const std::string& content,
+ const base::Optional<std::string>& topic) const;
// Read the cached the splash screen.
int ReadCachedSplashScreen(const std::string& key,
@@ -47,18 +49,26 @@
bool IsSplashScreenCached() const;
// Set the URL of the currently requested splash screen.
- void SetUrl(const GURL& url) { url_ = url; }
+ void SetUrl(const GURL& url, const base::Optional<std::string>& topic) {
+ url_ = url;
+ topic_ = topic;
+ }
// Get the cache location of the currently requested splash screen.
GURL GetCachedSplashScreenUrl() {
- base::Optional<std::string> key = GetKeyForStartUrl(url_);
+ base::Optional<std::string> key = GetKeyForStartConfig(url_, topic_);
return GURL(loader::kCacheScheme + ("://" + *key));
}
private:
- // Get the key that corresponds to a starting URL. Optionally create
- // subdirectories along the path.
- static base::Optional<std::string> GetKeyForStartUrl(const GURL& url);
+ // Get the key that corresponds to the starting URL and (optional) topic.
+ base::Optional<std::string> GetKeyForStartConfig(
+ const GURL& url, const base::Optional<std::string>& topic) const;
+
+ // Adds the directory to the path and subpath if the new path does not exceed
+ // maximum length. Returns true if successful.
+ bool AddPathDirectory(const std::string& directory, std::vector<char>& path,
+ std::string& subpath) const;
// Lock to protect access to the cache file.
mutable base::Lock lock_;
@@ -66,6 +76,8 @@
mutable uint32_t last_page_hash_;
// Latest url that was navigated to.
GURL url_;
+ // Splash topic associated with startup.
+ base::Optional<std::string> topic_;
};
} // namespace browser
diff --git a/src/cobalt/browser/switches.cc b/src/cobalt/browser/switches.cc
index 915b762..64653d5 100644
--- a/src/cobalt/browser/switches.cc
+++ b/src/cobalt/browser/switches.cc
@@ -390,6 +390,17 @@
"no value is set, the URL in gyp_configuration.gypi or base.gypi will be "
"used.";
+const char kFallbackSplashScreenTopics[] = "fallback_splash_screen_topics";
+const char kFallbackSplashScreenTopicsHelp[] =
+ "Setting this switch defines a mapping of URL 'topics' to splash screen "
+ "URLs or filenames that Cobalt will use in the absence of a web cache, "
+ "(for example, music=music_splash_screen.html&foo=file:///bar.html). If a "
+ "URL is given it should match the format of 'fallback_splash_screen_url'. "
+ "A given filename should exist in the same directory as "
+ "'fallback_splash_screen_url'. If no fallback url exists for the topic of "
+ "the URL used to launch Cobalt, then the value of "
+ "'fallback_splash_screen_url' will be used.";
+
const char kVersion[] = "version";
const char kVersionHelp[] = "Prints the current version of Cobalt";
diff --git a/src/cobalt/browser/switches.h b/src/cobalt/browser/switches.h
index f70c0f8..67c03f7 100644
--- a/src/cobalt/browser/switches.h
+++ b/src/cobalt/browser/switches.h
@@ -151,6 +151,8 @@
extern const char kSoftwareSurfaceCacheSizeInBytesHelp[];
extern const char kFallbackSplashScreenURL[];
extern const char kFallbackSplashScreenURLHelp[];
+extern const char kFallbackSplashScreenTopics[];
+extern const char kFallbackSplashScreenTopicsHelp[];
extern const char kVersion[];
extern const char kVersionHelp[];
extern const char kViewport[];
diff --git a/src/cobalt/browser/web_module.cc b/src/cobalt/browser/web_module.cc
index d852db5..9748ca1 100644
--- a/src/cobalt/browser/web_module.cc
+++ b/src/cobalt/browser/web_module.cc
@@ -87,17 +87,19 @@
const int kDOMMaxElementDepth = 32;
void CacheUrlContent(SplashScreenCache* splash_screen_cache,
- const std::string& content) {
- splash_screen_cache->SplashScreenCache::CacheSplashScreen(content);
+ const std::string& content,
+ const base::Optional<std::string>& topic) {
+ splash_screen_cache->SplashScreenCache::CacheSplashScreen(content, topic);
}
-base::Callback<void(const std::string&)> CacheUrlContentCallback(
- SplashScreenCache* splash_screen_cache) {
+base::Callback<void(const std::string&, const base::Optional<std::string>&)>
+CacheUrlContentCallback(SplashScreenCache* splash_screen_cache) {
// This callback takes in first the url, then the content string.
if (splash_screen_cache) {
return base::Bind(CacheUrlContent, base::Unretained(splash_screen_cache));
} else {
- return base::Callback<void(const std::string&)>();
+ return base::Callback<void(const std::string&,
+ const base::Optional<std::string>&)>();
}
}
diff --git a/src/cobalt/build/all.gyp b/src/cobalt/build/all.gyp
index 79ece6f..c2db0e4 100644
--- a/src/cobalt/build/all.gyp
+++ b/src/cobalt/build/all.gyp
@@ -89,6 +89,7 @@
'<(DEPTH)/third_party/zlib/zlib.gyp:zip_unittests_deploy',
'<(DEPTH)/net/net.gyp:net_unittests_deploy',
'<(DEPTH)/sql/sql.gyp:sql_unittests_deploy',
+ '<(DEPTH)/starboard/common/common_test.gyp:common_test_deploy',
'<(DEPTH)/starboard/elf_loader/elf_loader.gyp:elf_loader_test_deploy',
'<(DEPTH)/starboard/loader_app/loader_app.gyp:loader_app_tests_deploy',
'<(DEPTH)/starboard/nplb/nplb_evergreen_compat_tests/nplb_evergreen_compat_tests.gyp:nplb_evergreen_compat_tests_deploy',
diff --git a/src/cobalt/build/build.id b/src/cobalt/build/build.id
index ab60a96..396f316 100644
--- a/src/cobalt/build/build.id
+++ b/src/cobalt/build/build.id
@@ -1 +1 @@
-283707
\ No newline at end of file
+286167
\ No newline at end of file
diff --git a/src/cobalt/build/cobalt_build_id.gyp b/src/cobalt/build/cobalt_build_id.gyp
index 68d0c7f..23a96cc 100644
--- a/src/cobalt/build/cobalt_build_id.gyp
+++ b/src/cobalt/build/cobalt_build_id.gyp
@@ -19,7 +19,7 @@
'<(output_path)',
],
'action': [
- 'python',
+ 'python2',
'<(build_id_py_path)',
'<(output_path)',
'<(cobalt_version)',
diff --git a/src/cobalt/build/cobalt_configuration.py b/src/cobalt/build/cobalt_configuration.py
index 4e8d6bc..2cd04f6 100644
--- a/src/cobalt/build/cobalt_configuration.py
+++ b/src/cobalt/build/cobalt_configuration.py
@@ -114,7 +114,15 @@
# XMLHttpRequest: send() - Redirects (basics) (307).
# Disabled because of: Flaky.
- 'xhr/WebPlatformTest.Run/XMLHttpRequest_send_redirect_htm'
+ 'xhr/WebPlatformTest.Run/XMLHttpRequest_send_redirect_htm',
+
+ # Disabled because of: Flaky on buildbot across multiple buildconfigs.
+ # Non-reproducible with local runs.
+ ('xhr/WebPlatformTest.Run/'
+ 'XMLHttpRequest_send_entity_body_get_head_async_htm'),
+ 'xhr/WebPlatformTest.Run/XMLHttpRequest_status_error_htm',
+ 'xhr/WebPlatformTest.Run/XMLHttpRequest_response_json_htm',
+ 'xhr/WebPlatformTest.Run/XMLHttpRequest_send_redirect_to_non_cors_htm',
]
return filters
diff --git a/src/cobalt/build/copy_web_data.gypi b/src/cobalt/build/copy_web_data.gypi
index f849b7b..787a7ea 100644
--- a/src/cobalt/build/copy_web_data.gypi
+++ b/src/cobalt/build/copy_web_data.gypi
@@ -57,7 +57,7 @@
'<!@pymod_do_main(starboard.build.copy_data -o <(sb_static_contents_output_data_dir)/web/<(content_web_output_subdir) --outputs <(content_web_input_files))',
],
'action': [
- 'python',
+ 'python2',
'<(DEPTH)/starboard/build/copy_data.py',
'-o', '<(sb_static_contents_output_data_dir)/web/<(content_web_output_subdir)',
'<@(content_web_input_files)',
diff --git a/src/cobalt/build/gyp_cobalt b/src/cobalt/build/gyp_cobalt
index 9a69561..f73c7be 100755
--- a/src/cobalt/build/gyp_cobalt
+++ b/src/cobalt/build/gyp_cobalt
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python2
# Copyright 2014 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -27,7 +27,9 @@
from cobalt.tools import paths
from starboard.build.gyp_runner import GypRunner
from starboard.tools import build
+from starboard.tools import command_line
from starboard.tools import config
+from starboard.tools import log_level
from starboard.tools import platform
@@ -64,9 +66,9 @@
parser.add_argument('--check', action='store_true',
help='Check format of gyp files.')
- parser.add_argument('-v', '--verbose', dest='verbose_count',
- default=0, action='count',
- help='Verbose level (multiple times for more).')
+ parser.add_argument('-v', '--verbose', action='store_true',
+ help='Enables verbose logging. For more control over the '
+ "logging level use '--log_level' instead.")
parser.add_argument('platform', choices=platform.GetAll(),
metavar='platform',
help='Target platform. Supported platforms are: %s.' % (
@@ -75,25 +77,13 @@
default=None,
help='GYP build file. Uses all.gyp if nothing is given.')
+ command_line.AddLoggingArguments(parser, default='warning')
+
options = parser.parse_args(argv)
options.application = cobalt_configuration.APPLICATION_NAME
return options
-def _SetupLogging():
- logging_level = logging.WARNING
- logging_format = '%(message)s'
- logging.basicConfig(level=logging_level, format=logging_format)
-
-
-def _SetLogLevel(verbose_count):
- logging_level = logging.WARNING
- if verbose_count == 1:
- logging_level = logging.INFO
- elif verbose_count >= 2:
- logging_level = logging.DEBUG
- logging.getLogger().setLevel(logging_level)
-
def _GetBuildConfigs(build_configs):
if build_configs:
return build_configs
@@ -105,9 +95,9 @@
return config.GetAll()
def main(argv):
- _SetupLogging()
options = _ParseCommandLineArguments(argv)
- _SetLogLevel(options.verbose_count)
+
+ log_level.InitializeLogging(options)
if os.environ.get('GYP_DEFINES'):
logging.error('GYP_DEFINES environment variable is not supported.')
diff --git a/src/cobalt/build/gyp_cobalt.bat b/src/cobalt/build/gyp_cobalt.bat
index 0d70fd3..bc80bfe 100644
--- a/src/cobalt/build/gyp_cobalt.bat
+++ b/src/cobalt/build/gyp_cobalt.bat
@@ -23,7 +23,6 @@
setlocal
set SCRIPT_DIR=%~dp0
-set SCRIPT_DIR_UNIX=%SCRIPT_DIR:\=/%
:: Locate depot_tool by searching for the location of git.bat.
for %%F in (git.bat) do set DEPOT_TOOLS_DIR=%%~dp$PATH:F
@@ -46,11 +45,11 @@
:: Full path to the git directory.
set GIT_BIN_DIR=%DEPOT_TOOLS_DIR%%GIT_BIN_DIR%\bin\
-:: Convert back slashes into Unix-style forward slashes.
-set ARGS=%*
-set ARGS_UNIX=%ARGS:\=/%
+:: Put git from depot_tools in path
+set PATH=%PATH%;%GIT_BIN_DIR%
-echo Running gyp_cobalt using git bash, Ctrl+C may not work well...
-%GIT_BIN_DIR%bash.exe -lc "python %SCRIPT_DIR_UNIX%gyp_cobalt %ARGS_UNIX%"
+set ARGS=%*
+
+python2 %SCRIPT_DIR%gyp_cobalt %ARGS%
:EOF
diff --git a/src/cobalt/configuration/configuration.cc b/src/cobalt/configuration/configuration.cc
index 9c5087c..a5e9aad 100644
--- a/src/cobalt/configuration/configuration.cc
+++ b/src/cobalt/configuration/configuration.cc
@@ -155,6 +155,13 @@
#endif
}
+const char* Configuration::CobaltFallbackSplashScreenTopics() {
+ if (configuration_api_ && configuration_api_->version >= 2) {
+ return configuration_api_->CobaltFallbackSplashScreenTopics();
+ }
+ return "";
+}
+
bool Configuration::CobaltEnableQuic() {
if (configuration_api_) {
#if defined(COBALT_ENABLE_QUIC)
diff --git a/src/cobalt/configuration/configuration.h b/src/cobalt/configuration/configuration.h
index 6ea8ba5..6913ea1 100644
--- a/src/cobalt/configuration/configuration.h
+++ b/src/cobalt/configuration/configuration.h
@@ -59,6 +59,7 @@
bool CobaltGcZeal();
const char* CobaltRasterizerType();
bool CobaltEnableJit();
+ const char* CobaltFallbackSplashScreenTopics();
private:
Configuration();
diff --git a/src/cobalt/content/fonts/fonts.gyp b/src/cobalt/content/fonts/fonts.gyp
index 36c4562..a47c417 100644
--- a/src/cobalt/content/fonts/fonts.gyp
+++ b/src/cobalt/content/fonts/fonts.gyp
@@ -151,7 +151,7 @@
'<(sb_static_contents_output_data_dir)/fonts/fonts.xml',
],
'action': [
- 'python', 'scripts/filter_fonts.py',
+ 'python2', 'scripts/filter_fonts.py',
'-i', '<(source_font_config_dir)/fonts.xml',
'-o', '<(sb_static_contents_output_data_dir)/fonts/fonts.xml',
'<@(package_categories)',
diff --git a/src/cobalt/css_parser/css_parser.gyp b/src/cobalt/css_parser/css_parser.gyp
index d498ca5..d07f4c8 100644
--- a/src/cobalt/css_parser/css_parser.gyp
+++ b/src/cobalt/css_parser/css_parser.gyp
@@ -51,9 +51,6 @@
{
'rule_name': 'bison',
'extension': 'y',
- # Don't run through Cygwin on Windows since we want to use a custom
- # Bison executable.
- 'msvs_cygwin_shell': 0,
'outputs': [
# Tokens and types, included by scanner.
'<(SHARED_INTERMEDIATE_DIR)/<(_module_dir)/<(RULE_INPUT_ROOT)_generated.h',
diff --git a/src/cobalt/cssom/cssom.gyp b/src/cobalt/cssom/cssom.gyp
index b8278f9..59ea816 100644
--- a/src/cobalt/cssom/cssom.gyp
+++ b/src/cobalt/cssom/cssom.gyp
@@ -286,9 +286,8 @@
'outputs': [
'<(output_path)',
],
- 'action': ['python', '<(script_path)', 'CSSOMEmbeddedResources', '<(output_path)', '<(input_directory)'],
+ 'action': ['python2', '<(script_path)', 'CSSOMEmbeddedResources', '<(output_path)', '<(input_directory)'],
'message': 'Embedding cssom resources in "<(input_directory)" into header file, "<(output_path)".',
- 'msvs_cygwin_shell': 1,
},
],
'direct_dependent_settings': {
diff --git a/src/cobalt/debug/remote/devtools/METADATA b/src/cobalt/debug/remote/devtools/METADATA
index 76331fa..38373bd 100644
--- a/src/cobalt/debug/remote/devtools/METADATA
+++ b/src/cobalt/debug/remote/devtools/METADATA
@@ -9,7 +9,7 @@
}
url {
type: GIT
- value: "https://github.com/ChromeDevTools/devtools-frontend"
+ value: "https://chromium.googlesource.com/devtools/devtools-frontend"
}
version: "757e0e1e1ffc4a0d36d005d120de5f73c1b910e0"
last_upgrade_date {
diff --git a/src/cobalt/debug/remote/devtools/blink/devtools_protocol/devtools_protocol.gyp b/src/cobalt/debug/remote/devtools/blink/devtools_protocol/devtools_protocol.gyp
index 4c183a3..ed32bf9 100644
--- a/src/cobalt/debug/remote/devtools/blink/devtools_protocol/devtools_protocol.gyp
+++ b/src/cobalt/debug/remote/devtools/blink/devtools_protocol/devtools_protocol.gyp
@@ -37,7 +37,7 @@
'inputs': [ '<(script_path)', '<@(input_files)' ],
'outputs': [ '<(stamp_file)' ],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'--stamp',
'<(stamp_file)',
@@ -65,7 +65,7 @@
'inputs': [ '<(script_path)', '<@(input_files)' ],
'outputs': [ '<(output_file)' ],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<@(input_files)',
'<(output_file)',
diff --git a/src/cobalt/debug/remote/devtools/devtools.gyp b/src/cobalt/debug/remote/devtools/devtools.gyp
index da2d82b..bb16c96 100644
--- a/src/cobalt/debug/remote/devtools/devtools.gyp
+++ b/src/cobalt/debug/remote/devtools/devtools.gyp
@@ -2046,7 +2046,7 @@
],
'outputs': [ '<(output_file)' ],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<@(grd_files)',
'--relative_path_dirs',
@@ -2072,7 +2072,7 @@
'inputs': [ '<(script_path)', '<@(input_files)' ],
'outputs': [ '<(output_file)' ],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<(output_file)',
'<@(input_files)',
@@ -2093,7 +2093,7 @@
'inputs': [ '<(script_path)', '<@(input_files)' ],
'outputs': [ '<(output_file)' ],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<@(input_files)',
'<(output_file)',
@@ -2114,7 +2114,7 @@
'inputs': [ '<(script_path)', '<@(input_files)' ],
'outputs': [ '<(output_file)' ],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<@(input_files)',
'<(output_file)',
@@ -2138,7 +2138,7 @@
'inputs': [ '<(script_path)', '<@(input_files)' ],
'outputs': [ '<(output_file)' ],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<@(input_files)',
'--output_js_dir',
@@ -2185,7 +2185,7 @@
'<@(generated_worker_bundles)',
],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<@(devtools_applications)',
'--input_path',
@@ -2218,7 +2218,7 @@
'<@(copied_devtools_modules)',
],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'--rsp',
'<(rsp_file)',
@@ -2248,7 +2248,7 @@
'<(stamp_file)',
],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'--input_path',
'<(input_path)',
diff --git a/src/cobalt/doc/lifecycle.md b/src/cobalt/doc/lifecycle.md
index a75d2fb..b9b9373 100644
--- a/src/cobalt/doc/lifecycle.md
+++ b/src/cobalt/doc/lifecycle.md
@@ -110,8 +110,8 @@
``` c++
class MyApplication : public shared::starboard::QueueApplication {
// [ ... ]
- bool IsStartImmediate() SB_OVERRIDE;
- bool IsPreloadImmediate() SB_OVERRIDE;
+ bool IsStartImmediate() override;
+ bool IsPreloadImmediate() override;
// [ ... ]
}
```
@@ -132,6 +132,6 @@
To support the `--preload` command-line argument:
``` c++
- bool IsStartImmediate() SB_OVERRIDE { return !HasPreloadSwitch(); }
- bool IsPreloadImmediate() SB_OVERRIDE { return HasPreloadSwitch(); }
+ bool IsStartImmediate() override { return !HasPreloadSwitch(); }
+ bool IsPreloadImmediate() override { return HasPreloadSwitch(); }
```
diff --git a/src/cobalt/dom/dom_test.gyp b/src/cobalt/dom/dom_test.gyp
index dbe96a0..53e43cc 100644
--- a/src/cobalt/dom/dom_test.gyp
+++ b/src/cobalt/dom/dom_test.gyp
@@ -44,6 +44,7 @@
'font_cache_test.cc',
'html_element_factory_test.cc',
'html_element_test.cc',
+ 'html_link_element_test.cc',
'intersection_observer_test.cc',
'keyboard_event_test.cc',
'local_storage_database_test.cc',
diff --git a/src/cobalt/dom/html_link_element.cc b/src/cobalt/dom/html_link_element.cc
index 8244909..b7150de 100644
--- a/src/cobalt/dom/html_link_element.cc
+++ b/src/cobalt/dom/html_link_element.cc
@@ -20,6 +20,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/strings/string_tokenizer.h"
#include "base/trace_event/trace_event.h"
#include "cobalt/cssom/css_parser.h"
#include "cobalt/cssom/css_style_sheet.h"
@@ -34,10 +35,34 @@
namespace dom {
namespace {
+bool IsValidRelChar(char const& c) {
+ return (isalnum(c) || c == '_' || c == '\\' || c == '-');
+}
+
+bool IsValidSplashScreenFormat(const std::string& rel) {
+ base::StringTokenizer tokenizer(rel, "_");
+ tokenizer.set_options(base::StringTokenizer::RETURN_DELIMS);
+ bool is_valid_format = true;
+ while (tokenizer.GetNext()) {
+ std::string token = tokenizer.token();
+ if (SbStringCompareAll(token.c_str(), "splashscreen") == 0) {
+ is_valid_format = true;
+ } else {
+ for (char const& c : token) {
+ if (!IsValidRelChar(c)) {
+ return false;
+ }
+ }
+ is_valid_format = false;
+ }
+ }
+ return is_valid_format;
+}
+
CspDelegate::ResourceType GetCspResourceTypeForRel(const std::string& rel) {
if (rel == "stylesheet") {
return CspDelegate::kStyle;
- } else if (rel == "splashscreen") {
+ } else if (IsValidSplashScreenFormat(rel)) {
return CspDelegate::kLocation;
} else {
NOTIMPLEMENTED();
@@ -71,13 +96,15 @@
const char HTMLLinkElement::kTagName[] = "link";
// static
const std::vector<std::string> HTMLLinkElement::kSupportedRelValues = {
- "stylesheet", "splashscreen"};
+ "stylesheet"};
void HTMLLinkElement::OnInsertedIntoDocument() {
HTMLElement::OnInsertedIntoDocument();
if (std::find(kSupportedRelValues.begin(), kSupportedRelValues.end(),
rel()) != kSupportedRelValues.end()) {
Obtain();
+ } else if (IsValidSplashScreenFormat(rel())) {
+ Obtain();
} else {
LOG(WARNING) << "<link> has unsupported rel value: " << rel() << ".";
}
@@ -202,7 +229,7 @@
Document* document = node_document();
if (rel() == "stylesheet") {
OnStylesheetLoaded(document, *content);
- } else if (rel() == "splashscreen") {
+ } else if (IsValidSplashScreenFormat(rel())) {
OnSplashscreenLoaded(document, *content);
} else {
NOTIMPLEMENTED();
@@ -257,7 +284,13 @@
void HTMLLinkElement::OnSplashscreenLoaded(Document* document,
const std::string& content) {
scoped_refptr<Window> window = document->window();
- window->CacheSplashScreen(content);
+ std::string link = rel();
+ size_t last_underscore = link.find_last_of("_");
+ base::Optional<std::string> topic;
+ if (last_underscore != std::string::npos) {
+ topic = link.substr(0, last_underscore);
+ }
+ window->CacheSplashScreen(content, topic);
}
void HTMLLinkElement::OnStylesheetLoaded(Document* document,
diff --git a/src/cobalt/dom/html_link_element.h b/src/cobalt/dom/html_link_element.h
index ba4ee01..e38abea 100644
--- a/src/cobalt/dom/html_link_element.h
+++ b/src/cobalt/dom/html_link_element.h
@@ -67,13 +67,14 @@
DEFINE_WRAPPABLE_TYPE(HTMLLinkElement);
- private:
+ protected:
~HTMLLinkElement() override {}
+ private:
void ResolveAndSetAbsoluteURL();
// From the spec: HTMLLinkElement.
- void Obtain();
+ virtual void Obtain();
void OnContentProduced(const loader::Origin& last_url_origin,
std::unique_ptr<std::string> content);
diff --git a/src/cobalt/dom/html_link_element_test.cc b/src/cobalt/dom/html_link_element_test.cc
new file mode 100644
index 0000000..380711e
--- /dev/null
+++ b/src/cobalt/dom/html_link_element_test.cc
@@ -0,0 +1,114 @@
+// Copyright 2020 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "cobalt/dom/html_link_element.h"
+#include "base/message_loop/message_loop.h"
+#include "cobalt/cssom/testing/mock_css_parser.h"
+#include "cobalt/dom/document.h"
+#include "cobalt/dom/dom_stat_tracker.h"
+#include "cobalt/dom/testing/stub_environment_settings.h"
+#include "cobalt/dom/window.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::NiceMock;
+
+namespace cobalt {
+namespace dom {
+
+class HTMLLinkElementMock : public HTMLLinkElement {
+ public:
+ explicit HTMLLinkElementMock(Document* document)
+ : HTMLLinkElement(document) {}
+ void Obtain() { obtained_ = true; }
+ bool obtained_ = false;
+ bool obtained() { return obtained_; }
+};
+
+class DocumentMock : public Document {
+ public:
+ explicit DocumentMock(HTMLElementContext* context) : Document(context) {}
+ scoped_refptr<HTMLLinkElementMock> CreateElement(
+ const std::string& local_name) {
+ return scoped_refptr<HTMLLinkElementMock>(new HTMLLinkElementMock(this));
+ }
+};
+
+class HtmlLinkElementTest : public ::testing::Test {
+ protected:
+ HtmlLinkElementTest()
+ : dom_stat_tracker_(new DomStatTracker("HtmlLinkElementTest")),
+ html_element_context_(&environment_settings_, NULL, NULL, &css_parser_,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, dom_stat_tracker_.get(),
+ "", base::kApplicationStateStarted, NULL),
+ document_(new DocumentMock(&html_element_context_)),
+ message_loop_(base::MessageLoop::TYPE_DEFAULT) {}
+
+ scoped_refptr<DocumentMock> document() { return document_; }
+ scoped_refptr<HTMLLinkElementMock> CreateDocumentWithLinkElement(
+ std::string rel = "");
+
+ private:
+ std::unique_ptr<DomStatTracker> dom_stat_tracker_;
+ testing::StubEnvironmentSettings environment_settings_;
+ NiceMock<cssom::testing::MockCSSParser> css_parser_;
+ HTMLElementContext html_element_context_;
+ scoped_refptr<DocumentMock> document_;
+ base::MessageLoop message_loop_;
+};
+
+scoped_refptr<HTMLLinkElementMock>
+HtmlLinkElementTest::CreateDocumentWithLinkElement(std::string rel) {
+ scoped_refptr<HTMLLinkElementMock> element_ =
+ document_->CreateElement("link");
+ if (!rel.empty()) {
+ element_->SetAttribute("rel", rel);
+ }
+ document_->AppendChild(element_);
+ return element_;
+}
+
+TEST_F(HtmlLinkElementTest, StylesheetRelAttribute) {
+ scoped_refptr<HTMLLinkElementMock> el =
+ CreateDocumentWithLinkElement("stylesheet");
+ EXPECT_TRUE(el->obtained());
+}
+
+TEST_F(HtmlLinkElementTest, SplashScreenRelAttribute) {
+ scoped_refptr<HTMLLinkElementMock> el =
+ CreateDocumentWithLinkElement("splashscreen");
+ EXPECT_TRUE(el->obtained());
+
+ el = CreateDocumentWithLinkElement("music_splashscreen");
+ EXPECT_TRUE(el->obtained());
+
+ el = CreateDocumentWithLinkElement("music-_\\2_splashscreen");
+ EXPECT_TRUE(el->obtained());
+}
+
+TEST_F(HtmlLinkElementTest, BadSplashScreenRelAttribute) {
+ scoped_refptr<HTMLLinkElementMock> el =
+ CreateDocumentWithLinkElement("bad*_splashscreen");
+ EXPECT_FALSE(el->obtained());
+
+ el = CreateDocumentWithLinkElement("badsplashscreen");
+ EXPECT_FALSE(el->obtained());
+
+ el = CreateDocumentWithLinkElement("splashscreen_bad");
+ EXPECT_FALSE(el->obtained());
+}
+
+} // namespace dom
+} // namespace cobalt
diff --git a/src/cobalt/dom/window.cc b/src/cobalt/dom/window.cc
index a667e9e..4f89140 100644
--- a/src/cobalt/dom/window.cc
+++ b/src/cobalt/dom/window.cc
@@ -705,12 +705,13 @@
tracer->Trace(on_screen_keyboard_);
}
-void Window::CacheSplashScreen(const std::string& content) {
+void Window::CacheSplashScreen(const std::string& content,
+ const base::Optional<std::string>& topic) {
if (splash_screen_cache_callback_.is_null()) {
return;
}
DLOG(INFO) << "Caching splash screen for URL " << location()->url();
- splash_screen_cache_callback_.Run(content);
+ splash_screen_cache_callback_.Run(content, topic);
}
const scoped_refptr<OnScreenKeyboard>& Window::on_screen_keyboard() const {
diff --git a/src/cobalt/dom/window.h b/src/cobalt/dom/window.h
index 3b3976f..de6fce2 100644
--- a/src/cobalt/dom/window.h
+++ b/src/cobalt/dom/window.h
@@ -121,7 +121,9 @@
// close() was called.
typedef base::Callback<void(base::TimeDelta)> CloseCallback;
typedef UrlRegistry<MediaSource> MediaSourceRegistry;
- typedef base::Callback<void(const std::string&)> CacheCallback;
+ typedef base::Callback<void(const std::string&,
+ const base::Optional<std::string>&)>
+ CacheCallback;
enum ClockType {
kClockTypeTestRunner,
@@ -378,7 +380,8 @@
void OnDocumentRootElementUnableToProvideOffsetDimensions();
// Cache the passed in splash screen content for the window.location URL.
- void CacheSplashScreen(const std::string& content);
+ void CacheSplashScreen(const std::string& content,
+ const base::Optional<std::string>& topic);
const scoped_refptr<loader::CORSPreflightCache> get_preflight_cache() {
return preflight_cache_;
diff --git a/src/cobalt/extension/configuration.h b/src/cobalt/extension/configuration.h
index 9d379c7..7c12817 100644
--- a/src/cobalt/extension/configuration.h
+++ b/src/cobalt/extension/configuration.h
@@ -219,6 +219,11 @@
// See "cobalt/doc/performance_tuning.md" for more information on when this
// should be used.
bool (*CobaltEnableJit)();
+
+ // The fields below this point were added in version 2 or later.
+
+ // A mapping of splash screen topics to fallback URLs.
+ const char* (*CobaltFallbackSplashScreenTopics)();
} CobaltExtensionConfigurationApi;
#ifdef __cplusplus
diff --git a/src/cobalt/extension/extension_test.cc b/src/cobalt/extension/extension_test.cc
index 04c0642..38b7c83 100644
--- a/src/cobalt/extension/extension_test.cc
+++ b/src/cobalt/extension/extension_test.cc
@@ -128,7 +128,7 @@
}
EXPECT_STREQ(extension_api->name, kExtensionName);
- EXPECT_TRUE(extension_api->version == 1);
+ EXPECT_TRUE(extension_api->version == 1 || extension_api->version == 2);
EXPECT_TRUE(extension_api->CobaltUserOnExitStrategy != NULL);
EXPECT_TRUE(extension_api->CobaltRenderDirtyRegionOnly != NULL);
EXPECT_TRUE(extension_api->CobaltEglSwapInterval != NULL);
@@ -149,6 +149,9 @@
EXPECT_TRUE(extension_api->CobaltReduceCpuMemoryBy != NULL);
EXPECT_TRUE(extension_api->CobaltReduceGpuMemoryBy != NULL);
EXPECT_TRUE(extension_api->CobaltGcZeal != NULL);
+ if (extension_api->version >= 2) {
+ EXPECT_TRUE(extension_api->CobaltFallbackSplashScreenTopics != NULL);
+ }
const ExtensionApi* second_extension_api =
static_cast<const ExtensionApi*>(SbSystemGetExtension(kExtensionName));
diff --git a/src/cobalt/extension/media_session.h b/src/cobalt/extension/media_session.h
index dccbd42..eb91f7c 100644
--- a/src/cobalt/extension/media_session.h
+++ b/src/cobalt/extension/media_session.h
@@ -89,16 +89,6 @@
CobaltExtensionMediaMetadata* metadata;
double actual_playback_rate;
SbTimeMonotonic current_playback_position;
-
- // Callback to MediaSessionClient::UpdatePlatformPlaybackState for when the
- // platform updates state.
- CobaltExtensionMediaSessionUpdatePlatformPlaybackStateCallback
- update_platform_playback_state_callback;
-
- // Callback to MediaSessionClient::InvokeAction for when the platform handles
- // a new media action.
- void* callback_context;
- CobaltExtensionMediaSessionInvokeActionCallback invoke_action_callback;
} CobaltExtensionMediaSessionState;
typedef struct CobaltExtensionMediaSessionApi {
@@ -114,6 +104,17 @@
void (*OnMediaSessionStateChanged)(
CobaltExtensionMediaSessionState session_state);
+ // Register MediaSessionClient callbacks when the platform create a new
+ // MediaSessionClient.
+ void (*RegisterMediaSessionCallbacks)(
+ void* callback_context,
+ CobaltExtensionMediaSessionInvokeActionCallback invoke_action_callback,
+ CobaltExtensionMediaSessionUpdatePlatformPlaybackStateCallback
+ update_platform_playback_state_callback);
+
+ // Destory platform's MediaSessionClient after the Cobalt's
+ // MediaSessionClient has been destroyed.
+ void (*DestroyMediaSessionClientCallback) ();
} CobaltExtensionMediaSessionApi;
inline void CobaltExtensionMediaSessionActionDetailsInit(
diff --git a/src/cobalt/layout/box_generator.cc b/src/cobalt/layout/box_generator.cc
index c622e99..2bc8571 100644
--- a/src/cobalt/layout/box_generator.cc
+++ b/src/cobalt/layout/box_generator.cc
@@ -1057,12 +1057,17 @@
container_box_before_split->SetUiNavItem(html_element->GetUiNavItem());
boxes_.push_back(container_box_before_split);
- BoxIntersectionObserverModule::IntersectionObserverRootVector roots =
- html_element->GetLayoutIntersectionObserverRoots();
- BoxIntersectionObserverModule::IntersectionObserverTargetVector targets =
- html_element->GetLayoutIntersectionObserverTargets();
- container_box_before_split->AddIntersectionObserverRootsAndTargets(
- std::move(roots), std::move(targets));
+ // We already handle the case where the Intersection Observer root is the
+ // viewport with the initial containing block in layout.
+ if (html_element !=
+ html_element->node_document()->document_element()->AsHTMLElement()) {
+ BoxIntersectionObserverModule::IntersectionObserverRootVector roots =
+ html_element->GetLayoutIntersectionObserverRoots();
+ BoxIntersectionObserverModule::IntersectionObserverTargetVector targets =
+ html_element->GetLayoutIntersectionObserverTargets();
+ container_box_before_split->AddIntersectionObserverRootsAndTargets(
+ std::move(roots), std::move(targets));
+ }
AppendPseudoElementToLine(html_element, dom::kBeforePseudoElementType);
diff --git a/src/cobalt/layout/intersection_observer_target.cc b/src/cobalt/layout/intersection_observer_target.cc
index 26d863d..e114025 100644
--- a/src/cobalt/layout/intersection_observer_target.cc
+++ b/src/cobalt/layout/intersection_observer_target.cc
@@ -27,108 +27,22 @@
namespace cobalt {
namespace layout {
+namespace {
-void IntersectionObserverTarget::UpdateIntersectionObservationsForTarget(
- ContainerBox* target_box) {
- TRACE_EVENT0(
- "cobalt::layout",
- "IntersectionObserverTarget::UpdateIntersectionObservationsForTarget()");
- // Walk up the containing block chain looking for the box referencing the
- // IntersectionObserverRoot corresponding to this IntersectionObserverTarget.
- // Skip further processing for the target if it is not a descendant of the
- // root in the containing block chain.
- const ContainerBox* root_box = target_box->GetContainingBlock();
- while (!root_box->ContainsIntersectionObserverRoot(
- intersection_observer_root_)) {
- if (!root_box->parent()) {
- return;
- }
- root_box = root_box->GetContainingBlock();
- }
-
- // Let targetRect be target's bounding border box.
- RectLayoutUnit target_transformed_border_box(
- target_box->GetTransformedBoxFromRoot(
- target_box->GetBorderBoxFromMarginBox()));
- math::RectF target_rect =
- math::RectF(target_transformed_border_box.x().toFloat(),
- target_transformed_border_box.y().toFloat(),
- target_transformed_border_box.width().toFloat(),
- target_transformed_border_box.height().toFloat());
-
- // Let intersectionRect be the result of running the compute the intersection
- // algorithm on target.
- math::RectF root_bounds = GetRootBounds(
- root_box, intersection_observer_root_->root_margin_property_value());
- math::RectF intersection_rect = ComputeIntersectionBetweenTargetAndRoot(
- root_box, root_bounds, target_rect, target_box);
-
- // Let targetArea be targetRect's area.
- float target_area = target_rect.size().GetArea();
-
- // Let intersectionArea be intersectionRect's area.
- float intersection_area = intersection_rect.size().GetArea();
-
- // Let isIntersecting be true if targetRect and rootBounds intersect or are
- // edge-adjacent, even if the intersection has zero area (because rootBounds
- // or targetRect have zero area); otherwise, let isIntersecting be false.
- bool is_intersecting =
- intersection_rect.width() != 0 || intersection_rect.height() != 0;
-
- // If targetArea is non-zero, let intersectionRatio be intersectionArea
- // divided by targetArea. Otherwise, let intersectionRatio be 1 if
- // isIntersecting is true, or 0 if isIntersecting is false.
- float intersection_ratio = target_area > 0 ? intersection_area / target_area
- : is_intersecting ? 1.0f : 0.0f;
-
- // Let thresholdIndex be the index of the first entry in observer.thresholds
- // whose value is greater than intersectionRatio, or the length of
- // observer.thresholds if intersectionRatio is greater than or equal to the
- // last entry in observer.thresholds.
- const std::vector<double>& thresholds =
- intersection_observer_root_->thresholds_vector();
- size_t threshold_index;
- for (threshold_index = 0; threshold_index < thresholds.size();
- ++threshold_index) {
- if (thresholds.at(threshold_index) > intersection_ratio) {
- // isIntersecting is false if intersectionRatio is less than all
- // thresholds, sorted ascending. Not in spec but follows Chrome behavior.
- if (threshold_index == 0) {
- is_intersecting = false;
- }
- break;
- }
- }
-
- // If thresholdIndex does not equal previousThresholdIndex or if
- // isIntersecting does not equal previousIsIntersecting, queue an
- // IntersectionObserverEntry, passing in observer, time, rootBounds,
- // boundingClientRect, intersectionRect, isIntersecting, and target.
- if (static_cast<int32>(threshold_index) != previous_threshold_index_ ||
- is_intersecting != previous_is_intersecting_) {
- on_intersection_callback_.Run(root_bounds, target_rect, intersection_rect,
- is_intersecting, intersection_ratio);
- }
-
- // Update the previousThresholdIndex and previousIsIntersecting properties.
- previous_threshold_index_ = static_cast<int32>(threshold_index);
- previous_is_intersecting_ = is_intersecting;
+int32 GetUsedLengthOfRootMarginPropertyValue(
+ const scoped_refptr<cssom::PropertyValue>& length_property_value,
+ LayoutUnit percentage_base) {
+ UsedLengthValueProvider used_length_provider(percentage_base);
+ length_property_value->Accept(&used_length_provider);
+ // Not explicitly stated in web spec, but has been observed that Chrome
+ // truncates root margin decimal values.
+ return static_cast<int32>(
+ used_length_provider.used_length().value_or(LayoutUnit(0.0f)).toFloat());
}
-bool IntersectionObserverTarget::IsInContainingBlockChain(
- const ContainerBox* potential_containing_block,
- const ContainerBox* target_box) {
- const ContainerBox* containing_block = target_box->GetContainingBlock();
- while (containing_block != potential_containing_block) {
- if (!containing_block->parent()) {
- return false;
- }
- containing_block = containing_block->GetContainingBlock();
- }
- return true;
-}
-
-math::RectF IntersectionObserverTarget::GetRootBounds(
+// Rules for determining the root intersection rectangle bounds.
+// https://www.w3.org/TR/intersection-observer/#intersectionobserver-root-intersection-rectangle
+math::RectF GetRootBounds(
const ContainerBox* root_box,
scoped_refptr<cssom::PropertyListValue> root_margin_property_value) {
math::RectF root_bounds_without_margins;
@@ -175,18 +89,27 @@
return root_bounds;
}
-int32 IntersectionObserverTarget::GetUsedLengthOfRootMarginPropertyValue(
- const scoped_refptr<cssom::PropertyValue>& length_property_value,
- LayoutUnit percentage_base) {
- UsedLengthValueProvider used_length_provider(percentage_base);
- length_property_value->Accept(&used_length_provider);
- // Not explicitly stated in web spec, but has been observed that Chrome
- // truncates root margin decimal values.
- return static_cast<int32>(
- used_length_provider.used_length().value_or(LayoutUnit(0.0f)).toFloat());
+// Similar to the IntersectRects function in math::RectF, but handles edge
+// adjacent intersections as valid intersections (instead of returning a
+// rectangle with zero dimensions)
+math::RectF IntersectIntersectionObserverRects(const math::RectF& a,
+ const math::RectF& b) {
+ float rx = std::max(a.x(), b.x());
+ float ry = std::max(a.y(), b.y());
+ float rr = std::min(a.right(), b.right());
+ float rb = std::min(a.bottom(), b.bottom());
+
+ if (rx > rr || ry > rb) {
+ return math::RectF(0.0f, 0.0f, 0.0f, 0.0f);
+ }
+
+ return math::RectF(rx, ry, rr - rx, rb - ry);
}
-math::RectF IntersectionObserverTarget::ComputeIntersectionBetweenTargetAndRoot(
+// Compute the intersection between a target and the observer's intersection
+// root.
+// https://www.w3.org/TR/intersection-observer/#calculate-intersection-rect-algo
+math::RectF ComputeIntersectionBetweenTargetAndRoot(
const ContainerBox* root_box, const math::RectF& root_bounds,
const math::RectF& target_rect, const ContainerBox* target_box) {
// Let intersectionRect be target's bounding border box.
@@ -280,18 +203,95 @@
return intersection_rect;
}
-math::RectF IntersectionObserverTarget::IntersectIntersectionObserverRects(
- const math::RectF& a, const math::RectF& b) {
- float rx = std::max(a.x(), b.x());
- float ry = std::max(a.y(), b.y());
- float rr = std::min(a.right(), b.right());
- float rb = std::min(a.bottom(), b.bottom());
+} // namespace
- if (rx > rr || ry > rb) {
- return math::RectF(0.0f, 0.0f, 0.0f, 0.0f);
+void IntersectionObserverTarget::UpdateIntersectionObservationsForTarget(
+ ContainerBox* target_box) {
+ TRACE_EVENT0(
+ "cobalt::layout",
+ "IntersectionObserverTarget::UpdateIntersectionObservationsForTarget()");
+ // Walk up the containing block chain looking for the box referencing the
+ // IntersectionObserverRoot corresponding to this IntersectionObserverTarget.
+ // Skip further processing for the target if it is not a descendant of the
+ // root in the containing block chain.
+ const ContainerBox* root_box = target_box->GetContainingBlock();
+ while (!root_box->ContainsIntersectionObserverRoot(
+ intersection_observer_root_)) {
+ if (!root_box->parent()) {
+ return;
+ }
+ root_box = root_box->GetContainingBlock();
}
- return math::RectF(rx, ry, rr - rx, rb - ry);
+ // Let targetRect be target's bounding border box.
+ RectLayoutUnit target_transformed_border_box(
+ target_box->GetTransformedBoxFromRoot(
+ target_box->GetBorderBoxFromMarginBox()));
+ const math::RectF target_rect =
+ math::RectF(target_transformed_border_box.x().toFloat(),
+ target_transformed_border_box.y().toFloat(),
+ target_transformed_border_box.width().toFloat(),
+ target_transformed_border_box.height().toFloat());
+
+ // Let intersectionRect be the result of running the compute the intersection
+ // algorithm on target.
+ const math::RectF root_bounds = GetRootBounds(
+ root_box, intersection_observer_root_->root_margin_property_value());
+ const math::RectF intersection_rect = ComputeIntersectionBetweenTargetAndRoot(
+ root_box, root_bounds, target_rect, target_box);
+
+ // Let targetArea be targetRect's area.
+ float target_area = target_rect.size().GetArea();
+
+ // Let intersectionArea be intersectionRect's area.
+ float intersection_area = intersection_rect.size().GetArea();
+
+ // Let isIntersecting be true if targetRect and rootBounds intersect or are
+ // edge-adjacent, even if the intersection has zero area (because rootBounds
+ // or targetRect have zero area); otherwise, let isIntersecting be false.
+ bool is_intersecting =
+ intersection_rect.width() != 0 || intersection_rect.height() != 0 ||
+ (target_rect.width() == 0 && target_rect.height() == 0 &&
+ root_bounds.Contains(target_rect));
+
+ // If targetArea is non-zero, let intersectionRatio be intersectionArea
+ // divided by targetArea. Otherwise, let intersectionRatio be 1 if
+ // isIntersecting is true, or 0 if isIntersecting is false.
+ float intersection_ratio = target_area > 0 ? intersection_area / target_area
+ : is_intersecting ? 1.0f : 0.0f;
+
+ // Let thresholdIndex be the index of the first entry in observer.thresholds
+ // whose value is greater than intersectionRatio, or the length of
+ // observer.thresholds if intersectionRatio is greater than or equal to the
+ // last entry in observer.thresholds.
+ const std::vector<double>& thresholds =
+ intersection_observer_root_->thresholds_vector();
+ size_t threshold_index;
+ for (threshold_index = 0; threshold_index < thresholds.size();
+ ++threshold_index) {
+ if (thresholds.at(threshold_index) > intersection_ratio) {
+ // isIntersecting is false if intersectionRatio is less than all
+ // thresholds, sorted ascending. Not in spec but follows Chrome behavior.
+ if (threshold_index == 0) {
+ is_intersecting = false;
+ }
+ break;
+ }
+ }
+
+ // If thresholdIndex does not equal previousThresholdIndex or if
+ // isIntersecting does not equal previousIsIntersecting, queue an
+ // IntersectionObserverEntry, passing in observer, time, rootBounds,
+ // boundingClientRect, intersectionRect, isIntersecting, and target.
+ if (static_cast<int32>(threshold_index) != previous_threshold_index_ ||
+ is_intersecting != previous_is_intersecting_) {
+ on_intersection_callback_.Run(root_bounds, target_rect, intersection_rect,
+ is_intersecting, intersection_ratio);
+ }
+
+ // Update the previousThresholdIndex and previousIsIntersecting properties.
+ previous_threshold_index_ = static_cast<int32>(threshold_index);
+ previous_is_intersecting_ = is_intersecting;
}
} // namespace layout
diff --git a/src/cobalt/layout/intersection_observer_target.h b/src/cobalt/layout/intersection_observer_target.h
index a40d41d..23a8671 100644
--- a/src/cobalt/layout/intersection_observer_target.h
+++ b/src/cobalt/layout/intersection_observer_target.h
@@ -75,34 +75,6 @@
}
private:
- // Walk up the containing block chain, as described in
- // http://www.w3.org/TR/CSS2/visudet.html#containing-block-details
- bool IsInContainingBlockChain(const ContainerBox* potential_containing_block,
- const ContainerBox* target_box);
-
- int32 GetUsedLengthOfRootMarginPropertyValue(
- const scoped_refptr<cssom::PropertyValue>& length_property_value,
- LayoutUnit percentage_base);
-
- // Rules for determining the root intersection rectangle bounds.
- // https://www.w3.org/TR/intersection-observer/#intersectionobserver-root-intersection-rectangle
- math::RectF GetRootBounds(
- const ContainerBox* root_box,
- scoped_refptr<cssom::PropertyListValue> root_margin_property_value);
-
- // Compute the intersection between a target and the observer's intersection
- // root.
- // https://www.w3.org/TR/intersection-observer/#calculate-intersection-rect-algo
- math::RectF ComputeIntersectionBetweenTargetAndRoot(
- const ContainerBox* root_box, const math::RectF& root_bounds,
- const math::RectF& target_rect, const ContainerBox* target_box);
-
- // Similar to the IntersectRects function in math::RectF, but handles edge
- // adjacent intersections as valid intersections (instead of returning a
- // rectangle with zero dimensions)
- math::RectF IntersectIntersectionObserverRects(const math::RectF& a,
- const math::RectF& b);
-
OnIntersectionCallback on_intersection_callback_;
scoped_refptr<IntersectionObserverRoot> intersection_observer_root_;
diff --git a/src/cobalt/layout_tests/testdata/web-platform-tests/cobalt_special/web_platform_tests.txt b/src/cobalt/layout_tests/testdata/web-platform-tests/cobalt_special/web_platform_tests.txt
index 25ffd6c..d6e13da 100644
--- a/src/cobalt/layout_tests/testdata/web-platform-tests/cobalt_special/web_platform_tests.txt
+++ b/src/cobalt/layout_tests/testdata/web-platform-tests/cobalt_special/web_platform_tests.txt
@@ -1,4 +1,5 @@
# Cobalt's special tests that borrows WPT infrastructures.
origin-clean.htm,PASS
-preflight-cache-2.htm,PASS
\ No newline at end of file
+preflight-cache-2.htm,PASS
+xhr_content_length.htm,PASS
diff --git a/src/cobalt/layout_tests/testdata/web-platform-tests/intersection-observer/web_platform_tests.txt b/src/cobalt/layout_tests/testdata/web-platform-tests/intersection-observer/web_platform_tests.txt
index b830268..e543994 100644
--- a/src/cobalt/layout_tests/testdata/web-platform-tests/intersection-observer/web_platform_tests.txt
+++ b/src/cobalt/layout_tests/testdata/web-platform-tests/intersection-observer/web_platform_tests.txt
@@ -7,33 +7,33 @@
# Empty rootMargin should evaluate to default, not cause error
empty-root-margin.html,DISABLE
initial-observation-with-threshold.html,PASS
+# Unsupported functions measuring space width between adjacent inline elements
+inline-client-rect.html,DISABLE
inline-with-block-child-client-rect.html,PASS
isIntersecting-change-events.html,PASS
+# overflow: scroll results in incorrectly clipped intersection rect
+isIntersecting-threshold.html,DISABLE
+multiple-targets.html,PASS
+multiple-thresholds.html,PASS
# rootMargin default should be "0px 0px 0px 0px", not "0px"
observer-attributes.html,DISABLE
# WPT testharness needs to be rebased
observer-exceptions.html,DISABLE
+observer-without-js-reference.html,PASS
#Deleting an element does not trigger an intersection
remove-element.html,DISABLE
root-margin-root-element.html,PASS
+# Root margin calculations have rounding errors
+root-margin.html,DISABLE
# Setting IO target equal to document.documentElement crashes Cobalt
root-margin-rounding.html,DISABLE
rtl-clipped-root.html,PASS
+same-document-no-root.html,PASS
same-document-root.html,PASS
+same-document-zero-size-target.html,PASS
+text-target.html,PASS
zero-area-element-hidden.html,PASS
-#Zero-area target does not trigger an intersection
-zero-area-element-visible.html,DISABLE
-
-#No root specified - intersections with viewport incorrectly reported
-inline-client-rect.html,DISABLE
-isIntersecting-threshold.html,DISABLE
-multiple-targets.html,DISABLE
-multiple-thresholds.html,DISABLE
-observer-without-js-reference.html,DISABLE
-root-margin.html,DISABLE
-same-document-no-root.html,DISABLE
-same-document-zero-size-target.html,DISABLE
-text-target.html,DISABLE
+zero-area-element-visible.html,PASS
#IntersectionObserverV2 not implemented
v2/blur-filter.html,DISABLE
diff --git a/src/cobalt/loader/loader.gyp b/src/cobalt/loader/loader.gyp
index f4b8774..2b61cf7 100644
--- a/src/cobalt/loader/loader.gyp
+++ b/src/cobalt/loader/loader.gyp
@@ -218,7 +218,7 @@
'outputs': [
'<(output_path)',
],
- 'action': ['python', '<(script_path)', 'LoaderEmbeddedResources', '<(output_path)', '<(input_directory)'],
+ 'action': ['python2', '<(script_path)', 'LoaderEmbeddedResources', '<(output_path)', '<(input_directory)'],
'message': 'Embedding layout resources in "<(input_directory)" into header file, "<(output_path)".',
},
],
diff --git a/src/cobalt/media_session/media_session_client.cc b/src/cobalt/media_session/media_session_client.cc
index 4959580..9243556 100644
--- a/src/cobalt/media_session/media_session_client.cc
+++ b/src/cobalt/media_session/media_session_client.cc
@@ -81,6 +81,9 @@
extension_->version < 1) {
LOG(WARNING) << "Wrong MediaSession extension supplied";
extension_ = nullptr;
+ } else {
+ extension_->RegisterMediaSessionCallbacks(
+ this, &InvokeActionCallback, &UpdatePlatformPlaybackStateCallback);
}
}
#endif
@@ -90,6 +93,12 @@
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// Prevent any outstanding MediaSession::OnChanged tasks from calling this.
media_session_->media_session_client_ = nullptr;
+
+ // Destroy the platform's MediaSessionClient, if it exists.
+ if (extension_ != NULL &&
+ extension_->DestroyMediaSessionClientCallback != NULL) {
+ extension_->DestroyMediaSessionClientCallback();
+ }
}
void MediaSessionClient::SetMediaPlayerFactory(
@@ -317,11 +326,6 @@
artwork_size};
ext_state.metadata = &ext_metadata;
- ext_state.update_platform_playback_state_callback =
- &UpdatePlatformPlaybackStateCallback;
- ext_state.invoke_action_callback = &InvokeActionCallback;
- ext_state.callback_context = this;
-
extension_->OnMediaSessionStateChanged(ext_state);
}
}
diff --git a/src/cobalt/renderer/glimp_shaders/glsl/shaders.gypi b/src/cobalt/renderer/glimp_shaders/glsl/shaders.gypi
index 5e37306..a8801f8 100644
--- a/src/cobalt/renderer/glimp_shaders/glsl/shaders.gypi
+++ b/src/cobalt/renderer/glimp_shaders/glsl/shaders.gypi
@@ -21,8 +21,11 @@
{
'variables': {
'glsl_shaders_dir': '<(DEPTH)/cobalt/renderer/glimp_shaders/glsl',
+ 'glsl_shaders_0': [
+ '<!@pymod_do_main(starboard.build.gyp_functions file_glob <(DEPTH)/cobalt/renderer/glimp_shaders/glsl/ *.glsl)'
+ ],
'glsl_shaders': [
- '<!@(ls -1 <(DEPTH)/cobalt/renderer/glimp_shaders/glsl/*.glsl |xargs -n 1 basename)',
+ '<!@pymod_do_main(starboard.build.gyp_functions basename <@(glsl_shaders_0) )',
],
}
}
diff --git a/src/cobalt/renderer/rasterizer/egl/shaders/shaders.gyp b/src/cobalt/renderer/rasterizer/egl/shaders/shaders.gyp
index 02e1486..4c7a349 100644
--- a/src/cobalt/renderer/rasterizer/egl/shaders/shaders.gyp
+++ b/src/cobalt/renderer/rasterizer/egl/shaders/shaders.gyp
@@ -75,7 +75,7 @@
'<(shader_impl_source)',
],
'action': [
- 'python',
+ 'python2',
'<(generate_class_script)',
'<(shader_impl_header)',
'<(shader_impl_source)',
diff --git a/src/cobalt/script/mozjs-45/mozjs-45.gyp b/src/cobalt/script/mozjs-45/mozjs-45.gyp
index bb44af8..792261e 100644
--- a/src/cobalt/script/mozjs-45/mozjs-45.gyp
+++ b/src/cobalt/script/mozjs-45/mozjs-45.gyp
@@ -140,7 +140,7 @@
'outputs': [
'<(output_path)',
],
- 'action': ['python', '<(script_path)', 'MozjsEmbeddedResources', '<(output_path)', '<@(_sources)' ],
+ 'action': ['python2', '<(script_path)', 'MozjsEmbeddedResources', '<(output_path)', '<@(_sources)' ],
'message': 'Embedding mozjs resources in into header file, "<(output_path)".',
},
],
diff --git a/src/cobalt/script/v8c/v8c.gyp b/src/cobalt/script/v8c/v8c.gyp
index e9c31d8..ac6b815 100644
--- a/src/cobalt/script/v8c/v8c.gyp
+++ b/src/cobalt/script/v8c/v8c.gyp
@@ -148,7 +148,7 @@
'outputs': [
'<(output_path)',
],
- 'action': ['python', '<(script_path)', 'V8cEmbeddedResources', '<(output_path)', '<@(_sources)' ],
+ 'action': ['python2', '<(script_path)', 'V8cEmbeddedResources', '<(output_path)', '<@(_sources)' ],
'message': 'Embedding v8c resources in into header file, "<(output_path)".',
},
],
@@ -186,7 +186,7 @@
'outputs': [
'<(dummy_output_path)',
],
- 'action': ['python', '<(touch_script_path)', '<(touch_file_path)', '<(dummy_output_path)',
+ 'action': ['python2', '<(touch_script_path)', '<(touch_file_path)', '<(dummy_output_path)',
],
'message': 'Updating V8 snapshot creation time.',
},
diff --git a/src/cobalt/site/docs/development/setup-linux.md b/src/cobalt/site/docs/development/setup-linux.md
index 3c4d49f..4fff20e 100644
--- a/src/cobalt/site/docs/development/setup-linux.md
+++ b/src/cobalt/site/docs/development/setup-linux.md
@@ -32,12 +32,14 @@
Cobalt on Linux:
```
- $ sudo apt install -qqy --no-install-recommends pkgconf ninja-build bison \
- yasm binutils clang libgles2-mesa-dev mesa-common-dev libpulse-dev \
- libavresample-dev libasound2-dev libxrender-dev libxcomposite-dev
+ $ sudo apt install -qqy --no-install-recommends pkgconf ninja-build \
+ bison yasm binutils clang libgles2-mesa-dev mesa-common-dev \
+ libpulse-dev libavresample-dev libasound2-dev libxrender-dev \
+ libxcomposite-dev
```
1. Install Node.js via `nvm`:
+
```
$ export NVM_DIR=~/.nvm
$ export NODE_VERSION=12.17.0
diff --git a/src/cobalt/site/docs/reference/starboard/modules/configuration.md b/src/cobalt/site/docs/reference/starboard/modules/configuration.md
index 362c24e..fff4d9a 100644
--- a/src/cobalt/site/docs/reference/starboard/modules/configuration.md
+++ b/src/cobalt/site/docs/reference/starboard/modules/configuration.md
@@ -116,11 +116,6 @@
Macro to annotate a function as noreturn, which signals to the compiler that the
function cannot return.
-### SB_OVERRIDE ###
-
-Declares a function as overriding a virtual function on compilers that support
-it.
-
### SB_PREFERRED_RGBA_BYTE_ORDER_RGBA ###
An enumeration of values for the kSbPreferredByteOrder configuration variable.
diff --git a/src/cobalt/ui_navigation/nav_item.cc b/src/cobalt/ui_navigation/nav_item.cc
index 2cca3ec..7ddc7fa 100644
--- a/src/cobalt/ui_navigation/nav_item.cc
+++ b/src/cobalt/ui_navigation/nav_item.cc
@@ -105,7 +105,13 @@
void NavItem::Focus() {
starboard::ScopedSpinLock lock(&g_pending_updates_lock);
if (enabled_) {
- g_pending_focus = nav_item_;
+ if (g_pending_updates->empty()) {
+ // Immediately update focus if nothing else is queued for update.
+ g_pending_focus = kNativeItemInvalid;
+ GetInterface().set_focus(nav_item_);
+ } else {
+ g_pending_focus = nav_item_;
+ }
}
}
diff --git a/src/cobalt/updater/configurator.cc b/src/cobalt/updater/configurator.cc
index 210f088..aacee6a 100644
--- a/src/cobalt/updater/configurator.cc
+++ b/src/cobalt/updater/configurator.cc
@@ -1,4 +1,4 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
+// Copyright 2020 The Cobalt Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -33,10 +33,10 @@
const std::set<std::string> valid_channels = {"dev"};
const std::string kDefaultUpdaterChannel = "dev";
#elif defined(COBALT_BUILD_TYPE_QA)
-// Find more information about these test channels in the Evergreen test plan.
const std::set<std::string> valid_channels = {
+ // Default channel for qa builds
"qa",
- // A normal test channel that serves a valid update
+ // Test an update with higher version than qa channel
"test",
// Test an update with mismatched sabi
"tmsabi",
@@ -47,14 +47,31 @@
// Test an update that fails verification
"tfailv",
// Test a series of continuous updates with two channels
- "tseries1",
- "tseries2",
- // Test an update that's larger than the available storage on the device
- "tistore",
+ "tseries1", "tseries2",
};
const std::string kDefaultUpdaterChannel = "qa";
#elif defined(COBALT_BUILD_TYPE_GOLD)
-const std::set<std::string> valid_channels = {"prod", "dogfood"};
+const std::set<std::string> valid_channels = {
+ // Default channel for gold builds
+ "prod",
+ // Channel for dogfooders
+ "dogfood",
+ // Default channel for qa builds. A gold build can switch to this channel to
+ // get an official qa build.
+ "qa",
+ // Test an update with higher version than prod channel
+ "test",
+ // Test an update with mismatched sabi
+ "tmsabi",
+ // Test an update that does nothing
+ "tnoop",
+ // Test an update that crashes
+ "tcrash",
+ // Test an update that fails verification
+ "tfailv",
+ // Test a series of continuous updates with two channels
+ "tseries1", "tseries2",
+};
const std::string kDefaultUpdaterChannel = "prod";
#endif
@@ -146,8 +163,8 @@
base::flat_map<std::string, std::string> params;
params.insert(std::make_pair("SABI", SB_SABI_JSON_ID));
params.insert(std::make_pair("sbversion", std::to_string(SB_API_VERSION)));
- params.insert(std::make_pair(
- "jsengine", script::GetJavaScriptEngineNameAndVersion()));
+ params.insert(
+ std::make_pair("jsengine", script::GetJavaScriptEngineNameAndVersion()));
params.insert(std::make_pair(
"updaterchannelchanged",
SbAtomicNoBarrier_Load(&is_channel_changed_) == 1 ? "True" : "False"));
diff --git a/src/cobalt/xhr/url_fetcher_buffer_writer.cc b/src/cobalt/xhr/url_fetcher_buffer_writer.cc
index 3b43b8a..a6eec44 100644
--- a/src/cobalt/xhr/url_fetcher_buffer_writer.cc
+++ b/src/cobalt/xhr/url_fetcher_buffer_writer.cc
@@ -26,6 +26,8 @@
// Allocate 64KB if the total size is unknown to avoid allocating small buffer
// too many times.
const int64_t kDefaultPreAllocateSizeInBytes = 64 * 1024;
+// Set max allocate size to avoid erroneous size estimate.
+const int64_t kMaxPreAllocateSizeInBytes = 10 * 1024 * 1024;
void ReleaseMemory(std::string* str) {
DCHECK(str);
@@ -158,6 +160,10 @@
if (capacity < 0) {
capacity = kDefaultPreAllocateSizeInBytes;
+ } else if (capacity > kMaxPreAllocateSizeInBytes) {
+ LOG(WARNING) << "Allocation of " << capacity << " bytes is capped to "
+ << kMaxPreAllocateSizeInBytes;
+ capacity = kMaxPreAllocateSizeInBytes;
} else {
capacity_known_ = true;
}
diff --git a/src/glimp/stub/egl/display_impl.h b/src/glimp/stub/egl/display_impl.h
index 9dbf897..4d67036 100644
--- a/src/glimp/stub/egl/display_impl.h
+++ b/src/glimp/stub/egl/display_impl.h
@@ -32,25 +32,25 @@
DisplayImplStub();
~DisplayImplStub();
- DisplayImpl::VersionInfo GetVersionInfo() SB_OVERRIDE;
+ DisplayImpl::VersionInfo GetVersionInfo() override;
- const ConfigSet& GetSupportedConfigs() const SB_OVERRIDE {
+ const ConfigSet& GetSupportedConfigs() const override {
return supported_configs_;
}
- nb::scoped_ptr<SurfaceImpl> CreateWindowSurface(const Config* config,
- EGLNativeWindowType win,
- const AttribMap& attributes)
- SB_OVERRIDE;
+ nb::scoped_ptr<SurfaceImpl> CreateWindowSurface(
+ const Config* config,
+ EGLNativeWindowType win,
+ const AttribMap& attributes) override;
- nb::scoped_ptr<SurfaceImpl> CreatePbufferSurface(const Config* config,
- const AttribMap& attributes)
- SB_OVERRIDE;
+ nb::scoped_ptr<SurfaceImpl> CreatePbufferSurface(
+ const Config* config,
+ const AttribMap& attributes) override;
nb::scoped_ptr<gles::ContextImpl> CreateContext(const Config* config,
- int gles_version) SB_OVERRIDE;
+ int gles_version) override;
- bool SetSwapInterval(int interval) SB_OVERRIDE { return true; }
+ bool SetSwapInterval(int interval) override { return true; }
private:
void InitializeSupportedConfigs();
diff --git a/src/glimp/stub/egl/pbuffer_surface_impl.h b/src/glimp/stub/egl/pbuffer_surface_impl.h
index 63b6075..b24d364 100644
--- a/src/glimp/stub/egl/pbuffer_surface_impl.h
+++ b/src/glimp/stub/egl/pbuffer_surface_impl.h
@@ -30,8 +30,8 @@
PbufferSurfaceImplStub(int width, int height);
virtual ~PbufferSurfaceImplStub();
- int GetWidth() const SB_OVERRIDE { return width_; }
- int GetHeight() const SB_OVERRIDE { return height_; }
+ int GetWidth() const override { return width_; }
+ int GetHeight() const override { return height_; }
private:
int width_;
diff --git a/src/glimp/stub/egl/window_surface_impl.h b/src/glimp/stub/egl/window_surface_impl.h
index 09ca539..8b910a6 100644
--- a/src/glimp/stub/egl/window_surface_impl.h
+++ b/src/glimp/stub/egl/window_surface_impl.h
@@ -28,8 +28,8 @@
WindowSurfaceImplStub();
virtual ~WindowSurfaceImplStub();
- int GetWidth() const SB_OVERRIDE { return width_; }
- int GetHeight() const SB_OVERRIDE { return height_; }
+ int GetWidth() const override { return width_; }
+ int GetHeight() const override { return height_; }
private:
int width_;
diff --git a/src/glimp/stub/gles/buffer_impl.h b/src/glimp/stub/gles/buffer_impl.h
index 5f39163..d166561 100644
--- a/src/glimp/stub/gles/buffer_impl.h
+++ b/src/glimp/stub/gles/buffer_impl.h
@@ -26,13 +26,13 @@
class BufferImplStub : public BufferImpl {
public:
BufferImplStub();
- ~BufferImplStub() SB_OVERRIDE {}
+ ~BufferImplStub() override {}
- bool Allocate(Usage usage, size_t size) SB_OVERRIDE;
- bool SetData(intptr_t offset, size_t size, const void* data) SB_OVERRIDE;
+ bool Allocate(Usage usage, size_t size) override;
+ bool SetData(intptr_t offset, size_t size, const void* data) override;
- void* Map() SB_OVERRIDE;
- bool Unmap() SB_OVERRIDE;
+ void* Map() override;
+ bool Unmap() override;
private:
};
diff --git a/src/glimp/stub/gles/context_impl.h b/src/glimp/stub/gles/context_impl.h
index b46a303..ef8bd77 100644
--- a/src/glimp/stub/gles/context_impl.h
+++ b/src/glimp/stub/gles/context_impl.h
@@ -32,47 +32,47 @@
ContextImplStub();
virtual ~ContextImplStub() {}
- ContextImpl::ExtensionList GetExtensions() const SB_OVERRIDE;
- int GetMaxVertexAttribs() const SB_OVERRIDE;
- int GetMaxFragmentTextureUnits() const SB_OVERRIDE;
- int GetMaxCombinedTextureImageUnits() const SB_OVERRIDE;
- int GetMaxTextureSize() const SB_OVERRIDE;
- int GetMaxRenderbufferSize() const SB_OVERRIDE;
- int GetMaxFragmentUniformVectors() const SB_OVERRIDE;
- int GetMaxVertexTextureImageUnits() const SB_OVERRIDE { return 0; }
+ ContextImpl::ExtensionList GetExtensions() const override;
+ int GetMaxVertexAttribs() const override;
+ int GetMaxFragmentTextureUnits() const override;
+ int GetMaxCombinedTextureImageUnits() const override;
+ int GetMaxTextureSize() const override;
+ int GetMaxRenderbufferSize() const override;
+ int GetMaxFragmentUniformVectors() const override;
+ int GetMaxVertexTextureImageUnits() const override { return 0; }
- nb::scoped_ptr<ProgramImpl> CreateProgram() SB_OVERRIDE;
+ nb::scoped_ptr<ProgramImpl> CreateProgram() override;
- nb::scoped_ptr<ShaderImpl> CreateVertexShader() SB_OVERRIDE;
- nb::scoped_ptr<ShaderImpl> CreateFragmentShader() SB_OVERRIDE;
+ nb::scoped_ptr<ShaderImpl> CreateVertexShader() override;
+ nb::scoped_ptr<ShaderImpl> CreateFragmentShader() override;
- nb::scoped_ptr<BufferImpl> CreateBuffer() SB_OVERRIDE;
+ nb::scoped_ptr<BufferImpl> CreateBuffer() override;
- nb::scoped_ptr<TextureImpl> CreateTexture() SB_OVERRIDE;
+ nb::scoped_ptr<TextureImpl> CreateTexture() override;
- void Flush() SB_OVERRIDE;
- void Finish() SB_OVERRIDE;
+ void Flush() override;
+ void Finish() override;
void Clear(bool clear_color,
bool clear_depth,
bool clear_stencil,
const DrawState& draw_state,
- DrawStateDirtyFlags* dirty_flags) SB_OVERRIDE;
+ DrawStateDirtyFlags* dirty_flags) override;
void DrawArrays(DrawMode mode,
int first_vertex,
int num_vertices,
const DrawState& draw_state,
- DrawStateDirtyFlags* dirty_flags) SB_OVERRIDE;
+ DrawStateDirtyFlags* dirty_flags) override;
void DrawElements(DrawMode mode,
int num_vertices,
IndexDataType index_data_type,
intptr_t index_offset_in_bytes,
const DrawState& draw_state,
- DrawStateDirtyFlags* dirty_flags) SB_OVERRIDE;
+ DrawStateDirtyFlags* dirty_flags) override;
- void SwapBuffers(egl::Surface* surface) SB_OVERRIDE;
+ void SwapBuffers(egl::Surface* surface) override;
private:
};
diff --git a/src/glimp/stub/gles/program_impl.h b/src/glimp/stub/gles/program_impl.h
index 28576f6..64d5aba 100644
--- a/src/glimp/stub/gles/program_impl.h
+++ b/src/glimp/stub/gles/program_impl.h
@@ -33,15 +33,15 @@
static const int kMaxUniformsPerShader = 16;
ProgramImplStub();
- ~ProgramImplStub() SB_OVERRIDE {}
+ ~ProgramImplStub() override {}
ProgramImpl::LinkResults Link(
const nb::scoped_refptr<Shader>& vertex_shader,
- const nb::scoped_refptr<Shader>& fragment_shader) SB_OVERRIDE;
+ const nb::scoped_refptr<Shader>& fragment_shader) override;
- bool BindAttribLocation(unsigned int index, const char* name) SB_OVERRIDE;
+ bool BindAttribLocation(unsigned int index, const char* name) override;
- int GetUniformLocation(const char* name) SB_OVERRIDE;
+ int GetUniformLocation(const char* name) override;
// Returns the location of the shader attribute that was previously bound
// to |index| in a call to BindAttribLocation().
diff --git a/src/glimp/stub/gles/shader_impl.h b/src/glimp/stub/gles/shader_impl.h
index 296a3e4..9976834 100644
--- a/src/glimp/stub/gles/shader_impl.h
+++ b/src/glimp/stub/gles/shader_impl.h
@@ -36,9 +36,9 @@
};
explicit ShaderImplStub(Type type);
- ~ShaderImplStub() SB_OVERRIDE {}
+ ~ShaderImplStub() override {}
- ShaderImpl::CompileResults Compile(const std::string& source) SB_OVERRIDE;
+ ShaderImpl::CompileResults Compile(const std::string& source) override;
private:
Type type_;
diff --git a/src/glimp/stub/gles/texture_impl.h b/src/glimp/stub/gles/texture_impl.h
index c0a2f76..e37a82b 100644
--- a/src/glimp/stub/gles/texture_impl.h
+++ b/src/glimp/stub/gles/texture_impl.h
@@ -31,30 +31,32 @@
class TextureImplStub : public TextureImpl {
public:
TextureImplStub();
- ~TextureImplStub() SB_OVERRIDE {}
+ ~TextureImplStub() override {}
- void Initialize(int level, PixelFormat pixel_format, int width, int height)
- SB_OVERRIDE;
+ void Initialize(int level,
+ PixelFormat pixel_format,
+ int width,
+ int height) override;
bool UpdateData(int level,
const nb::Rect<int>& window,
int pitch_in_bytes,
- const void* pixels) SB_OVERRIDE;
+ const void* pixels) override;
void UpdateDataFromBuffer(
int level,
const nb::Rect<int>& window,
int pitch_in_bytes,
const nb::scoped_refptr<Buffer>& pixel_unpack_buffer,
- uintptr_t buffer_offset) SB_OVERRIDE;
+ uintptr_t buffer_offset) override;
- void BindToEGLSurface(egl::Surface* surface) SB_OVERRIDE;
+ void BindToEGLSurface(egl::Surface* surface) override;
void ReadPixelsAsRGBA8(const nb::Rect<int>& window,
int pitch_in_bytes,
- void* pixels) SB_OVERRIDE;
+ void* pixels) override;
- bool CanBeAttachedToFramebuffer() const SB_OVERRIDE;
+ bool CanBeAttachedToFramebuffer() const override;
private:
};
diff --git a/src/net/net.gyp b/src/net/net.gyp
index d140fd9..f57652f 100644
--- a/src/net/net.gyp
+++ b/src/net/net.gyp
@@ -1823,7 +1823,7 @@
'<(output_dir)/<(RULE_INPUT_ROOT)-inc.cc',
],
'action': [
- 'python',
+ 'python2',
'<(script_path)',
'<(data_dir)/<(RULE_INPUT_ROOT).gperf',
'<(output_dir)/<(RULE_INPUT_ROOT)-inc.cc',
diff --git a/src/starboard/android/apk/app/src/app/AndroidManifest.xml b/src/starboard/android/apk/app/src/app/AndroidManifest.xml
index 7337495..945df54 100644
--- a/src/starboard/android/apk/app/src/app/AndroidManifest.xml
+++ b/src/starboard/android/apk/app/src/app/AndroidManifest.xml
@@ -29,6 +29,8 @@
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
+ <!-- This is needed when targeting API 28+ to use foreground services -->
+ <uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<application
android:name="dev.cobalt.app.CobaltApplication"
@@ -65,6 +67,11 @@
</intent-filter>
</activity>
+ <service android:name="dev.cobalt.coat.MediaPlaybackService"
+ android:enabled="true"
+ android:exported="false"
+ android:foregroundServiceType="mediaPlayback"/>
+
</application>
</manifest>
diff --git a/src/starboard/android/apk/app/src/app/java/dev/cobalt/app/MainActivity.java b/src/starboard/android/apk/app/src/app/java/dev/cobalt/app/MainActivity.java
index 3b37bf1..961fee0 100644
--- a/src/starboard/android/apk/app/src/app/java/dev/cobalt/app/MainActivity.java
+++ b/src/starboard/android/apk/app/src/app/java/dev/cobalt/app/MainActivity.java
@@ -15,6 +15,7 @@
package dev.cobalt.app;
import android.app.Activity;
+import android.app.Service;
import dev.cobalt.account.UserAuthorizerImpl;
import dev.cobalt.coat.CobaltActivity;
import dev.cobalt.coat.StarboardBridge;
@@ -31,6 +32,7 @@
@Override
protected StarboardBridge createStarboardBridge(String[] args, String startDeepLink) {
Holder<Activity> activityHolder = new Holder<>();
+ Holder<Service> serviceHolder = new Holder<>();
Runnable stopRequester =
new Runnable() {
@Override
@@ -43,6 +45,7 @@
return new StarboardBridge(
getApplicationContext(),
activityHolder,
+ serviceHolder,
userAuthorizer,
args,
startDeepLink);
diff --git a/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/CobaltActivity.java b/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/CobaltActivity.java
index 9a609c0..ac9be3c 100644
--- a/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/CobaltActivity.java
+++ b/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/CobaltActivity.java
@@ -113,11 +113,13 @@
}
getStarboardBridge().onActivityStart(this, keyboardEditor);
+ getStarboardBridge().stopMediaPlaybackService();
super.onStart();
}
@Override
protected void onStop() {
+ getStarboardBridge().startMediaPlaybackService();
getStarboardBridge().onActivityStop(this);
super.onStop();
diff --git a/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/MediaPlaybackService.java b/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/MediaPlaybackService.java
new file mode 100644
index 0000000..2fb2c95
--- /dev/null
+++ b/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/MediaPlaybackService.java
@@ -0,0 +1,145 @@
+// Copyright 2020 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package dev.cobalt.coat;
+
+import static dev.cobalt.media.Log.TAG;
+
+import android.app.Notification;
+import android.app.NotificationChannel;
+import android.app.NotificationManager;
+import android.app.Service;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Build;
+import android.os.Build.VERSION;
+import android.os.IBinder;
+import androidx.annotation.RequiresApi;
+import androidx.core.app.NotificationCompat;
+import dev.cobalt.util.Log;
+import dev.cobalt.util.UsedByNative;
+
+public class MediaPlaybackService extends Service{
+
+private static final int NOTIFICATION_ID = 1234;
+private static final String NOTIFICAITON_CHANNEL_ID = "default";
+private static final String NOTIFICATION_CHANNEL_NAME = "Default channel";
+private Context context;
+
+@Override
+public void onCreate() {
+ super.onCreate();
+ Log.i(TAG, "Creating a Media playback foreground service.");
+ getStarboardBridge().onServiceStart(this);
+ context = getApplicationContext();
+}
+
+@Override
+public int onStartCommand(Intent intent, int flags, int startId) {
+ Log.i(TAG, "Cold start - Starting the serivce.");
+ startService();
+ // We don't want the system to recreate a service for us.
+ return START_NOT_STICKY;
+}
+
+@Override
+public IBinder onBind(Intent intent) {
+ // Do not support binding.
+ return null;
+}
+
+@Override
+public void onDestroy() {
+ getStarboardBridge().onServiceDestroy(this);
+ context = null;
+ super.onDestroy();
+ Log.i(TAG, "Destorying the Media playback service.");
+}
+
+public void startService() {
+ createChannel();
+ startForeground(NOTIFICATION_ID, buildNotification());
+}
+
+public void stopService() {
+ deleteChannel();
+ hideNotification();
+ stopForeground(true);
+ stopSelf();
+}
+
+private void hideNotification() {
+ Log.i(TAG, "Hiding notification after stopped the serivce");
+ NotificationManager notificationManager =
+ (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
+ notificationManager.cancel(NOTIFICATION_ID);
+}
+
+private void createChannel() {
+ if (Build.VERSION.SDK_INT >= 26) {
+ createChannelInternalV26();
+ }
+}
+
+@RequiresApi(26)
+private void createChannelInternalV26() {
+ NotificationManager notificationManager =
+ (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
+ NotificationChannel channel =
+ new NotificationChannel(
+ NOTIFICAITON_CHANNEL_ID,
+ NOTIFICATION_CHANNEL_NAME,
+ notificationManager.IMPORTANCE_DEFAULT);
+ channel.setDescription("Channel for showing persistent notification");
+ try {
+ notificationManager.createNotificationChannel(channel);
+ } catch (IllegalArgumentException e) {
+
+ }
+}
+
+public void deleteChannel() {
+ if (Build.VERSION.SDK_INT >= 26) {
+ deleteChannelInternalV26();
+ }
+}
+
+@RequiresApi(26)
+private void deleteChannelInternalV26() {
+ NotificationManager notificationManager =
+ (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
+ notificationManager.deleteNotificationChannel(NOTIFICAITON_CHANNEL_ID);
+}
+
+Notification buildNotification() {
+ NotificationCompat.Builder builder =
+ new NotificationCompat.Builder(context, NOTIFICAITON_CHANNEL_ID)
+ .setShowWhen(false)
+ .setPriority(NotificationCompat.PRIORITY_MIN)
+ .setSmallIcon(android.R.drawable.stat_sys_warning)
+ .setContentTitle("Media playback serivce")
+ .setContentText("Media playback serivce is running");
+
+ if (VERSION.SDK_INT >= 26) {
+ builder.setChannelId(NOTIFICAITON_CHANNEL_ID);
+ }
+ return builder.build();
+}
+
+@UsedByNative
+protected StarboardBridge getStarboardBridge() {
+ return ((StarboardBridge.HostApplication) getApplication()).getStarboardBridge();
+}
+
+}
diff --git a/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/StarboardBridge.java b/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/StarboardBridge.java
index 9cee635..8c01db9 100644
--- a/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/StarboardBridge.java
+++ b/src/starboard/android/apk/app/src/main/java/dev/cobalt/coat/StarboardBridge.java
@@ -19,6 +19,7 @@
import static dev.cobalt.util.Log.TAG;
import android.app.Activity;
+import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
@@ -74,6 +75,7 @@
private final Context appContext;
private final Holder<Activity> activityHolder;
+ private final Holder<Service> serviceHolder;
private final String[] args;
private final String startDeepLink;
private final Runnable stopRequester =
@@ -92,6 +94,7 @@
public StarboardBridge(
Context appContext,
Holder<Activity> activityHolder,
+ Holder<Service> serviceHolder,
UserAuthorizer userAuthorizer,
String[] args,
String startDeepLink) {
@@ -102,6 +105,7 @@
this.appContext = appContext;
this.activityHolder = activityHolder;
+ this.serviceHolder = serviceHolder;
this.args = args;
this.startDeepLink = startDeepLink;
this.sysConfigChangeReceiver = new CobaltSystemConfigChangeReceiver(appContext, stopRequester);
@@ -140,6 +144,36 @@
}
}
+ protected void onServiceStart(Service service) {
+ serviceHolder.set(service);
+ }
+
+ protected void onServiceDestroy(Service service) {
+ if (serviceHolder.get() == service) {
+ serviceHolder.set(null);
+ }
+ }
+
+ protected void startMediaPlaybackService() {
+ Service service = serviceHolder.get();
+ if (service == null) {
+ Log.i(TAG, "Cold start - Instantiating a MediaPlaybackSerivce.");
+ Intent intent = new Intent(appContext, MediaPlaybackService.class);
+ appContext.startService(intent);
+ } else {
+ Log.i(TAG, "Warm start - Restarting the serivce.");
+ ((MediaPlaybackService) service).startService();
+ }
+ }
+
+ protected void stopMediaPlaybackService() {
+ Service service = serviceHolder.get();
+ if (service != null) {
+ Log.i(TAG, "Stopping the Media playback serivce.");
+ ((MediaPlaybackService) service).stopService();
+ }
+ }
+
@SuppressWarnings("unused")
@UsedByNative
protected void beforeStartOrResume() {
diff --git a/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecBridge.java b/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecBridge.java
index faa8742..d0585e9 100644
--- a/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecBridge.java
+++ b/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecBridge.java
@@ -592,8 +592,25 @@
mediaFormat.setByteBuffer(MediaFormat.KEY_HDR_STATIC_INFO, colorInfo.hdrStaticInfo);
}
- int maxWidth = findVideoDecoderResult.videoCapabilities.getSupportedWidths().getUpper();
- int maxHeight = findVideoDecoderResult.videoCapabilities.getSupportedHeights().getUpper();
+ VideoCapabilities videoCapabilities = findVideoDecoderResult.videoCapabilities;
+ int maxWidth = videoCapabilities.getSupportedWidths().getUpper();
+ int maxHeight = videoCapabilities.getSupportedHeights().getUpper();
+ if (!videoCapabilities.isSizeSupported(maxWidth, maxHeight)) {
+ if (maxHeight >= 4320 && videoCapabilities.isSizeSupported(7680, 4320)) {
+ maxWidth = 7680;
+ maxHeight = 4320;
+ } else if (maxHeight >= 2160 && videoCapabilities.isSizeSupported(3840, 2160)) {
+ maxWidth = 3840;
+ maxHeight = 2160;
+ } else if (maxHeight >= 1080 && videoCapabilities.isSizeSupported(1920, 1080)) {
+ maxWidth = 1920;
+ maxHeight = 1080;
+ } else {
+ Log.e(TAG, "Failed to find a compatible resolution");
+ maxWidth = 1920;
+ maxHeight = 1080;
+ }
+ }
if (!bridge.configureVideo(
mediaFormat,
surface,
@@ -915,8 +932,9 @@
// adapt up to 8k at any point. We thus request 8k buffers up front,
// unless the decoder claims to not be able to do 8k, in which case
// we're ok, since we would've rejected a 8k stream when canPlayType
- // was called, and then use those decoder values instead.
- if (Build.VERSION.SDK_INT > 22) {
+ // was called, and then use those decoder values instead. We only
+ // support 8k for API level 29 and above.
+ if (Build.VERSION.SDK_INT > 28) {
format.setInteger(MediaFormat.KEY_MAX_WIDTH, Math.min(7680, maxSupportedWidth));
format.setInteger(MediaFormat.KEY_MAX_HEIGHT, Math.min(4320, maxSupportedHeight));
} else {
diff --git a/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecUtil.java b/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecUtil.java
index 40fcde0..71e6857 100644
--- a/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecUtil.java
+++ b/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/MediaCodecUtil.java
@@ -573,21 +573,33 @@
}
VideoCapabilities videoCapabilities = codecCapabilities.getVideoCapabilities();
- if (frameWidth != 0 && !videoCapabilities.getSupportedWidths().contains(frameWidth)) {
- Log.v(
- TAG,
- String.format(
- "Rejecting %s, reason: supported widths %s does not contain %d",
- name, videoCapabilities.getSupportedWidths().toString(), frameWidth));
- continue;
- }
- if (frameHeight != 0 && !videoCapabilities.getSupportedHeights().contains(frameHeight)) {
- Log.v(
- TAG,
- String.format(
- "Rejecting %s, reason: supported heights %s does not contain %d",
- name, videoCapabilities.getSupportedHeights().toString(), frameHeight));
- continue;
+ if (frameWidth != 0 && frameHeight != 0) {
+ if (!videoCapabilities.isSizeSupported(frameWidth, frameHeight)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Rejecting %s, reason: width %s is not compatible with height %d",
+ name, frameWidth, frameHeight));
+ continue;
+ }
+ } else if (frameWidth != 0) {
+ if (!videoCapabilities.getSupportedWidths().contains(frameWidth)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Rejecting %s, reason: supported widths %s does not contain %d",
+ name, videoCapabilities.getSupportedWidths().toString(), frameWidth));
+ continue;
+ }
+ } else if (frameHeight != 0) {
+ if (!videoCapabilities.getSupportedHeights().contains(frameHeight)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Rejecting %s, reason: supported heights %s does not contain %d",
+ name, videoCapabilities.getSupportedHeights().toString(), frameHeight));
+ continue;
+ }
}
if (bitrate != 0 && !videoCapabilities.getBitrateRange().contains(bitrate)) {
Log.v(
@@ -597,13 +609,31 @@
name, videoCapabilities.getBitrateRange().toString(), bitrate));
continue;
}
- if (fps != 0 && !videoCapabilities.getSupportedFrameRates().contains(fps)) {
- Log.v(
- TAG,
- String.format(
- "Rejecting %s, reason: supported frame rates %s does not contain %d",
- name, videoCapabilities.getSupportedFrameRates().toString(), fps));
- continue;
+ if (fps != 0) {
+ if (frameHeight != 0 && frameWidth != 0) {
+ if (!videoCapabilities.areSizeAndRateSupported(frameWidth, frameHeight, fps)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Rejecting %s, reason: supported frame rates %s does not contain %d",
+ name,
+ videoCapabilities
+ .getSupportedFrameRatesFor(frameWidth, frameHeight)
+ .toString(),
+ fps));
+ continue;
+ }
+ } else {
+ // At least one of frameHeight or frameWidth is 0
+ if (!videoCapabilities.getSupportedFrameRates().contains(fps)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Rejecting %s, reason: supported frame rates %s does not contain %d",
+ name, videoCapabilities.getSupportedFrameRates().toString(), fps));
+ continue;
+ }
+ }
}
String resultName =
(secure && !name.endsWith(SECURE_DECODER_SUFFIX))
diff --git a/src/starboard/android/shared/android_media_session_client.cc b/src/starboard/android/shared/android_media_session_client.cc
index b822ec1..afa436c 100644
--- a/src/starboard/android/shared/android_media_session_client.cc
+++ b/src/starboard/android/shared/android_media_session_client.cc
@@ -24,6 +24,7 @@
namespace starboard {
namespace android {
namespace shared {
+namespace {
using ::starboard::android::shared::JniEnvExt;
using ::starboard::android::shared::ScopedLocalJavaRef;
@@ -146,9 +147,9 @@
// In practice, only one MediaSessionClient will become active at a time.
// Protected by "mutex"
CobaltExtensionMediaSessionUpdatePlatformPlaybackStateCallback
- update_platform_playback_state_callback;
-CobaltExtensionMediaSessionInvokeActionCallback invoke_action_callback;
-void* callback_context;
+ g_update_platform_playback_state_callback;
+CobaltExtensionMediaSessionInvokeActionCallback g_invoke_action_callback;
+void* g_callback_context;
void OnceInit() {
SbMutexCreate(&mutex);
@@ -158,7 +159,7 @@
SbOnce(&once_flag, OnceInit);
SbMutexAcquire(&mutex);
- if (invoke_action_callback != NULL && callback_context != NULL) {
+ if (g_invoke_action_callback != NULL && g_callback_context != NULL) {
CobaltExtensionMediaSessionActionDetails details = {};
CobaltExtensionMediaSessionActionDetailsInit(
&details, PlaybackStateActionToMediaSessionAction(action));
@@ -166,12 +167,14 @@
if (details.action == kCobaltExtensionMediaSessionActionSeekto) {
details.seek_time = seek_ms / 1000.0;
}
- invoke_action_callback(details, callback_context);
+ g_invoke_action_callback(details, g_callback_context);
}
SbMutexRelease(&mutex);
}
+} // namespace
+
void UpdateActiveSessionPlatformPlaybackState(PlaybackState state) {
SbOnce(&once_flag, OnceInit);
SbMutexAcquire(&mutex);
@@ -179,10 +182,10 @@
CobaltExtensionMediaSessionPlaybackState media_session_state =
PlaybackStateToCobaltExtensionPlaybackState(state);
- if (update_platform_playback_state_callback != NULL &&
- callback_context != NULL) {
- update_platform_playback_state_callback(media_session_state,
- callback_context);
+ if (g_update_platform_playback_state_callback != NULL &&
+ g_callback_context != NULL) {
+ g_update_platform_playback_state_callback(media_session_state,
+ g_callback_context);
}
SbMutexRelease(&mutex);
@@ -194,18 +197,14 @@
jint playback_state = CobaltExtensionPlaybackStateToPlaybackState(
session_state.actual_playback_state);
- void* media_session_client = session_state.callback_context;
SbOnce(&once_flag, OnceInit);
SbMutexAcquire(&mutex);
- if (playback_state != kNone) {
- callback_context = media_session_client;
- update_platform_playback_state_callback =
- session_state.update_platform_playback_state_callback;
- invoke_action_callback = session_state.invoke_action_callback;
- } else if (callback_context == media_session_client) {
- callback_context = NULL;
+
+ if (playback_state == kNone && g_callback_context != NULL) {
+ g_callback_context = NULL;
}
+
SbMutexRelease(&mutex);
jlong playback_state_actions = MediaSessionActionsToPlaybackStateActions(
@@ -277,8 +276,25 @@
j_artist.Get(), j_album.Get(), j_artwork.Get(), durationInMilliseconds);
}
+void RegisterMediaSessionCallbacks(
+ void* callback_context,
+ CobaltExtensionMediaSessionInvokeActionCallback invoke_action_callback,
+ CobaltExtensionMediaSessionUpdatePlatformPlaybackStateCallback
+ update_platform_playback_state_callback) {
+ SbOnce(&once_flag, OnceInit);
+ SbMutexAcquire(&mutex);
+
+ g_callback_context = callback_context;
+ g_invoke_action_callback = invoke_action_callback;
+ g_update_platform_playback_state_callback =
+ update_platform_playback_state_callback;
+
+ SbMutexRelease(&mutex);
+}
+
const CobaltExtensionMediaSessionApi kMediaSessionApi = {
- kCobaltExtensionMediaSessionName, 1, &OnMediaSessionStateChanged};
+ kCobaltExtensionMediaSessionName, 1, &OnMediaSessionStateChanged,
+ &RegisterMediaSessionCallbacks, NULL};
const void* GetMediaSessionApi() {
return &kMediaSessionApi;
diff --git a/src/starboard/android/shared/android_media_session_client.h b/src/starboard/android/shared/android_media_session_client.h
index 83fd0cc..3f86778 100644
--- a/src/starboard/android/shared/android_media_session_client.h
+++ b/src/starboard/android/shared/android_media_session_client.h
@@ -29,6 +29,12 @@
void OnMediaSessionStateChanged(
const CobaltExtensionMediaSessionState session_state);
+void RegisterMediaSessionCallbacks(
+ void* callback_context,
+ CobaltExtensionMediaSessionInvokeActionCallback invoke_action_callback,
+ CobaltExtensionMediaSessionUpdatePlatformPlaybackStateCallback
+ update_platform_playback_state_callback);
+
const void* GetMediaSessionApi();
} // namespace shared
} // namespace android
diff --git a/src/starboard/android/shared/application_android.cc b/src/starboard/android/shared/application_android.cc
index 1352a26..06adff2 100644
--- a/src/starboard/android/shared/application_android.cc
+++ b/src/starboard/android/shared/application_android.cc
@@ -243,6 +243,8 @@
// have a window.
env->CallStarboardVoidMethodOrAbort("beforeStartOrResume", "()V");
DispatchStart();
+ } else if (state() == kStateConcealed) {
+ DispatchAndDelete(new Event(kSbEventTypeReveal, NULL, NULL));
} else {
// Now that we got a window back, change the command for the switch
// below to sync up with the current activity lifecycle.
@@ -257,7 +259,7 @@
// Cobalt can't keep running without a window, even if the Activity
// hasn't stopped yet. DispatchAndDelete() will inject events as needed
// if we're not already paused.
- DispatchAndDelete(new Event(kSbEventTypeSuspend, NULL, NULL));
+ DispatchAndDelete(new Event(kSbEventTypeConceal, NULL, NULL));
if (window_) {
window_->native_window = NULL;
}
@@ -322,21 +324,20 @@
if (native_window_) {
switch (sync_state) {
case AndroidCommand::kStart:
- DispatchAndDelete(new Event(kSbEventTypeResume, NULL, NULL));
+ DispatchAndDelete(new Event(kSbEventTypeReveal, NULL, NULL));
break;
case AndroidCommand::kResume:
- DispatchAndDelete(new Event(kSbEventTypeUnpause, NULL, NULL));
+ DispatchAndDelete(new Event(kSbEventTypeFocus, NULL, NULL));
break;
case AndroidCommand::kPause:
- DispatchAndDelete(new Event(kSbEventTypePause, NULL, NULL));
+ DispatchAndDelete(new Event(kSbEventTypeBlur, NULL, NULL));
break;
case AndroidCommand::kStop:
- if (state() != kStateSuspended) {
- // We usually suspend when losing the window above, but if the window
+ if (state() != kStateConcealed) {
+ // We usually conceal when losing the window above, but if the window
// wasn't destroyed (e.g. when Daydream starts) then we still have to
- // suspend when the Activity is stopped.
- env->CallStarboardVoidMethodOrAbort("beforeSuspend", "()V");
- DispatchAndDelete(new Event(kSbEventTypeSuspend, NULL, NULL));
+ // conceal when the Activity is stopped.
+ DispatchAndDelete(new Event(kSbEventTypeConceal, NULL, NULL));
}
break;
default:
@@ -348,14 +349,6 @@
void ApplicationAndroid::SendAndroidCommand(AndroidCommand::CommandType type,
void* data) {
SB_LOG(INFO) << "Send Android command: " << AndroidCommandName(type);
- if (type == AndroidCommand::kNativeWindowDestroyed) {
- // When this command is processed it will suspend Cobalt, so make the JNI
- // call to StarboardBridge.beforeSuspend() early while still here on the
- // Android main thread. This lets the MediaSession get released now without
- // having to wait to bounce between threads.
- JniEnvExt* env = JniEnvExt::Get();
- env->CallStarboardVoidMethod("beforeSuspend", "()V");
- }
AndroidCommand cmd {type, data};
ScopedLock lock(android_command_mutex_);
write(android_command_writefd_, &cmd, sizeof(cmd));
diff --git a/src/third_party/mozjs-45/js/src/tests/lib/__init__.py b/src/starboard/android/shared/cobalt/__init__.py
similarity index 100%
rename from src/third_party/mozjs-45/js/src/tests/lib/__init__.py
rename to src/starboard/android/shared/cobalt/__init__.py
diff --git a/src/starboard/android/shared/configuration.cc b/src/starboard/android/shared/configuration.cc
index 6f05a18..0de9388 100644
--- a/src/starboard/android/shared/configuration.cc
+++ b/src/starboard/android/shared/configuration.cc
@@ -43,7 +43,7 @@
const CobaltExtensionConfigurationApi kConfigurationApi = {
kCobaltExtensionConfigurationName,
- 1,
+ 2,
&CobaltUserOnExitStrategy,
&common::CobaltRenderDirtyRegionOnlyDefault,
&CobaltEglSwapInterval,
@@ -66,6 +66,7 @@
&common::CobaltGcZealDefault,
&common::CobaltRasterizerTypeDefault,
&common::CobaltEnableJitDefault,
+ &common::CobaltFallbackSplashScreenTopicsDefault,
};
} // namespace
diff --git a/src/starboard/android/shared/configuration_public.h b/src/starboard/android/shared/configuration_public.h
index 371a413..e428200 100644
--- a/src/starboard/android/shared/configuration_public.h
+++ b/src/starboard/android/shared/configuration_public.h
@@ -169,4 +169,7 @@
#error "Android builds need a GCC-like compiler (for the moment)."
#endif
+// Enable SB_HAS_CONCEALED_STATE support.
+#define SB_HAS_CONCEALED_STATE 1
+
#endif // STARBOARD_ANDROID_SHARED_CONFIGURATION_PUBLIC_H_
diff --git a/src/starboard/android/shared/gyp_configuration.py b/src/starboard/android/shared/gyp_configuration.py
index 04d5372..3474905 100644
--- a/src/starboard/android/shared/gyp_configuration.py
+++ b/src/starboard/android/shared/gyp_configuration.py
@@ -146,7 +146,8 @@
if not self._target_toolchain:
tool_prefix = os.path.join(sdk_utils.GetNdkPath(), 'toolchains', 'llvm',
'prebuilt', 'linux-x86_64', 'bin', '')
- cc_path = self.build_accelerator + ' ' + tool_prefix + _ABI_TOOL_NAMES[self.android_abi][0]
+ cc_path = self.build_accelerator + ' ' + tool_prefix + _ABI_TOOL_NAMES[
+ self.android_abi][0]
cxx_path = cc_path + '++'
ar_path = tool_prefix + _ABI_TOOL_NAMES[self.android_abi][1]
clang_flags = [
@@ -276,31 +277,6 @@
# A map of failing or crashing tests per target.
__FILTERED_TESTS = { # pylint: disable=invalid-name
'player_filter_tests': [
- # Filter flaky failed tests temporarily.
- 'VideoDecoderTests/VideoDecoderTest.SingleInput/0',
- 'VideoDecoderTests/VideoDecoderTest.SingleInput/2',
- 'VideoDecoderTests/VideoDecoderTest.SingleInput/4',
- 'VideoDecoderTests/VideoDecoderTest.SingleInput/6',
- 'VideoDecoderTests/VideoDecoderTest.ResetBeforeInput/0',
- 'VideoDecoderTests/VideoDecoderTest.ResetBeforeInput/2',
- 'VideoDecoderTests/VideoDecoderTest.ResetBeforeInput/4',
- 'VideoDecoderTests/VideoDecoderTest.ResetBeforeInput/6',
- 'VideoDecoderTests/VideoDecoderTest.MultipleResets/0',
- 'VideoDecoderTests/VideoDecoderTest.MultipleResets/2',
- 'VideoDecoderTests/VideoDecoderTest.MultipleResets/4',
- 'VideoDecoderTests/VideoDecoderTest.MultipleResets/6',
- 'VideoDecoderTests/VideoDecoderTest.MultipleInputs/0',
- 'VideoDecoderTests/VideoDecoderTest.MultipleInputs/2',
- 'VideoDecoderTests/VideoDecoderTest.MultipleInputs/4',
- 'VideoDecoderTests/VideoDecoderTest.MultipleInputs/6',
- 'VideoDecoderTests/VideoDecoderTest.Preroll/0',
- 'VideoDecoderTests/VideoDecoderTest.Preroll/2',
- 'VideoDecoderTests/VideoDecoderTest.Preroll/4',
- 'VideoDecoderTests/VideoDecoderTest.Preroll/6',
- 'VideoDecoderTests/VideoDecoderTest.DecodeFullGOP/0',
- 'VideoDecoderTests/VideoDecoderTest.DecodeFullGOP/2',
- 'VideoDecoderTests/VideoDecoderTest.DecodeFullGOP/4',
- 'VideoDecoderTests/VideoDecoderTest.DecodeFullGOP/6',
# GetMaxNumberOfCachedFrames() on Android is device dependent,
# and Android doesn't provide an API to get it. So, this function
diff --git a/src/starboard/android/shared/launcher.py b/src/starboard/android/shared/launcher.py
new file mode 100644
index 0000000..566e595
--- /dev/null
+++ b/src/starboard/android/shared/launcher.py
@@ -0,0 +1,434 @@
+#
+# Copyright 2017 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Android implementation of Starboard launcher abstraction."""
+
+import os
+import re
+import socket
+import subprocess
+import sys
+import threading
+import time
+import Queue
+
+import _env # pylint: disable=unused-import,g-bad-import-order
+
+from starboard.android.shared import sdk_utils
+from starboard.tools import abstract_launcher
+
+_APP_PACKAGE_NAME = 'dev.cobalt.coat'
+
+_APP_START_INTENT = 'dev.cobalt.coat/dev.cobalt.app.MainActivity'
+
+# Matches an "adb shell am monitor" error line.
+_RE_ADB_AM_MONITOR_ERROR = re.compile(r'\*\* ERROR')
+
+# String added to queue to indicate process has crashed
+_QUEUE_CODE_CRASHED = 'crashed'
+
+# How long to keep logging after a crash in order to emit the stack trace.
+_CRASH_LOG_SECONDS = 1.0
+
+_RUNTIME_PERMISSIONS = [
+ 'android.permission.GET_ACCOUNTS',
+ 'android.permission.RECORD_AUDIO',
+]
+
+
+def TargetOsPathJoin(*path_elements):
+ """os.path.join for the target (Android)."""
+ return '/'.join(path_elements)
+
+
+def CleanLine(line):
+ """Removes trailing carriages returns from ADB output."""
+ return line.replace('\r', '')
+
+
+class StepTimer(object):
+ """Class for timing how long install/run steps take."""
+
+ def __init__(self, step_name):
+ self.step_name = step_name
+ self.start_time = time.time()
+ self.end_time = None
+
+ def Stop(self):
+ if self.start_time is None:
+ sys.stderr.write('Cannot stop timer; not started\n')
+ else:
+ self.end_time = time.time()
+ total_time = self.end_time - self.start_time
+ sys.stderr.write('Step \"{}\" took {} seconds.\n'.format(
+ self.step_name, total_time))
+
+
+class AdbCommandBuilder(object):
+ """Builder for 'adb' commands."""
+
+ def __init__(self, adb, device_id=None):
+ self.adb = adb
+ self.device_id = device_id
+
+ def Build(self, *args):
+ """Builds an 'adb' commandline with the given args."""
+ result = [self.adb]
+ if self.device_id:
+ result.append('-s')
+ result.append(self.device_id)
+ result += list(args)
+ return result
+
+
+class AdbAmMonitorWatcher(object):
+ """Watches an "adb shell am monitor" process to detect crashes."""
+
+ def __init__(self, launcher, done_queue):
+ self.launcher = launcher
+ self.process = launcher._PopenAdb(
+ 'shell', 'am', 'monitor', stdout=subprocess.PIPE)
+ if abstract_launcher.ARG_DRYRUN in launcher.launcher_args:
+ self.thread = None
+ return
+ self.thread = threading.Thread(target=self._Run)
+ self.thread.start()
+ self.done_queue = done_queue
+
+ def Shutdown(self):
+ self.process.kill()
+ if self.thread:
+ self.thread.join()
+
+ def _Run(self):
+ while True:
+ line = CleanLine(self.process.stdout.readline())
+ if not line:
+ return
+ if re.search(_RE_ADB_AM_MONITOR_ERROR, line):
+ self.done_queue.put(_QUEUE_CODE_CRASHED)
+ # This log line will wake up the main thread
+ self.launcher.CallAdb('shell', 'log', '-t', 'starboard',
+ 'am monitor detected crash')
+
+
+class Launcher(abstract_launcher.AbstractLauncher):
+ """Run an application on Android."""
+
+ def __init__(self, platform, target_name, config, device_id, **kwargs):
+
+ super(Launcher, self).__init__(platform, target_name, config, device_id,
+ **kwargs)
+
+ if abstract_launcher.ARG_SYSTOOLS in self.launcher_args:
+ # Use default adb binary from path.
+ self.adb = 'adb'
+ else:
+ self.adb = os.path.join(sdk_utils.GetSdkPath(), 'platform-tools', 'adb')
+
+ self.adb_builder = AdbCommandBuilder(self.adb)
+
+ if not self.device_id:
+ self.device_id = self._IdentifyDevice()
+ else:
+ self._ConnectIfNecessary()
+
+ self.adb_builder.device_id = self.device_id
+
+ # Verify connection and dump target build fingerprint.
+ self._CheckCallAdb('shell', 'getprop', 'ro.build.fingerprint')
+
+ out_directory = os.path.split(self.GetTargetPath())[0]
+ self.apk_path = os.path.join(out_directory, '{}.apk'.format(target_name))
+ if not os.path.exists(self.apk_path):
+ raise Exception("Can't find APK {}".format(self.apk_path))
+
+ # This flag is set when the main Run() loop exits. If Kill() is called
+ # after this flag is set, it will not do anything.
+ self.killed = threading.Event()
+
+ # Keep track of the port used by ADB forward in order to remove it later
+ # on.
+ self.local_port = None
+
+ def _IsValidIPv4Address(self, address):
+ """Returns True if address is a valid IPv4 address, False otherwise."""
+ try:
+ # inet_aton throws an exception if the address is not a valid IPv4
+ # address. However addresses such as '127.1' might still be considered
+ # valid, hence the check for 3 '.' in the address.
+ # pylint: disable=g-socket-inet-aton
+ if socket.inet_aton(address) and address.count('.') == 3:
+ return True
+ except Exception: # pylint: disable=broad-except
+ pass
+ return False
+
+ def _GetAdbDevices(self):
+ """Returns a list of names of connected devices, or empty list if none."""
+
+ # Does not use the ADBCommandBuilder class because this command should be
+ # run without targeting a specific device.
+ p = self._PopenAdb('devices', stdout=subprocess.PIPE)
+ result = p.stdout.readlines()[1:-1]
+ p.wait()
+
+ names = []
+ for device in result:
+ name_info = device.split('\t')
+ # Some devices may not have authorization for USB debugging.
+ try:
+ if 'unauthorized' not in name_info[1]:
+ names.append(name_info[0])
+ # Sometimes happens when device is found, even though none are connected.
+ except IndexError:
+ continue
+ return names
+
+ def _IdentifyDevice(self):
+ """Picks a device to be used to run the executable.
+
+ In the event that no device_id is provided, but multiple
+ devices are connected, this method chooses the first device
+ listed.
+
+ Returns:
+ The name of an attached device, or None if no devices are present.
+ """
+ device_name = None
+
+ devices = self._GetAdbDevices()
+ if devices:
+ device_name = devices[0]
+
+ return device_name
+
+ def _ConnectIfNecessary(self):
+ """Run ADB connect if needed for devices connected over IP."""
+ if not self._IsValidIPv4Address(self.device_id):
+ return
+ for device in self._GetAdbDevices():
+ # Devices returned by _GetAdbDevices might include port number, so cannot
+ # simply check if self.device_id is in the returned list.
+ if self.device_id in device:
+ return
+
+ # Device isn't connected. Run ADB connect.
+ # Does not use the ADBCommandBuilder class because this command should be
+ # run without targeting a specific device.
+ p = self._PopenAdb(
+ 'connect',
+ '{}:5555'.format(self.device_id),
+ stderr=subprocess.STDOUT,
+ stdout=subprocess.PIPE,
+ )
+ result = p.stdout.readlines()[0]
+ p.wait()
+
+ if 'connected to' not in result:
+ sys.stderr.write('Failed to connect to {}\n'.format(self.device_id))
+ sys.stderr.write('connect command exited with code {} '
+ 'and returned: {}'.format(p.returncode, result))
+
+ def _Call(self, *args):
+ sys.stderr.write('{}\n'.format(' '.join(args)))
+ if abstract_launcher.ARG_DRYRUN not in self.launcher_args:
+ subprocess.call(args, close_fds=True)
+
+ def CallAdb(self, *in_args):
+ args = self.adb_builder.Build(*in_args)
+ self._Call(*args)
+
+ def _CheckCall(self, *args):
+ sys.stderr.write('{}\n'.format(' '.join(args)))
+ if abstract_launcher.ARG_DRYRUN not in self.launcher_args:
+ subprocess.check_call(args, close_fds=True)
+
+ def _CheckCallAdb(self, *in_args):
+ args = self.adb_builder.Build(*in_args)
+ self._CheckCall(*args)
+
+ def _PopenAdb(self, *args, **kwargs):
+ build_args = self.adb_builder.Build(*args)
+ sys.stderr.write('{}\n'.format(' '.join(build_args)))
+ if abstract_launcher.ARG_DRYRUN in self.launcher_args:
+ return subprocess.Popen(['echo', 'dry-run'])
+ return subprocess.Popen(build_args, close_fds=True, **kwargs)
+
+ def Run(self):
+ # The return code for binaries run on Android is read from a log line that
+ # it emitted in android_main.cc. This return_code variable will be assigned
+ # the value read when we see that line, or left at 1 in the event of a crash
+ # or early exit.
+ return_code = 1
+
+ # Setup for running executable
+ self._CheckCallAdb('wait-for-device')
+ self._Shutdown()
+
+ # Clear logcat
+ self._CheckCallAdb('logcat', '-c')
+
+ # Install the APK, unless "noinstall" was specified.
+ if abstract_launcher.ARG_NOINSTALL not in self.launcher_args:
+ install_timer = StepTimer('install')
+ self._CheckCallAdb('install', '-r', self.apk_path)
+ install_timer.Stop()
+
+ # Send the wakeup key to ensure daydream isn't running, otherwise Activity
+ # Manager may get in a loop running the test over and over again.
+ self._CheckCallAdb('shell', 'input', 'keyevent', 'KEYCODE_WAKEUP')
+
+ # Grant runtime permissions to avoid prompts during testing.
+ if abstract_launcher.ARG_NOINSTALL not in self.launcher_args:
+ for permission in _RUNTIME_PERMISSIONS:
+ self._CheckCallAdb('shell', 'pm', 'grant', _APP_PACKAGE_NAME,
+ permission)
+
+ done_queue = Queue.Queue()
+ am_monitor = AdbAmMonitorWatcher(self, done_queue)
+
+ # Increases the size of the logcat buffer. Without this, the log buffer
+ # will not flush quickly enough and output will be cut off.
+ self._CheckCallAdb('logcat', '-G', '2M')
+
+ # Ctrl + C will kill this process
+ logcat_process = self._PopenAdb(
+ 'logcat',
+ '-v',
+ 'raw',
+ '-s',
+ '*:F',
+ 'DEBUG:*',
+ 'System.err:*',
+ 'starboard:*',
+ 'starboard_media:*',
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+
+ # Actually running executable
+ run_timer = StepTimer('running executable')
+ try:
+ args = ['shell', 'am', 'start']
+ command_line_params = [
+ '--android_log_sleep_time=1000',
+ '--disable_sign_in',
+ ]
+ for param in self.target_command_line_params:
+ if param.startswith('--link='):
+ # Android deeplinks go in the Intent data
+ link = param.split('=')[1]
+ args += ['-d', "'{}'".format(link)]
+ else:
+ command_line_params.append(param)
+ args += ['--esa', 'args', "'{}'".format(','.join(command_line_params))]
+ args += [_APP_START_INTENT]
+
+ self._CheckCallAdb(*args)
+
+ run_loop = abstract_launcher.ARG_DRYRUN not in self.launcher_args
+
+ app_crashed = False
+ while run_loop:
+ if not done_queue.empty():
+ done_queue_code = done_queue.get_nowait()
+ if done_queue_code == _QUEUE_CODE_CRASHED:
+ app_crashed = True
+ threading.Timer(_CRASH_LOG_SECONDS, logcat_process.kill).start()
+
+ # Note we cannot use "for line in logcat_process.stdout" because
+ # that uses a large buffer which will cause us to deadlock.
+ line = CleanLine(logcat_process.stdout.readline())
+
+ # Some crashes are not caught by the am_monitor thread, but they do
+ # produce the following string in logcat before they exit.
+ if 'beginning of crash' in line:
+ app_crashed = True
+ threading.Timer(_CRASH_LOG_SECONDS, logcat_process.kill).start()
+
+ if not line: # Logcat exited, or was killed
+ break
+ else:
+ self._WriteLine(line)
+ # Don't break until we see the below text in logcat, which should be
+ # written when the Starboard application event loop finishes.
+ if '***Application Stopped***' in line:
+ try:
+ return_code = int(line.split(' ')[-1])
+ except ValueError: # Error message was printed to stdout
+ pass
+ logcat_process.kill()
+ break
+
+ finally:
+ if app_crashed:
+ self._WriteLine('***Application Crashed***\n')
+ # Set return code to mimic segfault code on Linux
+ return_code = 11
+ else:
+ self._Shutdown()
+ if self.local_port is not None:
+ self.CallAdb('forward', '--remove', 'tcp:{}'.format(self.local_port))
+ am_monitor.Shutdown()
+ self.killed.set()
+ run_timer.Stop()
+ if logcat_process.poll() is None:
+ # This could happen when using SIGINT to kill the launcher
+ # (e.g. when using starboard/tools/example/app_launcher_client.py).
+ sys.stderr.write('Logcat process is still running. Killing it now.\n')
+ logcat_process.kill()
+
+ return return_code
+
+ def _Shutdown(self):
+ self.CallAdb('shell', 'am', 'force-stop', _APP_PACKAGE_NAME)
+
+ def SupportsDeepLink(self):
+ return True
+
+ def SendDeepLink(self, link):
+ shell_cmd = 'am start -d "{}" {}'.format(link, _APP_START_INTENT)
+ args = ['shell', shell_cmd]
+ self._CheckCallAdb(*args)
+ return True
+
+ def Kill(self):
+ if not self.killed.is_set():
+ sys.stderr.write('***Killing Launcher***\n')
+ self._CheckCallAdb('shell', 'log', '-t', 'starboard',
+ '***Application Stopped*** 1')
+ self._Shutdown()
+ else:
+ sys.stderr.write('Cannot kill launcher: already dead.\n')
+
+ def _WriteLine(self, line):
+ """Write log output to stdout."""
+ self.output_file.write(line)
+ self.output_file.flush()
+
+ def GetHostAndPortGivenPort(self, port):
+ forward_p = self._PopenAdb(
+ 'forward', 'tcp:0', 'tcp:{}'.format(port), stdout=subprocess.PIPE)
+ forward_p.wait()
+
+ self.local_port = CleanLine(forward_p.stdout.readline()).rstrip('\n')
+ sys.stderr.write('ADB forward local port {} '
+ '=> device port {}\n'.format(self.local_port, port))
+ # pylint: disable=g-socket-gethostbyname
+ return socket.gethostbyname('localhost'), self.local_port
+
+ def GetDeviceIp(self):
+ """Gets the device IP. TODO: Implement."""
+ return None
diff --git a/src/starboard/android/shared/microphone_impl.cc b/src/starboard/android/shared/microphone_impl.cc
index 71ac6f4..43314b6 100644
--- a/src/starboard/android/shared/microphone_impl.cc
+++ b/src/starboard/android/shared/microphone_impl.cc
@@ -47,11 +47,11 @@
class SbMicrophoneImpl : public SbMicrophonePrivate {
public:
SbMicrophoneImpl();
- ~SbMicrophoneImpl() SB_OVERRIDE;
+ ~SbMicrophoneImpl() override;
- bool Open() SB_OVERRIDE;
- bool Close() SB_OVERRIDE;
- int Read(void* out_audio_data, int audio_data_size) SB_OVERRIDE;
+ bool Open() override;
+ bool Close() override;
+ int Read(void* out_audio_data, int audio_data_size) override;
void SetPermission(bool is_granted);
static bool IsMicrophoneDisconnected();
diff --git a/src/third_party/mozjs-45/js/src/tests/lib/__init__.py b/src/starboard/android/x86/__init__.py
similarity index 100%
copy from src/third_party/mozjs-45/js/src/tests/lib/__init__.py
copy to src/starboard/android/x86/__init__.py
diff --git a/src/third_party/mozjs-45/js/src/tests/lib/__init__.py b/src/starboard/android/x86/cobalt/__init__.py
similarity index 100%
copy from src/third_party/mozjs-45/js/src/tests/lib/__init__.py
copy to src/starboard/android/x86/cobalt/__init__.py
diff --git a/src/starboard/android/x86/cobalt/configuration.py b/src/starboard/android/x86/cobalt/configuration.py
new file mode 100644
index 0000000..5e11fd8
--- /dev/null
+++ b/src/starboard/android/x86/cobalt/configuration.py
@@ -0,0 +1,61 @@
+# Copyright 2017-2020 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Starboard Android x86 Cobalt configuration."""
+
+from starboard.android.shared.cobalt import configuration
+from starboard.tools.testing import test_filter
+
+
+class CobaltAndroidX86Configuration(configuration.CobaltAndroidConfiguration):
+ """Starboard Android x86 Cobalt configuration."""
+
+ def GetTestFilters(self):
+ filters = super(CobaltAndroidX86Configuration, self).GetTestFilters()
+ for target, tests in self.__FILTERED_TESTS.iteritems():
+ filters.extend(test_filter.TestFilter(target, test) for test in tests)
+ return filters
+
+ # A map of failing or crashing tests per target
+ __FILTERED_TESTS = { # pylint: disable=invalid-name
+ 'graphics_system_test': [
+ test_filter.FILTER_ALL
+ ],
+ 'layout_tests': [ # Old Android versions don't have matching fonts
+ 'CSS3FontsLayoutTests/Layout.Test'
+ '/5_2_use_first_available_listed_font_family',
+ 'CSS3FontsLayoutTests/Layout.Test'
+ '/5_2_use_specified_font_family_if_available',
+ 'CSS3FontsLayoutTests/Layout.Test'
+ '/5_2_use_system_fallback_if_no_matching_family_is_found*',
+ 'CSS3FontsLayoutTests/Layout.Test'
+ '/synthetic_bolding_should_not_occur_on_bold_font',
+ 'CSS3FontsLayoutTests/Layout.Test'
+ '/synthetic_bolding_should_occur_on_non_bold_font',
+ ],
+ 'nb_test': [
+ 'BidirectionalFitReuseAllocatorTest.FallbackBlockMerge',
+ 'BidirectionalFitReuseAllocatorTest.FreeBlockMergingLeft',
+ 'BidirectionalFitReuseAllocatorTest.FreeBlockMergingRight',
+ 'FirstFitReuseAllocatorTest.FallbackBlockMerge',
+ 'FirstFitReuseAllocatorTest.FreeBlockMergingLeft',
+ 'FirstFitReuseAllocatorTest.FreeBlockMergingRight',
+ ],
+ 'net_unittests': [ # Net tests are very unstable on Android L
+ test_filter.FILTER_ALL
+ ],
+ 'renderer_test': [
+ 'PixelTest.YUV422UYVYImageScaledUpSupport',
+ 'PixelTest.YUV422UYVYImageScaledAndTranslated',
+ ],
+ }
diff --git a/src/starboard/android/x86/gyp_configuration.py b/src/starboard/android/x86/gyp_configuration.py
index 80134e6..2caad96 100644
--- a/src/starboard/android/x86/gyp_configuration.py
+++ b/src/starboard/android/x86/gyp_configuration.py
@@ -1,4 +1,4 @@
-# Copyright 2016 The Cobalt Authors. All Rights Reserved.
+# Copyright 2016-2020 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,10 +14,42 @@
"""Starboard Android x86 platform build configuration."""
from starboard.android.shared import gyp_configuration as shared_configuration
+from starboard.tools.testing import test_filter
def CreatePlatformConfig():
- return shared_configuration.AndroidConfiguration(
+ return Androidx86Configuration(
'android-x86',
'x86',
sabi_json_path='starboard/sabi/x86/sabi-v{sb_api_version}.json')
+
+
+class Androidx86Configuration(shared_configuration.AndroidConfiguration):
+
+ def GetTestFilters(self):
+ filters = super(Androidx86Configuration, self).GetTestFilters()
+ for target, tests in self.__FILTERED_TESTS.iteritems():
+ filters.extend(test_filter.TestFilter(target, test) for test in tests)
+ return filters
+
+ # A map of failing or crashing tests per target
+ __FILTERED_TESTS = { # pylint: disable=invalid-name
+ 'nplb': [
+ 'SbAccessibilityTest.CallSetCaptionsEnabled',
+ 'SbAccessibilityTest.GetCaptionSettingsReturnIsValid',
+ 'SbAudioSinkTest.*',
+ 'SbMediaCanPlayMimeAndKeySystem.*',
+ 'SbMicrophoneCloseTest.*',
+ 'SbMicrophoneOpenTest.*',
+ 'SbMicrophoneReadTest.*',
+ 'SbPlayerWriteSampleTests/SbPlayerWriteSampleTest.*',
+ 'SbSocketAddressTypes/SbSocketGetInterfaceAddressTest'
+ '.SunnyDaySourceForDestination/*',
+ 'SbMediaSetAudioWriteDurationTests/SbMediaSetAudioWriteDurationTest'
+ '.WriteContinuedLimitedInput/*',
+ ],
+ 'player_filter_tests': [
+ 'VideoDecoderTests/*',
+ 'AudioDecoderTests/*',
+ ],
+ }
diff --git a/src/starboard/build/base_configuration.gypi b/src/starboard/build/base_configuration.gypi
index a723315..95bafc6 100644
--- a/src/starboard/build/base_configuration.gypi
+++ b/src/starboard/build/base_configuration.gypi
@@ -266,10 +266,6 @@
'cflags_cc_host': [ '<@(compiler_flags_cc_host)', ],
'ldflags_host': [ '<@(linker_flags_host)' ],
- # Location of Cygwin which is used by the build system when running on a
- # Windows platform.
- 'msvs_cygwin_dirs': ['<(DEPTH)/third_party/cygwin'],
-
# Allows any source file to include files relative to the source tree.
'include_dirs': [ '<(DEPTH)' ],
'libraries': [ '<@(platform_libraries)' ],
diff --git a/src/starboard/build/collect_deploy_content.gypi b/src/starboard/build/collect_deploy_content.gypi
index 0ab0db8..6fc2840 100644
--- a/src/starboard/build/collect_deploy_content.gypi
+++ b/src/starboard/build/collect_deploy_content.gypi
@@ -66,7 +66,7 @@
],
'outputs': [ '<(content_deploy_stamp_file)' ],
'action': [
- 'python',
+ 'python2',
'<(DEPTH)/starboard/build/collect_deploy_content.py',
'-i', '<(input_dir)',
'-o', '<(output_dir)',
diff --git a/src/starboard/build/collect_deploy_content.py b/src/starboard/build/collect_deploy_content.py
index 35176bb..bca32a5 100755
--- a/src/starboard/build/collect_deploy_content.py
+++ b/src/starboard/build/collect_deploy_content.py
@@ -21,6 +21,7 @@
import _env # pylint: disable=unused-import
import starboard.tools.port_symlink as port_symlink
+from starboard.tools import log_level
# The name of an environment variable that when set to |'1'|, signals to us that
# we should log all output directories that we have populated.
@@ -88,10 +89,9 @@
options = parser.parse_args(argv[1:])
if os.environ.get(_SHOULD_LOG_ENV_KEY, None) == '1':
- log_level = logging.INFO
+ log_level.InitializeLoggingWithLevel(logging.INFO)
else:
- log_level = logging.WARNING
- logging.basicConfig(level=log_level, format='COLLECT CONTENT: %(message)s')
+ log_level.InitializeLoggingWithLevel(logging.WARNING)
logging.info('max_depth: %s', options.max_depth)
logging.info('< %s', options.input_dir)
diff --git a/src/starboard/build/convert_i18n_data.gypi b/src/starboard/build/convert_i18n_data.gypi
index 6ae88fe..a4f9313 100644
--- a/src/starboard/build/convert_i18n_data.gypi
+++ b/src/starboard/build/convert_i18n_data.gypi
@@ -43,7 +43,7 @@
'<!@pymod_do_main(starboard.build.convert_i18n_data -o <@(output_dir) --outputs <@(xlb_files))',
],
'action': [
- 'python',
+ 'python2',
'<(DEPTH)/starboard/build/convert_i18n_data.py',
'-o', '<@(output_dir)',
'<@(xlb_files)',
diff --git a/src/starboard/build/copy_test_data.gypi b/src/starboard/build/copy_test_data.gypi
index 6c92bcc..7432f8b 100644
--- a/src/starboard/build/copy_test_data.gypi
+++ b/src/starboard/build/copy_test_data.gypi
@@ -58,7 +58,7 @@
'<!@pymod_do_main(starboard.build.copy_data -o <(sb_static_contents_output_data_dir)/test/<(content_test_output_subdir) --outputs <(content_test_input_files))',
],
'action': [
- 'python',
+ 'python2',
'<(DEPTH)/starboard/build/copy_data.py',
'-o', '<(sb_static_contents_output_data_dir)/test/<(content_test_output_subdir)',
'<@(content_test_input_files)',
diff --git a/src/starboard/build/deploy.gypi b/src/starboard/build/deploy.gypi
index 450e280..b7cec90 100644
--- a/src/starboard/build/deploy.gypi
+++ b/src/starboard/build/deploy.gypi
@@ -82,7 +82,7 @@
'<(target_deploy_stamp_file)',
],
'action': [
- 'python',
+ 'python2',
'<(make_dirs)',
'--clean',
'--stamp=<(target_deploy_stamp_file)',
diff --git a/src/starboard/build/gyp b/src/starboard/build/gyp
index 74f1dfe..8866c17 100755
--- a/src/starboard/build/gyp
+++ b/src/starboard/build/gyp
@@ -24,7 +24,9 @@
import _env # pylint: disable=unused-import
from starboard.build.gyp_runner import GypRunner
from starboard.tools import build
+from starboard.tools import command_line
from starboard.tools import config
+from starboard.tools import log_level
from starboard.tools import paths
from starboard.tools import platform
@@ -71,9 +73,9 @@
parser.add_argument('--check', action='store_true',
help='Check format of gyp files.')
- parser.add_argument('-v', '--verbose', dest='verbose_count',
- default=0, action='count',
- help='Verbose level (multiple times for more).')
+ parser.add_argument('-v', '--verbose', action='store_true',
+ help='Enables verbose logging. For more control over the '
+ "logging level use '--log_level' instead.")
parser.add_argument('-a', '--application',
metavar='application',
default='cobalt',
@@ -83,28 +85,15 @@
default=None,
help='Root GYP build file. Default: starboard_all.gyp')
+ command_line.AddLoggingArguments(parser, default='warning')
+
return parser.parse_args(argv)
-def _SetupLogging():
- logging_level = logging.WARNING
- logging_format = '%(message)s'
- logging.basicConfig(level=logging_level, format=logging_format)
-
-
-def _SetLogLevel(verbose_count):
- logging_level = logging.WARNING
- if verbose_count == 1:
- logging_level = logging.INFO
- elif verbose_count >= 2:
- logging_level = logging.DEBUG
- logging.getLogger().setLevel(logging_level)
-
-
def main(argv):
- _SetupLogging()
options = _ParseCommandLineArguments(argv)
- _SetLogLevel(options.verbose_count)
+
+ log_level.InitializeLogging(options)
if os.environ.get('GYP_DEFINES'):
logging.error('GYP_DEFINES environment variable is not supported.')
diff --git a/src/starboard/build/gyp_functions.py b/src/starboard/build/gyp_functions.py
index f617d36..e6f466d 100644
--- a/src/starboard/build/gyp_functions.py
+++ b/src/starboard/build/gyp_functions.py
@@ -82,6 +82,22 @@
ret += f.replace(os.sep, '/') + ' '
return ret.strip()
+ def basename(self):
+ """Basename of list of files"""
+ parser = ExtensionCommandParser([])
+ parser.add_argument('input_list', nargs='*')
+ args = parser.parse_args(self.argv)
+ ret = [os.path.basename(x) for x in args.input_list]
+ return ' '.join(ret)
+
+ def replace_in_list(self):
+ """String replace in a list of arguments"""
+ parser = ExtensionCommandParser(['old', 'new'])
+ parser.add_argument('input_list', nargs='*')
+ args = parser.parse_args(self.argv)
+ inp = args.input_list
+ return ' '.join([x.replace(args.old, args.new) for x in inp])
+
def file_glob_sub(self):
"""Glob files, but return filenames with string replace from->to applied."""
args = ExtensionCommandParser(
@@ -139,7 +155,7 @@
return prog.replace(os.sep, '/')
previous_dir = root_dir
root_dir = os.path.dirname(root_dir)
- logging.error('Failed to find program.')
+ logging.error('Failed to find program "{}".'.format(args.program))
return None
def getenv(self):
diff --git a/src/starboard/common/common_test.gyp b/src/starboard/common/common_test.gyp
new file mode 100644
index 0000000..0fcd931
--- /dev/null
+++ b/src/starboard/common/common_test.gyp
@@ -0,0 +1,43 @@
+# Copyright 2020 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'common_test',
+ 'type': '<(gtest_target_type)',
+ 'sources': [
+ 'socket_test.cc',
+ '<(DEPTH)/starboard/common/test_main.cc',
+ ],
+ 'dependencies': [
+ '<(DEPTH)/starboard/common/common.gyp:common',
+ '<(DEPTH)/starboard/starboard.gyp:starboard',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ },
+ {
+ 'target_name': 'common_test_deploy',
+ 'type': 'none',
+ 'dependencies': [
+ 'common_test',
+ ],
+ 'variables': {
+ 'executable_name': 'common_test',
+ },
+ 'includes': [ '<(DEPTH)/starboard/build/deploy.gypi' ],
+ },
+ ],
+}
diff --git a/src/starboard/common/configuration_defaults.cc b/src/starboard/common/configuration_defaults.cc
index 6dbeb7b..d603e4f 100644
--- a/src/starboard/common/configuration_defaults.cc
+++ b/src/starboard/common/configuration_defaults.cc
@@ -33,6 +33,10 @@
return "none";
}
+const char* CobaltFallbackSplashScreenTopicsDefault() {
+ return "";
+}
+
bool CobaltEnableQuicDefault() {
return true;
}
diff --git a/src/starboard/common/configuration_defaults.h b/src/starboard/common/configuration_defaults.h
index c79c210..06114f9 100644
--- a/src/starboard/common/configuration_defaults.h
+++ b/src/starboard/common/configuration_defaults.h
@@ -26,6 +26,8 @@
const char* CobaltFallbackSplashScreenUrlDefault();
+const char* CobaltFallbackSplashScreenTopicsDefault();
+
bool CobaltEnableQuicDefault();
int CobaltSkiaCacheSizeInBytesDefault();
diff --git a/src/starboard/common/socket.cc b/src/starboard/common/socket.cc
index 1ce8b42..ae86116 100644
--- a/src/starboard/common/socket.cc
+++ b/src/starboard/common/socket.cc
@@ -12,13 +12,45 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+#include "starboard/common/socket.h"
+
#include <iomanip>
-#include "starboard/common/socket.h"
+#include "starboard/common/log.h"
#include "starboard/configuration.h"
namespace starboard {
+SbSocketAddress GetUnspecifiedAddress(SbSocketAddressType address_type,
+ int port) {
+ SbSocketAddress address = {};
+ address.type = address_type;
+ address.port = port;
+ return address;
+}
+
+bool GetLocalhostAddress(SbSocketAddressType address_type,
+ int port,
+ SbSocketAddress* address) {
+ if (address_type != kSbSocketAddressTypeIpv4 &&
+ address_type != kSbSocketAddressTypeIpv6) {
+ SB_LOG(ERROR) << __FUNCTION__ << ": unknown address type: " << address_type;
+ return false;
+ }
+ *address = GetUnspecifiedAddress(address_type, port);
+ switch (address_type) {
+ case kSbSocketAddressTypeIpv4:
+ address->address[0] = 127;
+ address->address[3] = 1;
+ break;
+ case kSbSocketAddressTypeIpv6:
+ address->address[15] = 1;
+ break;
+ }
+
+ return true;
+}
+
Socket::Socket(SbSocketAddressType address_type, SbSocketProtocol protocol)
: socket_(SbSocketCreate(address_type, protocol)) {}
diff --git a/src/starboard/common/socket.h b/src/starboard/common/socket.h
index aec5bd4..8efaada 100644
--- a/src/starboard/common/socket.h
+++ b/src/starboard/common/socket.h
@@ -20,13 +20,23 @@
#ifndef STARBOARD_COMMON_SOCKET_H_
#define STARBOARD_COMMON_SOCKET_H_
-#include <iostream>
+#include <ostream>
#include "starboard/socket.h"
#include "starboard/types.h"
namespace starboard {
+// Returns an IP unspecified address with the given port.
+SbSocketAddress GetUnspecifiedAddress(SbSocketAddressType address_type,
+ int port);
+
+// Gets an IP localhost address with the given port.
+// Returns true if it was successful.
+bool GetLocalhostAddress(SbSocketAddressType address_type,
+ int port,
+ SbSocketAddress* address);
+
class Socket {
public:
Socket(SbSocketAddressType address_type, SbSocketProtocol protocol);
diff --git a/src/starboard/common/socket_test.cc b/src/starboard/common/socket_test.cc
new file mode 100644
index 0000000..7d416e2
--- /dev/null
+++ b/src/starboard/common/socket_test.cc
@@ -0,0 +1,61 @@
+// Copyright 2020 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "starboard/common/socket.h"
+
+#include "starboard/common/log.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace starboard {
+namespace {
+
+TEST(SocketTest, TestGetUnspecifiedAddress) {
+ SbSocketAddress address =
+ GetUnspecifiedAddress(kSbSocketAddressTypeIpv4, 1010);
+ EXPECT_EQ(1010, address.port);
+ EXPECT_EQ(kSbSocketAddressTypeIpv4, address.type);
+}
+
+TEST(SocketTest, TestGetLocalhostAddressIpv4) {
+ SbSocketAddress address = {};
+ bool result = GetLocalhostAddress(kSbSocketAddressTypeIpv4, 2020, &address);
+ ASSERT_TRUE(result);
+ EXPECT_EQ(2020, address.port);
+ EXPECT_EQ(kSbSocketAddressTypeIpv4, address.type);
+ EXPECT_EQ(127, address.address[0]);
+ EXPECT_EQ(0, address.address[1]);
+ EXPECT_EQ(0, address.address[2]);
+ EXPECT_EQ(1, address.address[3]);
+}
+
+TEST(SocketTest, TestGetLocalhostAddressIpv6) {
+ SbSocketAddress address = {};
+ bool result = GetLocalhostAddress(kSbSocketAddressTypeIpv6, 3030, &address);
+ ASSERT_TRUE(result);
+ EXPECT_EQ(3030, address.port);
+ EXPECT_EQ(kSbSocketAddressTypeIpv6, address.type);
+ for (int i = 0; i < 15; i++) {
+ EXPECT_EQ(0, address.address[i]);
+ }
+ EXPECT_EQ(1, address.address[15]);
+}
+
+TEST(SocketTest, TestGetLocalhostAddressInvalidType) {
+ SbSocketAddress address = {};
+ bool result =
+ GetLocalhostAddress(static_cast<SbSocketAddressType>(2), 4040, &address);
+ ASSERT_FALSE(result);
+}
+} // namespace
+} // namespace starboard
diff --git a/src/starboard/configuration.h b/src/starboard/configuration.h
index e6ba5e4..e82f471 100644
--- a/src/starboard/configuration.h
+++ b/src/starboard/configuration.h
@@ -74,6 +74,9 @@
// Iteration on UI navigation API.
#define SB_UI_NAVIGATION2_VERSION SB_EXPERIMENTAL_API_VERSION
+// Deprecated the SB_OVERRIDE macro.
+#define SB_OVERRIDE_DEPRECATED_VERSION SB_EXPERIMENTAL_API_VERSION
+
// --- Release Candidate Feature Defines -------------------------------------
// --- Common Detected Features ----------------------------------------------
@@ -232,6 +235,7 @@
// Declares a function as overriding a virtual function on compilers that
// support it.
+#if SB_API_VERSION < SB_OVERRIDE_DEPRECATED_VERSION
#if !defined(SB_OVERRIDE)
#if defined(COMPILER_MSVC)
#define SB_OVERRIDE override
@@ -241,6 +245,10 @@
#define SB_OVERRIDE
#endif
#endif // SB_OVERRIDE
+#else
+#define SB_OVERRIDE \
+ #error "The SB_OVERRIDE macro is deprecated. Please use \"override\" instead."
+#endif // SB_API_VERSION < SB_OVERRIDE_DEPRECATED_VERSION
// Declare numeric literals of signed 64-bit type.
#if !defined(SB_INT64_C)
@@ -914,14 +922,13 @@
#error "Your platform must define SB_HAS_SPEECH_RECOGNIZER."
#endif // !defined(SB_HAS_SPEECH_RECOGNIZER)
#endif // SB_API_VERSION < SB_SPEECH_RECOGNIZER_IS_REQUIRED && SB_API_VERSION
- // >= 5
+// >= 5
#if SB_API_VERSION < 12 && SB_API_VERSION >= 8
#if !defined(SB_HAS_ON_SCREEN_KEYBOARD)
#error "Your platform must define SB_HAS_ON_SCREEN_KEYBOARD."
#endif // !defined(SB_HAS_ON_SCREEN_KEYBOARD)
-#endif // SB_API_VERSION < 12 &&
- // SB_API_VERSION >= 8
+#endif // SB_API_VERSION < 12 && SB_API_VERSION >= 8
#if SB_HAS(ON_SCREEN_KEYBOARD) && (SB_API_VERSION < 8)
#error "SB_HAS_ON_SCREEN_KEYBOARD not supported in this API version."
diff --git a/src/starboard/evergreen/shared/platform_deploy.gypi b/src/starboard/evergreen/shared/platform_deploy.gypi
index 3ad944a..e1a3d29 100644
--- a/src/starboard/evergreen/shared/platform_deploy.gypi
+++ b/src/starboard/evergreen/shared/platform_deploy.gypi
@@ -30,7 +30,7 @@
],
'outputs': [ '<(deploy_executable_file)' ],
'action': [
- 'python',
+ 'python2',
'<(DEPTH)/starboard/tools/port_symlink.py',
'-f',
'-r',
@@ -41,4 +41,3 @@
},
],
}
-
diff --git a/src/starboard/evergreen/testing/README.md b/src/starboard/evergreen/testing/README.md
new file mode 100644
index 0000000..4d5b3b4
--- /dev/null
+++ b/src/starboard/evergreen/testing/README.md
@@ -0,0 +1,171 @@
+Evergreen Test Automation
+=============
+
+The Evergreen test automation reduces the overhead of running the test cases
+required to test Evergreen-specific functionality. These tests must be run both
+internally and by partners, and making them simple and fast is good for all.
+
+Structure
+=============
+
+The Evergreen test automation framework is made up of a few distinct sets of
+files.
+
+**Core Scripts**
+
+These are files that are responsible for finding all of the tests, executing all
+of the tests, and outputting the results.
+
+* `run_all_tests.sh`
+* `setup.sh`
+* `pprint.sh`
+
+**Shared Scripts**
+
+These files contain code that is either non-trivial or repeated throughout the
+tests, and is shared across all platforms.
+
+* `shared/app_key.sh`
+* `shared/drain_file.sh`
+* `shared/init_logging.sh`
+* `shared/installation_slot.sh`
+* `shared/wait_and_watch.sh`
+
+**Platform-Specific Scripts**
+
+These files contain code that is either non-trivial or repeated throughout the
+tests, and is platform specific.
+
+* `<PLATFORM>/clean_up.sh`
+* `<PLATFORM>/clear_storage.sh`
+* `<PLATFORM>/create_file.sh`
+* `<PLATFORM>/delete_file.sh`
+* `<PLATFORM>/deploy_cobalt.sh`
+* `<PLATFORM>/run_command.sh`
+* `<PLATFORM>/setup.sh`
+* `<PLATFORM>/start_cobalt.sh`
+* `<PLATFORM>/stop_cobalt.sh`
+
+**Test HTML**
+
+These files are responsible for changing the channels when tests are running.
+
+* `tests/empty.html`
+* `tests/test.html`
+* `tests/tseries.html`
+
+**Test Cases**
+
+These files are responsible for the test logic, and each file corresponds to a
+single Evergreen test case.
+
+* `tests/abort_update_if_already_updating_test.sh`
+* `tests/alternative_content_test.sh`
+* `tests/continuous_updates_test.sh`
+* `tests/crashing_binary_test.sh`
+* `tests/disabled_updater_test.sh`
+* `tests/load_slot_being_updated_test.sh`
+* `tests/mismatched_architecture_test.sh`
+* `tests/noop_binary_test.sh`
+* `tests/out_of_storage_test.sh`
+* `tests/quick_roll_forward_test.sh`
+* `tests/racing_updaters_test.sh`
+* `tests/update_fails_verification_test.sh`
+* `tests/update_works_for_only_one_app_test.sh`
+* `tests/valid_slot_overwritten_test.sh`
+* `tests/verify_qa_channel_update_test.sh`
+
+How To Run
+=============
+
+Before beginning, please check if your target platform has a README.md and defer
+to the steps to run specified there. Otherwise, there are two primary methods of
+running the Evergreen tests.
+
+**Python Helper Script**
+
+The Python helper script at `cobalt/tools/buildbot/run_evergreen_tests.py`
+simplifies the process of running the automated tests, and relies on the
+existing abstract launcher infrastructure.
+
+For this example we will use the following:
+
+* `linux-x64x11` and `qa` for the target platform and configuration.
+* `evergreen-x64` and `qa` for the Evergreen platform and configuration.
+
+Then the following command can be used to run the tests.
+
+```
+python cobalt/tools/buildbot/run_evergreen_tests.py \
+ -p evergreen-x64 -c qa -P linux-x64x11 -C qa
+```
+
+**Directly**
+
+First, a directory tree containing the required binaries and content needs to be
+created. This directory tree must be in the following format:
+
+Directly running the scripts requires more setup than the helper script above.
+We will be using the same platforms and configurations as the steps above.
+
+First, a directory structure needs to be created where the Evergreen binary and
+its content is located under the content of the loader binary's content:
+
+```
+ linux-x64x11_qa
+ +-- deploy
+ +-- loader_app
+ +-- loader_app <-- loader binary
+ +-- content <-- loader content
+ +-- app
+ +-- cobalt
+ +-- content <-- cobalt content
+ +-- lib
+ +-- libcobalt.so <-- cobalt binary
+```
+
+Note: This directory structure is the same as what would be generated by
+ `starboard/evergreen/shared/launcher.py`.
+
+Next, set the environment variable `OUT` equal to the root of the directory tree
+created above.
+
+Then the following command can be used to run the tests.
+
+```
+ ./run_all_test.sh linux
+```
+
+Tips
+=============
+
+The tests will take between 15 and 30 minutes to complete and generate a
+significant amount of logs. The tests can be run in the background with the logs
+redirected using `./run_all_tests.sh linux &> results &`.
+
+When redirecting all of the logs from the test script being output to `results`,
+you can easily check the status by running `grep -E "RUN|PASSED|FAILED"`.
+
+Notes
+=============
+
+Evergreen uses "drain" files to ensure only one application downloads an update
+at a time. To fake update contention, some tests create this file manually:
+
+* `tests/abort_update_if_already_updating_test.sh`
+* `tests/load_slot_being_updated_test.sh`
+* `tests/racing_updaters_test.sh`
+
+Evergreen uses "app key" files to keep track of per-application state of an
+update. To fake changes to the per-application state of an update, some tests
+create, delete, or modify these files manually:
+
+* `tests/update_works_for_only_one_app_test.sh`
+* `tests/valid_slot_overwritten_test.sh`
+
+To validate Evergreen behavior when there is not enough storage for an update, a
+temporary filesystem is used, only 10MiB in size. One test creates a symbolic
+link from the storage path to this filesystem, faking an "out of storage"
+situation:
+
+* `tests/out_of_storage_test.sh`
diff --git a/src/starboard/evergreen/testing/pprint.sh b/src/starboard/evergreen/testing/pprint.sh
index c44ce31..2716a5c 100755
--- a/src/starboard/evergreen/testing/pprint.sh
+++ b/src/starboard/evergreen/testing/pprint.sh
@@ -21,6 +21,7 @@
if [[ "${OSTYPE}" = "*darwin*" ]]; then
arg=""
fi
+
# This command uses ANSI escape codes to attempt to output colored text. For
# more information see https://en.wikipedia.org/wiki/ANSI_escape_code.
echo "$arg" "\033[0;${1}m${2}\033[0m" >&2
diff --git a/src/starboard/evergreen/testing/run_all_tests.sh b/src/starboard/evergreen/testing/run_all_tests.sh
index d0602fc..ecbff3f 100755
--- a/src/starboard/evergreen/testing/run_all_tests.sh
+++ b/src/starboard/evergreen/testing/run_all_tests.sh
@@ -14,8 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-# Driver script that sets up the testing environment, collects all of the tests
-# to run, runs them, and outputs the results.
+# Driver script used to find all of the tests, run all of the tests, and output
+# the results of the tests. This script, and all other scripts, assume the
+# availability of "find", "grep", "ln", "mv", and "rm".
DIR="$(dirname "${0}")"
diff --git a/src/starboard/evergreen/testing/setup.sh b/src/starboard/evergreen/testing/setup.sh
index a9a707a..22a22c4 100755
--- a/src/starboard/evergreen/testing/setup.sh
+++ b/src/starboard/evergreen/testing/setup.sh
@@ -13,6 +13,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
+# Driver script used to perform the setup required for all of the tests.
+# Platform specific setup is delegated to the platform specific setup scripts.
if [[ ! -f "${DIR}/pprint.sh" ]]; then
echo "The script 'pprint.sh' is required"
@@ -31,13 +34,14 @@
PLATFORMS=("linux" "raspi")
if [[ ! "${PLATFORMS[@]}" =~ "${1}" ]] && [[ ! -d "${DIR}/${1}" ]]; then
- error "The platform provided must be one of the following: " "${PLATFORMS[@]}"
+ error "The platform provided must be one of the following: ${PLATFORMS[*]}"
exit 1
fi
# List of all required scripts.
SCRIPTS=("${DIR}/shared/app_key.sh" \
"${DIR}/shared/drain_file.sh" \
+ "${DIR}/shared/init_logging.sh" \
"${DIR}/shared/installation_slot.sh" \
"${DIR}/shared/wait_and_watch.sh" \
@@ -63,19 +67,3 @@
source $script "${DIR}/${1}"
done
-# The /tmp/ directory is used for temporarily storing logs.
-if [[ ! -d "/tmp/" ]]; then
- error "The '/tmp/' directory is missing"
- exit 1
-fi
-
-# A path in the temporary directory to write test log files to.
-LOG_PATH="/tmp/youtube_test_logs/$(date +%s%3N)"
-
-mkdir -p "${LOG_PATH}" &> /dev/null
-
-if [[ ! -d "${LOG_PATH}" ]]; then
- error "Failed to create directory at '${LOG_PATH}'"
- exit 1
-fi
-
diff --git a/src/starboard/evergreen/testing/shared/app_key.sh b/src/starboard/evergreen/testing/shared/app_key.sh
index 506fe5d..21e5b8e 100755
--- a/src/starboard/evergreen/testing/shared/app_key.sh
+++ b/src/starboard/evergreen/testing/shared/app_key.sh
@@ -14,6 +14,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# Searches the provided log file for the path to the bad app key file.
+#
+# Globals:
+# LOG_PATH
+# TAIL
+#
+# Args:
+# Path to a log file.
+#
+# Returns:
+# Path to the bad app key file, otherwise "".
function get_bad_app_key_file_path() {
if [[ $# -ne 1 ]]; then
error " get_bad_app_key_file_path only accepts a single argument"
diff --git a/src/starboard/evergreen/testing/shared/drain_file.sh b/src/starboard/evergreen/testing/shared/drain_file.sh
index 2e66aa1..412114b 100755
--- a/src/starboard/evergreen/testing/shared/drain_file.sh
+++ b/src/starboard/evergreen/testing/shared/drain_file.sh
@@ -14,6 +14,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# Creates a path to a valid drain file based on the drain file created in the
+# provided log file.
+#
+# Globals:
+# LOG_PATH
+# TAIL
+#
+# Args:
+# Path to a log file.
+#
+# Returns:
+# Path to a valid drain file, otherwise "".
function get_temporary_drain_file_path() {
if [[ $# -ne 1 ]]; then
error " get_temporary_drain_file_path only accepts a single argument"
diff --git a/src/starboard/evergreen/testing/shared/init_logging.sh b/src/starboard/evergreen/testing/shared/init_logging.sh
new file mode 100755
index 0000000..d343c8f
--- /dev/null
+++ b/src/starboard/evergreen/testing/shared/init_logging.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+#
+# Copyright 2020 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [[ ! -d "/tmp/" ]]; then
+ error "The '/tmp/' directory is required for log files"
+ exit 1
+fi
+
+LOG_PATH="/tmp/youtube_test_logs/$(date +%s%3N)"
+
+mkdir -p "${LOG_PATH}" &> /dev/null
+
+if [[ ! -d "${LOG_PATH}" ]]; then
+ error "Failed to create directory at '${LOG_PATH}'"
+ exit 1
+fi
+
diff --git a/src/starboard/evergreen/testing/shared/installation_slot.sh b/src/starboard/evergreen/testing/shared/installation_slot.sh
index 0258be3..ac52433 100755
--- a/src/starboard/evergreen/testing/shared/installation_slot.sh
+++ b/src/starboard/evergreen/testing/shared/installation_slot.sh
@@ -14,6 +14,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# Searches the provided log file for the current installation slot.
+#
+# Globals:
+# LOG_PATH
+# TAIL
+#
+# Args:
+# Path to a log file.
+#
+# Returns:
+# The current installation slot number, otherwise "".
function get_current_installation_slot() {
if [[ $# -ne 1 ]]; then
error " get_current_installation_slot only accepts a single argument"
diff --git a/src/starboard/evergreen/testing/shared/wait_and_watch.sh b/src/starboard/evergreen/testing/shared/wait_and_watch.sh
old mode 100644
new mode 100755
index f7c39da..cf799df
--- a/src/starboard/evergreen/testing/shared/wait_and_watch.sh
+++ b/src/starboard/evergreen/testing/shared/wait_and_watch.sh
@@ -14,6 +14,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# Polls a file for the provided pattern.
+#
+# Globals:
+# TIMEOUT
+# WAITED
+#
+# Args:
+# A pattern to check for, a path to a file to check.
+#
+# Returns:
+# 1 if the pattern was found, otherwise 0.
function wait_and_watch() {
if [[ $# -ne 2 ]]; then
error " wait_and_watch requires a pattern and a path"
diff --git a/src/starboard/evergreen/testing/tests/abort_update_if_already_updating_test.sh b/src/starboard/evergreen/testing/tests/abort_update_if_already_updating_test.sh
index d83646e..7610aba 100755
--- a/src/starboard/evergreen/testing/tests/abort_update_if_already_updating_test.sh
+++ b/src/starboard/evergreen/testing/tests/abort_update_if_already_updating_test.sh
@@ -34,6 +34,11 @@
FILENAME="$(get_temporary_drain_file_path "${TEST_NAME}.0.log")"
+ if [[ -z "${FILENAME}" ]]; then
+ error "Failed to evaluate a temporary drain file path"
+ return 1
+ fi
+
clear_storage
create_file "${FILENAME}"
diff --git a/src/starboard/evergreen/testing/tests/load_slot_being_updated_test.sh b/src/starboard/evergreen/testing/tests/load_slot_being_updated_test.sh
index 14a8a9c..fdb73b0 100755
--- a/src/starboard/evergreen/testing/tests/load_slot_being_updated_test.sh
+++ b/src/starboard/evergreen/testing/tests/load_slot_being_updated_test.sh
@@ -34,6 +34,11 @@
FILENAME=$(get_temporary_drain_file_path "${TEST_NAME}.0.log")
+ if [[ -z "${FILENAME}" ]]; then
+ error "Failed to evaluate a temporary drain file path"
+ return 1
+ fi
+
create_file "${FILENAME}"
start_cobalt "file:///tests/${TEST_FILE}?channel=test" "${TEST_NAME}.1.log" "Active slot draining"
diff --git a/src/starboard/evergreen/testing/tests/racing_updaters_test.sh b/src/starboard/evergreen/testing/tests/racing_updaters_test.sh
index 1b839af..2865095 100755
--- a/src/starboard/evergreen/testing/tests/racing_updaters_test.sh
+++ b/src/starboard/evergreen/testing/tests/racing_updaters_test.sh
@@ -53,6 +53,11 @@
FILENAME="$(get_temporary_drain_file_path "${TEST_NAME}.0.log")"
+ if [[ -z "${FILENAME}" ]]; then
+ error "Failed to evaluate a temporary drain file path"
+ return 1
+ fi
+
clear_storage
wait_and_force_race_condition "Created drain file" "${LOG_PATH}/${TEST_NAME}.1.log" "${FILENAME}" &
diff --git a/src/starboard/evergreen/testing/tests/update_works_for_only_one_app_test.sh b/src/starboard/evergreen/testing/tests/update_works_for_only_one_app_test.sh
index b09bdb4..65e5ad0 100755
--- a/src/starboard/evergreen/testing/tests/update_works_for_only_one_app_test.sh
+++ b/src/starboard/evergreen/testing/tests/update_works_for_only_one_app_test.sh
@@ -48,6 +48,11 @@
FILENAME="$(get_bad_app_key_file_path "${TEST_NAME}.2.log")"
+ if [[ -z "${FILENAME}" ]]; then
+ error "Failed to find the bad app key file path"
+ return 1
+ fi
+
create_file "${FILENAME}"
start_cobalt "file:///tests/empty.html" "${TEST_NAME}.3.log" "RevertBack current_installation="
diff --git a/src/starboard/evergreen/testing/tests/valid_slot_overwritten_test.sh b/src/starboard/evergreen/testing/tests/valid_slot_overwritten_test.sh
index 6c66c86..5fe2228 100755
--- a/src/starboard/evergreen/testing/tests/valid_slot_overwritten_test.sh
+++ b/src/starboard/evergreen/testing/tests/valid_slot_overwritten_test.sh
@@ -41,6 +41,11 @@
SLOT="$(get_current_installation_slot "${TEST_NAME}.1.log")"
+ if [[ -z "${SLOT}" ]]; then
+ error "Failed to evaluate the current installation slot"
+ return 1
+ fi
+
# Warning: do not wrap '$TAIL' with double quotes or else it will not actually
# resolve to the correct command.
delete_file "$(run_command "find ${STORAGE_DIR}/installation_${SLOT} -name app_key_*.good | ${TAIL} -1")"
diff --git a/src/starboard/linux/shared/configuration.cc b/src/starboard/linux/shared/configuration.cc
index 4b0f775..bb7371e 100644
--- a/src/starboard/linux/shared/configuration.cc
+++ b/src/starboard/linux/shared/configuration.cc
@@ -32,7 +32,7 @@
const CobaltExtensionConfigurationApi kConfigurationApi = {
kCobaltExtensionConfigurationName,
- 1,
+ 2,
&common::CobaltUserOnExitStrategyDefault,
&common::CobaltRenderDirtyRegionOnlyDefault,
&CobaltEglSwapInterval,
@@ -55,6 +55,7 @@
&common::CobaltGcZealDefault,
&common::CobaltRasterizerTypeDefault,
&common::CobaltEnableJitDefault,
+ &common::CobaltFallbackSplashScreenTopicsDefault,
};
} // namespace
diff --git a/src/starboard/linux/x64x11/gczeal/configuration.cc b/src/starboard/linux/x64x11/gczeal/configuration.cc
index 6d80740..5a88df1 100644
--- a/src/starboard/linux/x64x11/gczeal/configuration.cc
+++ b/src/starboard/linux/x64x11/gczeal/configuration.cc
@@ -38,7 +38,7 @@
const CobaltExtensionConfigurationApi kConfigurationApi = {
kCobaltExtensionConfigurationName,
- 1,
+ 2,
&common::CobaltUserOnExitStrategyDefault,
&common::CobaltRenderDirtyRegionOnlyDefault,
&CobaltEglSwapInterval,
@@ -61,6 +61,7 @@
&CobaltGcZeal,
&common::CobaltRasterizerTypeDefault,
&common::CobaltEnableJitDefault,
+ &common::CobaltFallbackSplashScreenTopicsDefault,
};
} // namespace
diff --git a/src/starboard/linux/x64x11/skia/configuration.cc b/src/starboard/linux/x64x11/skia/configuration.cc
index 3b2e26a..0c57f18 100644
--- a/src/starboard/linux/x64x11/skia/configuration.cc
+++ b/src/starboard/linux/x64x11/skia/configuration.cc
@@ -38,7 +38,7 @@
const CobaltExtensionConfigurationApi kConfigurationApi = {
kCobaltExtensionConfigurationName,
- 1,
+ 2,
&common::CobaltUserOnExitStrategyDefault,
&common::CobaltRenderDirtyRegionOnlyDefault,
&CobaltEglSwapInterval,
@@ -61,6 +61,7 @@
&common::CobaltGcZealDefault,
&CobaltRasterizerType,
&common::CobaltEnableJitDefault,
+ &common::CobaltFallbackSplashScreenTopicsDefault,
};
} // namespace
diff --git a/src/starboard/nplb/media_can_play_mime_and_key_system_test.cc b/src/starboard/nplb/media_can_play_mime_and_key_system_test.cc
index 20eaa0c..86faf88 100644
--- a/src/starboard/nplb/media_can_play_mime_and_key_system_test.cc
+++ b/src/starboard/nplb/media_can_play_mime_and_key_system_test.cc
@@ -94,6 +94,18 @@
"video/mp4; codecs=\"avc1.4d4015\"; width=1920; height=99999;", "");
ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d4015\"; width=1920; height=-1080;", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d4015\"; width=-1920; height=1080;", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d4015\"; width=-1920; height=-1080;", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+
// Invalid bitrate
result = SbMediaCanPlayMimeAndKeySystem(
"video/mp4; codecs=\"avc1.4d4015\"; width=1920; height=1080; "
@@ -101,6 +113,25 @@
"");
ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d4015\"; width=1920; height=1080; "
+ "bitrate=-20000",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+
+ // Invalid framerate
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d4015\"; width=1920; height=1080; "
+ "framerate=-30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d4015\"; width=1920; height=1080; "
+ "framerate=-25",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeNotSupported);
+
// Invalid eotf
result = SbMediaCanPlayMimeAndKeySystem(
"video/webm; codecs=\"vp09.02.10.10\"; width=1920; height=1080; "
@@ -138,8 +169,15 @@
}
TEST(SbMediaCanPlayMimeAndKeySystem, MinimumSupport) {
- // H.264 Main Profile Level 4.2
+ // H.264 High Profile Level 4.2
SbMediaSupportType result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=1920; height=1080; "
+ "framerate=30; bitrate=20000",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 Main Profile Level 4.2
+ result = SbMediaCanPlayMimeAndKeySystem(
"video/mp4; codecs=\"avc1.4d402a\"; width=1920; height=1080; "
"framerate=30;",
"");
@@ -151,6 +189,18 @@
"");
ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d402a\"; width=0; height=0; "
+ "framerate=0; bitrate=0",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.4d402a\"; width=-0; height=-0; "
+ "framerate=-0; bitrate=-0",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
// H.264 Main Profile Level 2.1
result = SbMediaCanPlayMimeAndKeySystem(
"video/mp4; codecs=\"avc1.4d4015\"; width=432; height=240; "
@@ -158,6 +208,16 @@
"");
ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ // AV1 Main Profile 1080p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.05M.08\"; width=1920; height=1080; "
+ "framerate=30; bitrate=20000",
+ "");
+
+ // VP9 1080p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=1920; height=1080; framerate=60", "");
+
// AAC-LC
result = SbMediaCanPlayMimeAndKeySystem(
"audio/mp4; codecs=\"mp4a.40.2\"; channels=2; bitrate=256;", "");
@@ -503,6 +563,229 @@
<< "\n\tAC-3: " << ac3_support << "\n\tE-AC-3: " << eac3_support;
}
+TEST(SbMediaCanPlayMimeAndKeySystem, ValidateQueriesUnderPeakCapability) {
+ // H.264 High Profile Level 4.2 1080p 25 fps
+ SbMediaSupportType result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=1920; height=1080; "
+ "framerate=25",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 High Profile Level 4.2 1080p 24 fps
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=1920; height=1080; "
+ "framerate=24",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 High Profile Level 4.2 1920x818
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=1920; height=818; "
+ "framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 High Profile Level 4.2 720p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=1280; height=720; "
+ "framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 High Profile Level 4.2 480p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=640; height=480; "
+ "framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 High Profile Level 4.2 360p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=480; height=360; "
+ "framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 High Profile Level 4.2 240p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=352; height=240; "
+ "framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // H.264 High Profile Level 4.2 144p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"avc1.64402a\"; width=256; height=144; "
+ "framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile Level 6.0 8K 10 bit HDR
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.17M.10.0.110.09.16.09.0\"; width=7680; "
+ "height=4320; framerate=30",
+ "");
+
+ if (result == kSbMediaSupportTypeProbably) {
+ // AV1 Main Profile Level 6.0 8K 8 bit SDR
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.16M.08\"; width=7680; height=4320; "
+ "framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ }
+
+ // AV1 Main Profile 4K
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.13M.10.0.110.09.16.09.0\"; width=3840; "
+ "height=2160; framerate=30",
+ "");
+
+ if (result == kSbMediaSupportTypeProbably) {
+ // AV1 Main Profile 1440p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.12M.10.0.110.09.16.09.0\"; width=2560; "
+ "height=1440; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ }
+ // AV1 Main Profile 1080p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.09M.08.0.110.09.16.09.0\"; width=1920; "
+ "height=1080; framerate=30",
+ "");
+
+ if (result == kSbMediaSupportTypeProbably) {
+ // AV1 Main Profile 1080p 25 fps
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.09M.08.0.110.09.16.09.0\"; width=1920; "
+ "height=1080; framerate=25",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile 1080p 24 fps
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.09M.08.0.110.09.16.09.0\"; width=1920; "
+ "height=1080; framerate=24",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile 1920x818
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.09M.08.0.110.09.16.09.0\"; width=1920; "
+ "height=818; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile 720p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.08M.10.0.110.09.16.09.0\"; width=1280; "
+ "height=720; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile 480p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.04M.10.0.110.09.16.09.0\"; width=854; "
+ "height=480; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile 360p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.01M.10.0.110.09.16.09.0\"; width=640; "
+ "height=360; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile 240p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.00M.10.0.110.09.16.09.0\"; width=426; "
+ "height=240; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // AV1 Main Profile 144p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/mp4; codecs=\"av01.0.00M.10.0.110.09.16.09.0\"; width=256; "
+ "height=144; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ }
+
+ // Vp9 8K
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=7680; height=4320; framerate=30", "");
+
+ if (result == kSbMediaSupportTypeProbably) {
+ // Vp9 4K
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=3840; height=2160; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ }
+
+ // Vp9 4K
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=3840; height=2160; framerate=30", "");
+
+ if (result == kSbMediaSupportTypeProbably) {
+ // Vp9 1440p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=2560; height=1440; framerate=30",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ }
+
+ // Vp9 1080p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=1920; height=1080; framerate=30", "");
+
+ if (result == kSbMediaSupportTypeProbably) {
+ // Vp9 1080p 25 fps
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=1920; height=1080; framerate=25",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // Vp9 1080p 24 fps
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=1920; height=1080; framerate=24",
+ "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // Vp9 1920x818
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=1920; height=818; framerate=30", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // Vp9 720p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=1280; height=720; framerate=30", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // Vp9 480p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=854; height=480; framerate=30", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // Vp9 360p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=640; height=360; framerate=30", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // Vp9 240p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=426; height=240; framerate=30", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+
+ // Vp9 144p
+ result = SbMediaCanPlayMimeAndKeySystem(
+ "video/webm; codecs=\"vp9\"; width=256; height=144; framerate=30", "");
+ ASSERT_EQ(result, kSbMediaSupportTypeProbably);
+ }
+}
+
} // namespace
} // namespace nplb
} // namespace starboard
diff --git a/src/starboard/nplb/nplb.gyp b/src/starboard/nplb/nplb.gyp
index ab22a51..c280c78 100644
--- a/src/starboard/nplb/nplb.gyp
+++ b/src/starboard/nplb/nplb.gyp
@@ -318,6 +318,7 @@
],
'dependencies': [
'<@(cobalt_platform_dependencies)',
+ '<(DEPTH)/starboard/common/common.gyp:common',
'<(DEPTH)/starboard/shared/starboard/media/media.gyp:media_util',
'<(DEPTH)/starboard/shared/starboard/player/player.gyp:player_copy_test_data',
'<(DEPTH)/starboard/shared/starboard/player/player.gyp:video_dmp',
diff --git a/src/starboard/nplb/socket_helpers.cc b/src/starboard/nplb/socket_helpers.cc
index c239a80..b9b33fc 100644
--- a/src/starboard/nplb/socket_helpers.cc
+++ b/src/starboard/nplb/socket_helpers.cc
@@ -84,32 +84,6 @@
return false;
}
-SbSocketAddress GetLocalhostAddress(SbSocketAddressType address_type,
- int port) {
- SbSocketAddress address = GetUnspecifiedAddress(address_type, port);
- switch (address_type) {
- case kSbSocketAddressTypeIpv4: {
- address.address[0] = 127;
- address.address[3] = 1;
- return address;
- }
- case kSbSocketAddressTypeIpv6: {
- address.address[15] = 1;
- return address;
- }
- }
- ADD_FAILURE() << "GetLocalhostAddress for unknown address type";
- return address;
-}
-
-SbSocketAddress GetUnspecifiedAddress(SbSocketAddressType address_type,
- int port) {
- SbSocketAddress address = {0};
- address.type = address_type;
- address.port = port;
- return address;
-}
-
SbSocket CreateServerTcpSocket(SbSocketAddressType address_type) {
SbSocket server_socket = SbSocketCreate(address_type, kSbSocketProtocolTcp);
if (!SbSocketIsValid(server_socket)) {
@@ -220,7 +194,12 @@
}
// Connect to localhost:<port>.
- SbSocketAddress address = GetLocalhostAddress(address_type, port);
+ SbSocketAddress address = {};
+ bool success = GetLocalhostAddress(address_type, port, &address);
+ if (!success) {
+ ADD_FAILURE() << "GetLocalhostAddress failed";
+ return kSbSocketInvalid;
+ }
// This connect will probably return pending, but we'll assume it will connect
// eventually.
@@ -245,7 +224,12 @@
}
// Connect to localhost:<port>.
- SbSocketAddress address = GetLocalhostAddress(address_type, port);
+ SbSocketAddress address = {};
+ bool success = GetLocalhostAddress(address_type, port, &address);
+ if (!success) {
+ ADD_FAILURE() << "GetLocalhostAddress failed";
+ return scoped_ptr<Socket>().Pass();
+ }
// This connect will probably return pending, but we'll assume it will connect
// eventually.
diff --git a/src/starboard/nplb/socket_helpers.h b/src/starboard/nplb/socket_helpers.h
index 8171b0e..600eb3a 100644
--- a/src/starboard/nplb/socket_helpers.h
+++ b/src/starboard/nplb/socket_helpers.h
@@ -40,13 +40,6 @@
// This will always return the same port number.
int GetPortNumberForTests();
-// Returns an IP localhost address with the given port.
-SbSocketAddress GetLocalhostAddress(SbSocketAddressType address_type, int port);
-
-// Returns an IP unspecified address with the given port.
-SbSocketAddress GetUnspecifiedAddress(SbSocketAddressType address_type,
- int port);
-
// Creates a TCP/IP server socket (sets Reuse Address option).
SbSocket CreateServerTcpSocket(SbSocketAddressType address_type);
scoped_ptr<Socket> CreateServerTcpSocketWrapped(
diff --git a/src/starboard/raspi/2/skia/configuration.cc b/src/starboard/raspi/2/skia/configuration.cc
index 58a6f11..925b92b 100644
--- a/src/starboard/raspi/2/skia/configuration.cc
+++ b/src/starboard/raspi/2/skia/configuration.cc
@@ -38,7 +38,7 @@
const CobaltExtensionConfigurationApi kConfigurationApi = {
kCobaltExtensionConfigurationName,
- 1,
+ 2,
&common::CobaltUserOnExitStrategyDefault,
&common::CobaltRenderDirtyRegionOnlyDefault,
&common::CobaltEglSwapIntervalDefault,
@@ -61,6 +61,7 @@
&common::CobaltGcZealDefault,
&CobaltRasterizerType,
&common::CobaltEnableJitDefault,
+ &common::CobaltFallbackSplashScreenTopicsDefault,
};
} // namespace
diff --git a/src/starboard/raspi/shared/configuration.cc b/src/starboard/raspi/shared/configuration.cc
index b3a9f0b..74ab33e 100644
--- a/src/starboard/raspi/shared/configuration.cc
+++ b/src/starboard/raspi/shared/configuration.cc
@@ -32,7 +32,7 @@
}
const CobaltExtensionConfigurationApi kConfigurationApi = {
kCobaltExtensionConfigurationName,
- 1,
+ 2,
&common::CobaltUserOnExitStrategyDefault,
&common::CobaltRenderDirtyRegionOnlyDefault,
&common::CobaltEglSwapIntervalDefault,
@@ -55,6 +55,7 @@
&common::CobaltGcZealDefault,
&common::CobaltRasterizerTypeDefault,
&common::CobaltEnableJitDefault,
+ &common::CobaltFallbackSplashScreenTopicsDefault,
};
} // namespace
diff --git a/src/starboard/shared/starboard/link_receiver.cc b/src/starboard/shared/starboard/link_receiver.cc
index d5ddadd..a7e8208 100644
--- a/src/starboard/shared/starboard/link_receiver.cc
+++ b/src/starboard/shared/starboard/link_receiver.cc
@@ -34,36 +34,6 @@
namespace starboard {
namespace {
-// Returns an address that means bind to any interface on the given |port|. When
-// |port| is zero, it means the system should choose the port.
-SbSocketAddress GetUnspecifiedAddress(SbSocketAddressType address_type,
- int port) {
- SbSocketAddress address = {0};
- address.type = address_type;
- address.port = port;
- return address;
-}
-
-// Returns an address that means bind to the loopback interface on the given
-// |port|. When |port| is zero, it means the system should choose the port.
-SbSocketAddress GetLocalhostAddress(SbSocketAddressType address_type,
- int port) {
- SbSocketAddress address = GetUnspecifiedAddress(address_type, port);
- switch (address_type) {
- case kSbSocketAddressTypeIpv4: {
- address.address[0] = 127;
- address.address[3] = 1;
- return address;
- }
- case kSbSocketAddressTypeIpv6: {
- address.address[15] = 1;
- return address;
- }
- }
- SB_LOG(ERROR) << __FUNCTION__ << ": unknown address type: " << address_type;
- return address;
-}
-
// Creates a socket that is appropriate for binding and listening, but is not
// bound and hasn't started listening yet.
scoped_ptr<Socket> CreateServerSocket(SbSocketAddressType address_type) {
@@ -91,7 +61,12 @@
return scoped_ptr<Socket>().Pass();
}
- SbSocketAddress address = GetLocalhostAddress(address_type, port);
+ SbSocketAddress address = {};
+ bool success = GetLocalhostAddress(address_type, port, &address);
+ if (!success) {
+ SB_LOG(ERROR) << "GetLocalhostAddress failed";
+ return scoped_ptr<Socket>().Pass();
+ }
SbSocketError result = socket->Bind(&address);
if (result != kSbSocketOk) {
SB_LOG(ERROR) << __FUNCTION__ << ": "
diff --git a/src/starboard/shared/starboard/media/media_util.cc b/src/starboard/shared/starboard/media/media_util.cc
index 3269d04..7dab6c0 100644
--- a/src/starboard/shared/starboard/media/media_util.cc
+++ b/src/starboard/shared/starboard/media/media_util.cc
@@ -160,6 +160,10 @@
int bitrate = mime_type.GetParamIntValue("bitrate", kDefaultBitRate);
+ if (width < 0 || height < 0 || fps < 0 || bitrate < 0) {
+ return false;
+ }
+
#if SB_HAS(MEDIA_IS_VIDEO_SUPPORTED_REFINEMENT)
if (!SbMediaIsVideoSupported(video_codec,
#if SB_API_VERSION >= 12
@@ -170,7 +174,7 @@
fps, decode_to_texture_required)) {
return false;
}
-#else // SB_HAS(MEDIA_IS_VIDEO_SUPPORTED_REFINEMENT)
+#else // SB_HAS(MEDIA_IS_VIDEO_SUPPORTED_REFINEMENT)
if (!SbMediaIsVideoSupported(video_codec, width, height, bitrate, fps,
decode_to_texture_required)) {
return false;
@@ -470,7 +474,7 @@
#if SB_API_VERSION < 11
case kSbMediaVideoCodecVp10:
return "vp10";
-#else // SB_API_VERSION < 11
+#else // SB_API_VERSION < 11
case kSbMediaVideoCodecAv1:
return "av1";
#endif // SB_API_VERSION < 11
@@ -722,7 +726,7 @@
os << sample_info.frame_width << 'x' << sample_info.frame_height << ' ';
#if SB_API_VERSION >= 11
os << '(' << sample_info.color_metadata << ')';
-#else // SB_API_VERSION >= 11
+#else // SB_API_VERSION >= 11
os << '(' << *sample_info.color_metadata << ')';
#endif // SB_API_VERSION >= 11
return os;
diff --git a/src/starboard/shared/starboard/player/filter/testing/video_decoder_test_fixture.cc b/src/starboard/shared/starboard/player/filter/testing/video_decoder_test_fixture.cc
index 46dd43c..f670b69 100644
--- a/src/starboard/shared/starboard/player/filter/testing/video_decoder_test_fixture.cc
+++ b/src/starboard/shared/starboard/player/filter/testing/video_decoder_test_fixture.cc
@@ -136,12 +136,14 @@
event_queue_.push_back(Event(kBufferFull, frame));
} else {
event_queue_.push_back(Event(kError, frame));
+ SB_LOG(WARNING) << "OnDecoderStatusUpdate received unknown state.";
}
}
void VideoDecoderTestFixture::OnError() {
ScopedLock scoped_lock(mutex_);
event_queue_.push_back(Event(kError, NULL));
+ SB_LOG(WARNING) << "Video decoder received error.";
}
#if SB_HAS(GLES2)
@@ -185,6 +187,7 @@
SbThreadSleep(kSbTimeMillisecond);
} while (SbTimeGetMonotonicNow() - start < timeout);
event->status = kTimeout;
+ SB_LOG(WARNING) << "WaitForNextEvent() timeout.";
}
bool VideoDecoderTestFixture::HasPendingEvents() {
diff --git a/src/starboard/stub/configuration.cc b/src/starboard/stub/configuration.cc
index 6af5309..1f9a6dd 100644
--- a/src/starboard/stub/configuration.cc
+++ b/src/starboard/stub/configuration.cc
@@ -28,7 +28,7 @@
const CobaltExtensionConfigurationApi kConfigurationApi = {
kCobaltExtensionConfigurationName,
- 1,
+ 2,
&common::CobaltUserOnExitStrategyDefault,
&common::CobaltRenderDirtyRegionOnlyDefault,
&common::CobaltEglSwapIntervalDefault,
@@ -51,6 +51,7 @@
&common::CobaltGcZealDefault,
&CobaltRasterizerType,
&common::CobaltEnableJitDefault,
+ &common::CobaltFallbackSplashScreenTopicsDefault,
};
} // namespace
diff --git a/src/starboard/tools/abstract_launcher.py b/src/starboard/tools/abstract_launcher.py
index 201e2f0..ad6764f 100644
--- a/src/starboard/tools/abstract_launcher.py
+++ b/src/starboard/tools/abstract_launcher.py
@@ -23,6 +23,10 @@
from starboard.tools import build
from starboard.tools import paths
+ARG_NOINSTALL = "noinstall"
+ARG_SYSTOOLS = "systools"
+ARG_DRYRUN = "dryrun"
+
def _GetLauncherForPlatform(platform_name):
"""Gets the module containing a platform's concrete launcher implementation.
@@ -125,6 +129,11 @@
env_variables = {}
self.env_variables = env_variables
+ launcher_args = kwargs.get("launcher_args", None)
+ if launcher_args is None:
+ launcher_args = []
+ self.launcher_args = launcher_args
+
# Launchers that need different startup timeout times should reassign
# this variable during initialization.
self.startup_timeout_seconds = 2 * 60
diff --git a/src/starboard/tools/app_launcher_packager.py b/src/starboard/tools/app_launcher_packager.py
index e2fad3a..902d8e0 100644
--- a/src/starboard/tools/app_launcher_packager.py
+++ b/src/starboard/tools/app_launcher_packager.py
@@ -33,6 +33,8 @@
sys.path.append(THIRD_PARTY_ROOT)
# pylint: disable=g-import-not-at-top,g-bad-import-order
import jinja2
+from starboard.tools import command_line
+from starboard.tools import log_level
from starboard.tools import port_symlink
import starboard.tools.platform
@@ -237,8 +239,8 @@
def main(command_args):
- logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
+ command_line.AddLoggingArguments(parser, default='warning')
dest_group = parser.add_mutually_exclusive_group(required=True)
dest_group.add_argument(
'-d',
@@ -256,12 +258,12 @@
action='store_true',
help='List to stdout the application resources relative to the current '
'directory.')
- parser.add_argument(
- '-v', '--verbose', action='store_true', help='Verbose logging output.')
+ parser.add_argument('-v', '--verbose', action='store_true',
+ help='Enables verbose logging. For more control over the '
+ "logging level use '--log_level' instead.")
args = parser.parse_args(command_args)
- if not args.verbose:
- logging.disable(logging.INFO)
+ log_level.InitializeLogging(args)
if args.destination_root:
CopyAppLauncherTools(REPOSITORY_ROOT, args.destination_root)
diff --git a/src/starboard/tools/command_line.py b/src/starboard/tools/command_line.py
index ddb4b32..afeb1a3 100644
--- a/src/starboard/tools/command_line.py
+++ b/src/starboard/tools/command_line.py
@@ -23,11 +23,18 @@
import starboard.tools.platform
-def AddLoggingArguments(arg_parser):
+def AddLoggingArguments(arg_parser, default='info'):
+ """Adds the logging level configuration argument.
+
+ Args:
+ arg_parser: The argument parser to used for initialization.
+ default: The default logging level to use. Valid values are: 'info',
+ 'debug', 'warning', 'error', and 'critical'.
+ """
arg_parser.add_argument(
'--log_level',
- choices=['debug', 'warning', 'error', 'critical'],
- default='info',
+ choices=['info', 'debug', 'warning', 'error', 'critical'],
+ default=default,
help='The minimum level a log statement must be to be output. This value '
"is used to initialize the 'logging' module log level.")
diff --git a/src/starboard/tools/create_derived_build.py b/src/starboard/tools/create_derived_build.py
index 85fc388..f54baac 100644
--- a/src/starboard/tools/create_derived_build.py
+++ b/src/starboard/tools/create_derived_build.py
@@ -32,6 +32,7 @@
import textwrap
import _env # pylint: disable=unused-import
+from starboard.tools import log_level
from starboard.tools import paths
import starboard.tools.environment as environment
@@ -151,11 +152,7 @@
def main():
- logging.basicConfig(
- level=logging.INFO,
- format=('[%(filename)s:%(lineno)s - %(asctime)s %(levelname)-8s] '
- '%(message)s'),
- datefmt='%m-%d %H:%M')
+ log_level.InitializeLoggingWithLevel(logging.INFO)
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
diff --git a/src/starboard/tools/testing/test_runner.py b/src/starboard/tools/testing/test_runner.py
index e6ca771..562bb25 100755
--- a/src/starboard/tools/testing/test_runner.py
+++ b/src/starboard/tools/testing/test_runner.py
@@ -218,7 +218,8 @@
application_name=None,
dry_run=False,
xml_output_dir=None,
- log_xml_results=False):
+ log_xml_results=False,
+ launcher_args=None):
self.platform = platform
self.config = config
self.loader_platform = loader_platform
@@ -227,6 +228,7 @@
self.target_params = target_params
self.out_directory = out_directory
self.loader_out_directory = loader_out_directory
+ self.launcher_args = launcher_args
if not self.out_directory:
self.out_directory = paths.BuildOutputDirectory(self.platform,
self.config)
@@ -336,6 +338,7 @@
return final_targets
def _GetTestFilters(self):
+ """Get test filters for a given platform and configuration."""
filters = self._platform_config.GetTestFilters()
app_filters = self._app_config.GetTestFilters()
if app_filters:
@@ -348,10 +351,10 @@
loader_platform_config = build.GetPlatformConfig(self.loader_platform)
loader_app_config = loader_platform_config.GetApplicationConfiguration(
self.application_name)
- for filter in (loader_platform_config.GetTestFilters() +
- loader_app_config.GetTestFilters()):
- if filter not in filters:
- filters.append(filter)
+ for filter_ in (loader_platform_config.GetTestFilters() +
+ loader_app_config.GetTestFilters()):
+ if filter_ not in filters:
+ filters.append(filter_)
return filters
def _GetAllTestEnvVariables(self):
@@ -430,7 +433,8 @@
env_variables=env,
loader_platform=self.loader_platform,
loader_config=self.loader_config,
- loader_out_directory=self.loader_out_directory)
+ loader_out_directory=self.loader_out_directory,
+ launcher_args=self.launcher_args)
test_reader = TestLineReader(read_pipe)
test_launcher = TestLauncher(launcher)
@@ -547,10 +551,10 @@
total_flaky_failed_count = 0
total_filtered_count = 0
- print # Explicit print for empty formatting line.
+ print() # Explicit print for empty formatting line.
logging.info("TEST RUN COMPLETE.")
if results:
- print # Explicit print for empty formatting line.
+ print() # Explicit print for empty formatting line.
# If the number of run tests from a test binary cannot be
# determined, assume an error occurred while running it.
@@ -594,7 +598,7 @@
# Sometimes the returned test "name" includes information about the
# parameter that was passed to it. This needs to be stripped off.
retry_result = self._RunTest(target_name, test_case.split(",")[0])
- print # Explicit print for empty formatting line.
+ print() # Explicit print for empty formatting line.
if retry_result[2] == 1:
flaky_passed_tests.append(test_case)
logging.info("%s succeeded on run #%d!\n", test_case, retry + 2)
@@ -707,11 +711,12 @@
# tests so we need to build it separately.
if self.loader_platform:
build_tests.BuildTargets(
- [_LOADER_TARGET, _CRASHPAD_TARGET], self.loader_out_directory, self.dry_run,
- extra_flags + [os.getenv('TEST_RUNNER_PLATFORM_BUILD_FLAGS', '')])
+ [_LOADER_TARGET, _CRASHPAD_TARGET], self.loader_out_directory,
+ self.dry_run,
+ extra_flags + [os.getenv("TEST_RUNNER_PLATFORM_BUILD_FLAGS", "")])
build_tests.BuildTargets(
self.test_targets, self.out_directory, self.dry_run,
- extra_flags + [os.getenv('TEST_RUNNER_BUILD_FLAGS', '')])
+ extra_flags + [os.getenv("TEST_RUNNER_BUILD_FLAGS", "")])
except subprocess.CalledProcessError as e:
result = False
@@ -827,6 +832,13 @@
action="store_true",
help="If set, results will be logged in xml format after all tests are"
" complete. --xml_output_dir will be ignored.")
+ arg_parser.add_argument(
+ "-w",
+ "--launcher_args",
+ help="Pass space-separated arguments to control launcher behaviour. "
+ "Arguments are plaform specific and may not be implemented for all "
+ "platforms. Common arguments are:\n\t'noinstall' - skip install steps "
+ "before running the test\n\t'systools' - use system-installed tools.")
args = arg_parser.parse_args()
if (args.loader_platform and not args.loader_config or
@@ -840,12 +852,19 @@
if args.target_params:
target_params = args.target_params.split(" ")
+ launcher_args = []
+ if args.launcher_args:
+ launcher_args = args.launcher_args.split(" ")
+
+ if args.dry_run:
+ launcher_args.append(abstract_launcher.ARG_DRYRUN)
+
runner = TestRunner(args.platform, args.config, args.loader_platform,
args.loader_config, args.device_id, args.target_name,
target_params, args.out_directory,
args.loader_out_directory, args.platform_tests_only,
args.application_name, args.dry_run, args.xml_output_dir,
- args.log_xml_results)
+ args.log_xml_results, launcher_args)
def Abort(signum, frame):
del signum, frame # Unused.
diff --git a/src/starboard/tools/tools.gyp b/src/starboard/tools/tools.gyp
index 16652cb..ce18897 100644
--- a/src/starboard/tools/tools.gyp
+++ b/src/starboard/tools/tools.gyp
@@ -35,7 +35,7 @@
'<(app_launcher_zip_file)',
],
'action': [
- 'python',
+ 'python2',
'<(app_launcher_packager_path)',
'-z',
'<(app_launcher_zip_file)',
diff --git a/src/third_party/angle/include/platform/Platform.h b/src/third_party/angle/include/platform/Platform.h
index 09505a3..1035f8e 100644
--- a/src/third_party/angle/include/platform/Platform.h
+++ b/src/third_party/angle/include/platform/Platform.h
@@ -236,11 +236,11 @@
using ProgramKeyType = std::array<uint8_t, 20>;
using CacheProgramFunc = void (*)(PlatformMethods *platform,
const ProgramKeyType &key,
- size_t programSize,
+ std::size_t programSize,
const uint8_t *programBytes);
inline void DefaultCacheProgram(PlatformMethods *platform,
const ProgramKeyType &key,
- size_t programSize,
+ std::size_t programSize,
const uint8_t *programBytes)
{}
diff --git a/src/third_party/angle/src/vulkan_support/vulkan.gypi b/src/third_party/angle/src/vulkan_support/vulkan.gypi
index 779d0da..2c821ba 100644
--- a/src/third_party/angle/src/vulkan_support/vulkan.gypi
+++ b/src/third_party/angle/src/vulkan_support/vulkan.gypi
@@ -442,7 +442,7 @@
],
'action':
[
- 'python', '<(spirv_tools_path)/utils/generate_grammar_tables.py',
+ 'python2', '<(spirv_tools_path)/utils/generate_grammar_tables.py',
'--spirv-core-grammar=<(spirv_headers_path)/include/spirv/1.0/spirv.core.grammar.json',
'--extinst-glsl-grammar=<(spirv_headers_path)/include/spirv/1.0/extinst.glsl.std.450.grammar.json',
'--extinst-opencl-grammar=<(spirv_tools_path)/source/extinst-1.0.opencl.std.grammar.json',
@@ -469,7 +469,7 @@
],
'action':
[
- 'python', '<(spirv_tools_path)/utils/generate_grammar_tables.py',
+ 'python2', '<(spirv_tools_path)/utils/generate_grammar_tables.py',
'--spirv-core-grammar=<(spirv_headers_path)/include/spirv/1.1/spirv.core.grammar.json',
'--core-insts-output=<(angle_gen_path)/vulkan/core.insts-1.1.inc',
'--operand-kinds-output=<(angle_gen_path)/vulkan/operand.kinds-1.1.inc',
@@ -491,7 +491,7 @@
],
'action':
[
- 'python', '<(spirv_tools_path)/utils/generate_registry_tables.py',
+ 'python2', '<(spirv_tools_path)/utils/generate_registry_tables.py',
'--xml=<(spirv_headers_path)/include/spirv/spir-v.xml',
'--generator-output=<(angle_gen_path)/vulkan/generators.inc',
],
@@ -651,7 +651,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
'-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml',
'vk_enum_string_helper.h', '-quiet',
@@ -676,7 +676,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
'-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml',
'vk_struct_size_helper.h', '-quiet',
@@ -701,7 +701,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
'-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml',
'vk_struct_size_helper.c', '-quiet',
@@ -726,7 +726,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
'-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml',
'vk_safe_struct.h', '-quiet',
@@ -751,7 +751,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py',
'-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml',
'vk_safe_struct.cpp', '-quiet',
@@ -776,7 +776,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml', 'vk_layer_dispatch_table.h', '-quiet',
],
},
@@ -799,7 +799,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml', 'vk_dispatch_table_helper.h', '-quiet',
],
},
@@ -822,7 +822,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml', 'vk_loader_extensions.h', '-quiet',
],
},
@@ -845,7 +845,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml', 'vk_loader_extensions.c', '-quiet',
],
},
@@ -871,7 +871,7 @@
],
'action':
[
- 'python', '<(angle_path)/scripts/generate_vulkan_layers_json.py',
+ 'python2', '<(angle_path)/scripts/generate_vulkan_layers_json.py',
'<(vulkan_layers_path)/layers/windows', '<(PRODUCT_DIR)/<(vulkan_json)',
],
}],
@@ -883,7 +883,7 @@
],
'action':
[
- 'python', '<(angle_path)/scripts/generate_vulkan_layers_json.py',
+ 'python2', '<(angle_path)/scripts/generate_vulkan_layers_json.py',
'<(vulkan_layers_path)/layers/linux', '<(PRODUCT_DIR)/<(vulkan_json)',
],
}],
@@ -1011,7 +1011,7 @@
'action':
[
# TODO(jmadill): Use correct platform path
- 'python', '<(angle_path)/scripts/generate_vulkan_header.py', '<(PRODUCT_DIR)/<(vulkan_json)',
+ 'python2', '<(angle_path)/scripts/generate_vulkan_header.py', '<(PRODUCT_DIR)/<(vulkan_json)',
'<(angle_gen_path)/vulkan/angle_loader.h', '<(PRODUCT_DIR)',
],
},
@@ -1023,7 +1023,7 @@
'outputs': [ '<(angle_gen_path)/vulkan/vulkan_loader_order_deps.stamp' ],
'action':
[
- 'python', '<(angle_path)/gyp/touch_stamp.py',
+ 'python2', '<(angle_path)/gyp/touch_stamp.py',
'<(angle_gen_path)/vulkan/vulkan_loader_order_deps.stamp',
]
},
@@ -1052,7 +1052,7 @@
'outputs': [ '<(angle_gen_path)/vulkan/layer_core_validation_order_deps.stamp' ],
'action':
[
- 'python', '<(angle_path)/gyp/touch_stamp.py',
+ 'python2', '<(angle_path)/gyp/touch_stamp.py',
'<(angle_gen_path)/vulkan/layer_core_validation_order_deps.stamp',
]
},
@@ -1090,7 +1090,7 @@
'outputs': [ '<(angle_gen_path)/vulkan/layer_swapchain_order_deps.stamp' ],
'action':
[
- 'python', '<(angle_path)/gyp/touch_stamp.py',
+ 'python2', '<(angle_path)/gyp/touch_stamp.py',
'<(angle_gen_path)/vulkan/layer_swapchain_order_deps.stamp',
]
},
@@ -1128,7 +1128,7 @@
'outputs': [ '<(angle_gen_path)/vulkan/layer_object_tracker_order_deps.stamp' ],
'action':
[
- 'python', '<(angle_path)/gyp/touch_stamp.py',
+ 'python2', '<(angle_path)/gyp/touch_stamp.py',
'<(angle_gen_path)/vulkan/layer_object_tracker_order_deps.stamp',
]
},
@@ -1176,7 +1176,7 @@
'outputs': [ '<(angle_gen_path)/vulkan/layer_unique_objects_order_deps.stamp' ],
'action':
[
- 'python', '<(angle_path)/gyp/touch_stamp.py',
+ 'python2', '<(angle_path)/gyp/touch_stamp.py',
'<(angle_gen_path)/vulkan/layer_unique_objects_order_deps.stamp',
]
},
@@ -1198,7 +1198,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml', 'unique_objects_wrappers.h', '-quiet',
],
},
@@ -1236,7 +1236,7 @@
'outputs': [ '<(angle_gen_path)/vulkan/layer_threading_order_deps.stamp' ],
'action':
[
- 'python', '<(angle_path)/gyp/touch_stamp.py',
+ 'python2', '<(angle_path)/gyp/touch_stamp.py',
'<(angle_gen_path)/vulkan/layer_threading_order_deps.stamp',
]
},
@@ -1257,7 +1257,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o',
'<(angle_gen_path)/vulkan', '-registry', '<(vulkan_layers_path)/scripts/vk.xml',
'thread_check.h', '-quiet',
],
@@ -1296,7 +1296,7 @@
'outputs': [ '<(angle_gen_path)/vulkan/layer_parameter_validation_order_deps.stamp' ],
'action':
[
- 'python', '<(angle_path)/gyp/touch_stamp.py',
+ 'python2', '<(angle_path)/gyp/touch_stamp.py',
'<(angle_gen_path)/vulkan/layer_parameter_validation_order_deps.stamp',
]
},
@@ -1317,7 +1317,7 @@
],
'action':
[
- 'python', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
+ 'python2', '<(vulkan_layers_path)/scripts/lvl_genvk.py', '-o', '<(angle_gen_path)/vulkan',
'-registry', '<(vulkan_layers_path)/scripts/vk.xml', 'parameter_validation.h', '-quiet',
],
},
diff --git a/src/third_party/blink/Source/bindings/scripts/global_constructors.gypi b/src/third_party/blink/Source/bindings/scripts/global_constructors.gypi
index 7255968..df0629c 100644
--- a/src/third_party/blink/Source/bindings/scripts/global_constructors.gypi
+++ b/src/third_party/blink/Source/bindings/scripts/global_constructors.gypi
@@ -58,7 +58,7 @@
],
'outputs': ['<@(outputs)'],
'action': [
- 'python',
+ 'python2',
'<(bindings_scripts_dir)/generate_global_constructors.py',
'--idl-files-list',
'<(idl_files_list)',
diff --git a/src/third_party/blink/Source/bindings/scripts/global_objects.gypi b/src/third_party/blink/Source/bindings/scripts/global_objects.gypi
index 1c6d373..feb909f 100644
--- a/src/third_party/blink/Source/bindings/scripts/global_objects.gypi
+++ b/src/third_party/blink/Source/bindings/scripts/global_objects.gypi
@@ -50,7 +50,7 @@
'<(output_file)',
],
'action': [
- 'python',
+ 'python2',
'<(bindings_scripts_dir)/compute_global_objects.py',
'--idl-files-list',
'<(idl_files_list)',
diff --git a/src/third_party/blink/Source/bindings/scripts/interfaces_info_individual.gypi b/src/third_party/blink/Source/bindings/scripts/interfaces_info_individual.gypi
index 88ba29d..51cc447 100644
--- a/src/third_party/blink/Source/bindings/scripts/interfaces_info_individual.gypi
+++ b/src/third_party/blink/Source/bindings/scripts/interfaces_info_individual.gypi
@@ -54,7 +54,7 @@
],
'action': [
- 'python',
+ 'python2',
'<(bindings_scripts_dir)/compute_interfaces_info_individual.py',
'--cache-directory',
'<(cache_directory)',
diff --git a/src/third_party/blink/Source/bindings/scripts/interfaces_info_overall.gypi b/src/third_party/blink/Source/bindings/scripts/interfaces_info_overall.gypi
index 47419da..d593e07 100644
--- a/src/third_party/blink/Source/bindings/scripts/interfaces_info_overall.gypi
+++ b/src/third_party/blink/Source/bindings/scripts/interfaces_info_overall.gypi
@@ -44,7 +44,7 @@
'<(output_file)',
],
'action': [
- 'python',
+ 'python2',
'<(bindings_scripts_dir)/compute_interfaces_info_overall.py',
'--',
'<@(input_files)',
diff --git a/src/third_party/crashpad/util/util.gyp b/src/third_party/crashpad/util/util.gyp
index 2440d7b..ce4d32f 100644
--- a/src/third_party/crashpad/util/util.gyp
+++ b/src/third_party/crashpad/util/util.gyp
@@ -357,7 +357,7 @@
'<(INTERMEDIATE_DIR)/util/mach/<(RULE_INPUT_ROOT)Server.h',
],
'action': [
- 'python',
+ 'python2',
'<@(_inputs)',
'<(RULE_INPUT_PATH)',
'<@(_outputs)',
diff --git a/src/third_party/mozjs-45/INSTALL b/src/third_party/mozjs-45/INSTALL
deleted file mode 100644
index c57135b..0000000
--- a/src/third_party/mozjs-45/INSTALL
+++ /dev/null
@@ -1,13 +0,0 @@
-Full build documentation for SpiderMonkey is hosted on MDN:
- https://developer.mozilla.org/en-US/docs/SpiderMonkey/Build_Documentation
-
-Note that the libraries produced by the build system include symbols,
-causing the binaries to be extremely large. It is highly suggested that `strip`
-be run over the binaries before deploying them.
-
-Building with default options may be performed as follows:
- cd js/src
- mkdir obj
- cd obj
- ../configure
- make # or mozmake on Windows
diff --git a/src/third_party/mozjs-45/LICENSE b/src/third_party/mozjs-45/LICENSE
deleted file mode 100644
index 14e2f77..0000000
--- a/src/third_party/mozjs-45/LICENSE
+++ /dev/null
@@ -1,373 +0,0 @@
-Mozilla Public License Version 2.0
-==================================
-
-1. Definitions
---------------
-
-1.1. "Contributor"
- means each individual or legal entity that creates, contributes to
- the creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
- means the combination of the Contributions of others (if any) used
- by a Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
- means Source Code Form to which the initial Contributor has attached
- the notice in Exhibit A, the Executable Form of such Source Code
- Form, and Modifications of such Source Code Form, in each case
- including portions thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- (a) that the initial Contributor has attached the notice described
- in Exhibit B to the Covered Software; or
-
- (b) that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the
- terms of a Secondary License.
-
-1.6. "Executable Form"
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
- means a work that combines Covered Software with other material, in
- a separate file or files, that is not Covered Software.
-
-1.8. "License"
- means this document.
-
-1.9. "Licensable"
- means having the right to grant, to the maximum extent possible,
- whether at the time of the initial grant or subsequently, any and
- all of the rights conveyed by this License.
-
-1.10. "Modifications"
- means any of the following:
-
- (a) any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered
- Software; or
-
- (b) any new file in Source Code Form that contains any Covered
- Software.
-
-1.11. "Patent Claims" of a Contributor
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the
- License, by the making, using, selling, offering for sale, having
- made, import, or transfer of either its Contributions or its
- Contributor Version.
-
-1.12. "Secondary License"
- means either the GNU General Public License, Version 2.0, the GNU
- Lesser General Public License, Version 2.1, the GNU Affero General
- Public License, Version 3.0, or any later versions of those
- licenses.
-
-1.13. "Source Code Form"
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that
- controls, is controlled by, or is under common control with You. For
- purposes of this definition, "control" means (a) the power, direct
- or indirect, to cause the direction or management of such entity,
- whether by contract or otherwise, or (b) ownership of more than
- fifty percent (50%) of the outstanding shares or beneficial
- ownership of such entity.
-
-2. License Grants and Conditions
---------------------------------
-
-2.1. Grants
-
-Each Contributor hereby grants You a world-wide, royalty-free,
-non-exclusive license:
-
-(a) under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
-(b) under Patent Claims of such Contributor to make, use, sell, offer
- for sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
-The licenses granted in Section 2.1 with respect to any Contribution
-become effective for each Contribution on the date the Contributor first
-distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
-The licenses granted in this Section 2 are the only rights granted under
-this License. No additional rights or licenses will be implied from the
-distribution or licensing of Covered Software under this License.
-Notwithstanding Section 2.1(b) above, no patent license is granted by a
-Contributor:
-
-(a) for any code that a Contributor has removed from Covered Software;
- or
-
-(b) for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
-(c) under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
-This License does not grant any rights in the trademarks, service marks,
-or logos of any Contributor (except as may be necessary to comply with
-the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
-No Contributor makes additional grants as a result of Your choice to
-distribute the Covered Software under a subsequent version of this
-License (see Section 10.2) or under the terms of a Secondary License (if
-permitted under the terms of Section 3.3).
-
-2.5. Representation
-
-Each Contributor represents that the Contributor believes its
-Contributions are its original creation(s) or it has sufficient rights
-to grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
-This License is not intended to limit any rights You have under
-applicable copyright doctrines of fair use, fair dealing, or other
-equivalents.
-
-2.7. Conditions
-
-Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
-in Section 2.1.
-
-3. Responsibilities
--------------------
-
-3.1. Distribution of Source Form
-
-All distribution of Covered Software in Source Code Form, including any
-Modifications that You create or to which You contribute, must be under
-the terms of this License. You must inform recipients that the Source
-Code Form of the Covered Software is governed by the terms of this
-License, and how they can obtain a copy of this License. You may not
-attempt to alter or restrict the recipients' rights in the Source Code
-Form.
-
-3.2. Distribution of Executable Form
-
-If You distribute Covered Software in Executable Form then:
-
-(a) such Covered Software must also be made available in Source Code
- Form, as described in Section 3.1, and You must inform recipients of
- the Executable Form how they can obtain a copy of such Source Code
- Form by reasonable means in a timely manner, at a charge no more
- than the cost of distribution to the recipient; and
-
-(b) You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter
- the recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
-You may create and distribute a Larger Work under terms of Your choice,
-provided that You also comply with the requirements of this License for
-the Covered Software. If the Larger Work is a combination of Covered
-Software with a work governed by one or more Secondary Licenses, and the
-Covered Software is not Incompatible With Secondary Licenses, this
-License permits You to additionally distribute such Covered Software
-under the terms of such Secondary License(s), so that the recipient of
-the Larger Work may, at their option, further distribute the Covered
-Software under the terms of either this License or such Secondary
-License(s).
-
-3.4. Notices
-
-You may not remove or alter the substance of any license notices
-(including copyright notices, patent notices, disclaimers of warranty,
-or limitations of liability) contained within the Source Code Form of
-the Covered Software, except that You may alter any license notices to
-the extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
-You may choose to offer, and to charge a fee for, warranty, support,
-indemnity or liability obligations to one or more recipients of Covered
-Software. However, You may do so only on Your own behalf, and not on
-behalf of any Contributor. You must make it absolutely clear that any
-such warranty, support, indemnity, or liability obligation is offered by
-You alone, and You hereby agree to indemnify every Contributor for any
-liability incurred by such Contributor as a result of warranty, support,
-indemnity or liability terms You offer. You may include additional
-disclaimers of warranty and limitations of liability specific to any
-jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
----------------------------------------------------
-
-If it is impossible for You to comply with any of the terms of this
-License with respect to some or all of the Covered Software due to
-statute, judicial order, or regulation then You must: (a) comply with
-the terms of this License to the maximum extent possible; and (b)
-describe the limitations and the code they affect. Such description must
-be placed in a text file included with all distributions of the Covered
-Software under this License. Except to the extent prohibited by statute
-or regulation, such description must be sufficiently detailed for a
-recipient of ordinary skill to be able to understand it.
-
-5. Termination
---------------
-
-5.1. The rights granted under this License will terminate automatically
-if You fail to comply with any of its terms. However, if You become
-compliant, then the rights granted under this License from a particular
-Contributor are reinstated (a) provisionally, unless and until such
-Contributor explicitly and finally terminates Your grants, and (b) on an
-ongoing basis, if such Contributor fails to notify You of the
-non-compliance by some reasonable means prior to 60 days after You have
-come back into compliance. Moreover, Your grants from a particular
-Contributor are reinstated on an ongoing basis if such Contributor
-notifies You of the non-compliance by some reasonable means, this is the
-first time You have received notice of non-compliance with this License
-from such Contributor, and You become compliant prior to 30 days after
-Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
-infringement claim (excluding declaratory judgment actions,
-counter-claims, and cross-claims) alleging that a Contributor Version
-directly or indirectly infringes any patent, then the rights granted to
-You by any and all Contributors for the Covered Software under Section
-2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all
-end user license agreements (excluding distributors and resellers) which
-have been validly granted by You or Your distributors under this License
-prior to termination shall survive termination.
-
-************************************************************************
-* *
-* 6. Disclaimer of Warranty *
-* ------------------------- *
-* *
-* Covered Software is provided under this License on an "as is" *
-* basis, without warranty of any kind, either expressed, implied, or *
-* statutory, including, without limitation, warranties that the *
-* Covered Software is free of defects, merchantable, fit for a *
-* particular purpose or non-infringing. The entire risk as to the *
-* quality and performance of the Covered Software is with You. *
-* Should any Covered Software prove defective in any respect, You *
-* (not any Contributor) assume the cost of any necessary servicing, *
-* repair, or correction. This disclaimer of warranty constitutes an *
-* essential part of this License. No use of any Covered Software is *
-* authorized under this License except under this disclaimer. *
-* *
-************************************************************************
-
-************************************************************************
-* *
-* 7. Limitation of Liability *
-* -------------------------- *
-* *
-* Under no circumstances and under no legal theory, whether tort *
-* (including negligence), contract, or otherwise, shall any *
-* Contributor, or anyone who distributes Covered Software as *
-* permitted above, be liable to You for any direct, indirect, *
-* special, incidental, or consequential damages of any character *
-* including, without limitation, damages for lost profits, loss of *
-* goodwill, work stoppage, computer failure or malfunction, or any *
-* and all other commercial damages or losses, even if such party *
-* shall have been informed of the possibility of such damages. This *
-* limitation of liability shall not apply to liability for death or *
-* personal injury resulting from such party's negligence to the *
-* extent applicable law prohibits such limitation. Some *
-* jurisdictions do not allow the exclusion or limitation of *
-* incidental or consequential damages, so this exclusion and *
-* limitation may not apply to You. *
-* *
-************************************************************************
-
-8. Litigation
--------------
-
-Any litigation relating to this License may be brought only in the
-courts of a jurisdiction where the defendant maintains its principal
-place of business and such litigation shall be governed by laws of that
-jurisdiction, without reference to its conflict-of-law provisions.
-Nothing in this Section shall prevent a party's ability to bring
-cross-claims or counter-claims.
-
-9. Miscellaneous
-----------------
-
-This License represents the complete agreement concerning the subject
-matter hereof. If any provision of this License is held to be
-unenforceable, such provision shall be reformed only to the extent
-necessary to make it enforceable. Any law or regulation which provides
-that the language of a contract shall be construed against the drafter
-shall not be used to construe this License against a Contributor.
-
-10. Versions of the License
----------------------------
-
-10.1. New Versions
-
-Mozilla Foundation is the license steward. Except as provided in Section
-10.3, no one other than the license steward has the right to modify or
-publish new versions of this License. Each version will be given a
-distinguishing version number.
-
-10.2. Effect of New Versions
-
-You may distribute the Covered Software under the terms of the version
-of the License under which You originally received the Covered Software,
-or under the terms of any subsequent version published by the license
-steward.
-
-10.3. Modified Versions
-
-If you create software not governed by this License, and you want to
-create a new license for such software, you may create and use a
-modified version of this License if you rename the license and remove
-any references to the name of the license steward (except to note that
-such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
-Licenses
-
-If You choose to distribute Source Code Form that is Incompatible With
-Secondary Licenses under the terms of this version of the License, the
-notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
--------------------------------------------
-
- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular
-file, then You may include the notice in a location (such as a LICENSE
-file in a relevant directory) where a recipient would be likely to look
-for such a notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
----------------------------------------------------------
-
- This Source Code Form is "Incompatible With Secondary Licenses", as
- defined by the Mozilla Public License, v. 2.0.
diff --git a/src/third_party/mozjs-45/METADATA b/src/third_party/mozjs-45/METADATA
deleted file mode 100644
index bc1efc0..0000000
--- a/src/third_party/mozjs-45/METADATA
+++ /dev/null
@@ -1,15 +0,0 @@
-name: "mozjs-45"
-description:
- "SpiderMonkey is Mozilla's JavaScript engine. SpiderMonkey was recently "
- "deprecated in Cobalt 21 but is still used out in production."
-third_party {
- url {
- type: ARCHIVE
- value: "https://ftp.mozilla.org/pub/spidermonkey/releases/45.0.2/mozjs-45.0.2.tar.bz2"
- }
- last_upgrade_date {
- year: 2016
- month: 4
- day: 14
- }
-}
diff --git a/src/third_party/mozjs-45/Makefile.in b/src/third_party/mozjs-45/Makefile.in
deleted file mode 100644
index 078ac34..0000000
--- a/src/third_party/mozjs-45/Makefile.in
+++ /dev/null
@@ -1,336 +0,0 @@
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifeq (,$(MAKE_VERSION))
-$(error GNU Make is required)
-endif
-make_min_ver := 3.81
-ifneq ($(make_min_ver),$(firstword $(sort $(make_min_ver) $(MAKE_VERSION))))
-$(error GNU Make $(make_min_ver) or higher is required)
-endif
-
-export TOPLEVEL_BUILD := 1
-
-default::
-
-ifdef MOZ_BUILD_APP
-include $(topsrcdir)/$(MOZ_BUILD_APP)/build.mk
-endif
-
-include $(topsrcdir)/config/config.mk
-
-GARBAGE_DIRS += _javagen _profile staticlib
-DIST_GARBAGE = config.cache config.log config.status* config-defs.h \
- config/autoconf.mk \
- mozilla-config.h \
- netwerk/necko-config.h xpcom/xpcom-config.h xpcom/xpcom-private.h \
- .mozconfig.mk
-
-ifdef JS_STANDALONE
-configure_dir = $(topsrcdir)/js/src
-else
-configure_dir = $(topsrcdir)
-endif
-
-ifndef TEST_MOZBUILD
-ifndef MOZ_PROFILE_USE
-# We need to explicitly put backend.RecursiveMakeBackend here
-# otherwise the rule in rules.mk doesn't run early enough.
-$(TIERS) binaries:: CLOBBER $(configure_dir)/configure config.status backend.RecursiveMakeBackend
-ifndef JS_STANDALONE
-ifdef COMPILE_ENVIRONMENT
-$(TIERS) binaries:: $(topsrcdir)/js/src/configure js/src/config.status
-endif
-endif
-endif
-endif
-
-ifdef JS_STANDALONE
-.PHONY: CLOBBER
-CLOBBER:
-else
-CLOBBER: $(topsrcdir)/CLOBBER
- @echo 'STOP! The CLOBBER file has changed.'
- @echo 'Please run the build through a sanctioned build wrapper, such as'
- @echo '"mach build" or client.mk.'
- @exit 1
-endif
-
-$(topsrcdir)/configure: $(topsrcdir)/configure.in
-$(topsrcdir)/js/src/configure: $(topsrcdir)/js/src/configure.in
-$(topsrcdir)/configure $(topsrcdir)/js/src/configure:
- @echo 'STOP! $^ has changed, and your configure is out of date.'
- @echo 'Please rerun autoconf and re-configure your build directory.'
- @echo 'To ignore this message, touch "$@",'
- @echo 'but your build might not succeed.'
- @exit 1
-
-config.status: $(configure_dir)/configure
-js/src/config.status: $(topsrcdir)/js/src/configure
-config.status js/src/config.status:
- @echo 'STOP! $^ has changed and needs to be run again.'
- @echo 'Please rerun it.'
- @echo 'To ignore this message, touch "$(CURDIR)/$@",'
- @echo 'but your build might not succeed.'
- @exit 1
-
-# Regenerate the build backend if it is out of date. We only have this rule in
-# this main make file because having it in rules.mk and applied to partial tree
-# builds resulted in a world of hurt. Gory details are in bug 877308.
-#
-# The mach build driver will ensure the backend is up to date for partial tree
-# builds. This cleanly avoids most of the pain.
-
-ifndef TEST_MOZBUILD
-backend.RecursiveMakeBackend:
- @echo 'Build configuration changed. Regenerating backend.'
- $(PYTHON) config.status
-
-Makefile: backend.RecursiveMakeBackend
- @$(TOUCH) $@
-
-include backend.RecursiveMakeBackend.pp
-
-default:: backend.RecursiveMakeBackend
-endif
-
-install_manifests := \
- $(addprefix dist/,bin branding idl include public private sdk xpi-stage) \
- _tests \
- $(NULL)
-install_manifest_depends = \
- CLOBBER \
- $(configure_dir)/configure \
- config.status \
- backend.RecursiveMakeBackend \
- $(NULL)
-
-ifndef JS_STANDALONE
-ifdef COMPILE_ENVIRONMENT
-install_manifest_depends += \
- $(topsrcdir)/js/src/configure \
- js/src/config.status \
- $(NULL)
-endif
-endif
-
-.PHONY: install-manifests
-install-manifests: $(addprefix install-,$(install_manifests))
-
-# process_install_manifest needs to be invoked with --no-remove when building
-# js as standalone because automated builds are building nspr separately and
-# that would remove the resulting files.
-# Eventually, a standalone js build would just be able to build nspr itself,
-# removing the need for the former.
-ifdef JS_STANDALONE
-NO_REMOVE=1
-endif
-
-.PHONY: $(addprefix install-,$(install_manifests))
-$(addprefix install-,$(filter dist/%,$(install_manifests))): install-dist/%: $(install_manifest_depends)
- $(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )$(DIST)/$* _build_manifests/install/dist_$*)
-
-# Dummy wrapper rule to allow the faster backend to piggy back
-install-dist_%: install-dist/% ;
-
-install-_tests: $(install_manifest_depends)
- $(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )_tests _build_manifests/install/_tests)
-
-# For compatibility
-.PHONY: install-tests
-install-tests: install-_tests
-
-include $(topsrcdir)/build/moz-automation.mk
-
-# dist and _tests should be purged during cleaning. However, we don't want them
-# purged during PGO builds because they contain some auto-generated files.
-ifneq ($(filter-out maybe_clobber_profiledbuild,$(MAKECMDGOALS)),)
-GARBAGE_DIRS += dist _tests
-endif
-
-# Windows PGO builds don't perform a clean before the 2nd pass. So, we want
-# to preserve content for the 2nd pass on Windows. Everywhere else, we always
-# process the install manifests as part of export.
-# For the binaries rule, not all the install manifests matter, so force only
-# the interesting ones to be done.
-ifdef MOZ_PROFILE_USE
-ifndef NO_PROFILE_GUIDED_OPTIMIZE
-ifneq ($(OS_ARCH)_$(GNU_CC), WINNT_)
-export:: install-manifests
-binaries::
- @$(MAKE) install-manifests NO_REMOVE=1 install_manifests=dist/include
-endif
-endif
-else # !MOZ_PROFILE_USE (normal build)
-export:: install-manifests
-binaries::
- @$(MAKE) install-manifests NO_REMOVE=1 install_manifests=dist/include
-endif
-
-# For historical reasons that are unknown, $(DIST)/sdk is always blown away
-# with no regard for PGO passes. This decision could probably be revisited.
-export:: install-dist/sdk
-
-ifndef JS_STANDALONE
-ifdef ENABLE_TESTS
-# Additional makefile targets to call automated test suites
-include $(topsrcdir)/testing/testsuite-targets.mk
-endif
-endif
-
-default all::
- $(call BUILDSTATUS,TIERS $(TIERS) $(if $(MOZ_AUTOMATION),$(MOZ_AUTOMATION_TIERS)))
-
-include $(topsrcdir)/config/rules.mk
-
-distclean::
- $(RM) $(DIST_GARBAGE)
-
-ifeq ($(OS_ARCH),WINNT)
-# we want to copy PDB files on Windows
-MAKE_SYM_STORE_ARGS := -c --vcs-info
-ifdef PDBSTR_PATH
-MAKE_SYM_STORE_ARGS += -i
-endif
-DUMP_SYMS_BIN ?= $(topsrcdir)/toolkit/crashreporter/tools/win32/dump_syms_vc$(_MSC_VER).exe
-# PDB files don't get moved to dist, so we need to scan the whole objdir
-MAKE_SYM_STORE_PATH := .
-endif
-ifeq ($(OS_ARCH),Darwin)
-# need to pass arch flags for universal builds
-ifdef UNIVERSAL_BINARY
-MAKE_SYM_STORE_ARGS := -c -a 'i386 x86_64' --vcs-info
-MAKE_SYM_STORE_PATH := $(DIST)/universal
-else
-MAKE_SYM_STORE_ARGS := -c -a $(OS_TEST) --vcs-info
-MAKE_SYM_STORE_PATH := $(DIST)/bin
-endif
-DUMP_SYMS_BIN ?= $(DIST)/host/bin/dump_syms
-endif
-ifeq (,$(filter-out Linux SunOS,$(OS_ARCH)))
-MAKE_SYM_STORE_ARGS := -c --vcs-info
-DUMP_SYMS_BIN ?= $(DIST)/host/bin/dump_syms
-MAKE_SYM_STORE_PATH := $(DIST)/bin
-endif
-MAKE_SYM_STORE_ARGS += --install-manifest=$(DEPTH)/_build_manifests/install/dist_include,$(DIST)/include
-
-SYM_STORE_SOURCE_DIRS := $(topsrcdir)
-
-ifndef JS_STANDALONE
-include $(topsrcdir)/toolkit/mozapps/installer/package-name.mk
-
-ifdef MOZ_SYMBOLS_EXTRA_BUILDID
-EXTRA_BUILDID := -$(MOZ_SYMBOLS_EXTRA_BUILDID)
-endif
-
-SYMBOL_INDEX_NAME = \
- $(MOZ_APP_NAME)-$(MOZ_APP_VERSION)-$(OS_TARGET)-$(BUILDID)-$(CPU_ARCH)$(EXTRA_BUILDID)-symbols.txt
-
-buildsymbols:
-ifdef MOZ_CRASHREPORTER
- echo building symbol store
- $(RM) -r $(DIST)/crashreporter-symbols
- $(RM) '$(DIST)/$(SYMBOL_ARCHIVE_BASENAME).zip'
- $(RM) '$(DIST)/$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
- $(NSINSTALL) -D $(DIST)/crashreporter-symbols
- OBJCOPY='$(OBJCOPY)' \
- $(PYTHON) $(topsrcdir)/toolkit/crashreporter/tools/symbolstore.py \
- $(MAKE_SYM_STORE_ARGS) \
- $(foreach dir,$(SYM_STORE_SOURCE_DIRS),-s $(dir)) \
- $(DUMP_SYMS_BIN) \
- $(DIST)/crashreporter-symbols \
- $(MAKE_SYM_STORE_PATH) | grep -iv test > \
- $(DIST)/crashreporter-symbols/$(SYMBOL_INDEX_NAME)
- echo packing symbols
- $(NSINSTALL) -D $(DIST)/$(PKG_PATH)
- cd $(DIST)/crashreporter-symbols && \
- zip -r9D '../$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip' . -x '*test*' -x '*Test*'
- cd $(DIST)/crashreporter-symbols && \
- grep 'sym' $(SYMBOL_INDEX_NAME) > $(SYMBOL_INDEX_NAME).tmp && \
- mv $(SYMBOL_INDEX_NAME).tmp $(SYMBOL_INDEX_NAME)
- cd $(DIST)/crashreporter-symbols && \
- zip -r9D '../$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip' . -i '*.sym' -i '*.txt' -x '*test*' -x '*Test*'
-endif # MOZ_CRASHREPORTER
-
-uploadsymbols:
-ifdef MOZ_CRASHREPORTER
-ifdef SOCORRO_SYMBOL_UPLOAD_TOKEN_FILE
- $(PYTHON) -u $(topsrcdir)/toolkit/crashreporter/tools/upload_symbols.py '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
-else
- $(SHELL) $(topsrcdir)/toolkit/crashreporter/tools/upload_symbols.sh $(SYMBOL_INDEX_NAME) '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
-endif
-endif
-
-# MOZ_SOURCE_STAMP is defined in package-name.mk with a deferred assignment.
-# exporting it makes make run its $(shell) command for each invoked submake,
-# so transform it to an immediate assignment.
-MOZ_SOURCE_STAMP := $(MOZ_SOURCE_STAMP)
-export MOZ_SOURCE_STAMP
-endif
-
-.PHONY: update-packaging
-update-packaging:
- $(MAKE) -C tools/update-packaging
-
-.PHONY: pretty-package
-pretty-package:
- unset MOZ_SIGN_CMD && $(MAKE) package MOZ_PKG_PRETTYNAMES=1
-
-.PHONY: pretty-package-tests
-pretty-package-tests:
- unset MOZ_SIGN_CMD && $(MAKE) package-tests MOZ_PKG_PRETTYNAMES=1
-
-.PHONY: pretty-l10n-check
-pretty-l10n-check:
- unset MOZ_SIGN_CMD && $(MAKE) l10n-check MOZ_PKG_PRETTYNAMES=1
-
-.PHONY: pretty-update-packaging
-pretty-update-packaging:
- unset MOZ_SIGN_CMD && $(MAKE) -C tools/update-packaging MOZ_PKG_PRETTYNAMES=1
-
-.PHONY: pretty-installer
-pretty-installer:
- unset MOZ_SIGN_CMD && $(MAKE) installer MOZ_PKG_PRETTYNAMES=1
-
-#XXX: this is a hack, since we don't want to clobber for MSVC
-# PGO support, but we can't do this test in client.mk
-ifneq ($(OS_ARCH)_$(GNU_CC), WINNT_)
-# No point in clobbering if PGO has been explicitly disabled.
-ifndef NO_PROFILE_GUIDED_OPTIMIZE
-maybe_clobber_profiledbuild: clean
-else
-maybe_clobber_profiledbuild:
-endif
-else
-maybe_clobber_profiledbuild:
- $(RM) $(DIST)/bin/*.pgc
- find $(DIST)/$(MOZ_APP_NAME) -name '*.pgc' -exec mv {} $(DIST)/bin \;
-endif
-
-.PHONY: maybe_clobber_profiledbuild
-
-# Look for R_386_PC32 relocations in shared libs, these
-# break x86_64 builds and SELinux users.
-ifeq ($(OS_TARGET)_$(TARGET_XPCOM_ABI),Linux_x86-gcc3)
-check::
- @relcount=`find $(DIST)/bin -name '*.so' | xargs objdump -R | grep R_386_PC32 | wc -l` && if test $$relcount -gt 0; then echo 'FAILED: R_386_PC32 relocations detected in a shared library. Did you use a system header without adding it to config/system-headers?'; exit 1; else echo 'PASSED'; fi
-endif
-
-ifdef JS_STANDALONE
-# Delegate js-specific rules to js
-check-%:
- $(MAKE) -C js/src $@
-
-source-package install:
- $(MAKE) -C js/src $@
-
-# Every export rule depends on config/export, but the rule for config/export
-# doesn't exist when building js non-standalone.
-.PHONY: config/export
-config/export:
-
-endif
-
-# There used to be build interdependencies here. They are now in config/recurse.mk
diff --git a/src/third_party/mozjs-45/README b/src/third_party/mozjs-45/README
deleted file mode 100644
index 5a7fff7..0000000
--- a/src/third_party/mozjs-45/README
+++ /dev/null
@@ -1,8 +0,0 @@
-This directory contains SpiderMonkey 45.
-
-This release is based on a revision of Mozilla 45:
- http://hg.mozilla.org/releases/
-The changes in the patches/ directory were applied.
-
-MDN hosts the latest SpiderMonkey 45 release notes:
- https://developer.mozilla.org/en-US/docs/SpiderMonkey/45
diff --git a/src/third_party/mozjs-45/cobalt_config/include/js-confdefs.h b/src/third_party/mozjs-45/cobalt_config/include/js-confdefs.h
deleted file mode 100644
index 81644a2..0000000
--- a/src/third_party/mozjs-45/cobalt_config/include/js-confdefs.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/* List of defines generated by configure. Included with preprocessor flag,
- * -include, to avoid long list of -D defines on the compile command-line.
- * Do not edit.
- */
-
-#ifndef _JS_CONFDEFS_H_
-#define _JS_CONFDEFS_H_
-
-#include "starboard/configuration.h"
-
-#if SB_SIZE_OF(POINTER) == 8
-#define JS_BYTES_PER_WORD 8
-#define JS_BITS_PER_WORD_LOG2 6
-#else
-#define JS_BYTES_PER_WORD 4
-#define JS_BITS_PER_WORD_LOG2 5
-#endif
-
-#if defined(COBALT_BUILD_TYPE_QA) || defined(COBALT_BUILD_TYPE_GOLD)
-#define RELEASE_BUILD 1
-#endif
-
-#if defined(COBALT_BUILD_TYPE_DEBUG)
-#define DEBUG 1
-#define JS_DEBUG 1
-#endif
-
-// Disabling this will fall back to js_sb_getenv for locale-
-// specific number formatting.
-// This can be removed when ENABLE_INTL_API is enabled, which requires a newer
-// version of ICU.
-#define HAVE_LOCALECONV 1
-
-#define HAVE_VA_COPY 1
-#define VA_COPY SB_VA_COPY
-
-#define MOZILLA_UAVERSION "45.0"
-#define MOZILLA_VERSION "45.0.2"
-#define MOZILLA_VERSION_U 45.0.2
-#define MOZJS_MAJOR_VERSION 0
-#define MOZJS_MINOR_VERSION 2
-
-// #include "js/RequiredDefines.h"
-
-inline char* js_sb_getenv(const char* name) {
- return nullptr;
-}
-
-inline char* js_sb_secure_getenv(const char* name) {
- return nullptr;
-}
-
-#endif /* _JS_CONFDEFS_H_ */
diff --git a/src/third_party/mozjs-45/cobalt_config/include/jscustomallocator.h b/src/third_party/mozjs-45/cobalt_config/include/jscustomallocator.h
deleted file mode 100644
index def0c34..0000000
--- a/src/third_party/mozjs-45/cobalt_config/include/jscustomallocator.h
+++ /dev/null
@@ -1,197 +0,0 @@
-// Copyright 2017 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef gc_JSCUSTOMALLOCATOR_H
-#define gc_JSCUSTOMALLOCATOR_H
-
-#include <algorithm>
-
-#include "memory_allocator_reporter.h"
-#include "starboard/common/string.h"
-#include "starboard/memory.h"
-
-namespace js {
-namespace oom {
-
-// Code imported from default allocator. These identifiers must be supplied
-// to SpiderMonkey common code.
-
-/*
- * To make testing OOM in certain helper threads more effective,
- * allow restricting the OOM testing to a certain helper thread
- * type. This allows us to fail e.g. in off-thread script parsing
- * without causing an OOM in the main thread first.
- */
-enum ThreadType {
- THREAD_TYPE_NONE = 0, // 0
- THREAD_TYPE_MAIN, // 1
- THREAD_TYPE_ASMJS, // 2
- THREAD_TYPE_ION, // 3
- THREAD_TYPE_PARSE, // 4
- THREAD_TYPE_COMPRESS, // 5
- THREAD_TYPE_GCHELPER, // 6
- THREAD_TYPE_GCPARALLEL, // 7
- THREAD_TYPE_MAX // Used to check shell function arguments
-};
-
-/*
- * Getter/Setter functions to encapsulate mozilla::ThreadLocal,
- * implementation is in jsutil.cpp.
- */
-# if defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
-extern bool InitThreadType(void);
-extern void SetThreadType(ThreadType);
-extern uint32_t GetThreadType(void);
-# else
-inline bool InitThreadType(void) { return true; }
-inline void SetThreadType(ThreadType t) {};
-inline uint32_t GetThreadType(void) { return 0; }
-# endif
-
-} // namespace oom
-} // namespace js
-
-# if defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
-
-/*
- * In order to test OOM conditions, when the testing function
- * oomAfterAllocations COUNT is passed, we fail continuously after the NUM'th
- * allocation from now.
- */
-extern JS_PUBLIC_DATA(uint32_t) OOM_maxAllocations; /* set in builtin/TestingFunctions.cpp */
-extern JS_PUBLIC_DATA(uint32_t) OOM_counter; /* data race, who cares. */
-extern JS_PUBLIC_DATA(bool) OOM_failAlways;
-
-#define JS_OOM_CALL_BP_FUNC() do {} while(0)
-
-namespace js {
-namespace oom {
-
-extern JS_PUBLIC_DATA(uint32_t) targetThread;
-
-static inline bool
-IsThreadSimulatingOOM()
-{
- return false;
-}
-
-static inline bool
-IsSimulatedOOMAllocation()
-{
- return false;
-}
-
-static inline bool
-ShouldFailWithOOM()
-{
- return false;
-}
-
-} // namespace oom
-} // namespace js
-
-# define JS_OOM_POSSIBLY_FAIL() \
- do { \
- if (js::oom::ShouldFailWithOOM()) \
- return nullptr; \
- } while (0)
-
-# define JS_OOM_POSSIBLY_FAIL_BOOL() \
- do { \
- if (js::oom::ShouldFailWithOOM()) \
- return false; \
- } while (0)
-
-# else // defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
-
-# define JS_OOM_POSSIBLY_FAIL() do {} while(0)
-# define JS_OOM_POSSIBLY_FAIL_BOOL() do {} while(0)
-namespace js {
-namespace oom {
-static inline bool IsSimulatedOOMAllocation() { return false; }
-static inline bool ShouldFailWithOOM() { return false; }
-} // namespace oom
-} // namespace js
-
-# endif // defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
-
-namespace js {
-
-struct MOZ_RAII AutoEnterOOMUnsafeRegion
-{
- MOZ_NORETURN MOZ_COLD void crash(const char* reason);
-};
-
-} // namespace js
-
-static inline void* js_malloc(size_t bytes)
-{
- size_t reservation_bytes = AllocationMetadata::GetReservationBytes(bytes);
- MemoryAllocatorReporter::Get()->UpdateTotalHeapSize(reservation_bytes);
- void* metadata = SbMemoryAllocate(reservation_bytes);
- AllocationMetadata::SetSizeToBaseAddress(metadata, reservation_bytes);
- return AllocationMetadata::GetUserAddressFromBaseAddress(metadata);
-}
-
-static inline void* js_calloc(size_t nmemb, size_t size)
-{
- size_t total_size = nmemb * size;
- void* memory = js_malloc(total_size);
- if (memory) {
- SbMemorySet(memory, 0, total_size);
- }
- return memory;
-}
-
-static inline void* js_calloc(size_t bytes)
-{
- return js_calloc(bytes, 1);
-}
-
-static inline void* js_realloc(void* p, size_t bytes)
-{
- AllocationMetadata* metadata =
- AllocationMetadata::GetMetadataFromUserAddress(p);
- size_t current_size =
- AllocationMetadata::GetSizeOfAllocationFromMetadata(metadata);
- size_t adjusted_size = AllocationMetadata::GetReservationBytes(bytes);
-
- MemoryAllocatorReporter::Get()->UpdateTotalHeapSize(
- static_cast<ssize_t>(adjusted_size - current_size));
- void* new_ptr = SbMemoryReallocate(metadata, adjusted_size);
- AllocationMetadata::SetSizeToBaseAddress(new_ptr, adjusted_size);
- return AllocationMetadata::GetUserAddressFromBaseAddress(new_ptr);
-}
-
-static inline void js_free(void* p)
-{
- if (p == NULL) {
- return;
- }
- AllocationMetadata* metadata =
- AllocationMetadata::GetMetadataFromUserAddress(p);
- MemoryAllocatorReporter::Get()->UpdateTotalHeapSize(-static_cast<ssize_t>(
- AllocationMetadata::GetSizeOfAllocationFromMetadata(metadata)));
- SbMemoryDeallocate(metadata);
-}
-
-static inline char* js_strdup(const char* s)
-{
- size_t length = SbStringGetLength(s) + 1;
- char* new_ptr = reinterpret_cast<char*>(js_malloc(length));
- SbStringCopy(new_ptr, s, length);
- return new_ptr;
-}
-
-#endif // gc_JSCUSTOMALLOCATOR_H
diff --git a/src/third_party/mozjs-45/config/Makefile.in b/src/third_party/mozjs-45/config/Makefile.in
deleted file mode 100644
index c08f2a7..0000000
--- a/src/third_party/mozjs-45/config/Makefile.in
+++ /dev/null
@@ -1,127 +0,0 @@
-# -*- Makefile -*-
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# IMPORTANT: Disable NSBUILDROOT for this directory only, otherwise we have
-# a recursive rule for finding nsinstall and the Perl scripts.
-ifdef NSBUILDROOT
-override NSBUILDROOT :=
-endif
-
-include $(topsrcdir)/config/config.mk
-
-# L10n jobs are doing make -C config manually before anything else,
-# and need nsinstall to be built as a consequence.
-ifdef COMPILE_ENVIRONMENT
-export:: host
-
-ifneq (WINNT,$(HOST_OS_ARCH))
-# Ensure nsinstall is atomically created
-nsinstall$(HOST_BIN_SUFFIX): $(HOST_PROGRAM)
- cp $^ $@.tmp
- mv $@.tmp $@
-
-NSINSTALL_EXECUTABLES := nsinstall$(HOST_BIN_SUFFIX)
-NSINSTALL_DEST := $(DIST)/bin
-NSINSTALL_TARGET := host
-INSTALL_TARGETS += NSINSTALL
-endif
-endif
-
-ifndef JS_STANDALONE
-HEADERS_FILES = \
- $(DEPTH)/mozilla-config.h \
- $(NULL)
-HEADERS_DEST := $(DIST)/include
-HEADERS_TARGET := export
-INSTALL_TARGETS += HEADERS
-endif
-
-include $(topsrcdir)/config/rules.mk
-
-ifndef JS_STANDALONE
-ifndef MOZ_PROFILE_USE
-# Generate a new buildid every time we "export" in config... that's only
-# supposed to be once per-build!
-export:: buildid
-
-buildid: FORCE
-ifdef MOZ_BUILD_DATE
- printf '%s' $(MOZ_BUILD_DATE) > buildid
-else
- $(PYTHON) $(topsrcdir)/toolkit/xre/make-platformini.py --print-buildid > buildid
-endif
-endif
-endif
-
-ifdef WRAP_SYSTEM_INCLUDES
-export-preqs = \
- $(call mkdir_deps,system_wrappers) \
- $(NULL)
-
-export:: $(export-preqs)
- $(PYTHON) -m mozbuild.action.preprocessor $(DEFINES) $(ACDEFINES) \
- -DMOZ_TREE_CAIRO=$(MOZ_TREE_CAIRO) \
- -DMOZ_TREE_PIXMAN=$(MOZ_TREE_PIXMAN) \
- -DMOZ_NATIVE_HUNSPELL=$(MOZ_NATIVE_HUNSPELL) \
- -DMOZ_NATIVE_BZ2=$(MOZ_NATIVE_BZ2) \
- -DMOZ_NATIVE_ZLIB=$(MOZ_NATIVE_ZLIB) \
- -DMOZ_NATIVE_PNG=$(MOZ_NATIVE_PNG) \
- -DMOZ_NATIVE_JPEG=$(MOZ_NATIVE_JPEG) \
- -DMOZ_NATIVE_LIBEVENT=$(MOZ_NATIVE_LIBEVENT) \
- -DMOZ_NATIVE_LIBVPX=$(MOZ_NATIVE_LIBVPX) \
- -DMOZ_NATIVE_ICU=$(MOZ_NATIVE_ICU) \
- $(srcdir)/system-headers | $(PERL) $(topsrcdir)/nsprpub/config/make-system-wrappers.pl system_wrappers
- $(INSTALL) system_wrappers $(DIST)
-
-GARBAGE_DIRS += system_wrappers
-endif
-
-ifdef WRAP_STL_INCLUDES
-ifdef GNU_CXX
-stl_compiler = gcc
-else
-ifdef _MSC_VER
-stl_compiler = msvc
-endif
-endif
-endif
-
-ifdef stl_compiler
-STL_WRAPPERS_SENTINEL = $(DIST)/stl_wrappers/sentinel
-
-$(STL_WRAPPERS_SENTINEL): $(srcdir)/make-stl-wrappers.py $(srcdir)/$(stl_compiler)-stl-wrapper.template.h $(srcdir)/stl-headers $(GLOBAL_DEPS)
- $(PYTHON) $(srcdir)/make-stl-wrappers.py stl_wrappers $(stl_compiler) $(srcdir)/$(stl_compiler)-stl-wrapper.template.h $(srcdir)/stl-headers
- $(PYTHON) $(srcdir)/nsinstall.py -t stl_wrappers $(DIST)
- touch $(STL_WRAPPERS_SENTINEL)
-
-export:: $(STL_WRAPPERS_SENTINEL)
-
-GARBAGE += $(STL_WRAPPERS_SENTINEL)
-GARBAGE_DIRS += stl_wrappers
-endif
-
-GARBAGE += \
- $(FINAL_LINK_COMPS) $(FINAL_LINK_LIBS) $(FINAL_LINK_COMP_NAMES) $(srcdir)/*.pyc *.pyc
-
-FORCE:
-
-ifndef JS_STANDALONE
-check-preqs += check-jar-mn
-endif
-check-preqs += check-makefiles
-
-check:: $(check-preqs)
-
-check-jar-mn::
- $(MAKE) -C tests/src-simple check-jar
- $(MAKE) -C tests/src-simple check-flat
- $(MAKE) -C tests/src-simple check-flat USE_EXTENSION_MANIFEST=1
-ifneq (,$(filter-out WINNT,$(OS_ARCH)))
- $(MAKE) -C tests/src-simple check-symlink
-endif
-
-check-makefiles:
- $(MAKE) -C tests/makefiles/autodeps check
diff --git a/src/third_party/mozjs-45/config/Moz/Milestone.pm b/src/third_party/mozjs-45/config/Moz/Milestone.pm
deleted file mode 100644
index 7222032..0000000
--- a/src/third_party/mozjs-45/config/Moz/Milestone.pm
+++ /dev/null
@@ -1,220 +0,0 @@
-#!/usr/bin/perl -w
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-package Moz::Milestone;
-use strict;
-
-use vars qw($officialMilestone
- $milestone);
-
-local $Moz::Milestone::milestone;
-local $Moz::Milestone::officialMilestone;
-
-#
-# Usage: getOfficialMilestone($milestoneFile)
-# Returns full milestone (x.x.x.x[ab12pre+])
-#
-sub getOfficialMilestone($) {
- my $mfile = $_[0];
- open(FILE,"$mfile") ||
- die ("Can't open $mfile for reading!");
-
- my $num = <FILE>;
- while($num =~ /^\s*#/ || $num !~ /^\d/) {
- $num = <FILE>;
- }
-
- close(FILE);
- if ($num !~ /^\d/) { return; }
- chomp($num);
- # Remove extra ^M caused by using dos-mode line-endings
- chop $num if (substr($num, -1, 1) eq "\r");
- $Moz::Milestone::officialMilestone = $num;
- $Moz::Milestone::milestone = &getMilestoneNum;
- return $num;
-}
-
-#
-# Usage: getMilestoneNum($num)
-# Returns: milestone without a + if it exists.
-#
-sub getMilestoneNum {
- if (defined($Moz::Milestone::milestone)) {
- return $Moz::Milestone::milestone;
- }
-
- if (defined($Moz::Milestone::officialMilestone)) {
- $Moz::Milestone::milestone = $Moz::Milestone::officialMilestone;
- } else {
- $Moz::Milestone::milestone = $_[0];
- }
-
- if ($Moz::Milestone::milestone =~ /\+$/) { # for x.x.x+, strip off the +
- $Moz::Milestone::milestone =~ s/\+$//;
- }
-
- return $Moz::Milestone::milestone;
-}
-
-#
-# Usage: getMilestoneQualifier($num)
-# Returns: + if it exists.
-#
-sub getMilestoneQualifier {
- my $milestoneQualifier;
- if (defined($Moz::Milestone::officialMilestone)) {
- $milestoneQualifier = $Moz::Milestone::officialMilestone;
- } else {
- $milestoneQualifier = $_[0];
- }
-
- if ($milestoneQualifier =~ /\+$/) {
- return "+";
- }
-}
-
-sub getMilestoneMajor {
- my $milestoneMajor;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMajor = $Moz::Milestone::milestone;
- } else {
- $milestoneMajor = $_[0];
- }
- my @parts = split(/\./,$milestoneMajor);
- return $parts[0];
-}
-
-sub getMilestoneMinor {
- my $milestoneMinor;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMinor = $Moz::Milestone::milestone;
- } else {
- $milestoneMinor = $_[0];
- }
- my @parts = split(/\./,$milestoneMinor);
-
- if ($#parts < 1 ) { return 0; }
- return $parts[1];
-}
-
-sub getMilestoneMini {
- my $milestoneMini;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMini = $Moz::Milestone::milestone;
- } else {
- $milestoneMini = $_[0];
- }
- my @parts = split(/\./,$milestoneMini);
-
- if ($#parts < 2 ) { return 0; }
- return $parts[2];
-}
-
-sub getMilestoneMicro {
- my $milestoneMicro;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMicro = $Moz::Milestone::milestone;
- } else {
- $milestoneMicro = $_[0];
- }
- my @parts = split(/\./,$milestoneMicro);
-
- if ($#parts < 3 ) { return 0; }
- return $parts[3];
-}
-
-sub getMilestoneAB {
- my $milestoneAB;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneAB = $Moz::Milestone::milestone;
- } else {
- $milestoneAB = $_[0];
- }
-
- if ($milestoneAB =~ /a/) { return "alpha"; }
- if ($milestoneAB =~ /b/) { return "beta"; }
- return "final";
-}
-
-#
-# Usage: getMilestoneABWithNum($milestoneFile)
-# Returns the alpha and beta tag with its number (a1, a2, b3, ...)
-#
-sub getMilestoneABWithNum {
- my $milestoneABNum;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneABNum = $Moz::Milestone::milestone;
- } else {
- $milestoneABNum = $_[0];
- }
-
- if ($milestoneABNum =~ /([ab]\d+)/) {
- return $1;
- } else {
- return "";
- }
-}
-
-#
-# build_file($template_file,$output_file)
-#
-sub build_file($$) {
- my @FILE;
- my @MILESTONE_PARTS;
- my $MINI_VERSION = 0;
- my $MICRO_VERSION = 0;
- my $OFFICIAL = 0;
- my $QUALIFIER = "";
-
- if (!defined($Moz::Milestone::milestone)) { die("$0: no milestone file set!\n"); }
- @MILESTONE_PARTS = split(/\./, &getMilestoneNum);
- if ($#MILESTONE_PARTS >= 2) {
- $MINI_VERSION = 1;
- } else {
- $MILESTONE_PARTS[2] = 0;
- }
- if ($#MILESTONE_PARTS >= 3) {
- $MICRO_VERSION = 1;
- } else {
- $MILESTONE_PARTS[3] = 0;
- }
- if (! &getMilestoneQualifier) {
- $OFFICIAL = 1;
- } else {
- $QUALIFIER = "+";
- }
-
- if (-e $_[0]) {
- open(FILE, "$_[0]") || die("$0: Can't open $_[0] for reading!\n");
- @FILE = <FILE>;
- close(FILE);
-
- open(FILE, ">$_[1]") || die("$0: Can't open $_[1] for writing!\n");
-
- #
- # There will be more of these based on what we need for files.
- #
- foreach(@FILE) {
- s/__MOZ_MAJOR_VERSION__/$MILESTONE_PARTS[0]/g;
- s/__MOZ_MINOR_VERSION__/$MILESTONE_PARTS[1]/g;
- s/__MOZ_MINI_VERSION__/$MILESTONE_PARTS[2]/g;
- s/__MOZ_MICRO_VERSION__/$MILESTONE_PARTS[3]/g;
- if ($MINI_VERSION) {
- s/__MOZ_OPTIONAL_MINI_VERSION__/.$MILESTONE_PARTS[2]/g;
- }
- if ($MICRO_VERSION) {
- s/__MOZ_OPTIONAL_MICRO_VERSION__/.$MILESTONE_PARTS[3]/g;
- }
-
- print FILE $_;
- }
- close(FILE);
- } else {
- die("$0: $_[0] doesn't exist for autoversioning!\n");
- }
-
-}
-
-1;
diff --git a/src/third_party/mozjs-45/config/MozZipFile.py b/src/third_party/mozjs-45/config/MozZipFile.py
deleted file mode 100644
index 337fe05..0000000
--- a/src/third_party/mozjs-45/config/MozZipFile.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import os
-import time
-import zipfile
-
-from mozbuild.util import lock_file
-
-
-class ZipFile(zipfile.ZipFile):
- """ Class with methods to open, read, write, close, list zip files.
-
- Subclassing zipfile.ZipFile to allow for overwriting of existing
- entries, though only for writestr, not for write.
- """
- def __init__(self, file, mode="r", compression=zipfile.ZIP_STORED,
- lock = False):
- if lock:
- assert isinstance(file, basestring)
- self.lockfile = lock_file(file + '.lck')
- else:
- self.lockfile = None
-
- if mode == 'a' and lock:
- # appending to a file which doesn't exist fails, but we can't check
- # existence util we hold the lock
- if (not os.path.isfile(file)) or os.path.getsize(file) == 0:
- mode = 'w'
-
- zipfile.ZipFile.__init__(self, file, mode, compression)
- self._remove = []
- self.end = self.fp.tell()
- self.debug = 0
-
- def writestr(self, zinfo_or_arcname, bytes):
- """Write contents into the archive.
-
- The contents is the argument 'bytes', 'zinfo_or_arcname' is either
- a ZipInfo instance or the name of the file in the archive.
- This method is overloaded to allow overwriting existing entries.
- """
- if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
- zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname,
- date_time=time.localtime(time.time()))
- zinfo.compress_type = self.compression
- # Add some standard UNIX file access permissions (-rw-r--r--).
- zinfo.external_attr = (0x81a4 & 0xFFFF) << 16L
- else:
- zinfo = zinfo_or_arcname
-
- # Now to the point why we overwrote this in the first place,
- # remember the entry numbers if we already had this entry.
- # Optimizations:
- # If the entry to overwrite is the last one, just reuse that.
- # If we store uncompressed and the new content has the same size
- # as the old, reuse the existing entry.
-
- doSeek = False # store if we need to seek to the eof after overwriting
- if self.NameToInfo.has_key(zinfo.filename):
- # Find the last ZipInfo with our name.
- # Last, because that's catching multiple overwrites
- i = len(self.filelist)
- while i > 0:
- i -= 1
- if self.filelist[i].filename == zinfo.filename:
- break
- zi = self.filelist[i]
- if ((zinfo.compress_type == zipfile.ZIP_STORED
- and zi.compress_size == len(bytes))
- or (i + 1) == len(self.filelist)):
- # make sure we're allowed to write, otherwise done by writestr below
- self._writecheck(zi)
- # overwrite existing entry
- self.fp.seek(zi.header_offset)
- if (i + 1) == len(self.filelist):
- # this is the last item in the file, just truncate
- self.fp.truncate()
- else:
- # we need to move to the end of the file afterwards again
- doSeek = True
- # unhook the current zipinfo, the writestr of our superclass
- # will add a new one
- self.filelist.pop(i)
- self.NameToInfo.pop(zinfo.filename)
- else:
- # Couldn't optimize, sadly, just remember the old entry for removal
- self._remove.append(self.filelist.pop(i))
- zipfile.ZipFile.writestr(self, zinfo, bytes)
- self.filelist.sort(lambda l, r: cmp(l.header_offset, r.header_offset))
- if doSeek:
- self.fp.seek(self.end)
- self.end = self.fp.tell()
-
- def close(self):
- """Close the file, and for mode "w" and "a" write the ending
- records.
-
- Overwritten to compact overwritten entries.
- """
- if not self._remove:
- # we don't have anything special to do, let's just call base
- r = zipfile.ZipFile.close(self)
- self.lockfile = None
- return r
-
- if self.fp.mode != 'r+b':
- # adjust file mode if we originally just wrote, now we rewrite
- self.fp.close()
- self.fp = open(self.filename, 'r+b')
- all = map(lambda zi: (zi, True), self.filelist) + \
- map(lambda zi: (zi, False), self._remove)
- all.sort(lambda l, r: cmp(l[0].header_offset, r[0].header_offset))
- # empty _remove for multiple closes
- self._remove = []
-
- lengths = [all[i+1][0].header_offset - all[i][0].header_offset
- for i in xrange(len(all)-1)]
- lengths.append(self.end - all[-1][0].header_offset)
- to_pos = 0
- for (zi, keep), length in zip(all, lengths):
- if not keep:
- continue
- oldoff = zi.header_offset
- # python <= 2.4 has file_offset
- if hasattr(zi, 'file_offset'):
- zi.file_offset = zi.file_offset + to_pos - oldoff
- zi.header_offset = to_pos
- self.fp.seek(oldoff)
- content = self.fp.read(length)
- self.fp.seek(to_pos)
- self.fp.write(content)
- to_pos += length
- self.fp.truncate()
- zipfile.ZipFile.close(self)
- self.lockfile = None
diff --git a/src/third_party/mozjs-45/config/android-common.mk b/src/third_party/mozjs-45/config/android-common.mk
deleted file mode 100644
index 8244af0..0000000
--- a/src/third_party/mozjs-45/config/android-common.mk
+++ /dev/null
@@ -1,44 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# Ensure ANDROID_SDK is defined before including this file.
-# We use common android defaults for boot class path and java version.
-ifndef ANDROID_SDK
- $(error ANDROID_SDK must be defined before including android-common.mk)
-endif
-
-# DEBUG_JARSIGNER always debug signs.
-DEBUG_JARSIGNER=$(PYTHON) $(abspath $(topsrcdir)/mobile/android/debug_sign_tool.py) \
- --keytool=$(KEYTOOL) \
- --jarsigner=$(JARSIGNER) \
- $(NULL)
-
-# RELEASE_JARSIGNER release signs if possible.
-ifdef MOZ_SIGN_CMD
-RELEASE_JARSIGNER := $(MOZ_SIGN_CMD) -f jar
-else
-RELEASE_JARSIGNER := $(DEBUG_JARSIGNER)
-endif
-
-# $(1) is the full path to input: foo-debug-unsigned-unaligned.apk.
-# $(2) is the full path to output: foo.apk.
-# Use this like: $(call RELEASE_SIGN_ANDROID_APK,foo-debug-unsigned-unaligned.apk,foo.apk)
-RELEASE_SIGN_ANDROID_APK = \
- cp $(1) $(2)-unaligned.apk && \
- $(RELEASE_JARSIGNER) $(2)-unaligned.apk && \
- $(ZIPALIGN) -f -v 4 $(2)-unaligned.apk $(2) && \
- $(RM) $(2)-unaligned.apk
-
-# For Android, we default to 1.7
-ifndef JAVA_VERSION
- JAVA_VERSION = 1.7
-endif
-
-JAVAC_FLAGS = \
- -target $(JAVA_VERSION) \
- -source $(JAVA_VERSION) \
- -encoding UTF8 \
- -g:source,lines \
- -Werror \
- $(NULL)
diff --git a/src/third_party/mozjs-45/config/autoconf-js.mk.in b/src/third_party/mozjs-45/config/autoconf-js.mk.in
deleted file mode 100644
index ae4ad2c..0000000
--- a/src/third_party/mozjs-45/config/autoconf-js.mk.in
+++ /dev/null
@@ -1,6 +0,0 @@
-ifndef INCLUDED_AUTOCONF_MK
-INCLUDED_AUTOCONF_MK = autoconf-js.mk
-include $(DEPTH)/config/emptyvars-js.mk
-@ALLSUBSTS@
-include $(topsrcdir)/config/baseconfig.mk
-endif
diff --git a/src/third_party/mozjs-45/config/autoconf.mk.in b/src/third_party/mozjs-45/config/autoconf.mk.in
deleted file mode 100644
index fb52bc1..0000000
--- a/src/third_party/mozjs-45/config/autoconf.mk.in
+++ /dev/null
@@ -1,6 +0,0 @@
-ifndef INCLUDED_AUTOCONF_MK
-INCLUDED_AUTOCONF_MK = autoconf.mk
-include $(DEPTH)/config/emptyvars.mk
-@ALLSUBSTS@
-include $(topsrcdir)/config/baseconfig.mk
-endif
diff --git a/src/third_party/mozjs-45/config/baseconfig.mk b/src/third_party/mozjs-45/config/baseconfig.mk
deleted file mode 100644
index 86920aa..0000000
--- a/src/third_party/mozjs-45/config/baseconfig.mk
+++ /dev/null
@@ -1,55 +0,0 @@
-# This file is normally included by autoconf.mk, but it is also used
-# directly in python/mozbuild/mozbuild/base.py for gmake validation.
-# We thus use INCLUDED_AUTOCONF_MK to enable/disable some parts depending
-# whether a normal build is happening or whether the check is running.
-includedir := $(includedir)/$(MOZ_APP_NAME)-$(MOZ_APP_VERSION)
-idldir = $(datadir)/idl/$(MOZ_APP_NAME)-$(MOZ_APP_VERSION)
-installdir = $(libdir)/$(MOZ_APP_NAME)-$(MOZ_APP_VERSION)
-sdkdir = $(libdir)/$(MOZ_APP_NAME)-devel-$(MOZ_APP_VERSION)
-ifeq (.,$(DEPTH))
-DIST = dist
-else
-DIST = $(DEPTH)/dist
-endif
-
-# We do magic with OBJ_SUFFIX in config.mk, the following ensures we don't
-# manually use it before config.mk inclusion
-_OBJ_SUFFIX := $(OBJ_SUFFIX)
-OBJ_SUFFIX = $(error config/config.mk needs to be included before using OBJ_SUFFIX)
-
-ifeq ($(HOST_OS_ARCH),WINNT)
-# We only support building with a non-msys gnu make version
-# strictly above 4.0.
-ifdef .PYMAKE
-$(error Pymake is no longer supported. Please upgrade to MozillaBuild 1.9 or newer and build with 'mach' or 'mozmake')
-endif
-
-ifeq (a,$(firstword a$(subst /, ,$(abspath .))))
-$(error MSYS make is not supported)
-endif
-# 4.0- happens to be greater than 4.0, lower than the mozmake version,
-# and lower than 4.0.1 or 4.1, whatever next version of gnu make will
-# be released.
-ifneq (4.0-,$(firstword $(sort 4.0- $(MAKE_VERSION))))
-$(error Make version too old. Only versions strictly greater than 4.0 are supported.)
-endif
-
-ifdef INCLUDED_AUTOCONF_MK
-ifeq (a,$(firstword a$(subst /, ,$(srcdir))))
-$(error MSYS-style srcdir are not supported for Windows builds.)
-endif
-endif
-endif # WINNT
-
-ifndef INCLUDED_AUTOCONF_MK
-default::
-else
-TIERS := export $(if $(COMPILE_ENVIRONMENT),compile )misc libs tools
-endif
-
-# These defines are used to support the twin-topsrcdir model for comm-central.
-ifdef MOZILLA_SRCDIR
- MOZILLA_DIR = $(MOZILLA_SRCDIR)
-else
- MOZILLA_DIR = $(topsrcdir)
-endif
diff --git a/src/third_party/mozjs-45/config/check_macroassembler_style.py b/src/third_party/mozjs-45/config/check_macroassembler_style.py
deleted file mode 100644
index 1cd0c71..0000000
--- a/src/third_party/mozjs-45/config/check_macroassembler_style.py
+++ /dev/null
@@ -1,276 +0,0 @@
-# vim: set ts=8 sts=4 et sw=4 tw=99:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#----------------------------------------------------------------------------
-# This script checks that SpiderMonkey MacroAssembler methods are properly
-# annotated.
-#
-# The MacroAssembler has one interface for all platforms, but it might have one
-# definition per platform. The code of the MacroAssembler use a macro to
-# annotate the method declarations, in order to delete the function if it is not
-# present on the current platform, and also to locate the files in which the
-# methods are defined.
-#
-# This script scans the MacroAssembler.h header, for method declarations.
-# It also scans MacroAssembler-/arch/.cpp, MacroAssembler-/arch/-inl.h, and
-# MacroAssembler-inl.h for method definitions. The result of both scans are
-# uniformized, and compared, to determine if the MacroAssembler.h header as
-# proper methods annotations.
-#----------------------------------------------------------------------------
-
-from __future__ import print_function
-
-import difflib
-import os
-import re
-import subprocess
-import sys
-from check_utils import get_all_toplevel_filenames
-
-architecture_independent = set([ 'generic' ])
-all_architecture_names = set([ 'x86', 'x64', 'arm', 'arm64', 'mips32', 'mips64' ])
-all_shared_architecture_names = set([ 'x86_shared', 'mips_shared', 'arm', 'arm64' ])
-
-reBeforeArg = "(?<=[(,\s])"
-reArgType = "(?P<type>[\w\s:*&]+)"
-reArgName = "(?P<name>\s\w+)"
-reArgDefault = "(?P<default>(?:\s=[^,)]+)?)"
-reAfterArg = "(?=[,)])"
-reMatchArg = re.compile(reBeforeArg + reArgType + reArgName + reArgDefault + reAfterArg)
-
-def get_normalized_signatures(signature, fileAnnot = None):
- # Remove semicolon.
- signature = signature.replace(';', ' ')
- # Normalize spaces.
- signature = re.sub(r'\s+', ' ', signature).strip()
- # Match arguments, and keep only the type.
- signature = reMatchArg.sub('\g<type>', signature)
- # Remove class name
- signature = signature.replace('MacroAssembler::', '')
-
- # Extract list of architectures
- archs = ['generic']
- if fileAnnot:
- archs = [fileAnnot['arch']]
-
- if 'DEFINED_ON(' in signature:
- archs = re.sub(r'.*DEFINED_ON\((?P<archs>[^()]*)\).*', '\g<archs>', signature).split(',')
- archs = [a.strip() for a in archs]
- signature = re.sub(r'\s+DEFINED_ON\([^()]*\)', '', signature)
-
- elif 'PER_ARCH' in signature:
- archs = all_architecture_names
- signature = re.sub(r'\s+PER_ARCH', '', signature)
-
- elif 'PER_SHARED_ARCH' in signature:
- archs = all_shared_architecture_names
- signature = re.sub(r'\s+PER_SHARED_ARCH', '', signature)
-
- else:
- # No signature annotation, the list of architectures remains unchanged.
- pass
-
- # Extract inline annotation
- inline = False
- if fileAnnot:
- inline = fileAnnot['inline']
-
- if 'inline ' in signature:
- signature = re.sub(r'inline\s+', '', signature)
- inline = True
-
- inlinePrefx = ''
- if inline:
- inlinePrefx = 'inline '
- signatures = [
- { 'arch': a, 'sig': inlinePrefx + signature }
- for a in archs
- ]
-
- return signatures
-
-file_suffixes = set([
- a.replace('_', '-') for a in
- all_architecture_names.union(all_shared_architecture_names)
-])
-def get_file_annotation(filename):
- origFilename = filename
- filename = filename.split('/')[-1]
-
- inline = False
- if filename.endswith('.cpp'):
- filename = filename[:-len('.cpp')]
- elif filename.endswith('-inl.h'):
- inline = True
- filename = filename[:-len('-inl.h')]
- else:
- raise Exception('unknown file name', origFilename)
-
- arch = 'generic'
- for suffix in file_suffixes:
- if filename == 'MacroAssembler-' + suffix:
- arch = suffix
- break
-
- return {
- 'inline': inline,
- 'arch': arch.replace('-', '_')
- }
-
-def get_macroassembler_definitions(filename):
- try:
- fileAnnot = get_file_annotation(filename)
- except:
- return []
-
- style_section = False
- code_section = False
- lines = ''
- signatures = []
- with open(os.path.join('../..', filename)) as f:
- for line in f:
- if '//{{{ check_macroassembler_style' in line:
- style_section = True
- elif '//}}} check_macroassembler_style' in line:
- style_section = False
- if not style_section:
- continue
-
- line = re.sub(r'//.*', '', line)
- if line.startswith('{'):
- if 'MacroAssembler::' in lines:
- signatures.extend(get_normalized_signatures(lines, fileAnnot))
- code_section = True
- continue
- if line.startswith('}'):
- code_section = False
- lines = ''
- continue
- if code_section:
- continue
-
- if len(line.strip()) == 0:
- lines = ''
- continue
- lines = lines + line
- # Continue until we have a complete declaration
- if '{' not in lines:
- continue
- # Skip variable declarations
- if ')' not in lines:
- lines = ''
- continue
-
- return signatures
-
-def get_macroassembler_declaration(filename):
- style_section = False
- lines = ''
- signatures = []
- with open(os.path.join('../..', filename)) as f:
- for line in f:
- if '//{{{ check_macroassembler_style' in line:
- style_section = True
- elif '//}}} check_macroassembler_style' in line:
- style_section = False
- if not style_section:
- continue
-
- line = re.sub(r'//.*', '', line)
- if len(line.strip()) == 0:
- lines = ''
- continue
- lines = lines + line
- # Continue until we have a complete declaration
- if ';' not in lines:
- continue
- # Skip variable declarations
- if ')' not in lines:
- lines = ''
- continue
-
- signatures.extend(get_normalized_signatures(lines))
- lines = ''
-
- return signatures
-
-def append_signatures(d, sigs):
- for s in sigs:
- if s['sig'] not in d:
- d[s['sig']] = []
- d[s['sig']].append(s['arch']);
- return d
-
-def generate_file_content(signatures):
- output = []
- for s in sorted(signatures.keys()):
- archs = set(sorted(signatures[s]))
- if len(archs.symmetric_difference(architecture_independent)) == 0:
- output.append(s + ';\n')
- if s.startswith('inline'):
- output.append(' is defined in MacroAssembler-inl.h\n')
- else:
- output.append(' is defined in MacroAssembler.cpp\n')
- else:
- if len(archs.symmetric_difference(all_architecture_names)) == 0:
- output.append(s + ' PER_ARCH;\n')
- elif len(archs.symmetric_difference(all_shared_architecture_names)) == 0:
- output.append(s + ' PER_SHARED_ARCH;\n')
- else:
- output.append(s + ' DEFINED_ON(' + ', '.join(archs) + ');\n')
- for a in archs:
- a = a.replace('_', '-')
- masm = '%s/MacroAssembler-%s' % (a, a)
- if s.startswith('inline'):
- output.append(' is defined in %s-inl.h\n' % masm)
- else:
- output.append(' is defined in %s.cpp\n' % masm)
- return output
-
-def check_style():
- # We read from the header file the signature of each function.
- decls = dict() # type: dict(signature => ['x86', 'x64'])
-
- # We infer from each file the signature of each MacroAssembler function.
- defs = dict() # type: dict(signature => ['x86', 'x64'])
-
- # Select the appropriate files.
- for filename in get_all_toplevel_filenames():
- if not filename.startswith('js/src/jit/'):
- continue
- if 'MacroAssembler' not in filename:
- continue
-
- if filename.endswith('MacroAssembler.h'):
- decls = append_signatures(decls, get_macroassembler_declaration(filename))
- else:
- defs = append_signatures(defs, get_macroassembler_definitions(filename))
-
- # Compare declarations and definitions output.
- difflines = difflib.unified_diff(generate_file_content(decls),
- generate_file_content(defs),
- fromfile='check_macroassembler_style.py declared syntax',
- tofile='check_macroassembler_style.py found definitions')
- ok = True
- for diffline in difflines:
- ok = False
- print(diffline, end='')
-
- return ok
-
-
-def main():
- ok = check_style()
-
- if ok:
- print('TEST-PASS | check_macroassembler_style.py | ok')
- else:
- print('TEST-UNEXPECTED-FAIL | check_macroassembler_style.py | actual output does not match expected output; diff is above')
-
- sys.exit(0 if ok else 1)
-
-
-if __name__ == '__main__':
- main()
diff --git a/src/third_party/mozjs-45/config/check_source_count.py b/src/third_party/mozjs-45/config/check_source_count.py
deleted file mode 100755
index e347e7a..0000000
--- a/src/third_party/mozjs-45/config/check_source_count.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-
-# Usage: check_source_count.py SEARCH_TERM COUNT ERROR_LOCATION REPLACEMENT [FILES...]
-# Checks that FILES contains exactly COUNT matches of SEARCH_TERM. If it does
-# not, an error message is printed, quoting ERROR_LOCATION, which should
-# probably be the filename and line number of the erroneous call to
-# check_source_count.py.
-from __future__ import print_function
-import sys
-import os
-import re
-
-search_string = sys.argv[1]
-expected_count = int(sys.argv[2])
-error_location = sys.argv[3]
-replacement = sys.argv[4]
-files = sys.argv[5:]
-
-details = {}
-
-count = 0
-for f in files:
- text = file(f).read()
- match = re.findall(search_string, text)
- if match:
- num = len(match)
- count += num
- details[f] = num
-
-if count == expected_count:
- print("TEST-PASS | check_source_count.py {0} | {1}"
- .format(search_string, expected_count))
-
-else:
- print("TEST-UNEXPECTED-FAIL | check_source_count.py {0} | "
- .format(search_string),
- end='')
- if count < expected_count:
- print("There are fewer occurrences of /{0}/ than expected. "
- "This may mean that you have removed some, but forgotten to "
- "account for it {1}.".format(search_string, error_location))
- else:
- print("There are more occurrences of /{0}/ than expected. We're trying "
- "to prevent an increase in the number of {1}'s, using {2} if "
- "possible. If it is unavoidable, you should update the expected "
- "count {3}.".format(search_string, search_string, replacement,
- error_location))
-
- print("Expected: {0}; found: {1}".format(expected_count, count))
- for k in sorted(details):
- print("Found {0} occurences in {1}".format(details[k],k))
- sys.exit(-1)
-
diff --git a/src/third_party/mozjs-45/config/check_spidermonkey_style.py b/src/third_party/mozjs-45/config/check_spidermonkey_style.py
deleted file mode 100644
index aa765d6..0000000
--- a/src/third_party/mozjs-45/config/check_spidermonkey_style.py
+++ /dev/null
@@ -1,584 +0,0 @@
-# vim: set ts=8 sts=4 et sw=4 tw=99:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#----------------------------------------------------------------------------
-# This script checks various aspects of SpiderMonkey code style. The current checks are as
-# follows.
-#
-# We check the following things in headers.
-#
-# - No cyclic dependencies.
-#
-# - No normal header should #include a inlines.h/-inl.h file.
-#
-# - #ifndef wrappers should have the right form. (XXX: not yet implemented)
-# - Every header file should have one.
-# - The guard name used should be appropriate for the filename.
-#
-# We check the following things in all files.
-#
-# - #includes should have full paths, e.g. "jit/Ion.h", not "Ion.h".
-#
-# - #includes should use the appropriate form for system headers (<...>) and
-# local headers ("...").
-#
-# - #includes should be ordered correctly.
-# - Each one should be in the correct section.
-# - Alphabetical order should be used within sections.
-# - Sections should be in the right order.
-# Note that the presence of #if/#endif blocks complicates things, to the
-# point that it's not always clear where a conditionally-compiled #include
-# statement should go, even to a human. Therefore, we check the #include
-# statements within each #if/#endif block (including nested ones) in
-# isolation, but don't try to do any order checking between such blocks.
-#----------------------------------------------------------------------------
-
-from __future__ import print_function
-
-import difflib
-import os
-import re
-import subprocess
-import sys
-import traceback
-from check_utils import get_all_toplevel_filenames
-
-# We don't bother checking files in these directories, because they're (a) auxiliary or (b)
-# imported code that doesn't follow our coding style.
-ignored_js_src_dirs = [
- 'js/src/config/', # auxiliary stuff
- 'js/src/ctypes/libffi/', # imported code
- 'js/src/devtools/', # auxiliary stuff
- 'js/src/editline/', # imported code
- 'js/src/gdb/', # auxiliary stuff
- 'js/src/vtune/' # imported code
-]
-
-# We ignore #includes of these files, because they don't follow the usual rules.
-included_inclnames_to_ignore = set([
- 'ffi.h', # generated in ctypes/libffi/
- 'devtools/sharkctl.h', # we ignore devtools/ in general
- 'devtools/Instruments.h', # we ignore devtools/ in general
- 'double-conversion.h', # strange MFBT case
- 'javascript-trace.h', # generated in $OBJDIR if HAVE_DTRACE is defined
- 'jsautokw.h', # generated in $OBJDIR
- 'jscustomallocator.h', # provided by embedders; allowed to be missing
- 'js-config.h', # generated in $OBJDIR
- 'pratom.h', # NSPR
- 'prcvar.h', # NSPR
- 'prerror.h', # NSPR
- 'prinit.h', # NSPR
- 'prlink.h', # NSPR
- 'prlock.h', # NSPR
- 'prprf.h', # NSPR
- 'prthread.h', # NSPR
- 'prtypes.h', # NSPR
- 'selfhosted.out.h', # generated in $OBJDIR
- 'shellmoduleloader.out.h', # generated in $OBJDIR
- 'unicode/locid.h', # ICU
- 'unicode/numsys.h', # ICU
- 'unicode/timezone.h', # ICU
- 'unicode/ucal.h', # ICU
- 'unicode/uclean.h', # ICU
- 'unicode/ucol.h', # ICU
- 'unicode/udat.h', # ICU
- 'unicode/udatpg.h', # ICU
- 'unicode/uenum.h', # ICU
- 'unicode/unorm.h', # ICU
- 'unicode/unum.h', # ICU
- 'unicode/ustring.h', # ICU
- 'unicode/utypes.h', # ICU
- 'vtune/VTuneWrapper.h' # VTune
-])
-
-# These files have additional constraints on where they are #included, so we
-# ignore #includes of them when checking #include ordering.
-oddly_ordered_inclnames = set([
- 'ctypes/typedefs.h', # Included multiple times in the body of ctypes/CTypes.h
- 'jsautokw.h', # Included in the body of frontend/TokenStream.h
- 'jswin.h', # Must be #included before <psapi.h>
- 'machine/endian.h', # Must be included after <sys/types.h> on BSD
- 'winbase.h', # Must precede other system headers(?)
- 'windef.h' # Must precede other system headers(?)
-])
-
-# The files in tests/style/ contain code that fails this checking in various
-# ways. Here is the output we expect. If the actual output differs from
-# this, one of the following must have happened.
-# - New SpiderMonkey code violates one of the checked rules.
-# - The tests/style/ files have changed without expected_output being changed
-# accordingly.
-# - This script has been broken somehow.
-#
-expected_output = '''\
-js/src/tests/style/BadIncludes2.h:1: error:
- vanilla header includes an inline-header file "tests/style/BadIncludes2-inl.h"
-
-js/src/tests/style/BadIncludes.h:3: error:
- the file includes itself
-
-js/src/tests/style/BadIncludes.h:6: error:
- "BadIncludes2.h" is included using the wrong path;
- did you forget a prefix, or is the file not yet committed?
-
-js/src/tests/style/BadIncludes.h:8: error:
- <tests/style/BadIncludes2.h> should be included using
- the #include "..." form
-
-js/src/tests/style/BadIncludes.h:10: error:
- "stdio.h" is included using the wrong path;
- did you forget a prefix, or is the file not yet committed?
-
-js/src/tests/style/BadIncludesOrder-inl.h:5:6: error:
- "vm/Interpreter-inl.h" should be included after "jsscriptinlines.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:6:7: error:
- "jsscriptinlines.h" should be included after "js/Value.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:7:8: error:
- "js/Value.h" should be included after "ds/LifoAlloc.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:8:9: error:
- "ds/LifoAlloc.h" should be included after "jsapi.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:9:10: error:
- "jsapi.h" should be included after <stdio.h>
-
-js/src/tests/style/BadIncludesOrder-inl.h:10:11: error:
- <stdio.h> should be included after "mozilla/HashFunctions.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:27:28: error:
- "jsobj.h" should be included after "jsfun.h"
-
-(multiple files): error:
- header files form one or more cycles
-
- tests/style/HeaderCycleA1.h
- -> tests/style/HeaderCycleA2.h
- -> tests/style/HeaderCycleA3.h
- -> tests/style/HeaderCycleA1.h
-
- tests/style/HeaderCycleB1-inl.h
- -> tests/style/HeaderCycleB2-inl.h
- -> tests/style/HeaderCycleB3-inl.h
- -> tests/style/HeaderCycleB4-inl.h
- -> tests/style/HeaderCycleB1-inl.h
- -> tests/style/jsheadercycleB5inlines.h
- -> tests/style/HeaderCycleB1-inl.h
- -> tests/style/HeaderCycleB4-inl.h
-
-'''.splitlines(True)
-
-actual_output = []
-
-
-def out(*lines):
- for line in lines:
- actual_output.append(line + '\n')
-
-
-def error(filename, linenum, *lines):
- location = filename
- if linenum is not None:
- location += ':' + str(linenum)
- out(location + ': error:')
- for line in (lines):
- out(' ' + line)
- out('')
-
-
-class FileKind(object):
- C = 1
- CPP = 2
- INL_H = 3
- H = 4
- TBL = 5
- MSG = 6
-
- @staticmethod
- def get(filename):
- if filename.endswith('.c'):
- return FileKind.C
-
- if filename.endswith('.cpp'):
- return FileKind.CPP
-
- if filename.endswith(('inlines.h', '-inl.h')):
- return FileKind.INL_H
-
- if filename.endswith('.h'):
- return FileKind.H
-
- if filename.endswith('.tbl'):
- return FileKind.TBL
-
- if filename.endswith('.msg'):
- return FileKind.MSG
-
- error(filename, None, 'unknown file kind')
-
-
-def check_style():
- # We deal with two kinds of name.
- # - A "filename" is a full path to a file from the repository root.
- # - An "inclname" is how a file is referred to in a #include statement.
- #
- # Examples (filename -> inclname)
- # - "mfbt/Attributes.h" -> "mozilla/Attributes.h"
- # - "mfbt/decimal/Decimal.h -> "mozilla/Decimal.h"
- # - "js/public/Vector.h" -> "js/Vector.h"
- # - "js/src/vm/String.h" -> "vm/String.h"
-
- mfbt_inclnames = set() # type: set(inclname)
- mozalloc_inclnames = set() # type: set(inclname)
- js_names = dict() # type: dict(filename, inclname)
-
- # Select the appropriate files.
- for filename in get_all_toplevel_filenames():
- if filename.startswith('mfbt/') and filename.endswith('.h'):
- inclname = 'mozilla/' + filename.split('/')[-1]
- mfbt_inclnames.add(inclname)
-
- if filename.startswith('memory/mozalloc/') and filename.endswith('.h'):
- inclname = 'mozilla/' + filename.split('/')[-1]
- mozalloc_inclnames.add(inclname)
-
- if filename.startswith('js/public/') and filename.endswith('.h'):
- inclname = 'js/' + filename[len('js/public/'):]
- js_names[filename] = inclname
-
- if filename.startswith('js/src/') and \
- not filename.startswith(tuple(ignored_js_src_dirs)) and \
- filename.endswith(('.c', '.cpp', '.h', '.tbl', '.msg')):
- inclname = filename[len('js/src/'):]
- js_names[filename] = inclname
-
- all_inclnames = mfbt_inclnames | mozalloc_inclnames | set(js_names.values())
-
- edges = dict() # type: dict(inclname, set(inclname))
-
- # We don't care what's inside the MFBT and MOZALLOC files, but because they
- # are #included from JS files we have to add them to the inclusion graph.
- for inclname in mfbt_inclnames | mozalloc_inclnames:
- edges[inclname] = set()
-
- # Process all the JS files.
- for filename in js_names.keys():
- inclname = js_names[filename]
- file_kind = FileKind.get(filename)
- if file_kind == FileKind.C or file_kind == FileKind.CPP or \
- file_kind == FileKind.H or file_kind == FileKind.INL_H:
- included_h_inclnames = set() # type: set(inclname)
-
- # This script is run in js/src/, so prepend '../../' to get to the root of the Mozilla
- # source tree.
- with open(os.path.join('../..', filename)) as f:
- do_file(filename, inclname, file_kind, f, all_inclnames, included_h_inclnames)
-
- edges[inclname] = included_h_inclnames
-
- find_cycles(all_inclnames, edges)
-
- # Compare expected and actual output.
- difflines = difflib.unified_diff(expected_output, actual_output,
- fromfile='check_spider_monkey_style.py expected output',
- tofile='check_spider_monkey_style.py actual output')
- ok = True
- for diffline in difflines:
- ok = False
- print(diffline, end='')
-
- return ok
-
-
-def module_name(name):
- '''Strip the trailing .cpp, .h, inlines.h or -inl.h from a filename.'''
-
- return name.replace('inlines.h', '').replace('-inl.h', '').replace('.h', '').replace('.cpp', '')
-
-
-def is_module_header(enclosing_inclname, header_inclname):
- '''Determine if an included name is the "module header", i.e. should be
- first in the file.'''
-
- module = module_name(enclosing_inclname)
-
- # Normal case, e.g. module == "foo/Bar", header_inclname == "foo/Bar.h".
- if module == module_name(header_inclname):
- return True
-
- # A public header, e.g. module == "foo/Bar", header_inclname == "js/Bar.h".
- m = re.match(r'js\/(.*)\.h', header_inclname)
- if m is not None and module.endswith('/' + m.group(1)):
- return True
-
- return False
-
-
-class Include(object):
- '''Important information for a single #include statement.'''
-
- def __init__(self, inclname, linenum, is_system):
- self.inclname = inclname
- self.linenum = linenum
- self.is_system = is_system
-
- def isLeaf(self):
- return True
-
- def section(self, enclosing_inclname):
- '''Identify which section inclname belongs to.
-
- The section numbers are as follows.
- 0. Module header (e.g. jsfoo.h or jsfooinlines.h within jsfoo.cpp)
- 1. mozilla/Foo.h
- 2. <foo.h> or <foo>
- 3. jsfoo.h, prmjtime.h, etc
- 4. foo/Bar.h
- 5. jsfooinlines.h
- 6. foo/Bar-inl.h
- 7. non-.h, e.g. *.tbl, *.msg
- '''
-
- if self.is_system:
- return 2
-
- if not self.inclname.endswith('.h'):
- return 7
-
- # A couple of modules have the .h file in js/ and the .cpp file elsewhere and so need
- # special handling.
- if is_module_header(enclosing_inclname, self.inclname):
- return 0
-
- if '/' in self.inclname:
- if self.inclname.startswith('mozilla/'):
- return 1
-
- if self.inclname.endswith('-inl.h'):
- return 6
-
- return 4
-
- if self.inclname.endswith('inlines.h'):
- return 5
-
- return 3
-
- def quote(self):
- if self.is_system:
- return '<' + self.inclname + '>'
- else:
- return '"' + self.inclname + '"'
-
-
-class HashIfBlock(object):
- '''Important information about a #if/#endif block.
-
- A #if/#endif block is the contents of a #if/#endif (or similar) section.
- The top-level block, which is not within a #if/#endif pair, is also
- considered a block.
-
- Each leaf is either an Include (representing a #include), or another
- nested HashIfBlock.'''
- def __init__(self):
- self.kids = []
-
- def isLeaf(self):
- return False
-
-
-def do_file(filename, inclname, file_kind, f, all_inclnames, included_h_inclnames):
- block_stack = [HashIfBlock()]
-
- # Extract the #include statements as a tree of IBlocks and IIncludes.
- for linenum, line in enumerate(f, start=1):
- # We're only interested in lines that contain a '#'.
- if not '#' in line:
- continue
-
- # Look for a |#include "..."| line.
- m = re.match(r'\s*#\s*include\s+"([^"]*)"', line)
- if m is not None:
- block_stack[-1].kids.append(Include(m.group(1), linenum, False))
-
- # Look for a |#include <...>| line.
- m = re.match(r'\s*#\s*include\s+<([^>]*)>', line)
- if m is not None:
- block_stack[-1].kids.append(Include(m.group(1), linenum, True))
-
- # Look for a |#{if,ifdef,ifndef}| line.
- m = re.match(r'\s*#\s*(if|ifdef|ifndef)\b', line)
- if m is not None:
- # Open a new block.
- new_block = HashIfBlock()
- block_stack[-1].kids.append(new_block)
- block_stack.append(new_block)
-
- # Look for a |#{elif,else}| line.
- m = re.match(r'\s*#\s*(elif|else)\b', line)
- if m is not None:
- # Close the current block, and open an adjacent one.
- block_stack.pop()
- new_block = HashIfBlock()
- block_stack[-1].kids.append(new_block)
- block_stack.append(new_block)
-
- # Look for a |#endif| line.
- m = re.match(r'\s*#\s*endif\b', line)
- if m is not None:
- # Close the current block.
- block_stack.pop()
-
- def check_include_statement(include):
- '''Check the style of a single #include statement.'''
-
- if include.is_system:
- # Check it is not a known local file (in which case it's probably a system header).
- if include.inclname in included_inclnames_to_ignore or \
- include.inclname in all_inclnames:
- error(filename, include.linenum,
- include.quote() + ' should be included using',
- 'the #include "..." form')
-
- else:
- if include.inclname not in included_inclnames_to_ignore:
- included_kind = FileKind.get(include.inclname)
-
- # Check the #include path has the correct form.
- if include.inclname not in all_inclnames:
- error(filename, include.linenum,
- include.quote() + ' is included using the wrong path;',
- 'did you forget a prefix, or is the file not yet committed?')
-
- # Record inclusions of .h files for cycle detection later.
- # (Exclude .tbl and .msg files.)
- elif included_kind == FileKind.H or included_kind == FileKind.INL_H:
- included_h_inclnames.add(include.inclname)
-
- # Check a H file doesn't #include an INL_H file.
- if file_kind == FileKind.H and included_kind == FileKind.INL_H:
- error(filename, include.linenum,
- 'vanilla header includes an inline-header file ' + include.quote())
-
- # Check a file doesn't #include itself. (We do this here because the cycle
- # detection below doesn't detect this case.)
- if inclname == include.inclname:
- error(filename, include.linenum, 'the file includes itself')
-
- def check_includes_order(include1, include2):
- '''Check the ordering of two #include statements.'''
-
- if include1.inclname in oddly_ordered_inclnames or \
- include2.inclname in oddly_ordered_inclnames:
- return
-
- section1 = include1.section(inclname)
- section2 = include2.section(inclname)
- if (section1 > section2) or \
- ((section1 == section2) and (include1.inclname.lower() > include2.inclname.lower())):
- error(filename, str(include1.linenum) + ':' + str(include2.linenum),
- include1.quote() + ' should be included after ' + include2.quote())
-
- # Check the extracted #include statements, both individually, and the ordering of
- # adjacent pairs that live in the same block.
- def pair_traverse(prev, this):
- if this.isLeaf():
- check_include_statement(this)
- if prev is not None and prev.isLeaf():
- check_includes_order(prev, this)
- else:
- for prev2, this2 in zip([None] + this.kids[0:-1], this.kids):
- pair_traverse(prev2, this2)
-
- pair_traverse(None, block_stack[-1])
-
-
-def find_cycles(all_inclnames, edges):
- '''Find and draw any cycles.'''
-
- SCCs = tarjan(all_inclnames, edges)
-
- # The various sorted() calls below ensure the output is deterministic.
-
- def draw_SCC(c):
- cset = set(c)
- drawn = set()
- def draw(v, indent):
- out(' ' * indent + ('-> ' if indent else ' ') + v)
- if v in drawn:
- return
- drawn.add(v)
- for succ in sorted(edges[v]):
- if succ in cset:
- draw(succ, indent + 1)
- draw(sorted(c)[0], 0)
- out('')
-
- have_drawn_an_SCC = False
- for scc in sorted(SCCs):
- if len(scc) != 1:
- if not have_drawn_an_SCC:
- error('(multiple files)', None, 'header files form one or more cycles')
- have_drawn_an_SCC = True
-
- draw_SCC(scc)
-
-
-# Tarjan's algorithm for finding the strongly connected components (SCCs) of a graph.
-# https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
-def tarjan(V, E):
- vertex_index = {}
- vertex_lowlink = {}
- index = 0
- S = []
- all_SCCs = []
-
- def strongconnect(v, index):
- # Set the depth index for v to the smallest unused index
- vertex_index[v] = index
- vertex_lowlink[v] = index
- index += 1
- S.append(v)
-
- # Consider successors of v
- for w in E[v]:
- if w not in vertex_index:
- # Successor w has not yet been visited; recurse on it
- index = strongconnect(w, index)
- vertex_lowlink[v] = min(vertex_lowlink[v], vertex_lowlink[w])
- elif w in S:
- # Successor w is in stack S and hence in the current SCC
- vertex_lowlink[v] = min(vertex_lowlink[v], vertex_index[w])
-
- # If v is a root node, pop the stack and generate an SCC
- if vertex_lowlink[v] == vertex_index[v]:
- i = S.index(v)
- scc = S[i:]
- del S[i:]
- all_SCCs.append(scc)
-
- return index
-
- for v in V:
- if v not in vertex_index:
- index = strongconnect(v, index)
-
- return all_SCCs
-
-
-def main():
- ok = check_style()
-
- if ok:
- print('TEST-PASS | check_spidermonkey_style.py | ok')
- else:
- print('TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | actual output does not match expected output; diff is above')
-
- sys.exit(0 if ok else 1)
-
-
-if __name__ == '__main__':
- main()
diff --git a/src/third_party/mozjs-45/config/check_utils.py b/src/third_party/mozjs-45/config/check_utils.py
deleted file mode 100644
index 035402b..0000000
--- a/src/third_party/mozjs-45/config/check_utils.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# vim: set ts=8 sts=4 et sw=4 tw=99:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import subprocess
-
-def get_all_toplevel_filenames():
- '''Get a list of all the files in the (Mercurial or Git) repository.'''
- failed_cmds = []
- try:
- cmd = ['hg', 'manifest', '-q']
- all_filenames = subprocess.check_output(cmd, universal_newlines=True,
- stderr=subprocess.PIPE).split('\n')
- return all_filenames
- except:
- failed_cmds.append(cmd)
-
- try:
- # Get the relative path to the top-level directory.
- cmd = ['git', 'rev-parse', '--show-cdup']
- top_level = subprocess.check_output(cmd, universal_newlines=True,
- stderr=subprocess.PIPE).split('\n')[0]
- cmd = ['git', 'ls-files', '--full-name', top_level]
- all_filenames = subprocess.check_output(cmd, universal_newlines=True,
- stderr=subprocess.PIPE).split('\n')
- return all_filenames
- except:
- failed_cmds.append(cmd)
-
- raise Exception('failed to run any of the repo manifest commands', failed_cmds)
diff --git a/src/third_party/mozjs-45/config/check_vanilla_allocations.py b/src/third_party/mozjs-45/config/check_vanilla_allocations.py
deleted file mode 100644
index 2a3be87..0000000
--- a/src/third_party/mozjs-45/config/check_vanilla_allocations.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# vim: set ts=8 sts=4 et sw=4 tw=79:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#----------------------------------------------------------------------------
-# All heap allocations in SpiderMonkey must go through js_malloc, js_calloc,
-# js_realloc, and js_free. This is so that any embedder who uses a custom
-# allocator (by defining JS_USE_CUSTOM_ALLOCATOR) will see all heap allocation
-# go through that custom allocator.
-#
-# Therefore, the presence of any calls to "vanilla" allocation/free functions
-# (e.g. malloc(), free()) is a bug.
-#
-# This script checks for the presence of such disallowed vanilla
-# allocation/free function in SpiderMonkey when it's built as a library. It
-# relies on |nm| from the GNU binutils, and so only works on Linux, but one
-# platform is good enough to catch almost all violations.
-#
-# This checking is only 100% reliable in a JS_USE_CUSTOM_ALLOCATOR build in
-# which the default definitions of js_malloc et al (in Utility.h) -- which call
-# malloc et al -- are replaced with empty definitions. This is because the
-# presence and possible inlining of the default js_malloc et al can cause
-# malloc/calloc/realloc/free calls show up in unpredictable places.
-#
-# Unfortunately, that configuration cannot be tested on Mozilla's standard
-# testing infrastructure. Instead, by default this script only tests that none
-# of the other vanilla allocation/free functions (operator new, memalign, etc)
-# are present. If given the --aggressive flag, it will also check for
-# malloc/calloc/realloc/free.
-#
-# Note: We don't check for |operator delete| and |operator delete[]|. These
-# can be present somehow due to virtual destructors, but this is not too
-# because vanilla delete/delete[] calls don't make sense without corresponding
-# vanilla new/new[] calls, and any explicit calls will be caught by Valgrind's
-# mismatched alloc/free checking.
-#----------------------------------------------------------------------------
-
-from __future__ import print_function
-
-import argparse
-import re
-import subprocess
-import sys
-
-# The obvious way to implement this script is to search for occurrences of
-# malloc et al, succeed if none are found, and fail is some are found.
-# However, "none are found" does not necessarily mean "none are present" --
-# this script could be buggy. (Or the output format of |nm| might change in
-# the future.)
-#
-# So jsutil.cpp deliberately contains a (never-called) function that contains a
-# single use of all the vanilla allocation/free functions. And this script
-# fails if it (a) finds uses of those functions in files other than jsutil.cpp,
-# *or* (b) fails to find them in jsutil.cpp.
-
-# Tracks overall success of the test.
-has_failed = False
-
-
-def fail(msg):
- print('TEST-UNEXPECTED-FAIL | check_vanilla_allocations.py |', msg)
- global has_failed
- has_failed = True
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument('--aggressive', action='store_true',
- help='also check for malloc, calloc, realloc and free')
- parser.add_argument('file', type=str,
- help='name of the file to check')
- args = parser.parse_args()
-
- # Run |nm|. Options:
- # -u: show only undefined symbols
- # -C: demangle symbol names
- # -A: show an object filename for each undefined symbol
- cmd = ['nm', '-u', '-C', '-A', args.file]
- lines = subprocess.check_output(cmd, universal_newlines=True,
- stderr=subprocess.PIPE).split('\n')
-
- # alloc_fns contains all the vanilla allocation/free functions that we look
- # for. Regexp chars are escaped appropriately.
-
- alloc_fns = [
- # Matches |operator new(unsigned T)|, where |T| is |int| or |long|.
- r'operator new\(unsigned',
-
- # Matches |operator new[](unsigned T)|, where |T| is |int| or |long|.
- r'operator new\[\]\(unsigned',
-
- r'memalign',
- # These three aren't available on all Linux configurations.
- #r'posix_memalign',
- #r'aligned_alloc',
- #r'valloc',
- ]
-
- if args.aggressive:
- alloc_fns += [
- r'malloc',
- r'calloc',
- r'realloc',
- r'free',
- r'strdup'
- ]
-
- # This is like alloc_fns, but regexp chars are not escaped.
- alloc_fns_unescaped = [fn.translate(None, r'\\') for fn in alloc_fns]
-
- # This regexp matches the relevant lines in the output of |nm|, which look
- # like the following.
- #
- # js/src/libjs_static.a:jsutil.o: U malloc
- #
- alloc_fns_re = r'([^:/ ]+):\s+U (' + r'|'.join(alloc_fns) + r')'
-
- # This tracks which allocation/free functions have been seen in jsutil.cpp.
- jsutil_cpp = set([])
-
- # Would it be helpful to emit detailed line number information after a failure?
- emit_line_info = False
-
- for line in lines:
- m = re.search(alloc_fns_re, line)
- if m is None:
- continue
-
- filename = m.group(1)
- fn = m.group(2)
- if filename == 'jsutil.o':
- jsutil_cpp.add(fn)
- else:
- # An allocation is present in a non-special file. Fail!
- fail("'" + fn + "' present in " + filename)
- # Try to give more precise information about the offending code.
- emit_line_info = True
-
-
- # Check that all functions we expect are used in jsutil.cpp. (This will
- # fail if the function-detection code breaks at any point.)
- for fn in alloc_fns_unescaped:
- if fn not in jsutil_cpp:
- fail("'" + fn + "' isn't used as expected in jsutil.cpp")
- else:
- jsutil_cpp.remove(fn)
-
- # This should never happen, but check just in case.
- if jsutil_cpp:
- fail('unexpected allocation fns used in jsutil.cpp: ' +
- ', '.join(jsutil_cpp))
-
- # If we found any improper references to allocation functions, try to use
- # DWARF debug info to get more accurate line number information about the
- # bad calls. This is a lot slower than 'nm -A', and it is not always
- # precise when building with --enable-optimized.
- if emit_line_info:
- print('check_vanilla_allocations.py: Source lines with allocation calls:')
- print('check_vanilla_allocations.py: Accurate in unoptimized builds; jsutil.cpp expected.')
-
- # Run |nm|. Options:
- # -u: show only undefined symbols
- # -C: demangle symbol names
- # -l: show line number information for each undefined symbol
- cmd = ['nm', '-u', '-C', '-l', args.file]
- lines = subprocess.check_output(cmd, universal_newlines=True,
- stderr=subprocess.PIPE).split('\n')
-
- # This regexp matches the relevant lines in the output of |nm -l|,
- # which look like the following.
- #
- # U malloc jsutil.cpp:117
- #
- alloc_lines_re = r'U ((' + r'|'.join(alloc_fns) + r').*)\s+(\S+:\d+)$'
-
- for line in lines:
- m = re.search(alloc_lines_re, line)
- if m:
- print('check_vanilla_allocations.py:', m.group(1), 'called at', m.group(3))
-
- if has_failed:
- sys.exit(1)
-
- print('TEST-PASS | check_vanilla_allocations.py | ok')
- sys.exit(0)
-
-
-if __name__ == '__main__':
- main()
-
diff --git a/src/third_party/mozjs-45/config/config.mk b/src/third_party/mozjs-45/config/config.mk
deleted file mode 100644
index d0de93d..0000000
--- a/src/third_party/mozjs-45/config/config.mk
+++ /dev/null
@@ -1,688 +0,0 @@
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#
-# config.mk
-#
-# Determines the platform and builds the macros needed to load the
-# appropriate platform-specific .mk file, then defines all (most?)
-# of the generic macros.
-#
-
-# Define an include-at-most-once flag
-ifdef INCLUDED_CONFIG_MK
-$(error Do not include config.mk twice!)
-endif
-INCLUDED_CONFIG_MK = 1
-
-EXIT_ON_ERROR = set -e; # Shell loops continue past errors without this.
-
-ifndef topsrcdir
-topsrcdir = $(DEPTH)
-endif
-
-ifndef INCLUDED_AUTOCONF_MK
-include $(DEPTH)/config/autoconf.mk
-endif
-
--include $(DEPTH)/.mozconfig.mk
-
-ifndef EXTERNALLY_MANAGED_MAKE_FILE
-# Using $(firstword) may not be perfect. But it should be good enough for most
-# scenarios.
-_current_makefile = $(CURDIR)/$(firstword $(MAKEFILE_LIST))
-
-# Import the automatically generated backend file. If this file doesn't exist,
-# the backend hasn't been properly configured. We want this to be a fatal
-# error, hence not using "-include".
-ifndef STANDALONE_MAKEFILE
-GLOBAL_DEPS += backend.mk
-include backend.mk
-endif
-
-endif
-
-space = $(NULL) $(NULL)
-
-# Include defs.mk files that can be found in $(srcdir)/$(DEPTH),
-# $(srcdir)/$(DEPTH-1), $(srcdir)/$(DEPTH-2), etc., and $(srcdir)
-# where $(DEPTH-1) is one level less of depth, $(DEPTH-2), two, etc.
-# i.e. for DEPTH=../../.., DEPTH-1 is ../.. and DEPTH-2 is ..
-# These defs.mk files are used to define variables in a directory
-# and all its subdirectories, recursively.
-__depth := $(subst /, ,$(DEPTH))
-ifeq (.,$(__depth))
-__depth :=
-endif
-$(foreach __d,$(__depth) .,$(eval __depth = $(wordlist 2,$(words $(__depth)),$(__depth))$(eval -include $(subst $(space),/,$(strip $(srcdir) $(__depth) defs.mk)))))
-
-COMMA = ,
-
-# Sanity check some variables
-CHECK_VARS := \
- XPI_NAME \
- LIBRARY_NAME \
- MODULE \
- DEPTH \
- XPI_PKGNAME \
- INSTALL_EXTENSION_ID \
- SHARED_LIBRARY_NAME \
- SONAME \
- STATIC_LIBRARY_NAME \
- $(NULL)
-
-# checks for internal spaces or trailing spaces in the variable
-# named by $x
-check-variable = $(if $(filter-out 0 1,$(words $($(x))z)),$(error Spaces are not allowed in $(x)))
-
-$(foreach x,$(CHECK_VARS),$(check-variable))
-
-ifndef INCLUDED_FUNCTIONS_MK
-include $(MOZILLA_DIR)/config/makefiles/functions.mk
-endif
-
-RM = rm -f
-
-# FINAL_TARGET specifies the location into which we copy end-user-shipped
-# build products (typelibs, components, chrome). It may already be specified by
-# a moz.build file.
-#
-# If XPI_NAME is set, the files will be shipped to $(DIST)/xpi-stage/$(XPI_NAME)
-# instead of $(DIST)/bin. In both cases, if DIST_SUBDIR is set, the files will be
-# shipped to a $(DIST_SUBDIR) subdirectory.
-FINAL_TARGET ?= $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)
-# Override the stored value for the check to make sure that the variable is not
-# redefined in the Makefile.in value.
-FINAL_TARGET_FROZEN := '$(FINAL_TARGET)'
-
-ifdef XPI_NAME
-ACDEFINES += -DXPI_NAME=$(XPI_NAME)
-endif
-
-# The VERSION_NUMBER is suffixed onto the end of the DLLs we ship.
-VERSION_NUMBER = 50
-
-ifeq ($(HOST_OS_ARCH),WINNT)
- ifeq ($(MOZILLA_DIR),$(topsrcdir))
- win_srcdir := $(subst $(topsrcdir),$(WIN_TOP_SRC),$(srcdir))
- else
- # This means we're in comm-central's topsrcdir, so we need to adjust
- # WIN_TOP_SRC (which points to mozilla's topsrcdir) for the substitution
- # to win_srcdir.
- cc_WIN_TOP_SRC := $(WIN_TOP_SRC:%/mozilla=%)
- win_srcdir := $(subst $(topsrcdir),$(cc_WIN_TOP_SRC),$(srcdir))
- endif
- BUILD_TOOLS = $(WIN_TOP_SRC)/build/unix
-else
- win_srcdir := $(srcdir)
- BUILD_TOOLS = $(MOZILLA_DIR)/build/unix
-endif
-
-CONFIG_TOOLS = $(MOZ_BUILD_ROOT)/config
-AUTOCONF_TOOLS = $(MOZILLA_DIR)/build/autoconf
-
-#
-# Strip off the excessively long version numbers on these platforms,
-# but save the version to allow multiple versions of the same base
-# platform to be built in the same tree.
-#
-ifneq (,$(filter FreeBSD HP-UX Linux NetBSD OpenBSD SunOS,$(OS_ARCH)))
-OS_RELEASE := $(basename $(OS_RELEASE))
-
-# Allow the user to ignore the OS_VERSION, which is usually irrelevant.
-ifdef WANT_MOZILLA_CONFIG_OS_VERSION
-OS_VERS := $(suffix $(OS_RELEASE))
-OS_VERSION := $(shell echo $(OS_VERS) | sed 's/-.*//')
-endif
-
-endif
-
-OS_CONFIG := $(OS_ARCH)$(OS_RELEASE)
-
-ifdef _MSC_VER
-CC_WRAPPER ?= $(call py_action,cl)
-CXX_WRAPPER ?= $(call py_action,cl)
-endif # _MSC_VER
-
-CC := $(CC_WRAPPER) $(CC)
-CXX := $(CXX_WRAPPER) $(CXX)
-MKDIR ?= mkdir
-SLEEP ?= sleep
-TOUCH ?= touch
-
-PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py
-
-# determine debug-related options
-_DEBUG_ASFLAGS :=
-_DEBUG_CFLAGS :=
-_DEBUG_LDFLAGS :=
-
-_DEBUG_CFLAGS += $(MOZ_DEBUG_DEFINES)
-
-ifneq (,$(MOZ_DEBUG)$(MOZ_DEBUG_SYMBOLS))
- ifeq ($(AS),yasm)
- ifeq ($(OS_ARCH)_$(GNU_CC),WINNT_)
- _DEBUG_ASFLAGS += -g cv8
- else
- ifneq ($(OS_ARCH),Darwin)
- _DEBUG_ASFLAGS += -g dwarf2
- endif
- endif
- else
- _DEBUG_ASFLAGS += $(MOZ_DEBUG_FLAGS)
- endif
- _DEBUG_CFLAGS += $(MOZ_DEBUG_FLAGS)
- _DEBUG_LDFLAGS += $(MOZ_DEBUG_LDFLAGS)
-endif
-
-ifeq ($(YASM),$(AS))
-# yasm doesn't like the GNU as flags we may already have in ASFLAGS, so reset.
-ASFLAGS := $(_DEBUG_ASFLAGS)
-# yasm doesn't like -c
-AS_DASH_C_FLAG=
-else
-ASFLAGS += $(_DEBUG_ASFLAGS)
-endif
-OS_CFLAGS += $(_DEBUG_CFLAGS)
-OS_CXXFLAGS += $(_DEBUG_CFLAGS)
-OS_LDFLAGS += $(_DEBUG_LDFLAGS)
-
-# XXX: What does this? Bug 482434 filed for better explanation.
-ifeq ($(OS_ARCH)_$(GNU_CC),WINNT_)
-ifdef MOZ_DEBUG
-ifneq (,$(MOZ_BROWSE_INFO)$(MOZ_BSCFILE))
-OS_CFLAGS += -FR
-OS_CXXFLAGS += -FR
-endif
-else # ! MOZ_DEBUG
-
-# MOZ_DEBUG_SYMBOLS generates debug symbols in separate PDB files.
-# Used for generating an optimized build with debugging symbols.
-# Used in the Windows nightlies to generate symbols for crash reporting.
-ifdef MOZ_DEBUG_SYMBOLS
-OS_CXXFLAGS += -UDEBUG -DNDEBUG
-OS_CFLAGS += -UDEBUG -DNDEBUG
-ifdef HAVE_64BIT_BUILD
-OS_LDFLAGS += -DEBUG -OPT:REF,ICF
-else
-OS_LDFLAGS += -DEBUG -OPT:REF
-endif
-endif
-
-#
-# Handle DMD in optimized builds.
-# No opt to give sane callstacks.
-#
-ifdef MOZ_DMD
-MOZ_OPTIMIZE_FLAGS=-Zi -Od -UDEBUG -DNDEBUG
-ifdef HAVE_64BIT_BUILD
-OS_LDFLAGS = -DEBUG -OPT:REF,ICF
-else
-OS_LDFLAGS = -DEBUG -OPT:REF
-endif
-endif # MOZ_DMD
-
-endif # MOZ_DEBUG
-
-endif # WINNT && !GNU_CC
-
-#
-# Build using PIC by default
-#
-_ENABLE_PIC=1
-
-# Don't build SIMPLE_PROGRAMS with PGO, since they don't need it anyway,
-# and we don't have the same build logic to re-link them in the second pass.
-ifdef SIMPLE_PROGRAMS
-NO_PROFILE_GUIDED_OPTIMIZE = 1
-endif
-
-# No sense in profiling unit tests
-ifdef CPP_UNIT_TESTS
-NO_PROFILE_GUIDED_OPTIMIZE = 1
-endif
-
-# Enable profile-based feedback
-ifneq (1,$(NO_PROFILE_GUIDED_OPTIMIZE))
-ifdef MOZ_PROFILE_GENERATE
-OS_CFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_GEN_CFLAGS))
-OS_CXXFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_GEN_CFLAGS))
-OS_LDFLAGS += $(PROFILE_GEN_LDFLAGS)
-ifeq (WINNT,$(OS_ARCH))
-AR_FLAGS += -LTCG
-endif
-endif # MOZ_PROFILE_GENERATE
-
-ifdef MOZ_PROFILE_USE
-OS_CFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_USE_CFLAGS))
-OS_CXXFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_USE_CFLAGS))
-OS_LDFLAGS += $(PROFILE_USE_LDFLAGS)
-ifeq (WINNT,$(OS_ARCH))
-AR_FLAGS += -LTCG
-endif
-endif # MOZ_PROFILE_USE
-endif # NO_PROFILE_GUIDED_OPTIMIZE
-
-MAKE_JARS_FLAGS = \
- -t $(topsrcdir) \
- -f $(MOZ_CHROME_FILE_FORMAT) \
- $(NULL)
-
-ifdef USE_EXTENSION_MANIFEST
-MAKE_JARS_FLAGS += -e
-endif
-
-TAR_CREATE_FLAGS = -chf
-
-#
-# Personal makefile customizations go in these optional make include files.
-#
-MY_CONFIG := $(DEPTH)/config/myconfig.mk
-MY_RULES := $(DEPTH)/config/myrules.mk
-
-#
-# Default command macros; can be overridden in <arch>.mk.
-#
-CCC = $(CXX)
-
-INCLUDES = \
- -I$(srcdir) \
- -I. \
- $(LOCAL_INCLUDES) \
- -I$(DIST)/include \
- $(NULL)
-
-ifndef IS_GYP_DIR
-# NSPR_CFLAGS and NSS_CFLAGS must appear ahead of the other flags to avoid Linux
-# builds wrongly picking up system NSPR/NSS header files.
-OS_INCLUDES := \
- $(NSPR_CFLAGS) $(NSS_CFLAGS) \
- $(MOZ_JPEG_CFLAGS) \
- $(MOZ_PNG_CFLAGS) \
- $(MOZ_ZLIB_CFLAGS) \
- $(MOZ_PIXMAN_CFLAGS) \
- $(NULL)
-endif
-
-include $(MOZILLA_DIR)/config/static-checking-config.mk
-
-CFLAGS = $(OS_CPPFLAGS) $(OS_CFLAGS)
-CXXFLAGS = $(OS_CPPFLAGS) $(OS_CXXFLAGS)
-LDFLAGS = $(OS_LDFLAGS) $(MOZBUILD_LDFLAGS) $(MOZ_FIX_LINK_PATHS)
-
-ifdef MOZ_OPTIMIZE
-ifeq (1,$(MOZ_OPTIMIZE))
-ifneq (,$(if $(MOZ_PROFILE_GENERATE)$(MOZ_PROFILE_USE),$(MOZ_PGO_OPTIMIZE_FLAGS)))
-CFLAGS += $(MOZ_PGO_OPTIMIZE_FLAGS)
-CXXFLAGS += $(MOZ_PGO_OPTIMIZE_FLAGS)
-else
-CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-CXXFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-endif # neq (,$(MOZ_PROFILE_GENERATE)$(MOZ_PROFILE_USE))
-else
-CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-CXXFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-endif # MOZ_OPTIMIZE == 1
-LDFLAGS += $(MOZ_OPTIMIZE_LDFLAGS)
-RUSTFLAGS += $(MOZ_OPTIMIZE_RUSTFLAGS)
-endif # MOZ_OPTIMIZE
-
-ifdef CROSS_COMPILE
-HOST_CFLAGS += $(HOST_OPTIMIZE_FLAGS)
-else
-ifdef MOZ_OPTIMIZE
-ifeq (1,$(MOZ_OPTIMIZE))
-HOST_CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-else
-HOST_CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-endif # MOZ_OPTIMIZE == 1
-endif # MOZ_OPTIMIZE
-endif # CROSS_COMPILE
-
-CFLAGS += $(MOZ_FRAMEPTR_FLAGS)
-CXXFLAGS += $(MOZ_FRAMEPTR_FLAGS)
-
-# Check for ALLOW_COMPILER_WARNINGS (shorthand for Makefiles to request that we
-# *don't* use the warnings-as-errors compile flags)
-
-# Don't use warnings-as-errors in Windows PGO builds because it is suspected of
-# causing problems in that situation. (See bug 437002.)
-ifeq (WINNT_1,$(OS_ARCH)_$(MOZ_PROFILE_GENERATE)$(MOZ_PROFILE_USE))
-ALLOW_COMPILER_WARNINGS=1
-endif # WINNT && (MOS_PROFILE_GENERATE ^ MOZ_PROFILE_USE)
-
-# Don't use warnings-as-errors in clang-cl because it warns about many more
-# things than MSVC does.
-ifdef CLANG_CL
-ALLOW_COMPILER_WARNINGS=1
-endif # CLANG_CL
-
-# Use warnings-as-errors if ALLOW_COMPILER_WARNINGS is not set to 1 (which
-# includes the case where it's undefined).
-ifneq (1,$(ALLOW_COMPILER_WARNINGS))
-CXXFLAGS += $(WARNINGS_AS_ERRORS)
-CFLAGS += $(WARNINGS_AS_ERRORS)
-endif # ALLOW_COMPILER_WARNINGS
-
-ifeq ($(OS_ARCH)_$(GNU_CC),WINNT_)
-#// Currently, unless USE_STATIC_LIBS is defined, the multithreaded
-#// DLL version of the RTL is used...
-#//
-#//------------------------------------------------------------------------
-ifdef MOZ_ASAN
-# ASAN-instrumented code tries to link against the dynamic CRT, which can't be
-# used in the same link as the static CRT.
-USE_STATIC_LIBS=
-endif # MOZ_ASAN
-
-ifdef USE_STATIC_LIBS
-RTL_FLAGS=-MT # Statically linked multithreaded RTL
-ifdef MOZ_DEBUG
-ifndef MOZ_NO_DEBUG_RTL
-RTL_FLAGS=-MTd # Statically linked multithreaded MSVC4.0 debug RTL
-endif
-endif # MOZ_DEBUG
-
-else # !USE_STATIC_LIBS
-
-RTL_FLAGS=-MD # Dynamically linked, multithreaded RTL
-ifdef MOZ_DEBUG
-ifndef MOZ_NO_DEBUG_RTL
-RTL_FLAGS=-MDd # Dynamically linked, multithreaded MSVC4.0 debug RTL
-endif
-endif # MOZ_DEBUG
-endif # USE_STATIC_LIBS
-endif # WINNT && !GNU_CC
-
-ifeq ($(OS_ARCH),Darwin)
-# Compiling ObjC requires an Apple compiler anyway, so it's ok to set
-# host CMFLAGS here.
-HOST_CMFLAGS += -fobjc-exceptions
-HOST_CMMFLAGS += -fobjc-exceptions
-OS_COMPILE_CMFLAGS += -fobjc-exceptions
-OS_COMPILE_CMMFLAGS += -fobjc-exceptions
-ifeq ($(MOZ_WIDGET_TOOLKIT),uikit)
-OS_COMPILE_CMFLAGS += -fobjc-abi-version=2 -fobjc-legacy-dispatch
-OS_COMPILE_CMMFLAGS += -fobjc-abi-version=2 -fobjc-legacy-dispatch
-endif
-endif
-
-COMPILE_CFLAGS = $(VISIBILITY_FLAGS) $(DEFINES) $(INCLUDES) $(OS_INCLUDES) $(DSO_CFLAGS) $(DSO_PIC_CFLAGS) $(RTL_FLAGS) $(OS_COMPILE_CFLAGS) $(CFLAGS) $(MOZBUILD_CFLAGS)
-COMPILE_CXXFLAGS = $(if $(DISABLE_STL_WRAPPING),,$(STL_FLAGS)) $(VISIBILITY_FLAGS) $(DEFINES) $(INCLUDES) $(OS_INCLUDES) $(DSO_CFLAGS) $(DSO_PIC_CFLAGS) $(RTL_FLAGS) $(OS_COMPILE_CXXFLAGS) $(CXXFLAGS) $(MOZBUILD_CXXFLAGS)
-COMPILE_CMFLAGS = $(OS_COMPILE_CMFLAGS) $(MOZBUILD_CMFLAGS)
-COMPILE_CMMFLAGS = $(OS_COMPILE_CMMFLAGS) $(MOZBUILD_CMMFLAGS)
-ASFLAGS += $(MOZBUILD_ASFLAGS)
-
-ifndef CROSS_COMPILE
-HOST_CFLAGS += $(RTL_FLAGS)
-endif
-
-HOST_CFLAGS += $(HOST_DEFINES) $(MOZBUILD_HOST_CFLAGS)
-HOST_CXXFLAGS += $(HOST_DEFINES) $(MOZBUILD_HOST_CXXFLAGS)
-
-#
-# Name of the binary code directories
-#
-# Override defaults
-
-# Default location of include files
-IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser
-IDL_PARSER_CACHE_DIR = $(DEPTH)/xpcom/idl-parser
-
-SDK_LIB_DIR = $(DIST)/sdk/lib
-SDK_BIN_DIR = $(DIST)/sdk/bin
-
-DEPENDENCIES = .md
-
-ifdef MACOSX_DEPLOYMENT_TARGET
-export MACOSX_DEPLOYMENT_TARGET
-endif # MACOSX_DEPLOYMENT_TARGET
-
-ifdef MOZ_USING_CCACHE
-ifdef CLANG_CXX
-export CCACHE_CPP2=1
-endif
-endif
-
-# Set link flags according to whether we want a console.
-ifeq ($(OS_ARCH),WINNT)
-ifdef MOZ_WINCONSOLE
-ifeq ($(MOZ_WINCONSOLE),1)
-WIN32_EXE_LDFLAGS += $(WIN32_CONSOLE_EXE_LDFLAGS)
-else # MOZ_WINCONSOLE
-WIN32_EXE_LDFLAGS += $(WIN32_GUI_EXE_LDFLAGS)
-endif
-else
-# For setting subsystem version
-WIN32_EXE_LDFLAGS += $(WIN32_CONSOLE_EXE_LDFLAGS)
-endif
-endif # WINNT
-
-ifdef _MSC_VER
-ifeq ($(CPU_ARCH),x86_64)
-# set stack to 2MB on x64 build. See bug 582910
-WIN32_EXE_LDFLAGS += -STACK:2097152
-endif
-endif
-
-#
-# Include any personal overrides the user might think are needed.
-#
--include $(topsrcdir)/$(MOZ_BUILD_APP)/app-config.mk
--include $(MY_CONFIG)
-
-######################################################################
-
-GARBAGE += $(DEPENDENCIES) core $(wildcard core.[0-9]*) $(wildcard *.err) $(wildcard *.pure) $(wildcard *_pure_*.o) Templates.DB
-
-ifeq ($(OS_ARCH),Darwin)
-ifndef NSDISTMODE
-NSDISTMODE=absolute_symlink
-endif
-PWD := $(CURDIR)
-endif
-
-NSINSTALL_PY := $(PYTHON) $(abspath $(MOZILLA_DIR)/config/nsinstall.py)
-# For Pymake, wherever we use nsinstall.py we're also going to try to make it
-# a native command where possible. Since native commands can't be used outside
-# of single-line commands, we continue to provide INSTALL for general use.
-# Single-line commands should be switched over to install_cmd.
-NSINSTALL_NATIVECMD := %nsinstall nsinstall
-
-ifdef NSINSTALL_BIN
-NSINSTALL = $(NSINSTALL_BIN)
-else
-ifeq ($(HOST_OS_ARCH),WINNT)
-NSINSTALL = $(NSINSTALL_PY)
-else
-NSINSTALL = $(DEPTH)/config/nsinstall$(HOST_BIN_SUFFIX)
-endif # WINNT
-endif # NSINSTALL_BIN
-
-
-ifeq (,$(CROSS_COMPILE)$(filter-out WINNT, $(OS_ARCH)))
-INSTALL = $(NSINSTALL) -t
-
-else
-
-# This isn't laid out as conditional directives so that NSDISTMODE can be
-# target-specific.
-INSTALL = $(if $(filter copy, $(NSDISTMODE)), $(NSINSTALL) -t, $(if $(filter absolute_symlink, $(NSDISTMODE)), $(NSINSTALL) -L $(PWD), $(NSINSTALL) -R))
-
-endif # WINNT
-
-# The default for install_cmd is simply INSTALL
-install_cmd ?= $(INSTALL) $(1)
-
-# Use nsinstall in copy mode to install files on the system
-SYSINSTALL = $(NSINSTALL) -t
-# This isn't necessarily true, just here
-sysinstall_cmd = install_cmd
-
-#
-# Localization build automation
-#
-
-# Because you might wish to "make locales AB_CD=ab-CD", we don't hardcode
-# MOZ_UI_LOCALE directly, but use an intermediate variable that can be
-# overridden by the command line. (Besides, AB_CD is prettier).
-AB_CD = $(MOZ_UI_LOCALE)
-# Many locales directories want this definition.
-ACDEFINES += -DAB_CD=$(AB_CD)
-
-ifndef L10NBASEDIR
- L10NBASEDIR = $(error L10NBASEDIR not defined by configure)
-else
- IS_LANGUAGE_REPACK = 1
-endif
-
-EXPAND_LOCALE_SRCDIR = $(if $(filter en-US,$(AB_CD)),$(topsrcdir)/$(1)/en-US,$(or $(realpath $(L10NBASEDIR)),$(abspath $(L10NBASEDIR)))/$(AB_CD)/$(subst /locales,,$(1)))
-
-ifdef relativesrcdir
-LOCALE_SRCDIR ?= $(call EXPAND_LOCALE_SRCDIR,$(relativesrcdir))
-endif
-
-ifdef relativesrcdir
-MAKE_JARS_FLAGS += --relativesrcdir=$(relativesrcdir)
-ifneq (en-US,$(AB_CD))
-ifdef LOCALE_MERGEDIR
-MAKE_JARS_FLAGS += --locale-mergedir=$(LOCALE_MERGEDIR)
-endif
-ifdef IS_LANGUAGE_REPACK
-MAKE_JARS_FLAGS += --l10n-base=$(L10NBASEDIR)/$(AB_CD)
-endif
-else
-MAKE_JARS_FLAGS += -c $(LOCALE_SRCDIR)
-endif # en-US
-else
-MAKE_JARS_FLAGS += -c $(LOCALE_SRCDIR)
-endif # ! relativesrcdir
-
-ifdef LOCALE_MERGEDIR
-MERGE_FILE = $(firstword \
- $(wildcard $(LOCALE_MERGEDIR)/$(subst /locales,,$(relativesrcdir))/$(1)) \
- $(wildcard $(LOCALE_SRCDIR)/$(1)) \
- $(srcdir)/en-US/$(1) )
-else
-MERGE_FILE = $(LOCALE_SRCDIR)/$(1)
-endif
-MERGE_FILES = $(foreach f,$(1),$(call MERGE_FILE,$(f)))
-
-# These marcros are similar to MERGE_FILE, but no merging, and en-US first.
-# They're used for searchplugins, for example.
-EN_US_OR_L10N_FILE = $(firstword \
- $(wildcard $(srcdir)/en-US/$(1)) \
- $(LOCALE_SRCDIR)/$(1) )
-EN_US_OR_L10N_FILES = $(foreach f,$(1),$(call EN_US_OR_L10N_FILE,$(f)))
-
-ifneq (WINNT,$(OS_ARCH))
-RUN_TEST_PROGRAM = $(DIST)/bin/run-mozilla.sh
-endif # ! WINNT
-
-#
-# Java macros
-#
-
-# Make sure any compiled classes work with at least JVM 1.4
-JAVAC_FLAGS += -source 1.4
-
-ifdef MOZ_DEBUG
-JAVAC_FLAGS += -g
-endif
-
-CREATE_PRECOMPLETE_CMD = $(PYTHON) $(abspath $(MOZILLA_DIR)/config/createprecomplete.py)
-
-# MDDEPDIR is the subdirectory where dependency files are stored
-MDDEPDIR := .deps
-
-EXPAND_LIBS_EXEC = $(PYTHON) $(MOZILLA_DIR)/config/expandlibs_exec.py
-EXPAND_LIBS_GEN = $(PYTHON) $(MOZILLA_DIR)/config/expandlibs_gen.py
-EXPAND_AR = $(EXPAND_LIBS_EXEC) --extract -- $(AR)
-EXPAND_CC = $(EXPAND_LIBS_EXEC) --uselist -- $(CC)
-EXPAND_CCC = $(EXPAND_LIBS_EXEC) --uselist -- $(CCC)
-EXPAND_LD = $(EXPAND_LIBS_EXEC) --uselist -- $(LD)
-EXPAND_MKSHLIB_ARGS = --uselist
-ifdef SYMBOL_ORDER
-EXPAND_MKSHLIB_ARGS += --symbol-order $(SYMBOL_ORDER)
-endif
-EXPAND_MKSHLIB = $(EXPAND_LIBS_EXEC) $(EXPAND_MKSHLIB_ARGS) -- $(MKSHLIB)
-
-# $(call CHECK_SYMBOLS,lib,PREFIX,dep_name,test)
-# Checks that the given `lib` doesn't contain dependency on symbols with a
-# version starting with `PREFIX`_ and matching the `test`. `dep_name` is only
-# used for the error message.
-# `test` is an awk expression using the information in the variable `v` which
-# contains a list of version items ([major, minor, ...]).
-define CHECK_SYMBOLS
-@$(TOOLCHAIN_PREFIX)readelf -sW $(1) | \
-awk '$$8 ~ /@$(2)_/ { \
- split($$8,a,"@"); \
- split(a[2],b,"_"); \
- split(b[2],v,"."); \
- if ($(4)) { \
- if (!found) { \
- print "TEST-UNEXPECTED-FAIL | check_stdcxx | We do not want these $(3) symbol versions to be used:" \
- } \
- print " ",$$8; \
- found=1 \
- } \
-} \
-END { \
- if (found) { \
- exit(1) \
- } \
-}'
-endef
-
-ifneq (,$(MOZ_LIBSTDCXX_TARGET_VERSION)$(MOZ_LIBSTDCXX_HOST_VERSION))
-CHECK_STDCXX = $(call CHECK_SYMBOLS,$(1),GLIBCXX,libstdc++,v[1] > 3 || (v[1] == 3 && v[2] == 4 && v[3] > 10))
-CHECK_GLIBC = $(call CHECK_SYMBOLS,$(1),GLIBC,libc,v[1] > 2 || (v[1] == 2 && v[2] > 7))
-endif
-
-ifeq (,$(filter $(OS_TARGET),WINNT Darwin))
-CHECK_TEXTREL = @$(TOOLCHAIN_PREFIX)readelf -d $(1) | grep TEXTREL > /dev/null && echo 'TEST-UNEXPECTED-FAIL | check_textrel | We do not want text relocations in libraries and programs' || true
-endif
-
-ifeq ($(MOZ_WIDGET_TOOLKIT),android)
-# While this is very unlikely (libc being added by the compiler at the end
-# of the linker command line), if libmozglue.so ends up after libc.so, all
-# hell breaks loose, so better safe than sorry, and check it's actually the
-# case.
-CHECK_MOZGLUE_ORDER = @$(TOOLCHAIN_PREFIX)readelf -d $(1) | grep NEEDED | awk '{ libs[$$NF] = ++n } END { if (libs["[libmozglue.so]"] && libs["[libc.so]"] < libs["[libmozglue.so]"]) { print "libmozglue.so must be linked before libc.so"; exit 1 } }'
-endif
-
-define CHECK_BINARY
-$(call CHECK_GLIBC,$(1))
-$(call CHECK_STDCXX,$(1))
-$(call CHECK_TEXTREL,$(1))
-$(call LOCAL_CHECKS,$(1))
-$(call CHECK_MOZGLUE_ORDER,$(1))
-endef
-
-# autoconf.mk sets OBJ_SUFFIX to an error to avoid use before including
-# this file
-OBJ_SUFFIX := $(_OBJ_SUFFIX)
-
-# PGO builds with GCC build objects with instrumentation in a first pass,
-# then objects optimized, without instrumentation, in a second pass. If
-# we overwrite the objects from the first pass with those from the second,
-# we end up not getting instrumentation data for better optimization on
-# incremental builds. As a consequence, we use a different object suffix
-# for the first pass.
-ifndef NO_PROFILE_GUIDED_OPTIMIZE
-ifdef MOZ_PROFILE_GENERATE
-ifdef GNU_CC
-OBJ_SUFFIX := i_o
-endif
-endif
-endif
-
-PLY_INCLUDE = -I$(MOZILLA_DIR)/other-licenses/ply
-
-export CL_INCLUDES_PREFIX
-# Make sure that the build system can handle non-ASCII characters
-# in environment variables to prevent it from breking silently on
-# non-English systems.
-export NONASCII
diff --git a/src/third_party/mozjs-45/config/createprecomplete.py b/src/third_party/mozjs-45/config/createprecomplete.py
deleted file mode 100644
index 3241d52..0000000
--- a/src/third_party/mozjs-45/config/createprecomplete.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Any copyright is dedicated to the Public Domain.
-# http://creativecommons.org/publicdomain/zero/1.0/
-
-# Creates the precomplete file containing the remove and rmdir application
-# update instructions which is used to remove files and directories that are no
-# longer present in a complete update. The current working directory is used for
-# the location to enumerate and to create the precomplete file.
-
-import sys
-import os
-
-def get_build_entries(root_path):
- """ Iterates through the root_path, creating a list for each file and
- directory. Excludes any file paths ending with channel-prefs.js.
- """
- rel_file_path_set = set()
- rel_dir_path_set = set()
- for root, dirs, files in os.walk(root_path):
- for file_name in files:
- parent_dir_rel_path = root[len(root_path)+1:]
- rel_path_file = os.path.join(parent_dir_rel_path, file_name)
- rel_path_file = rel_path_file.replace("\\", "/")
- if not (rel_path_file.endswith("channel-prefs.js") or
- rel_path_file.endswith("update-settings.ini") or
- rel_path_file.find("distribution/") != -1):
- rel_file_path_set.add(rel_path_file)
-
- for dir_name in dirs:
- parent_dir_rel_path = root[len(root_path)+1:]
- rel_path_dir = os.path.join(parent_dir_rel_path, dir_name)
- rel_path_dir = rel_path_dir.replace("\\", "/")+"/"
- if rel_path_dir.find("distribution/") == -1:
- rel_dir_path_set.add(rel_path_dir)
-
- rel_file_path_list = list(rel_file_path_set)
- rel_file_path_list.sort(reverse=True)
- rel_dir_path_list = list(rel_dir_path_set)
- rel_dir_path_list.sort(reverse=True)
-
- return rel_file_path_list, rel_dir_path_list
-
-def generate_precomplete(root_path):
- """ Creates the precomplete file containing the remove and rmdir
- application update instructions. The given directory is used
- for the location to enumerate and to create the precomplete file.
- """
- rel_path_precomplete = "precomplete"
- # If inside a Mac bundle use the root of the bundle for the path.
- if os.path.basename(root_path) == "Resources":
- root_path = os.path.abspath(os.path.join(root_path, '../../'))
- rel_path_precomplete = "Contents/Resources/precomplete"
-
- precomplete_file_path = os.path.join(root_path,rel_path_precomplete)
- # Open the file so it exists before building the list of files and open it
- # in binary mode to prevent OS specific line endings.
- precomplete_file = open(precomplete_file_path, "wb")
- rel_file_path_list, rel_dir_path_list = get_build_entries(root_path)
- for rel_file_path in rel_file_path_list:
- precomplete_file.writelines("remove \""+rel_file_path+"\"\n")
-
- for rel_dir_path in rel_dir_path_list:
- precomplete_file.writelines("rmdir \""+rel_dir_path+"\"\n")
-
- precomplete_file.close()
-
-if __name__ == "__main__":
- generate_precomplete(os.getcwd())
diff --git a/src/third_party/mozjs-45/config/doxygen.cfg.in b/src/third_party/mozjs-45/config/doxygen.cfg.in
deleted file mode 100644
index 3096b37..0000000
--- a/src/third_party/mozjs-45/config/doxygen.cfg.in
+++ /dev/null
@@ -1,1364 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-# Doxyfile 1.5.5
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project
-#
-# All text after a hash (#) is considered a comment and will be ignored
-# The format is:
-# TAG = value [value, ...]
-# For lists items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (" ")
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the config file
-# that follow. The default is UTF-8 which is also the encoding used for all
-# text before the first occurrence of this tag. Doxygen uses libiconv (or the
-# iconv built into libc) for the transcoding. See
-# http://www.gnu.org/software/libiconv for the list of possible encodings.
-
-DOXYFILE_ENCODING = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
-# by quotes) that should identify the project.
-
-PROJECT_NAME = "Mozilla"
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number.
-# This could be handy for archiving the generated documentation or
-# if some version control system is used.
-
-PROJECT_NUMBER =
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
-# base path where the generated documentation will be put.
-# If a relative path is entered, it will be relative to the location
-# where doxygen was started. If left blank the current directory will be used.
-
-OUTPUT_DIRECTORY = @MOZ_DOC_OUTPUT_DIR@
-
-# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
-# 4096 sub-directories (in 2 levels) under the output directory of each output
-# format and will distribute the generated files over these directories.
-# Enabling this option can be useful when feeding doxygen a huge amount of
-# source files, where putting all generated files in the same directory would
-# otherwise cause performance problems for the file system.
-
-CREATE_SUBDIRS = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# The default language is English, other supported languages are:
-# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional,
-# Croatian, Czech, Danish, Dutch, Farsi, Finnish, French, German, Greek,
-# Hungarian, Italian, Japanese, Japanese-en (Japanese with English messages),
-# Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, Polish,
-# Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish, Swedish,
-# and Ukrainian.
-
-OUTPUT_LANGUAGE = English
-
-# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
-# include brief member descriptions after the members that are listed in
-# the file and class documentation (similar to JavaDoc).
-# Set to NO to disable this.
-
-BRIEF_MEMBER_DESC = YES
-
-# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
-# the brief description of a member or function before the detailed description.
-# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-
-REPEAT_BRIEF = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator
-# that is used to form the text in various listings. Each string
-# in this list, if found as the leading text of the brief description, will be
-# stripped from the text and the result after processing the whole list, is
-# used as the annotated text. Otherwise, the brief description is used as-is.
-# If left blank, the following values are used ("$name" is automatically
-# replaced with the name of the entity): "The $name class" "The $name widget"
-# "The $name file" "is" "provides" "specifies" "contains"
-# "represents" "a" "an" "the"
-
-ABBREVIATE_BRIEF =
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# Doxygen will generate a detailed section even if there is only a brief
-# description.
-
-ALWAYS_DETAILED_SEC = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-
-INLINE_INHERITED_MEMB = NO
-
-# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
-# path before files name in the file list and in the header files. If set
-# to NO the shortest path that makes the file name unique will be used.
-
-FULL_PATH_NAMES = NO
-
-# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
-# can be used to strip a user-defined part of the path. Stripping is
-# only done if one of the specified strings matches the left-hand part of
-# the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the
-# path to strip.
-
-STRIP_FROM_PATH =
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
-# the path mentioned in the documentation of a class, which tells
-# the reader which header file to include in order to use a class.
-# If left blank only the name of the header file containing the class
-# definition is used. Otherwise one should specify the include paths that
-# are normally passed to the compiler using the -I flag.
-
-STRIP_FROM_INC_PATH =
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
-# (but less readable) file names. This can be useful is your file systems
-# doesn't support long names like on DOS, Mac, or CD-ROM.
-
-SHORT_NAMES = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
-# will interpret the first line (until the first dot) of a JavaDoc-style
-# comment as the brief description. If set to NO, the JavaDoc
-# comments will behave just like regular Qt-style comments
-# (thus requiring an explicit @brief command for a brief description.)
-
-JAVADOC_AUTOBRIEF = YES
-
-# If the QT_AUTOBRIEF tag is set to YES then Doxygen will
-# interpret the first line (until the first dot) of a Qt-style
-# comment as the brief description. If set to NO, the comments
-# will behave just like regular Qt-style comments (thus requiring
-# an explicit \brief command for a brief description.)
-
-QT_AUTOBRIEF = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
-# treat a multi-line C++ special comment block (i.e. a block of //! or ///
-# comments) as a brief description. This used to be the default behaviour.
-# The new default is to treat a multi-line C++ comment block as a detailed
-# description. Set this tag to YES if you prefer the old behaviour instead.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the DETAILS_AT_TOP tag is set to YES then Doxygen
-# will output the detailed description near the top, like JavaDoc.
-# If set to NO, the detailed description appears after the member
-# documentation.
-
-DETAILS_AT_TOP = NO
-
-# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
-# member inherits the documentation from any documented member that it
-# re-implements.
-
-INHERIT_DOCS = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
-# a new page for each member. If set to NO, the documentation of a member will
-# be part of the file/class/namespace that contains it.
-
-SEPARATE_MEMBER_PAGES = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab.
-# Doxygen uses this value to replace tabs by spaces in code fragments.
-
-TAB_SIZE = 4
-
-# This tag can be used to specify a number of aliases that acts
-# as commands in the documentation. An alias has the form "name=value".
-# For example adding "sideeffect=\par Side Effects:\n" will allow you to
-# put the command \sideeffect (or @sideeffect) in the documentation, which
-# will result in a user-defined paragraph with heading "Side Effects:".
-# You can put \n's in the value part of an alias to insert newlines.
-
-ALIASES = "status=\par Status:\n"
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
-# sources only. Doxygen will then generate output that is more tailored for C.
-# For instance, some of the names that are used will be different. The list
-# of all members will be omitted, etc.
-
-OPTIMIZE_OUTPUT_FOR_C = NO
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
-# sources only. Doxygen will then generate output that is more tailored for
-# Java. For instance, namespaces will be presented as packages, qualified
-# scopes will look different, etc.
-
-OPTIMIZE_OUTPUT_JAVA = NO
-
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
-# sources only. Doxygen will then generate output that is more tailored for
-# Fortran.
-
-OPTIMIZE_FOR_FORTRAN = NO
-
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
-# sources. Doxygen will then generate output that is tailored for
-# VHDL.
-
-OPTIMIZE_OUTPUT_VHDL = NO
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
-# to include (a tag file for) the STL sources as input, then you should
-# set this tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
-# func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-
-BUILTIN_STL_SUPPORT = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-
-CPP_CLI_SUPPORT = NO
-
-# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only.
-# Doxygen will parse them like normal C++ but will assume all classes use public
-# instead of private inheritance when no explicit protection keyword is present.
-
-SIP_SUPPORT = NO
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES, then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-
-DISTRIBUTE_GROUP_DOC = YES
-
-# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
-# the same type (for instance a group of public functions) to be put as a
-# subgroup of that type (e.g. under the Public Functions section). Set it to
-# NO to prevent subgrouping. Alternatively, this can be done per class using
-# the \nosubgrouping command.
-
-SUBGROUPING = YES
-
-# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum
-# is documented as struct, union, or enum with the name of the typedef. So
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
-# with name TypeT. When disabled the typedef will appear as a member of a file,
-# namespace, or class. And the struct will be named TypeS. This can typically
-# be useful for C code in case the coding convention dictates that all compound
-# types are typedef'ed and only the typedef is referenced, never the tag name.
-
-TYPEDEF_HIDES_STRUCT = NO
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
-# documentation are documented, even if no documentation was available.
-# Private class members and static file members will be hidden unless
-# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
-
-EXTRACT_ALL = YES
-
-# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
-# will be included in the documentation.
-
-EXTRACT_PRIVATE = NO
-
-# If the EXTRACT_STATIC tag is set to YES all static members of a file
-# will be included in the documentation.
-
-EXTRACT_STATIC = NO
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
-# defined locally in source files will be included in the documentation.
-# If set to NO only classes defined in header files are included.
-
-EXTRACT_LOCAL_CLASSES = YES
-
-# This flag is only useful for Objective-C code. When set to YES local
-# methods, which are defined in the implementation section but not in
-# the interface are included in the documentation.
-# If set to NO (the default) only methods in the interface are included.
-
-EXTRACT_LOCAL_METHODS = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be
-# extracted and appear in the documentation as a namespace called
-# 'anonymous_namespace{file}', where file will be replaced with the base
-# name of the file that contains the anonymous namespace. By default
-# anonymous namespace are hidden.
-
-EXTRACT_ANON_NSPACES = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
-# undocumented members of documented classes, files or namespaces.
-# If set to NO (the default) these members will be included in the
-# various overviews, but no documentation section is generated.
-# This option has no effect if EXTRACT_ALL is enabled.
-
-HIDE_UNDOC_MEMBERS = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy.
-# If set to NO (the default) these classes will be included in the various
-# overviews. This option has no effect if EXTRACT_ALL is enabled.
-
-HIDE_UNDOC_CLASSES = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
-# friend (class|struct|union) declarations.
-# If set to NO (the default) these declarations will be included in the
-# documentation.
-
-HIDE_FRIEND_COMPOUNDS = NO
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
-# documentation blocks found inside the body of a function.
-# If set to NO (the default) these blocks will be appended to the
-# function's detailed documentation block.
-
-HIDE_IN_BODY_DOCS = NO
-
-# The INTERNAL_DOCS tag determines if documentation
-# that is typed after a \internal command is included. If the tag is set
-# to NO (the default) then the documentation will be excluded.
-# Set it to YES to include the internal documentation.
-
-INTERNAL_DOCS = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
-# file names in lower-case letters. If set to YES upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# and Mac users are advised to set this option to NO.
-
-CASE_SENSE_NAMES = YES
-
-# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
-# will show members with their full class and namespace scopes in the
-# documentation. If set to YES the scope will be hidden.
-
-HIDE_SCOPE_NAMES = NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
-# will put a list of the files that are included by a file in the documentation
-# of that file.
-
-SHOW_INCLUDE_FILES = YES
-
-# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
-# is inserted in the documentation for inline members.
-
-INLINE_INFO = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
-# will sort the (detailed) documentation of file and class members
-# alphabetically by member name. If set to NO the members will appear in
-# declaration order.
-
-SORT_MEMBER_DOCS = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
-# brief documentation of file, namespace and class members alphabetically
-# by member name. If set to NO (the default) the members will appear in
-# declaration order.
-
-SORT_BRIEF_DOCS = NO
-
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the
-# hierarchy of group names into alphabetical order. If set to NO (the default)
-# the group names will appear in their defined order.
-
-SORT_GROUP_NAMES = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
-# sorted by fully-qualified names, including namespaces. If set to
-# NO (the default), the class list will be sorted only by class name,
-# not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the
-# alphabetical list.
-
-SORT_BY_SCOPE_NAME = NO
-
-# The GENERATE_TODOLIST tag can be used to enable (YES) or
-# disable (NO) the todo list. This list is created by putting \todo
-# commands in the documentation.
-
-GENERATE_TODOLIST = YES
-
-# The GENERATE_TESTLIST tag can be used to enable (YES) or
-# disable (NO) the test list. This list is created by putting \test
-# commands in the documentation.
-
-GENERATE_TESTLIST = YES
-
-# The GENERATE_BUGLIST tag can be used to enable (YES) or
-# disable (NO) the bug list. This list is created by putting \bug
-# commands in the documentation.
-
-GENERATE_BUGLIST = NO
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
-# disable (NO) the deprecated list. This list is created by putting
-# \deprecated commands in the documentation.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional
-# documentation sections, marked by \if sectionname ... \endif.
-
-ENABLED_SECTIONS =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
-# the initial value of a variable or define consists of for it to appear in
-# the documentation. If the initializer consists of more lines than specified
-# here it will be hidden. Use a value of 0 to hide initializers completely.
-# The appearance of the initializer of individual variables and defines in the
-# documentation can be controlled using \showinitializer or \hideinitializer
-# command in the documentation regardless of this setting.
-
-MAX_INITIALIZER_LINES = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
-# at the bottom of the documentation of classes and structs. If set to YES the
-# list will mention the files that were used to generate the documentation.
-
-SHOW_USED_FILES = YES
-
-# If the sources in your project are distributed over multiple directories
-# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy
-# in the documentation. The default is NO.
-
-SHOW_DIRECTORIES = NO
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from
-# the version control system). Doxygen will invoke the program by executing (via
-# popen()) the command <command> <input-file>, where <command> is the value of
-# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
-# provided by doxygen. Whatever the program writes to standard output
-# is used as the file version. See the manual for examples.
-
-FILE_VERSION_FILTER =
-
-#---------------------------------------------------------------------------
-# configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated
-# by doxygen. Possible values are YES and NO. If left blank NO is used.
-
-QUIET = NO
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated by doxygen. Possible values are YES and NO. If left blank
-# NO is used.
-
-WARNINGS = YES
-
-# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
-# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
-# automatically be disabled.
-
-WARN_IF_UNDOCUMENTED = NO
-
-# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some
-# parameters in a documented function, or documenting parameters that
-# don't exist or using markup commands wrongly.
-
-WARN_IF_DOC_ERROR = YES
-
-# This WARN_NO_PARAMDOC option can be abled to get warnings for
-# functions that are documented, but have no documentation for their parameters
-# or return value. If set to NO (the default) doxygen will only warn about
-# wrong or incomplete parameter documentation, but not about the absence of
-# documentation.
-
-WARN_NO_PARAMDOC = NO
-
-# The WARN_FORMAT tag determines the format of the warning messages that
-# doxygen can produce. The string should contain the $file, $line, and $text
-# tags, which will be replaced by the file and line number from which the
-# warning originated and the warning text. Optionally the format may contain
-# $version, which will be replaced by the version of the file (if it could
-# be obtained via FILE_VERSION_FILTER)
-
-WARN_FORMAT =
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning
-# and error messages should be written. If left blank the output is written
-# to stderr.
-
-WARN_LOGFILE =
-
-#---------------------------------------------------------------------------
-# configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag can be used to specify the files and/or directories that contain
-# documented source files. You may enter file names like "myfile.cpp" or
-# directories like "/usr/src/myproject". Separate the files or directories
-# with spaces.
-
-INPUT = @MOZ_DOC_INPUT_DIRS@
-
-# This tag can be used to specify the character encoding of the source files
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is
-# also the default input encoding. Doxygen uses libiconv (or the iconv built
-# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for
-# the list of possible encodings.
-
-INPUT_ENCODING = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
-# and *.h) to filter out the source-files in the directories. If left
-# blank the following patterns are tested:
-# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx
-# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90
-
-FILE_PATTERNS = *.idl \
- *.cpp \
- *.h
-
-# The RECURSIVE tag can be used to turn specify whether or not subdirectories
-# should be searched for input files as well. Possible values are YES and NO.
-# If left blank NO is used.
-
-RECURSIVE = YES
-
-# The EXCLUDE tag can be used to specify files and/or directories that should
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-
-EXCLUDE =
-
-# The EXCLUDE_SYMLINKS tag can be used select whether or not files or
-# directories that are symbolic links (a Unix filesystem feature) are excluded
-# from the input.
-
-EXCLUDE_SYMLINKS = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories. Note that the wildcards are matched
-# against the file with absolute path, so to exclude all test directories
-# for example use the pattern */test/*
-
-EXCLUDE_PATTERNS = nsI*.h mozI*.h imgI*.h
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the
-# output. The symbol name can be a fully qualified name, a word, or if the
-# wildcard * is used, a substring. Examples: ANamespace, AClass,
-# AClass::ANamespace, ANamespace::*Test
-
-EXCLUDE_SYMBOLS = nsCOMPtr_base
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or
-# directories that contain example code fragments that are included (see
-# the \include command).
-
-EXAMPLE_PATH =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
-# and *.h) to filter out the source-files in the directories. If left
-# blank all files are included.
-
-EXAMPLE_PATTERNS =
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude
-# commands irrespective of the value of the RECURSIVE tag.
-# Possible values are YES and NO. If left blank NO is used.
-
-EXAMPLE_RECURSIVE = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or
-# directories that contain image that are included in the documentation (see
-# the \image command).
-
-IMAGE_PATH =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command <filter> <input-file>, where <filter>
-# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
-# input file. Doxygen will then use the output that the filter program writes
-# to standard output. If FILTER_PATTERNS is specified, this tag will be
-# ignored.
-
-INPUT_FILTER =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form:
-# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
-# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER
-# is applied to all files.
-
-FILTER_PATTERNS =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER) will be used to filter the input files when producing source
-# files to browse (i.e. when SOURCE_BROWSER is set to YES).
-
-FILTER_SOURCE_FILES = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will
-# be generated. Documented entities will be cross-referenced with these sources.
-# Note: To get rid of all source code in the generated output, make sure also
-# VERBATIM_HEADERS is set to NO.
-
-SOURCE_BROWSER = NO
-
-# Setting the INLINE_SOURCES tag to YES will include the body
-# of functions and classes directly in the documentation.
-
-INLINE_SOURCES = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
-# doxygen to hide any special comment blocks from generated source code
-# fragments. Normal C and C++ comments will always remain visible.
-
-STRIP_CODE_COMMENTS = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES (the default)
-# then for each documented function all documented
-# functions referencing it will be listed.
-
-REFERENCED_BY_RELATION = NO
-
-# If the REFERENCES_RELATION tag is set to YES (the default)
-# then for each documented function all documented entities
-# called/used by that function will be listed.
-
-REFERENCES_RELATION = NO
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES (the default)
-# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from
-# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will
-# link to the source code. Otherwise they will link to the documentstion.
-
-REFERENCES_LINK_SOURCE = NO
-
-# If the USE_HTAGS tag is set to YES then the references to source code
-# will point to the HTML generated by the htags(1) tool instead of doxygen
-# built-in source browser. The htags tool is part of GNU's global source
-# tagging system (see http://www.gnu.org/software/global/global.html). You
-# will need version 4.8.6 or higher.
-
-USE_HTAGS = NO
-
-# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
-# will generate a verbatim copy of the header file for each class for
-# which an include is specified. Set to NO to disable this.
-
-VERBATIM_HEADERS = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
-# of all compounds will be generated. Enable this if the project
-# contains a lot of classes, structs, unions or interfaces.
-
-ALPHABETICAL_INDEX = YES
-
-# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
-# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
-# in which this list will be split (can be a number in the range [1..20])
-
-COLS_IN_ALPHA_INDEX = 5
-
-# In case all classes in a project start with a common prefix, all
-# classes will be put under the same header in the alphabetical index.
-# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
-# should be ignored while generating the index headers.
-
-IGNORE_PREFIX = nsI ns \
- mozI moz \
- imgI img
-
-#---------------------------------------------------------------------------
-# configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
-# generate HTML output.
-
-GENERATE_HTML = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `html' will be used as the default path.
-
-HTML_OUTPUT =
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
-# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
-# doxygen will generate files with .html extension.
-
-HTML_FILE_EXTENSION = .html
-
-# The HTML_HEADER tag can be used to specify a personal HTML header for
-# each generated HTML page. If it is left blank doxygen will generate a
-# standard header.
-
-HTML_HEADER =
-
-# The HTML_FOOTER tag can be used to specify a personal HTML footer for
-# each generated HTML page. If it is left blank doxygen will generate a
-# standard footer.
-
-HTML_FOOTER =
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
-# style sheet that is used by each HTML page. It can be used to
-# fine-tune the look of the HTML output. If the tag is left blank doxygen
-# will generate a default style sheet. Note that doxygen will try to copy
-# the style sheet file to the HTML output directory, so don't put your own
-# stylesheet in the HTML output directory as well, or it will be erased!
-
-HTML_STYLESHEET =
-
-# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
-# files or namespaces will be aligned in HTML using tables. If set to
-# NO a bullet list will be used.
-
-HTML_ALIGN_MEMBERS = YES
-
-# If the GENERATE_HTMLHELP tag is set to YES, additional index files
-# will be generated that can be used as input for tools like the
-# Microsoft HTML help workshop to generate a compiled HTML help file (.chm)
-# of the generated HTML documentation.
-
-GENERATE_HTMLHELP = NO
-
-# If the GENERATE_DOCSET tag is set to YES, additional index files
-# will be generated that can be used as input for Apple's Xcode 3
-# integrated development environment, introduced with OSX 10.5 (Leopard).
-# To create a documentation set, doxygen will generate a Makefile in the
-# HTML output directory. Running make will produce the docset in that
-# directory and running "make install" will install the docset in
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find
-# it at startup.
-
-GENERATE_DOCSET = NO
-
-# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the
-# feed. A documentation feed provides an umbrella under which multiple
-# documentation sets from a single provider (such as a company or product suite)
-# can be grouped.
-
-DOCSET_FEEDNAME = "Doxygen generated docs"
-
-# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that
-# should uniquely identify the documentation set bundle. This should be a
-# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen
-# will append .docset to the name.
-
-DOCSET_BUNDLE_ID = org.doxygen.Project
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded. For this to work a browser that supports
-# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox
-# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari).
-
-HTML_DYNAMIC_SECTIONS = NO
-
-# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
-# be used to specify the file name of the resulting .chm file. You
-# can add a path in front of the file if the result should not be
-# written to the html output directory.
-
-CHM_FILE =
-
-# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
-# be used to specify the location (absolute path including file name) of
-# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
-# the HTML help compiler on the generated index.hhp.
-
-HHC_LOCATION =
-
-# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
-# controls if a separate .chi index file is generated (YES) or that
-# it should be included in the master .chm file (NO).
-
-GENERATE_CHI = NO
-
-# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
-# controls whether a binary table of contents is generated (YES) or a
-# normal table of contents (NO) in the .chm file.
-
-BINARY_TOC = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members
-# to the contents of the HTML help documentation and to the tree view.
-
-TOC_EXPAND = NO
-
-# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
-# top of each HTML page. The value NO (the default) enables the index and
-# the value YES disables it.
-
-DISABLE_INDEX = NO
-
-# This tag can be used to set the number of enum values (range [1..20])
-# that doxygen will group on one line in the generated HTML documentation.
-
-ENUM_VALUES_PER_LINE = 4
-
-# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
-# generated containing a tree-like index structure (just like the one that
-# is generated for HTML Help). For this to work a browser that supports
-# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+,
-# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are
-# probably better off using the HTML help feature.
-
-GENERATE_TREEVIEW = NO
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
-# used to set the initial width (in pixels) of the frame in which the tree
-# is shown.
-
-TREEVIEW_WIDTH = 250
-
-#---------------------------------------------------------------------------
-# configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
-# generate Latex output.
-
-GENERATE_LATEX = NO
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `latex' will be used as the default path.
-
-LATEX_OUTPUT =
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked. If left blank `latex' will be used as the default command name.
-
-LATEX_CMD_NAME = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
-# generate index for LaTeX. If left blank `makeindex' will be used as the
-# default command name.
-
-MAKEINDEX_CMD_NAME = makeindex
-
-# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
-# LaTeX documents. This may be useful for small projects and may help to
-# save some trees in general.
-
-COMPACT_LATEX = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used
-# by the printer. Possible values are: a4, a4wide, letter, legal and
-# executive. If left blank a4wide will be used.
-
-PAPER_TYPE = a4wide
-
-# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
-# packages that should be included in the LaTeX output.
-
-EXTRA_PACKAGES =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
-# the generated latex document. The header should contain everything until
-# the first chapter. If it is left blank doxygen will generate a
-# standard header. Notice: only use this tag if you know what you are doing!
-
-LATEX_HEADER =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
-# is prepared for conversion to pdf (using ps2pdf). The pdf file will
-# contain links (just like the HTML output) instead of page references
-# This makes the output suitable for online browsing using a pdf viewer.
-
-PDF_HYPERLINKS = NO
-
-# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
-# plain latex in the generated Makefile. Set this option to YES to get a
-# higher quality PDF documentation.
-
-USE_PDFLATEX = NO
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
-# command to the generated LaTeX files. This will instruct LaTeX to keep
-# running if errors occur, instead of asking the user for help.
-# This option is also used when generating formulas in HTML.
-
-LATEX_BATCHMODE = NO
-
-# If LATEX_HIDE_INDICES is set to YES then doxygen will not
-# include the index chapters (such as File Index, Compound Index, etc.)
-# in the output.
-
-LATEX_HIDE_INDICES = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
-# The RTF output is optimized for Word 97 and may not look very pretty with
-# other RTF readers or editors.
-
-GENERATE_RTF = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `rtf' will be used as the default path.
-
-RTF_OUTPUT =
-
-# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
-# RTF documents. This may be useful for small projects and may help to
-# save some trees in general.
-
-COMPACT_RTF = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
-# will contain hyperlink fields. The RTF file will
-# contain links (just like the HTML output) instead of page references.
-# This makes the output suitable for online browsing using WORD or other
-# programs which support those fields.
-# Note: wordpad (write) and others do not support links.
-
-RTF_HYPERLINKS = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's
-# config file, i.e. a series of assignments. You only have to provide
-# replacements, missing definitions are set to their default value.
-
-RTF_STYLESHEET_FILE =
-
-# Set optional variables used in the generation of an rtf document.
-# Syntax is similar to doxygen's config file.
-
-RTF_EXTENSIONS_FILE =
-
-#---------------------------------------------------------------------------
-# configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
-# generate man pages
-
-GENERATE_MAN = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `man' will be used as the default path.
-
-MAN_OUTPUT =
-
-# The MAN_EXTENSION tag determines the extension that is added to
-# the generated man pages (default is the subroutine's section .3)
-
-MAN_EXTENSION =
-
-# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
-# then it will generate one additional man file for each entity
-# documented in the real man page(s). These additional files
-# only source the real man page, but without them the man command
-# would be unable to find the correct page. The default is NO.
-
-MAN_LINKS = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES Doxygen will
-# generate an XML file that captures the structure of
-# the code including all documentation.
-
-GENERATE_XML = NO
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `xml' will be used as the default path.
-
-XML_OUTPUT = xml
-
-# The XML_SCHEMA tag can be used to specify an XML schema,
-# which can be used by a validating XML parser to check the
-# syntax of the XML files.
-
-XML_SCHEMA =
-
-# The XML_DTD tag can be used to specify an XML DTD,
-# which can be used by a validating XML parser to check the
-# syntax of the XML files.
-
-XML_DTD =
-
-# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
-# dump the program listings (including syntax highlighting
-# and cross-referencing information) to the XML output. Note that
-# enabling this will significantly increase the size of the XML output.
-
-XML_PROGRAMLISTING = YES
-
-#---------------------------------------------------------------------------
-# configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
-# generate an AutoGen Definitions (see autogen.sf.net) file
-# that captures the structure of the code including all
-# documentation. Note that this feature is still experimental
-# and incomplete at the moment.
-
-GENERATE_AUTOGEN_DEF = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES Doxygen will
-# generate a Perl module file that captures the structure of
-# the code including all documentation. Note that this
-# feature is still experimental and incomplete at the
-# moment.
-
-GENERATE_PERLMOD = NO
-
-# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
-# the necessary Makefile rules, Perl scripts and LaTeX code to be able
-# to generate PDF and DVI output from the Perl module output.
-
-PERLMOD_LATEX = NO
-
-# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
-# nicely formatted so it can be parsed by a human reader. This is useful
-# if you want to understand what is going on. On the other hand, if this
-# tag is set to NO the size of the Perl module output will be much smaller
-# and Perl will parse it just the same.
-
-PERLMOD_PRETTY = YES
-
-# The names of the make variables in the generated doxyrules.make file
-# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
-# This is useful so different doxyrules.make files included by the same
-# Makefile don't overwrite each other's variables.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
-# evaluate all C-preprocessor directives found in the sources and include
-# files.
-
-ENABLE_PREPROCESSING = YES
-
-# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
-# names in the source code. If set to NO (the default) only conditional
-# compilation will be performed. Macro expansion can be done in a controlled
-# way by setting EXPAND_ONLY_PREDEF to YES.
-
-MACRO_EXPANSION = YES
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
-# then the macro expansion is limited to the macros specified with the
-# PREDEFINED and EXPAND_AS_DEFINED tags.
-
-EXPAND_ONLY_PREDEF = NO
-
-# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
-# in the INCLUDE_PATH (see below) will be search if a #include is found.
-
-SEARCH_INCLUDES = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by
-# the preprocessor.
-
-INCLUDE_PATH = @MOZ_DOC_INCLUDE_DIRS@
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will
-# be used.
-
-INCLUDE_FILE_PATTERNS = *.h
-
-# The PREDEFINED tag can be used to specify one or more macro names that
-# are defined before the preprocessor is started (similar to the -D option of
-# gcc). The argument of the tag is a list of macros of the form: name
-# or name=definition (no spaces). If the definition and the = are
-# omitted =1 is assumed. To prevent a macro definition from being
-# undefined via #undef or recursively expanded use the := operator
-# instead of the = operator.
-
-PREDEFINED =
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
-# this tag can be used to specify a list of macro names that should be expanded.
-# The macro definition that is found in the sources will be used.
-# Use the PREDEFINED tag if you want to use a different macro definition.
-
-EXPAND_AS_DEFINED =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
-# doxygen's preprocessor will remove all function-like macros that are alone
-# on a line, have an all uppercase name, and do not end with a semicolon. Such
-# function macros are typically used for boiler-plate code, and will confuse
-# the parser if not removed.
-
-SKIP_FUNCTION_MACROS = YES
-
-#---------------------------------------------------------------------------
-# Configuration::additions related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES option can be used to specify one or more tagfiles.
-# Optionally an initial location of the external documentation
-# can be added for each tagfile. The format of a tag file without
-# this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where "loc1" and "loc2" can be relative or absolute paths or
-# URLs. If a location is present for each tag, the installdox tool
-# does not have to be run to correct the links.
-# Note that each tag file must have a unique name
-# (where the name does NOT include the path)
-# If a tag file is not located in the directory in which doxygen
-# is run, you must also specify the path to the tagfile here.
-
-TAGFILES =
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create
-# a tag file that is based on the input files it reads.
-
-GENERATE_TAGFILE =
-
-# If the ALLEXTERNALS tag is set to YES all external classes will be listed
-# in the class index. If set to NO only the inherited external classes
-# will be listed.
-
-ALLEXTERNALS = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
-# in the modules index. If set to NO, only the current project's groups will
-# be listed.
-
-EXTERNAL_GROUPS = YES
-
-# The PERL_PATH should be the absolute path and name of the perl script
-# interpreter (i.e. the result of `which perl').
-
-PERL_PATH =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
-# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
-# or super classes. Setting the tag to NO turns the diagrams off. Note that
-# this option is superseded by the HAVE_DOT option below. This is only a
-# fallback. It is recommended to install and use dot, since it yields more
-# powerful graphs.
-
-CLASS_DIAGRAMS = YES
-
-# You can define message sequence charts within doxygen comments using the \msc
-# command. Doxygen will then run the mscgen tool (see
-# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the
-# documentation. The MSCGEN_PATH tag allows you to specify the directory where
-# the mscgen tool resides. If left empty the tool is assumed to be found in the
-# default search path.
-
-MSCGEN_PATH =
-
-# If set to YES, the inheritance and collaboration graphs will hide
-# inheritance and usage relations if the target is undocumented
-# or is not a class.
-
-HIDE_UNDOC_RELATIONS = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz, a graph visualization
-# toolkit from AT&T and Lucent Bell Labs. The other options in this section
-# have no effect if this option is set to NO (the default)
-
-HAVE_DOT = YES
-
-# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
-# will generate a graph for each documented class showing the direct and
-# indirect inheritance relations. Setting this tag to YES will force the
-# the CLASS_DIAGRAMS tag to NO.
-
-CLASS_GRAPH = YES
-
-# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
-# will generate a graph for each documented class showing the direct and
-# indirect implementation dependencies (inheritance, containment, and
-# class references variables) of the class with other documented classes.
-
-COLLABORATION_GRAPH = YES
-
-# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
-# will generate a graph for groups, showing the direct groups dependencies
-
-GROUP_GRAPHS = YES
-
-# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-
-UML_LOOK = NO
-
-# If set to YES, the inheritance and collaboration graphs will show the
-# relations between templates and their instances.
-
-TEMPLATE_RELATIONS = NO
-
-# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
-# tags are set to YES then doxygen will generate a graph for each documented
-# file showing the direct and indirect include dependencies of the file with
-# other documented files.
-
-INCLUDE_GRAPH = YES
-
-# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
-# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
-# documented header file showing the documented files that directly or
-# indirectly include this file.
-
-INCLUDED_BY_GRAPH = YES
-
-# If the CALL_GRAPH and HAVE_DOT options are set to YES then
-# doxygen will generate a call dependency graph for every global function
-# or class method. Note that enabling this option will significantly increase
-# the time of a run. So in most cases it will be better to enable call graphs
-# for selected functions only using the \callgraph command.
-
-CALL_GRAPH = NO
-
-# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then
-# doxygen will generate a caller dependency graph for every global function
-# or class method. Note that enabling this option will significantly increase
-# the time of a run. So in most cases it will be better to enable caller
-# graphs for selected functions only using the \callergraph command.
-
-CALLER_GRAPH = NO
-
-# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
-# will graphical hierarchy of all classes instead of a textual one.
-
-GRAPHICAL_HIERARCHY = YES
-
-# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES
-# then doxygen will show the dependencies a directory has on other directories
-# in a graphical way. The dependency relations are determined by the #include
-# relations between the files in the directories.
-
-DIRECTORY_GRAPH = YES
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot. Possible values are png, jpg, or gif
-# If left blank png will be used.
-
-DOT_IMAGE_FORMAT = png
-
-# The tag DOT_PATH can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-
-DOT_PATH =
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the
-# \dotfile command).
-
-DOTFILE_DIRS =
-
-# The MAX_DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of
-# nodes that will be shown in the graph. If the number of nodes in a graph
-# becomes larger than this value, doxygen will truncate the graph, which is
-# visualized by representing a node as a red box. Note that doxygen if the
-# number of direct children of the root node in a graph is already larger than
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note
-# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-
-DOT_GRAPH_MAX_NODES = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
-# graphs generated by dot. A depth value of 3 means that only nodes reachable
-# from the root by following a path via at most 3 edges will be shown. Nodes
-# that lay further from the root node will be omitted. Note that setting this
-# option to 1 or 2 may greatly reduce the computation time needed for large
-# code bases. Also note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-
-MAX_DOT_GRAPH_DEPTH = 3
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is enabled by default, which results in a transparent
-# background. Warning: Depending on the platform used, enabling this option
-# may lead to badly anti-aliased labels on the edges of a graph (i.e. they
-# become hard to read).
-
-DOT_TRANSPARENT = YES
-
-# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10)
-# support this, this feature is disabled by default.
-
-DOT_MULTI_TARGETS = NO
-
-# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
-# generate a legend page explaining the meaning of the various boxes and
-# arrows in the dot generated graphs.
-
-GENERATE_LEGEND = YES
-
-# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
-# remove the intermediate dot files that are used to generate
-# the various graphs.
-
-DOT_CLEANUP = YES
-
-#---------------------------------------------------------------------------
-# Configuration::additions related to the search engine
-#---------------------------------------------------------------------------
-
-# The SEARCHENGINE tag specifies whether or not a search engine should be
-# used. If set to NO the values of all tags below this one will be ignored.
-
-SEARCHENGINE = NO
diff --git a/src/third_party/mozjs-45/config/emptyvars-js.mk.in b/src/third_party/mozjs-45/config/emptyvars-js.mk.in
deleted file mode 100644
index 388cf2a..0000000
--- a/src/third_party/mozjs-45/config/emptyvars-js.mk.in
+++ /dev/null
@@ -1 +0,0 @@
-@ALLEMPTYSUBSTS@
diff --git a/src/third_party/mozjs-45/config/emptyvars.mk.in b/src/third_party/mozjs-45/config/emptyvars.mk.in
deleted file mode 100644
index 388cf2a..0000000
--- a/src/third_party/mozjs-45/config/emptyvars.mk.in
+++ /dev/null
@@ -1 +0,0 @@
-@ALLEMPTYSUBSTS@
diff --git a/src/third_party/mozjs-45/config/expandlibs.py b/src/third_party/mozjs-45/config/expandlibs.py
deleted file mode 100644
index ac06c43..0000000
--- a/src/third_party/mozjs-45/config/expandlibs.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''Expandlibs is a system that allows to replace some libraries with a
-descriptor file containing some linking information about them.
-
-The descriptor file format is as follows:
----8<-----
-OBJS = a.o b.o ...
-LIBS = libfoo.a libbar.a ...
---->8-----
-
-(In the example above, OBJ_SUFFIX is o and LIB_SUFFIX is a).
-
-Expandlibs also canonicalizes how to pass libraries to the linker, such
-that only the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} form needs to be used:
-given a list of files, expandlibs will replace items with the form
-${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} following these rules:
-
-- If a ${DLL_PREFIX}${ROOT}.${DLL_SUFFIX} or
- ${DLL_PREFIX}${ROOT}.${IMPORT_LIB_SUFFIX} file exists, use that instead
-- If the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} file exists, use it
-- If a ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX}.${LIB_DESC_SUFFIX} file exists,
- replace ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} with the OBJS and LIBS the
- descriptor contains. And for each of these LIBS, also apply the same
- rules.
-'''
-from __future__ import with_statement
-import sys, os, errno
-import expandlibs_config as conf
-
-def ensureParentDir(file):
- '''Ensures the directory parent to the given file exists'''
- dir = os.path.dirname(file)
- if dir and not os.path.exists(dir):
- try:
- os.makedirs(dir)
- except OSError, error:
- if error.errno != errno.EEXIST:
- raise
-
-def relativize(path):
- '''Returns a path relative to the current working directory, if it is
- shorter than the given path'''
- def splitpath(path):
- dir, file = os.path.split(path)
- if os.path.splitdrive(dir)[1] == os.sep:
- return [file]
- return splitpath(dir) + [file]
-
- if not os.path.exists(path):
- return path
- curdir = splitpath(os.path.abspath(os.curdir))
- abspath = splitpath(os.path.abspath(path))
- while curdir and abspath and curdir[0] == abspath[0]:
- del curdir[0]
- del abspath[0]
- if not curdir and not abspath:
- return '.'
- relpath = os.path.join(*[os.pardir for i in curdir] + abspath)
- if len(path) > len(relpath):
- return relpath
- return path
-
-def isObject(path):
- '''Returns whether the given path points to an object file, that is,
- ends with OBJ_SUFFIX or .i_o'''
- return os.path.splitext(path)[1] in [conf.OBJ_SUFFIX, '.i_o']
-
-def isDynamicLib(path):
- '''Returns whether the given path points to a dynamic library, that is,
- ends with DLL_SUFFIX.'''
- # On mac, the xul library is named XUL, instead of libxul.dylib. Assume any
- # file by that name is a dynamic library.
- return os.path.splitext(path)[1] == conf.DLL_SUFFIX or os.path.basename(path) == 'XUL'
-
-class LibDescriptor(dict):
- KEYS = ['OBJS', 'LIBS']
-
- def __init__(self, content=None):
- '''Creates an instance of a lib descriptor, initialized with contents
- from a list of strings when given. This is intended for use with
- file.readlines()'''
- if isinstance(content, list) and all([isinstance(item, str) for item in content]):
- pass
- elif content is not None:
- raise TypeError("LibDescriptor() arg 1 must be None or a list of strings")
- super(LibDescriptor, self).__init__()
- for key in self.KEYS:
- self[key] = []
- if not content:
- return
- for key, value in [(s.strip() for s in item.split('=', 2)) for item in content if item.find('=') >= 0]:
- if key in self.KEYS:
- self[key] = value.split()
-
- def __str__(self):
- '''Serializes the lib descriptor'''
- return '\n'.join('%s = %s' % (k, ' '.join(self[k])) for k in self.KEYS if len(self[k]))
-
-class ExpandArgs(list):
- def __init__(self, args):
- '''Creates a clone of the |args| list and performs file expansion on
- each item it contains'''
- super(ExpandArgs, self).__init__()
- self._descs = set()
- for arg in args:
- self += self._expand(arg)
-
- def _expand(self, arg):
- '''Internal function doing the actual work'''
- (root, ext) = os.path.splitext(arg)
- if ext != conf.LIB_SUFFIX or not os.path.basename(root).startswith(conf.LIB_PREFIX):
- return [relativize(arg)]
- if conf.LIB_PREFIX:
- dll = root.replace(conf.LIB_PREFIX, conf.DLL_PREFIX, 1) + conf.DLL_SUFFIX
- else:
- dll = root + conf.DLL_SUFFIX
- if os.path.exists(dll):
- if conf.IMPORT_LIB_SUFFIX:
- return [relativize(root + conf.IMPORT_LIB_SUFFIX)]
- else:
- return [relativize(dll)]
- return self._expand_desc(arg)
-
- def _expand_desc(self, arg):
- '''Internal function taking care of lib descriptor expansion only'''
- desc = os.path.abspath(arg + conf.LIBS_DESC_SUFFIX)
- if os.path.exists(desc):
- if desc in self._descs:
- return []
- self._descs.add(desc)
- with open(desc, 'r') as f:
- desc = LibDescriptor(f.readlines())
- objs = [relativize(o) for o in desc['OBJS']]
- for lib in desc['LIBS']:
- objs += self._expand(lib)
- return objs
- return [relativize(arg)]
-
-if __name__ == '__main__':
- print " ".join(ExpandArgs(sys.argv[1:]))
diff --git a/src/third_party/mozjs-45/config/expandlibs_config.py b/src/third_party/mozjs-45/config/expandlibs_config.py
deleted file mode 100644
index 8365f77..0000000
--- a/src/third_party/mozjs-45/config/expandlibs_config.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from buildconfig import substs
-
-def normalize_suffix(suffix):
- '''Returns a normalized suffix, i.e. ensures it starts with a dot and
- doesn't starts or ends with whitespace characters'''
- value = suffix.strip()
- if len(value) and not value.startswith('.'):
- value = '.' + value
- return value
-
-# Variables from the build system
-AR = substs['AR']
-AR_EXTRACT = substs['AR_EXTRACT'].replace('$(AR)', AR)
-DLL_PREFIX = substs['DLL_PREFIX']
-LIB_PREFIX = substs['LIB_PREFIX']
-OBJ_SUFFIX = normalize_suffix(substs['OBJ_SUFFIX'])
-LIB_SUFFIX = normalize_suffix(substs['LIB_SUFFIX'])
-DLL_SUFFIX = normalize_suffix(substs['DLL_SUFFIX'])
-IMPORT_LIB_SUFFIX = normalize_suffix(substs['IMPORT_LIB_SUFFIX'])
-LIBS_DESC_SUFFIX = normalize_suffix(substs['LIBS_DESC_SUFFIX'])
-EXPAND_LIBS_LIST_STYLE = substs['EXPAND_LIBS_LIST_STYLE']
-EXPAND_LIBS_ORDER_STYLE = substs['EXPAND_LIBS_ORDER_STYLE']
-LD_PRINT_ICF_SECTIONS = substs['LD_PRINT_ICF_SECTIONS']
diff --git a/src/third_party/mozjs-45/config/expandlibs_exec.py b/src/third_party/mozjs-45/config/expandlibs_exec.py
deleted file mode 100644
index c053430..0000000
--- a/src/third_party/mozjs-45/config/expandlibs_exec.py
+++ /dev/null
@@ -1,354 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''expandlibs-exec.py applies expandlibs rules, and some more (see below) to
-a given command line, and executes that command line with the expanded
-arguments.
-
-With the --extract argument (useful for e.g. $(AR)), it extracts object files
-from static libraries (or use those listed in library descriptors directly).
-
-With the --uselist argument (useful for e.g. $(CC)), it replaces all object
-files with a list file. This can be used to avoid limitations in the length
-of a command line. The kind of list file format used depends on the
-EXPAND_LIBS_LIST_STYLE variable: 'list' for MSVC style lists (@file.list)
-or 'linkerscript' for GNU ld linker scripts.
-See https://bugzilla.mozilla.org/show_bug.cgi?id=584474#c59 for more details.
-
-With the --symbol-order argument, followed by a file name, it will add the
-relevant linker options to change the order in which the linker puts the
-symbols appear in the resulting binary. Only works for ELF targets.
-'''
-from __future__ import with_statement
-import sys
-import os
-from expandlibs import (
- ExpandArgs,
- relativize,
- isDynamicLib,
- isObject,
-)
-import expandlibs_config as conf
-from optparse import OptionParser
-import subprocess
-import tempfile
-import shutil
-import subprocess
-import re
-from mozbuild.makeutil import Makefile
-
-# The are the insert points for a GNU ld linker script, assuming a more
-# or less "standard" default linker script. This is not a dict because
-# order is important.
-SECTION_INSERT_BEFORE = [
- ('.text', '.fini'),
- ('.rodata', '.rodata1'),
- ('.data.rel.ro', '.dynamic'),
- ('.data', '.data1'),
-]
-
-class ExpandArgsMore(ExpandArgs):
- ''' Meant to be used as 'with ExpandArgsMore(args) as ...: '''
- def __enter__(self):
- self.tmp = []
- return self
-
- def __exit__(self, type, value, tb):
- '''Automatically remove temporary files'''
- for tmp in self.tmp:
- if os.path.isdir(tmp):
- shutil.rmtree(tmp, True)
- else:
- os.remove(tmp)
-
- def extract(self):
- self[0:] = self._extract(self)
-
- def _extract(self, args):
- '''When a static library name is found, either extract its contents
- in a temporary directory or use the information found in the
- corresponding lib descriptor.
- '''
- ar_extract = conf.AR_EXTRACT.split()
- newlist = []
-
- def lookup(base, f):
- for root, dirs, files in os.walk(base):
- if f in files:
- return os.path.join(root, f)
-
- for arg in args:
- if os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
- if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
- newlist += self._extract(self._expand_desc(arg))
- continue
- elif os.path.exists(arg) and (len(ar_extract) or conf.AR == 'lib'):
- tmp = tempfile.mkdtemp(dir=os.curdir)
- self.tmp.append(tmp)
- if conf.AR == 'lib':
- out = subprocess.check_output([conf.AR, '-NOLOGO', '-LIST', arg])
- files = out.splitlines()
- # If lib -list returns a list full of dlls, it's an
- # import lib.
- if all(isDynamicLib(f) for f in files):
- newlist += [arg]
- continue
- for f in files:
- subprocess.call([conf.AR, '-NOLOGO', '-EXTRACT:%s' % f, os.path.abspath(arg)], cwd=tmp)
- else:
- subprocess.call(ar_extract + [os.path.abspath(arg)], cwd=tmp)
- objs = []
- basedir = os.path.dirname(arg)
- for root, dirs, files in os.walk(tmp):
- for f in files:
- if isObject(f):
- # If the file extracted from the library also
- # exists in the directory containing the
- # library, or one of its subdirectories, use
- # that instead.
- maybe_obj = lookup(os.path.join(basedir, os.path.relpath(root, tmp)), f)
- if maybe_obj:
- objs.append(relativize(maybe_obj))
- else:
- objs.append(relativize(os.path.join(root, f)))
- newlist += sorted(objs)
- continue
- newlist += [arg]
- return newlist
-
- def makelist(self):
- '''Replaces object file names with a temporary list file, using a
- list format depending on the EXPAND_LIBS_LIST_STYLE variable
- '''
- objs = [o for o in self if isObject(o)]
- if not len(objs): return
- fd, tmp = tempfile.mkstemp(suffix=".list",dir=os.curdir)
- if conf.EXPAND_LIBS_LIST_STYLE == "linkerscript":
- content = ['INPUT("%s")\n' % obj for obj in objs]
- ref = tmp
- elif conf.EXPAND_LIBS_LIST_STYLE == "filelist":
- content = ["%s\n" % obj for obj in objs]
- ref = "-Wl,-filelist," + tmp
- elif conf.EXPAND_LIBS_LIST_STYLE == "list":
- content = ["%s\n" % obj for obj in objs]
- ref = "@" + tmp
- else:
- os.close(fd)
- os.remove(tmp)
- return
- self.tmp.append(tmp)
- f = os.fdopen(fd, "w")
- f.writelines(content)
- f.close()
- idx = self.index(objs[0])
- newlist = self[0:idx] + [ref] + [item for item in self[idx:] if item not in objs]
- self[0:] = newlist
-
- def _getFoldedSections(self):
- '''Returns a dict about folded sections.
- When section A and B are folded into section C, the dict contains:
- { 'A': 'C',
- 'B': 'C',
- 'C': ['A', 'B'] }'''
- if not conf.LD_PRINT_ICF_SECTIONS:
- return {}
-
- proc = subprocess.Popen(self + [conf.LD_PRINT_ICF_SECTIONS], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- result = {}
- # gold's --print-icf-sections output looks like the following:
- # ld: ICF folding section '.section' in file 'file.o'into '.section' in file 'file.o'
- # In terms of words, chances are this will change in the future,
- # especially considering "into" is misplaced. Splitting on quotes
- # seems safer.
- for l in stderr.split('\n'):
- quoted = l.split("'")
- if len(quoted) > 5 and quoted[1] != quoted[5]:
- result[quoted[1]] = [quoted[5]]
- if quoted[5] in result:
- result[quoted[5]].append(quoted[1])
- else:
- result[quoted[5]] = [quoted[1]]
- return result
-
- def _getOrderedSections(self, ordered_symbols):
- '''Given an ordered list of symbols, returns the corresponding list
- of sections following the order.'''
- if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
- raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
- finder = SectionFinder([arg for arg in self if isObject(arg) or os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
- folded = self._getFoldedSections()
- sections = set()
- ordered_sections = []
- for symbol in ordered_symbols:
- symbol_sections = finder.getSections(symbol)
- all_symbol_sections = []
- for section in symbol_sections:
- if section in folded:
- if isinstance(folded[section], str):
- section = folded[section]
- all_symbol_sections.append(section)
- all_symbol_sections.extend(folded[section])
- else:
- all_symbol_sections.append(section)
- for section in all_symbol_sections:
- if not section in sections:
- ordered_sections.append(section)
- sections.add(section)
- return ordered_sections
-
- def orderSymbols(self, order):
- '''Given a file containing a list of symbols, adds the appropriate
- argument to make the linker put the symbols in that order.'''
- with open(order) as file:
- sections = self._getOrderedSections([l.strip() for l in file.readlines() if l.strip()])
- split_sections = {}
- linked_sections = [s[0] for s in SECTION_INSERT_BEFORE]
- for s in sections:
- for linked_section in linked_sections:
- if s.startswith(linked_section):
- if linked_section in split_sections:
- split_sections[linked_section].append(s)
- else:
- split_sections[linked_section] = [s]
- break
- content = []
- # Order is important
- linked_sections = [s for s in linked_sections if s in split_sections]
-
- if conf.EXPAND_LIBS_ORDER_STYLE == 'section-ordering-file':
- option = '-Wl,--section-ordering-file,%s'
- content = sections
- for linked_section in linked_sections:
- content.extend(split_sections[linked_section])
- content.append('%s.*' % linked_section)
- content.append(linked_section)
-
- elif conf.EXPAND_LIBS_ORDER_STYLE == 'linkerscript':
- option = '-Wl,-T,%s'
- section_insert_before = dict(SECTION_INSERT_BEFORE)
- for linked_section in linked_sections:
- content.append('SECTIONS {')
- content.append(' %s : {' % linked_section)
- content.extend(' *(%s)' % s for s in split_sections[linked_section])
- content.append(' }')
- content.append('}')
- content.append('INSERT BEFORE %s' % section_insert_before[linked_section])
- else:
- raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
-
- fd, tmp = tempfile.mkstemp(dir=os.curdir)
- f = os.fdopen(fd, "w")
- f.write('\n'.join(content)+'\n')
- f.close()
- self.tmp.append(tmp)
- self.append(option % tmp)
-
-class SectionFinder(object):
- '''Instances of this class allow to map symbol names to sections in
- object files.'''
-
- def __init__(self, objs):
- '''Creates an instance, given a list of object files.'''
- if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
- raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
- self.mapping = {}
- for obj in objs:
- if not isObject(obj) and os.path.splitext(obj)[1] != conf.LIB_SUFFIX:
- raise Exception('%s is not an object nor a static library' % obj)
- for symbol, section in SectionFinder._getSymbols(obj):
- sym = SectionFinder._normalize(symbol)
- if sym in self.mapping:
- if not section in self.mapping[sym]:
- self.mapping[sym].append(section)
- else:
- self.mapping[sym] = [section]
-
- def getSections(self, symbol):
- '''Given a symbol, returns a list of sections containing it or the
- corresponding thunks. When the given symbol is a thunk, returns the
- list of sections containing its corresponding normal symbol and the
- other thunks for that symbol.'''
- sym = SectionFinder._normalize(symbol)
- if sym in self.mapping:
- return self.mapping[sym]
- return []
-
- @staticmethod
- def _normalize(symbol):
- '''For normal symbols, return the given symbol. For thunks, return
- the corresponding normal symbol.'''
- if re.match('^_ZThn[0-9]+_', symbol):
- return re.sub('^_ZThn[0-9]+_', '_Z', symbol)
- return symbol
-
- @staticmethod
- def _getSymbols(obj):
- '''Returns a list of (symbol, section) contained in the given object
- file.'''
- proc = subprocess.Popen(['objdump', '-t', obj], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- syms = []
- for line in stdout.splitlines():
- # Each line has the following format:
- # <addr> [lgu!][w ][C ][W ][Ii ][dD ][FfO ] <section>\t<length> <symbol>
- tmp = line.split(' ',1)
- # This gives us ["<addr>", "[lgu!][w ][C ][W ][Ii ][dD ][FfO ] <section>\t<length> <symbol>"]
- # We only need to consider cases where "<section>\t<length> <symbol>" is present,
- # and where the [FfO] flag is either F (function) or O (object).
- if len(tmp) > 1 and len(tmp[1]) > 6 and tmp[1][6] in ['O', 'F']:
- tmp = tmp[1][8:].split()
- # That gives us ["<section>","<length>", "<symbol>"]
- syms.append((tmp[-1], tmp[0]))
- return syms
-
-def print_command(out, args):
- print >>out, "Executing: " + " ".join(args)
- for tmp in [f for f in args.tmp if os.path.isfile(f)]:
- print >>out, tmp + ":"
- with open(tmp) as file:
- print >>out, "".join([" " + l for l in file.readlines()])
- out.flush()
-
-def main(args, proc_callback=None):
- parser = OptionParser()
- parser.add_option("--extract", action="store_true", dest="extract",
- help="when a library has no descriptor file, extract it first, when possible")
- parser.add_option("--uselist", action="store_true", dest="uselist",
- help="use a list file for objects when executing a command")
- parser.add_option("--verbose", action="store_true", dest="verbose",
- help="display executed command and temporary files content")
- parser.add_option("--symbol-order", dest="symbol_order", metavar="FILE",
- help="use the given list of symbols to order symbols in the resulting binary when using with a linker")
-
- (options, args) = parser.parse_args(args)
-
- with ExpandArgsMore(args) as args:
- if options.extract:
- args.extract()
- if options.symbol_order:
- args.orderSymbols(options.symbol_order)
- if options.uselist:
- args.makelist()
-
- if options.verbose:
- print_command(sys.stderr, args)
- try:
- proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
- if proc_callback:
- proc_callback(proc)
- except Exception, e:
- print >>sys.stderr, 'error: Launching', args, ':', e
- raise e
- (stdout, stderr) = proc.communicate()
- if proc.returncode and not options.verbose:
- print_command(sys.stderr, args)
- sys.stderr.write(stdout)
- sys.stderr.flush()
- if proc.returncode:
- return proc.returncode
- return 0
-
-if __name__ == '__main__':
- exit(main(sys.argv[1:]))
diff --git a/src/third_party/mozjs-45/config/expandlibs_gen.py b/src/third_party/mozjs-45/config/expandlibs_gen.py
deleted file mode 100644
index b1de63c..0000000
--- a/src/third_party/mozjs-45/config/expandlibs_gen.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''Given a list of object files and library names, prints a library
-descriptor to standard output'''
-
-from __future__ import with_statement
-import sys
-import os
-import expandlibs_config as conf
-from expandlibs import LibDescriptor, isObject, ensureParentDir
-from optparse import OptionParser
-
-def generate(args):
- desc = LibDescriptor()
- for arg in args:
- if isObject(arg):
- if os.path.exists(arg):
- desc['OBJS'].append(os.path.abspath(arg))
- else:
- raise Exception("File not found: %s" % arg)
- elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
- if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
- desc['LIBS'].append(os.path.abspath(arg))
- else:
- raise Exception("File not found: %s" % arg)
- return desc
-
-if __name__ == '__main__':
- parser = OptionParser()
- parser.add_option("-o", dest="output", metavar="FILE",
- help="send output to the given file")
-
- (options, args) = parser.parse_args()
- if not options.output:
- raise Exception("Missing option: -o")
-
- ensureParentDir(options.output)
- with open(options.output, 'w') as outfile:
- print >>outfile, generate(args)
diff --git a/src/third_party/mozjs-45/config/external/ffi/Makefile.in b/src/third_party/mozjs-45/config/external/ffi/Makefile.in
deleted file mode 100644
index 19e80d8..0000000
--- a/src/third_party/mozjs-45/config/external/ffi/Makefile.in
+++ /dev/null
@@ -1,12 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifndef MOZ_NATIVE_FFI
-
-include $(topsrcdir)/config/config.mk
-
-$(STATIC_LIBS):
- $(MAKE) -C $(DEPTH)/js/src/ctypes/libffi
-
-endif
diff --git a/src/third_party/mozjs-45/config/external/ffi/moz.build b/src/third_party/mozjs-45/config/external/ffi/moz.build
deleted file mode 100644
index 725b483..0000000
--- a/src/third_party/mozjs-45/config/external/ffi/moz.build
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-Library('ffi')
-
-if CONFIG['MOZ_NATIVE_FFI']:
- OS_LIBS += CONFIG['MOZ_FFI_LIBS']
-else:
- if CONFIG['_MSC_VER']:
- prefix = 'lib'
- else:
- prefix = ''
- USE_LIBS += [
- 'static:/js/src/ctypes/libffi/.libs/%sffi' % prefix,
- ]
diff --git a/src/third_party/mozjs-45/config/external/freetype2/Makefile.in b/src/third_party/mozjs-45/config/external/freetype2/Makefile.in
deleted file mode 100644
index 95a9871..0000000
--- a/src/third_party/mozjs-45/config/external/freetype2/Makefile.in
+++ /dev/null
@@ -1,12 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifdef MOZ_TREE_FREETYPE
-
-include $(topsrcdir)/config/config.mk
-
-$(STATIC_LIBS):
- $(MAKE) -C $(DEPTH)/modules/freetype2
-
-endif
diff --git a/src/third_party/mozjs-45/config/external/freetype2/moz.build b/src/third_party/mozjs-45/config/external/freetype2/moz.build
deleted file mode 100644
index fc52d25..0000000
--- a/src/third_party/mozjs-45/config/external/freetype2/moz.build
+++ /dev/null
@@ -1,14 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-Library('freetype')
-
-if CONFIG['MOZ_TREE_FREETYPE']:
- USE_LIBS += [
- 'static:/modules/freetype2/.libs/freetype',
- ]
-else:
- OS_LIBS += CONFIG['FT2_LIBS']
diff --git a/src/third_party/mozjs-45/config/external/icu/Makefile.in b/src/third_party/mozjs-45/config/external/icu/Makefile.in
deleted file mode 100644
index 7781b23..0000000
--- a/src/third_party/mozjs-45/config/external/icu/Makefile.in
+++ /dev/null
@@ -1,62 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# Ensure that this happens before including rules.mk
-ifdef USE_ICU
- ifndef MOZ_NATIVE_ICU
- # Library names: On Windows, ICU uses modified library names for static
- # and debug libraries.
- ifdef MOZ_SHARED_ICU
- ifeq ($(OS_ARCH),WINNT)
- ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(libname)$(MOZ_ICU_DBG_SUFFIX)$(MOZ_ICU_VERSION).dll)
- else # ! WINNT
- ifeq ($(OS_ARCH),Darwin)
- ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(DLL_PREFIX)$(libname).$(MOZ_ICU_VERSION)$(DLL_SUFFIX))
- else # ! Darwin
- ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(DLL_PREFIX)$(libname)$(DLL_SUFFIX).$(MOZ_ICU_VERSION))
- endif
- endif # WINNT
- ifdef ICU_FILES
- ICU_DEST := $(DIST)/bin
- INSTALL_TARGETS += ICU
- $(ICU_FILES): buildicu
- ICU_TARGET := target
- endif
- else # !MOZ_SHARED_ICU
- ifeq ($(OS_ARCH),WINNT)
- ICU_LIB_RENAME = $(foreach libname,$(ICU_LIB_NAMES),\
- cp -p $(DEPTH)/intl/icu/target/lib/s$(libname)$(MOZ_ICU_DBG_SUFFIX).lib $(DEPTH)/intl/icu/target/lib/$(libname)$(MOZ_ICU_DBG_SUFFIX).lib;)
- endif
- endif # MOZ_SHARED_ICU
- endif # !MOZ_NATIVE_ICU
-endif # USE_ICU
-
-include $(topsrcdir)/config/rules.mk
-
-ifdef USE_ICU
-ifndef MOZ_NATIVE_ICU
-target:: buildicu
-$(STATIC_LIBS): buildicu
-
-# - Force ICU to use the standard suffix for object files because expandlibs
-# will discard all files with a non-standard suffix (bug 857450).
-# - Options for genrb: -k strict parsing; -R omit collation tailoring rules.
-buildicu::
-# ICU's build system is full of races, so force non-parallel build.
-# Msys screws up GENRBOPTS when it contains spaces, so all genrb flags need
-# to be stuck together. See https://bugzilla.mozilla.org/show_bug.cgi?id=1034594#c34
-ifdef CROSS_COMPILE
- +ASAN_OPTIONS=detect_leaks=0 $(MAKE) -j1 -C $(DEPTH)/intl/icu/host STATIC_O=$(OBJ_SUFFIX) GENRBOPTS='-kRC'
-endif
- +ASAN_OPTIONS=detect_leaks=0 $(MAKE) -j1 -C $(DEPTH)/intl/icu/target STATIC_O=$(OBJ_SUFFIX) GENRBOPTS='-kR'
- $(ICU_LIB_RENAME)
-
-distclean clean::
-ifdef CROSS_COMPILE
- +$(MAKE) -C $(DEPTH)/intl/icu/host $@ STATIC_O=$(OBJ_SUFFIX)
-endif
- +$(MAKE) -C $(DEPTH)/intl/icu/target $@ STATIC_O=$(OBJ_SUFFIX)
-
-endif
-endif
diff --git a/src/third_party/mozjs-45/config/external/icu/moz.build b/src/third_party/mozjs-45/config/external/icu/moz.build
deleted file mode 100644
index ef8a706..0000000
--- a/src/third_party/mozjs-45/config/external/icu/moz.build
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-Library('icu')
-
-if CONFIG['MOZ_NATIVE_ICU']:
- OS_LIBS += CONFIG['MOZ_ICU_LIBS']
-else:
- # Order needs to be preserved
- for l in CONFIG['ICU_LIB_NAMES']:
- USE_LIBS += ['%s/intl/icu/target/lib/%s%s' % (
- 'static:' if not CONFIG['MOZ_SHARED_ICU'] else '',
- l,
- CONFIG['MOZ_ICU_DBG_SUFFIX']
- )]
-
- if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gonk':
- OS_LIBS += [
- 'gabi++',
- ]
- if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android' and CONFIG['MOZ_ANDROID_CXX_STL'] == 'mozstlport':
- USE_LIBS += [
- 'gabi++'
- ]
diff --git a/src/third_party/mozjs-45/config/external/lgpllibs/lgpllibs.def b/src/third_party/mozjs-45/config/external/lgpllibs/lgpllibs.def
deleted file mode 100644
index 359d97e..0000000
--- a/src/third_party/mozjs-45/config/external/lgpllibs/lgpllibs.def
+++ /dev/null
@@ -1,10 +0,0 @@
-; This Source Code Form is subject to the terms of the Mozilla Public
-; License, v. 2.0. If a copy of the MPL was not distributed with this
-; file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-LIBRARY lgpllibs.dll
-
-EXPORTS
- av_rdft_init
- av_rdft_calc
- av_rdft_end
diff --git a/src/third_party/mozjs-45/config/external/lgpllibs/moz.build b/src/third_party/mozjs-45/config/external/lgpllibs/moz.build
deleted file mode 100644
index 27df35d..0000000
--- a/src/third_party/mozjs-45/config/external/lgpllibs/moz.build
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# The lgpllibs library stores symbols from third-party LGPL licensed libraries,
-# such as libav and libsoundtouch. It fulfills the requirement of dynamically
-# linking these symbols into gecko.
-#
-# Any library added here should also be reflected in the about:license page.
-
-GeckoSharedLibrary('lgpllibs', linkage=None)
-SHARED_LIBRARY_NAME = 'lgpllibs'
-
-if CONFIG['MOZ_LIBAV_FFT']:
- DIRS += ['/media/libav']
- DEFFILE = SRCDIR + '/lgpllibs.def'
diff --git a/src/third_party/mozjs-45/config/external/moz.build b/src/third_party/mozjs-45/config/external/moz.build
deleted file mode 100644
index 769a33d..0000000
--- a/src/third_party/mozjs-45/config/external/moz.build
+++ /dev/null
@@ -1,61 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-external_dirs = []
-
-DIRS += [
- 'lgpllibs',
- 'sqlite',
-]
-if not CONFIG['MOZ_NATIVE_JPEG']:
- external_dirs += ['media/libjpeg']
-
-if CONFIG['MOZ_UPDATER']:
- if not CONFIG['MOZ_NATIVE_BZ2']:
- external_dirs += ['modules/libbz2']
-
-# There's no "native brotli" yet, but probably in the future...
-external_dirs += ['modules/brotli']
-
-if CONFIG['MOZ_VORBIS']:
- external_dirs += ['media/libvorbis']
-
-if CONFIG['MOZ_TREMOR']:
- external_dirs += ['media/libtremor']
-
-if CONFIG['MOZ_WEBM']:
- external_dirs += ['media/libnestegg']
-
-if CONFIG['MOZ_WEBM_ENCODER']:
- external_dirs += ['media/libmkv']
-
-if CONFIG['MOZ_VPX'] and not CONFIG['MOZ_NATIVE_LIBVPX']:
- external_dirs += ['media/libvpx']
-
-if not CONFIG['MOZ_NATIVE_PNG']:
- external_dirs += ['media/libpng']
-
-if CONFIG['CPU_ARCH'] == 'arm':
- external_dirs += ['media/openmax_dl']
-
-if CONFIG['MOZ_WEBSPEECH_POCKETSPHINX']:
- external_dirs += [
- 'media/sphinxbase',
- 'media/pocketsphinx',
- ]
-
-external_dirs += [
- 'media/kiss_fft',
- 'media/libcubeb',
- 'media/libogg',
- 'media/libopus',
- 'media/libtheora',
- 'media/libspeex_resampler',
- 'media/libstagefright',
- 'media/libsoundtouch',
-]
-
-DIRS += ['../../' + i for i in external_dirs]
diff --git a/src/third_party/mozjs-45/config/external/nspr/Makefile.in b/src/third_party/mozjs-45/config/external/nspr/Makefile.in
deleted file mode 100644
index c4349f3..0000000
--- a/src/third_party/mozjs-45/config/external/nspr/Makefile.in
+++ /dev/null
@@ -1,57 +0,0 @@
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-include $(topsrcdir)/config/rules.mk
-
-ifdef MOZ_BUILD_NSPR
-
-# Copy NSPR to the SDK
-ABS_DIST = $(abspath $(DIST))
-
-ifdef MOZ_FOLD_LIBS
-# Trick the nspr build system into not building shared libraries.
-# bug #851869.
-EXTRA_MAKE_FLAGS := SHARED_LIBRARY= IMPORT_LIBRARY= SHARED_LIB_PDB=
-
-# Work around libVersionPoint conflict between all three libraries.
-# See bug #838566.
-EXTRA_MAKE_FLAGS += XP_DEFINE=-DlibVersionPoint='libVersionPoint$$(LIBRARY_NAME)'
-else
-# nspr's make export compiles and links everything, but linking can't happen
-# during export on platforms where nspr is linked against mozcrt/mozglue.
-export:: EXTRA_MAKE_FLAGS += SHARED_LIBRARY= IMPORT_LIBRARY= SHARED_LIB_PDB=
-endif
-
-MOZ_BUILDID := $(shell cat $(DEPTH)/config/buildid)
-
-# The NSPR build system uses build-time generated dates for public API
-# exposed data structures. Use the buildid as forced date, to avoid
-# having to deal with what changing NSPR itself might mean.
-
-# SH_DATE is a date with the format "%Y-%m-%d %T"
-EXTRA_MAKE_FLAGS += SH_DATE="$(shell $(PYTHON) -c 'd = "$(MOZ_BUILDID)"; print d[0:4]+"-"+d[4:6]+"-"+d[6:8]+" "+d[8:10]+":"+d[10:12]+":"+d[12:14]')"
-
-# SH_NOW is a date as a unix timestamp in µseconds
-EXTRA_MAKE_FLAGS += SH_NOW="$(shell $(PYTHON) -c 'import time, calendar; print calendar.timegm(time.strptime("$(MOZ_BUILDID)", "%Y%m%d%H%M%S"))')000000"
-
-clean distclean export::
- $(MAKE) -C $(DEPTH)/nsprpub $@ $(EXTRA_MAKE_FLAGS)
-
-target::
-# nspr's libs and install rule re-export headers, and that can race with other
-# compilations, so use a separate directory here. The headers are exported
-# during export anyways.
- $(MAKE) -C $(DEPTH)/nsprpub libs $(EXTRA_MAKE_FLAGS) dist_includedir=$(ABS_DIST)/nspr-include
- $(MAKE) -C $(DEPTH)/nsprpub install prefix=$(ABS_DIST)/sdk exec_prefix=$(ABS_DIST)/sdk bindir=$(ABS_DIST)/sdk/dummy includedir=$(ABS_DIST)/nspr-include libdir=$(ABS_DIST)/sdk/lib datadir=$(ABS_DIST)/sdk/dummy DESTDIR= $(EXTRA_MAKE_FLAGS)
- $(INSTALL) $(DEPTH)/nsprpub/config/nspr-config $(DIST)/sdk/bin
- $(RM) -rf $(DIST)/sdk/dummy
-ifneq (,$(filter WINNT,$(OS_ARCH))) # {
- $(RM) -f $(DIST)/sdk/lib/$(DLL_PREFIX)nspr4$(DLL_SUFFIX) $(DIST)/sdk/lib/$(DLL_PREFIX)plc4$(DLL_SUFFIX) $(DIST)/sdk/lib/$(DLL_PREFIX)plds4$(DLL_SUFFIX)
- $(RM) -f $(DIST)/sdk/lib/$(LIB_PREFIX)nspr4_s.$(LIB_SUFFIX) $(DIST)/sdk/lib/$(LIB_PREFIX)plc4_s.$(LIB_SUFFIX) $(DIST)/sdk/lib/$(LIB_PREFIX)plds4_s.$(LIB_SUFFIX)
-else # } {
- $(RM) -f $(DIST)/sdk/lib/$(LIB_PREFIX)nspr4.$(LIB_SUFFIX) $(DIST)/sdk/lib/$(LIB_PREFIX)plc4.$(LIB_SUFFIX) $(DIST)/sdk/lib/$(LIB_PREFIX)plds4.$(LIB_SUFFIX)
-endif # }
-
-endif
diff --git a/src/third_party/mozjs-45/config/external/nspr/moz.build b/src/third_party/mozjs-45/config/external/nspr/moz.build
deleted file mode 100644
index 424119e..0000000
--- a/src/third_party/mozjs-45/config/external/nspr/moz.build
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-Library('nspr')
-
-if CONFIG['MOZ_FOLD_LIBS']:
- # When folding libraries, nspr is actually in the nss library.
- USE_LIBS += [
- 'nss',
- ]
-elif CONFIG['MOZ_BUILD_NSPR']:
- USE_LIBS += [
- '/nsprpub/lib/ds/plds4',
- '/nsprpub/lib/libc/src/plc4',
- '/nsprpub/pr/src/nspr4',
- ]
-else:
- OS_LIBS += CONFIG['NSPR_LIBS']
diff --git a/src/third_party/mozjs-45/config/external/nss/Makefile.in b/src/third_party/mozjs-45/config/external/nss/Makefile.in
deleted file mode 100644
index 713c66a..0000000
--- a/src/third_party/mozjs-45/config/external/nss/Makefile.in
+++ /dev/null
@@ -1,499 +0,0 @@
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifndef MOZ_NATIVE_NSS
-
-CC_WRAPPER =
-CXX_WRAPPER =
-
-default::
-
-include $(topsrcdir)/config/makefiles/functions.mk
-
-NSS_LIBS = \
- nss3 \
- nssutil3 \
- smime3 \
- ssl3 \
- $(NULL)
-
-ifdef MOZ_FOLD_LIBS
-NSS_DLLS = $(LIBRARY_NAME)
-else
-NSS_DLLS = $(NSS_LIBS)
-endif
-
-NSS_EXTRA_DLLS = \
- nssckbi \
- softokn3 \
- $(NULL)
-
-ifndef NSS_DISABLE_DBM
-NSS_EXTRA_DLLS += nssdbm3
-endif
-
-SDK_LIBS = crmf
-
-ifneq (,$(filter WINNT,$(OS_ARCH)))
-SDK_LIBS += $(NSS_DLLS)
-endif
-
-# Default
-HAVE_FREEBL_LIBS = 1
-
-# 32-bit HP-UX PA-RISC
-ifeq ($(OS_ARCH), HP-UX)
-ifneq ($(OS_TEST), ia64)
-ifndef HAVE_64BIT_BUILD
-HAVE_FREEBL_LIBS =
-HAVE_FREEBL_LIBS_32INT32 = 1
-HAVE_FREEBL_LIBS_32FPU = 1
-endif
-endif
-endif
-
-# SunOS SPARC
-ifeq ($(OS_ARCH), SunOS)
-ifneq (86,$(findstring 86,$(OS_TEST)))
-ifdef HAVE_64BIT_BUILD
-HAVE_FREEBL_LIBS =
-HAVE_FREEBL_LIBS_64 = 1
-else
-HAVE_FREEBL_LIBS =
-HAVE_FREEBL_LIBS_32FPU = 1
-HAVE_FREEBL_LIBS_32INT64 = 1
-endif
-endif
-endif
-
-ifdef HAVE_FREEBL_LIBS
-NSS_EXTRA_DLLS += freebl3
-endif
-ifdef HAVE_FREEBL_LIBS_32INT32
-NSS_EXTRA_DLLS += freebl_32int_3
-endif
-ifdef HAVE_FREEBL_LIBS_32FPU
-NSS_EXTRA_DLLS += freebl_32fpu_3
-endif
-ifdef HAVE_FREEBL_LIBS_32INT64
-NSS_EXTRA_DLLS += freebl_32int64_3
-endif
-ifdef HAVE_FREEBL_LIBS_64
-NSS_EXTRA_DLLS += freebl_64int_3
-NSS_EXTRA_DLLS += freebl_64fpu_3
-endif
-
-ABS_DIST := $(abspath $(DIST))
-ifeq ($(HOST_OS_ARCH),WINNT)
-ifdef CYGDRIVE_MOUNT
-ABS_DIST := $(shell cygpath -w $(ABS_DIST) | sed -e 's|\\|/|g')
-endif
-ifneq (,$(filter mingw%,$(host_os)))
-ABS_DIST := $(shell cd $(DIST) && pwd -W)
-endif
-endif
-# For all variables such as DLLFLAGS, that may contain $(DIST)
-DIST := $(ABS_DIST)
-NSPR_INCLUDE_DIR = $(firstword $(filter -I%,$(NSPR_CFLAGS)))
-ifneq (,$(strip $(NSPR_INCLUDE_DIR)))
-NSPR_INCLUDE_DIR := $(subst -I,,$(subst -I$(DIST),-I$(ABS_DIST),$(NSPR_INCLUDE_DIR)))
-else
-NSPR_INCLUDE_DIR = $(ABS_DIST)/include/nspr
-endif
-NSPR_LIB_DIR = $(firstword $(filter -L%,$(NSPR_LIBS)))
-ifneq (,$(strip $(NSPR_LIB_DIR)))
-NSPR_LIB_DIR := $(subst -L,,$(subst -L$(DIST),-L$(ABS_DIST),$(NSPR_LIB_DIR)))
-else
-NSPR_LIB_DIR = $(ABS_DIST)/lib
-endif
-
-# To get debug symbols from NSS
-export MOZ_DEBUG_SYMBOLS
-
-DEFAULT_GMAKE_FLAGS =
-DEFAULT_GMAKE_FLAGS += CC='$(CC)'
-DEFAULT_GMAKE_FLAGS += SOURCE_MD_DIR=$(ABS_DIST)
-DEFAULT_GMAKE_FLAGS += SOURCE_MDHEADERS_DIR=$(NSPR_INCLUDE_DIR)
-DEFAULT_GMAKE_FLAGS += DIST=$(ABS_DIST)
-DEFAULT_GMAKE_FLAGS += NSPR_INCLUDE_DIR=$(NSPR_INCLUDE_DIR)
-DEFAULT_GMAKE_FLAGS += NSPR_LIB_DIR=$(NSPR_LIB_DIR)
-DEFAULT_GMAKE_FLAGS += MOZILLA_CLIENT=1
-DEFAULT_GMAKE_FLAGS += NO_MDUPDATE=1
-DEFAULT_GMAKE_FLAGS += NSS_ENABLE_ECC=1
-ifeq ($(OS_ARCH)_$(GNU_CC),WINNT_1)
-DEFAULT_GMAKE_FLAGS += OS_DLLFLAGS='-static-libgcc' NSPR31_LIB_PREFIX=lib
-endif
-ifndef MOZ_NATIVE_SQLITE
-ifdef MOZ_FOLD_LIBS
-DEFAULT_GMAKE_FLAGS += SQLITE_LIB_NAME=nss3
-else
-DEFAULT_GMAKE_FLAGS += SQLITE_LIB_NAME=mozsqlite3
-DEFAULT_GMAKE_FLAGS += SQLITE_LIB_DIR=$(ABS_DIST)/../config/external/sqlite
-endif # MOZ_FOLD_LIBS
-DEFAULT_GMAKE_FLAGS += SQLITE_INCLUDE_DIR=$(ABS_DIST)/include
-endif
-ifdef NSS_DISABLE_DBM
-DEFAULT_GMAKE_FLAGS += NSS_DISABLE_DBM=1
-endif
-# Hack to force NSS build system to use "normal" object directories
-DEFAULT_GMAKE_FLAGS += topsrcdir='$(topsrcdir)'
-# topsrcdir can't be expanded here because msys path mangling likes to break
-# paths in that case.
-DEFAULT_GMAKE_FLAGS += BUILD='$(MOZ_BUILD_ROOT)/security/$$(subst $$(topsrcdir)/security/,,$$(CURDIR))'
-DEFAULT_GMAKE_FLAGS += BUILD_TREE='$$(BUILD)' OBJDIR='$$(BUILD)' DEPENDENCIES='$$(BUILD)/.deps' SINGLE_SHLIB_DIR='$$(BUILD)'
-DEFAULT_GMAKE_FLAGS += SOURCE_XP_DIR=$(ABS_DIST)
-ifndef MOZ_DEBUG
-DEFAULT_GMAKE_FLAGS += BUILD_OPT=1 OPT_CODE_SIZE=1
-endif
-ifdef GNU_CC
-DEFAULT_GMAKE_FLAGS += NS_USE_GCC=1
-else
-DEFAULT_GMAKE_FLAGS += NS_USE_GCC=
-endif
-ifdef USE_N32
-# It is not really necessary to specify USE_PTHREADS=1. USE_PTHREADS
-# merely adds _PTH to coreconf's OBJDIR name.
-DEFAULT_GMAKE_FLAGS += USE_N32=1 USE_PTHREADS=1
-endif
-ifdef HAVE_64BIT_BUILD
-DEFAULT_GMAKE_FLAGS += USE_64=1
-endif
-ifeq ($(OS_ARCH),WINNT)
-DEFAULT_GMAKE_FLAGS += OS_TARGET=WIN95
-ifdef MOZ_DEBUG
-ifndef MOZ_NO_DEBUG_RTL
-DEFAULT_GMAKE_FLAGS += USE_DEBUG_RTL=1
-endif
-endif
-endif # WINNT
-ifeq ($(OS_ARCH),Darwin)
-# Make nsinstall use absolute symlinks by default when building NSS
-# for Mozilla on Mac OS X. (Bugzilla bug 193164)
-ifndef NSDISTMODE
-DEFAULT_GMAKE_FLAGS += NSDISTMODE=absolute_symlink
-endif
-ifdef MACOS_SDK_DIR
-DEFAULT_GMAKE_FLAGS += MACOS_SDK_DIR=$(MACOS_SDK_DIR)
-endif
-endif
-
-# Turn off TLS compression support because it requires system zlib.
-# See bug 580679 comment 18.
-DEFAULT_GMAKE_FLAGS += NSS_ENABLE_ZLIB=
-
-# Disable building of the test programs in security/nss/lib/zlib
-DEFAULT_GMAKE_FLAGS += PROGRAMS=
-
-# Disable creating .chk files. They will be generated from packager.mk
-# When bug 681624 lands, we can replace CHECKLOC= with SKIP_SHLIBSIGN=1
-DEFAULT_GMAKE_FLAGS += CHECKLOC=
-
-ifdef CROSS_COMPILE
-
-DEFAULT_GMAKE_FLAGS += \
- NATIVE_CC='$(HOST_CC)' \
- CC='$(CC)' \
- CCC='$(CXX)' \
- LINK='$(LD)' \
- AS='$(AS)' \
- AR='$(AR) $(AR_FLAGS:$@=$$@)' \
- RANLIB='$(RANLIB)' \
- RC='$(RC) $(RCFLAGS)' \
- OS_ARCH='$(OS_ARCH)' \
- OS_TEST='$(OS_TEST)' \
- CPU_ARCH='$(TARGET_CPU)' \
- $(NULL)
-
-# Android has pthreads integrated into -lc, so OS_PTHREAD is set to nothing
-ifeq ($(OS_TARGET), Android)
-DEFAULT_GMAKE_FLAGS += \
- OS_RELEASE='2.6' \
- OS_PTHREAD= \
- $(NULL)
-
-DEFAULT_GMAKE_FLAGS += ARCHFLAG='$(filter-out -W%,$(CFLAGS)) -DCHECK_FORK_GETPID $(addprefix -DANDROID_VERSION=,$(ANDROID_VERSION)) -include $(topsrcdir)/security/manager/android_stub.h'
-endif
-endif
-
-ifdef WRAP_LDFLAGS
-NSS_EXTRA_LDFLAGS += $(WRAP_LDFLAGS)
-endif
-
-ifdef MOZ_GLUE_WRAP_LDFLAGS
-NSS_EXTRA_LDFLAGS += $(SHARED_LIBS:$(DEPTH)%=$(MOZ_BUILD_ROOT)%) $(MOZ_GLUE_WRAP_LDFLAGS)
-endif
-
-ifneq (,$(WRAP_LDFLAGS)$(MOZ_GLUE_WRAP_LDFLAGS))
-DEFAULT_GMAKE_FLAGS += \
- LDFLAGS='$(LDFLAGS) $(NSS_EXTRA_LDFLAGS)' \
- DSO_LDOPTS='$(DSO_LDOPTS) $(LDFLAGS) $(NSS_EXTRA_LDFLAGS)' \
- $(NULL)
-endif
-
-DEFAULT_GMAKE_FLAGS += FREEBL_NO_DEPEND=0
-ifeq ($(OS_TARGET),Linux)
-DEFAULT_GMAKE_FLAGS += FREEBL_LOWHASH=1
-endif
-
-ifdef MOZ_NO_WLZDEFS
-DEFAULT_GMAKE_FLAGS += ZDEFS_FLAG=
-endif
-ifdef MOZ_CFLAGS_NSS
-DEFAULT_GMAKE_FLAGS += XCFLAGS='$(filter-out -W%,$(CFLAGS))'
-DEFAULT_GMAKE_FLAGS += DARWIN_DYLIB_VERSIONS='-compatibility_version 1 -current_version 1 $(LDFLAGS)'
-endif
-ifeq (1_1,$(CLANG_CL)_$(MOZ_ASAN))
-XLDFLAGS := $(OS_LDFLAGS)
-DEFAULT_GMAKE_FLAGS += XLDFLAGS='$(XLDFLAGS)'
-endif
-
-DEFAULT_GMAKE_FLAGS += NSS_NO_PKCS11_BYPASS=1
-
-# Put NSS headers directly under $(DIST)/include
-DEFAULT_GMAKE_FLAGS += PUBLIC_EXPORT_DIR='$(ABS_DIST)/include/$$(MODULE)'
-DEFAULT_GMAKE_FLAGS += SOURCE_XPHEADERS_DIR='$$(SOURCE_XP_DIR)/include/$$(MODULE)'
-DEFAULT_GMAKE_FLAGS += MODULE_INCLUDES='$$(addprefix -I$$(SOURCE_XP_DIR)/include/,$$(REQUIRES))'
-
-# Work around NSS's MAKE_OBJDIR being racy. See bug #836220
-DEFAULT_GMAKE_FLAGS += MAKE_OBJDIR='$$(INSTALL) -D $$(OBJDIR)'
-
-# Work around NSS adding IMPORT_LIBRARY to TARGETS with no rule for
-# it, creating race conditions. See bug #836220
-DEFAULT_GMAKE_FLAGS += TARGETS='$$(LIBRARY) $$(SHARED_LIBRARY) $$(PROGRAM)'
-
-ifdef MOZ_FOLD_LIBS_FLAGS
-DEFAULT_GMAKE_FLAGS += XCFLAGS='$(MOZ_FOLD_LIBS_FLAGS)'
-endif
-
-ifndef WARNINGS_AS_ERRORS
-DEFAULT_GMAKE_FLAGS += NSS_ENABLE_WERROR=0
-endif
-ifeq ($(OS_TARGET),Android)
-DEFAULT_GMAKE_FLAGS += NSS_ENABLE_WERROR=0
-endif
-
-NSS_SRCDIR = $(topsrcdir)
-
-NSS_DIRS =
-ifndef MOZ_FOLD_LIBS
-NSS_DIRS += nss/lib
-else
-ifndef NSS_DISABLE_DBM
-NSS_DIRS += nss/lib/dbm
-endif
-endif
-NSS_DIRS += \
- nss/cmd/lib \
- nss/cmd/shlibsign \
- $(NULL)
-
-ifdef ENABLE_TESTS
-NSS_DIRS += \
- nss/cmd/certutil \
- nss/cmd/pk12util \
- nss/cmd/modutil \
- $(NULL)
-endif
-
-ifneq (,$(filter %--build-id,$(LDFLAGS)))
-DEFAULT_GMAKE_ENV = LDFLAGS=-Wl,--build-id
-endif
-
-ifdef MOZ_FOLD_LIBS
-# TODO: The following can be replaced by something simpler when bug 844880
-# is fixed.
-# All static libraries required for nss, smime, ssl and nssutil.
-# The strip is needed to remove potential linefeed characters, since they hang
-# around in some cases on Windows.
-NSS_STATIC_LIBS := $(strip $(shell $(MAKE) --no-print-directory -f $(srcdir)/nss.mk DEPTH='$(DEPTH)' topsrcdir='$(topsrcdir)' srcdir='$(srcdir)' echo-variable-libs))
-# Corresponding build directories
-NSS_STATIC_DIRS := $(foreach lib,$(NSS_STATIC_LIBS),$(patsubst %/,%,$(dir $(lib))))
-NSS_DIRS += $(NSS_STATIC_DIRS)
-
-# TODO: The following can be replaced by something simpler when bug 844884
-# is fixed.
-# Remaining nss/lib directories
-NSS_DIRS += nss/lib/freebl nss/lib/softoken nss/lib/jar nss/lib/crmf nss/lib/ckfw nss/lib/libpkix
-ifeq (WINNT,$(OS_TARGET))
-NSS_DIRS += nss/lib/zlib
-endif
-endif # MOZ_FOLD_LIBS
-
-# Filter-out $(LIBRARY_NAME) because it's already handled in config/rules.mk.
-NSS_DIST_DLL_FILES := $(addprefix $(DIST)/lib/$(DLL_PREFIX),$(addsuffix $(DLL_SUFFIX),$(filter-out $(LIBRARY_NAME),$(NSS_DLLS)) $(NSS_EXTRA_DLLS)))
-NSS_DIST_DLL_DEST := $(DIST)/bin
-NSS_DIST_DLL_TARGET := target
-INSTALL_TARGETS += NSS_DIST_DLL
-
-ifeq ($(OS_ARCH)_$(1), SunOS_softokn3)
-# has to use copy mode on Solaris, see #665509
-$(DIST)/bin/$(DLL_PREFIX)softokn3$(DLL_SUFFIX): INSTALL := $(INSTALL) -t
-endif
-
-NSS_SDK_LIB_FILES := \
- $(addprefix $(DIST)/lib/$(LIB_PREFIX),$(addsuffix .$(LIB_SUFFIX),$(SDK_LIBS))) \
- $(addprefix $(DIST)/bin/$(DLL_PREFIX),$(addsuffix $(DLL_SUFFIX),$(NSS_DLLS))) \
- $(NULL)
-NSS_SDK_LIB_DEST := $(DIST)/sdk/lib
-NSS_SDK_LIB_TARGET := target
-INSTALL_TARGETS += NSS_SDK_LIB
-
-ifdef MOZ_FOLD_LIBS
-# Add all static libraries for nss, smime, ssl and nssutil
-STATIC_LIBS += $(addprefix $(DEPTH)/security/,$(NSS_STATIC_LIBS))
-
-nss_def_file := $(srcdir)/nss.def
-
-ifeq (WINNT,$(OS_TARGET))
-# Create a .def file based on the various .def files for nss, smime, ssl and
-# nssutil.
-nss3.def: $(nss_def_file) $(DEPTH)/db/sqlite3/src/sqlite-processed.def $(NSS_EXTRA_SYMBOLS_FILE)
- echo LIBRARY nss3$(DLL_SUFFIX) > $@.tmp
- echo EXPORTS >> $@.tmp
- grep -v -h -e ^LIBRARY -e ^EXPORTS -e ^\; $^ >> $@.tmp
- mv $@.tmp $@
-else
-ifdef GCC_USE_GNU_LD
-sqlite_def_file := $(topsrcdir)/db/sqlite3/src/sqlite.def
-nspr_def_file := $(srcdir)/nspr-dummy.def
-
-nss3.def: $(nss_def_file) $(sqlite_def_file) $(nspr_def_file) $(NSS_EXTRA_SYMBOLS_FILE)
- @$(call py_action,convert_def_file, \
- $(DEFINES) $(ACDEFINES) $(MOZ_DEBUG_DEFINES) -o $@ $^)
-
-GARBAGE += \
- nss3.def \
- $(NULL)
-endif # GCC_USE_GNU_LD
-endif # WINNT
-
-IMPORT_LIB_FILES = $(IMPORT_LIBRARY)
-IMPORT_LIB_DEST ?= $(DIST)/lib
-IMPORT_LIB_TARGET = target
-INSTALL_TARGETS += IMPORT_LIB
-
-endif # MOZ_FOLD_LIBS
-
-include $(topsrcdir)/config/rules.mk
-
-# Can't pass this in DEFAULT_GMAKE_FLAGS because that overrides
-# definitions in NSS, so just export it into the sub-make's environment.
-ifeq (WINNT_1,$(OS_TARGET)_$(MOZ_MEMORY))
-ifdef MOZ_CRT
-# OS_LIBS comes from having mozcrt as a dependency in moz.build.
-DLLFLAGS := $(OS_LIBS)
-else
-DLLFLAGS := -LIBPATH:$(ABS_DIST)/../mozglue/build -DEFAULTLIB:mozglue
-endif
-export DLLFLAGS
-endif
-
-ifdef MOZ_FOLD_LIBS
-# Force the linker to include everything from the static libraries.
-EXPAND_LIBS_EXEC += --extract
-
-$(SHARED_LIBRARY): $(addprefix $(DEPTH)/security/,$(NSS_STATIC_LIBS))
-
-ifdef IMPORT_LIB_SUFFIX
-IMPORT_PREFIX = $(LIB_PREFIX)
-IMPORT_SUFFIX = .$(IMPORT_LIB_SUFFIX)
-else
-IMPORT_PREFIX = $(DLL_PREFIX)
-IMPORT_SUFFIX = $(DLL_SUFFIX)
-endif
-
-NSPR_IMPORT_LIBS = $(addprefix $(DIST)/lib/$(IMPORT_PREFIX),$(addsuffix $(IMPORT_SUFFIX),nspr4 plc4 plds4))
-SQLITE_IMPORT_LIB = $(DIST)/lib/$(IMPORT_PREFIX)mozsqlite3$(IMPORT_SUFFIX)
-
-# TODO: The following can be replaced by something simpler when bug 844884
-# is fixed.
-# Associate target files with the rules that build them.
-$(DIST)/lib/$(LIB_PREFIX)crmf.$(LIB_SUFFIX): libs-nss/lib/crmf
-$(DIST)/lib/$(DLL_PREFIX)freebl3$(DLL_SUFFIX): libs-nss/lib/freebl
-$(DIST)/lib/$(DLL_PREFIX)nssckbi$(DLL_SUFFIX): libs-nss/lib/ckfw
-$(DIST)/lib/$(DLL_PREFIX)softokn3$(DLL_SUFFIX): libs-nss/lib/softoken
-$(DIST)/lib/$(DLL_PREFIX)nssdbm3$(DLL_SUFFIX): libs-nss/lib/softoken
-$(foreach lib,$(NSS_STATIC_LIBS),$(eval $(DEPTH)/security/$(lib): libs-$(patsubst %/,%,$(dir $(lib)))))
-
-# Create fake import libraries for the folded libraries, so that linking
-# against them works both for the NSS build system (see dependencies below)
-# and for the rest of the mozilla build system.
-$(NSPR_IMPORT_LIBS) \
-$(SQLITE_IMPORT_LIB) \
-$(DIST)/lib/$(IMPORT_PREFIX)nssutil3$(IMPORT_SUFFIX) \
-$(DIST)/lib/$(IMPORT_PREFIX)ssl3$(IMPORT_SUFFIX) \
-$(DIST)/lib/$(IMPORT_PREFIX)smime3$(IMPORT_SUFFIX): $(DIST)/lib/$(IMPORT_PREFIX)nss3$(IMPORT_SUFFIX)
-ifeq (WINNT,$(OS_TARGET))
- cp $< $@
-else
- ln -sf $< $@
-endif
-
-# Interdependencies between nss sub-directories, and dependencies on NSPR/SQLite
-libs-nss/lib/ckfw: libs-nss/lib/nss/../base $(NSPR_IMPORT_LIBS)
-libs-nss/lib/softoken: $(NSPR_IMPORT_LIBS) $(SQLITE_IMPORT_LIB)
-libs-nss/lib/softoken: libs-nss/lib/freebl
-ifndef NSS_DISABLE_DBM
-libs-nss/lib/softoken: libs-nss/lib/dbm
-endif
-libs-nss/lib/softoken: $(DIST)/lib/$(IMPORT_PREFIX)nssutil3$(IMPORT_SUFFIX)
-libs-nss/lib/freebl: $(DIST)/lib/$(IMPORT_PREFIX)nssutil3$(IMPORT_SUFFIX) $(NSPR_IMPORT_LIBS)
-
-# For each directory where we build static libraries, force the NSS build system
-# to only build static libraries.
-$(addprefix libs-,$(NSS_STATIC_DIRS)): DEFAULT_GMAKE_FLAGS += SHARED_LIBRARY= IMPORT_LIBRARY=
-else
-$(STATIC_LIBS) $(NSS_DIST_DLL_FILES) $(NSS_SDK_LIB_FILES): libs-nss/lib
-endif # MOZ_FOLD_LIBS
-
-ifeq ($(NSINSTALL_PY),$(NSINSTALL))
-DEFAULT_GMAKE_FLAGS += PYTHON='$(PYTHON)'
-DEFAULT_GMAKE_FLAGS += NSINSTALL_PY='$(abspath $(topsrcdir)/config/nsinstall.py)'
-DEFAULT_GMAKE_FLAGS += NSINSTALL='$$(PYTHON) $$(NSINSTALL_PY)'
-else
-DEFAULT_GMAKE_FLAGS += NSINSTALL='$(abspath $(NSINSTALL))'
-endif
-ifeq ($(OS_ARCH),WINNT)
-DEFAULT_GMAKE_FLAGS += INSTALL='$$(NSINSTALL) -t'
-endif
-DEFAULT_GMAKE_FLAGS += $(EXTRA_GMAKE_FLAGS)
-
-$(addprefix libs-,$(NSS_DIRS)): libs-%:
-# Work around NSS's export rule being racy when recursing for private_export
-# See bug #836220.
-$(addprefix export-,$(NSS_DIRS)): EXTRA_GMAKE_FLAGS = PRIVATE_EXPORTS=
-$(addprefix export-,$(NSS_DIRS)): export-%: private_export-%
-$(addprefix private_export-,$(NSS_DIRS)): EXTRA_GMAKE_FLAGS =
-$(addprefix private_export-,$(NSS_DIRS)): private_export-%:
-
-$(foreach p,libs export private_export,$(addprefix $(p)-,$(NSS_DIRS))):
- $(DEFAULT_GMAKE_ENV) $(MAKE) -C $(NSS_SRCDIR)/security/$* $(@:-$*=) $(DEFAULT_GMAKE_FLAGS)
-
-export:: $(addprefix export-,$(NSS_DIRS))
-
-$(addprefix clean-,$(NSS_DIRS)): clean-%:
- $(MAKE) -C $(NSS_SRCDIR)/security/$* $(DEFAULT_GMAKE_FLAGS) clean
-
-clean clobber clobber_all realclean distclean depend:: $(addprefix clean-,$(NSS_DIRS))
-
-NSS_CMD_TARGETS := $(addprefix libs-,$(filter-out nss/cmd/lib,$(filter nss/cmd/%,$(NSS_DIRS))))
-target:: $(NSS_CMD_TARGETS)
-
-ifdef MOZ_FOLD_LIBS
-$(NSS_CMD_TARGETS): $(addprefix $(DIST)/lib/$(IMPORT_PREFIX),$(addsuffix $(IMPORT_SUFFIX),$(NSS_LIBS)))
-libs-nss/cmd/modutil: libs-nss/lib/jar
-ifeq (WINNT,$(OS_TARGET))
-libs-nss/cmd/modutil: libs-nss/lib/zlib
-endif
-$(NSS_CMD_TARGETS): libs-nss/cmd/lib
-else
-$(NSS_CMD_TARGETS): libs-nss/lib libs-nss/cmd/lib
-endif # MOZ_FOLD_LIBS
-
-# Work around NSS build system race condition creating certdata.c in
-# security/nss/lib/ckfw/builtins. See bug #836220.
-libs-nss/lib$(if $(MOZ_FOLD_LIBS),/ckfw): $(call mkdir_deps,$(DEPTH)/security/nss/lib/ckfw/builtins)
-
-endif
diff --git a/src/third_party/mozjs-45/config/external/nss/crmf/moz.build b/src/third_party/mozjs-45/config/external/nss/crmf/moz.build
deleted file mode 100644
index 9b8005f..0000000
--- a/src/third_party/mozjs-45/config/external/nss/crmf/moz.build
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-Library('crmf')
-
-if CONFIG['MOZ_NATIVE_NSS']:
- OS_LIBS += [l for l in CONFIG['NSS_LIBS'] if l.startswith('-L')]
- OS_LIBS += ['-lcrmf']
-else:
- USE_LIBS += [
- # The dependency on nss is not real, but is required to force the
- # parent directory being built before this one. This has no
- # practical effect on linkage, since the only thing linking crmf
- # will need nss anyways.
- 'nss',
- 'static:/security/nss/lib/crmf/crmf',
- ]
diff --git a/src/third_party/mozjs-45/config/external/nss/moz.build b/src/third_party/mozjs-45/config/external/nss/moz.build
deleted file mode 100644
index 572cebf..0000000
--- a/src/third_party/mozjs-45/config/external/nss/moz.build
+++ /dev/null
@@ -1,50 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-DIRS += ['crmf']
-
-if CONFIG['MOZ_NATIVE_NSS']:
- Library('nss')
- OS_LIBS += CONFIG['NSS_LIBS']
-elif CONFIG['MOZ_FOLD_LIBS']:
- GeckoSharedLibrary('nss', linkage=None)
- # TODO: The library name can be changed when bug 845217 is fixed.
- SHARED_LIBRARY_NAME = 'nss3'
-
- SDK_LIBRARY = True
-
- # Normally, there should be /something/ to ensure nspr is built
- # before this directory, but since nspr is built during "export",
- # it actually doesn't matter.
- if CONFIG['OS_TARGET'] == 'WINNT':
- suffix = '_s'
- else:
- suffix = ''
- USE_LIBS += [
- 'static:/nsprpub/lib/ds/plds4%s' % suffix,
- 'static:/nsprpub/lib/libc/src/plc4%s' % suffix,
- 'static:/nsprpub/pr/src/nspr4%s' % suffix,
- ]
-
- OS_LIBS += CONFIG['REALTIME_LIBS']
-
- if CONFIG['OS_TARGET'] == 'WINNT':
- DEFFILE = 'nss3.def'
-
- if CONFIG['OS_ARCH'] == 'Linux' and CONFIG['GCC_USE_GNU_LD']:
- LD_VERSION_SCRIPT = 'nss3.def'
-else:
- Library('nss')
- USE_LIBS += [
- '/security/nss/lib/nss/nss3',
- '/security/nss/lib/smime/smime3',
- '/security/nss/lib/ssl/ssl3',
- '/security/nss/lib/util/nssutil3',
- 'sqlite',
- ]
-
-# XXX: We should fix these warnings.
-ALLOW_COMPILER_WARNINGS = True
diff --git a/src/third_party/mozjs-45/config/external/nss/nspr-dummy.def b/src/third_party/mozjs-45/config/external/nss/nspr-dummy.def
deleted file mode 100644
index b14fcf8..0000000
--- a/src/third_party/mozjs-45/config/external/nss/nspr-dummy.def
+++ /dev/null
@@ -1,15 +0,0 @@
-; This Source Code Form is subject to the terms of the Mozilla Public
-; License, v. 2.0. If a copy of the MPL was not distributed with this
-; file, You can obtain one at http://mozilla.org/MPL/2.0/.
-;
-; This is a fake .def file, to be used for generating linker scripts
-; for our folded libnss when MOZ_FOLD_LIBS. NSPR, unlike NSS, exports
-; symbols with symbol visibility (Unix) or __declspec (Windows). When
-; using a linker script, however, we need to explicitly specify that
-; NSPR's symbols should be globally visible. Otherwise, NSPR's exported
-; symbols would match the |local: *| rule and be hidden.
-LIBRARY libnsprdummy
-EXPORTS
-PR_* ; Actual .def files don't allow wildcards, of course.
-_PR_*
-PL_*
diff --git a/src/third_party/mozjs-45/config/external/nss/nss.def b/src/third_party/mozjs-45/config/external/nss/nss.def
deleted file mode 100644
index 2e63153..0000000
--- a/src/third_party/mozjs-45/config/external/nss/nss.def
+++ /dev/null
@@ -1,686 +0,0 @@
-; This Source Code Form is subject to the terms of the Mozilla Public
-; License, v. 2.0. If a copy of the MPL was not distributed with this
-; file, You can obtain one at http://mozilla.org/MPL/2.0/.
-;
-; This is a fake .def file, to be used for generating linker scripts
-; for our folded libnss when MOZ_FOLD_LIBS.
-LIBRARY nss3
-EXPORTS
-ATOB_AsciiToData
-ATOB_AsciiToData_Util
-ATOB_ConvertAsciiToItem
-ATOB_ConvertAsciiToItem_Util
-BTOA_ConvertItemToAscii_Util
-BTOA_DataToAscii
-BTOA_DataToAscii_Util
-CERT_AddCertToListHead
-CERT_AddCertToListTail
-CERT_AddExtension
-CERT_AddExtensionByOID
-__CERT_AddTempCertToPerm
-CERT_AsciiToName
-CERT_CacheOCSPResponseFromSideChannel
-CERT_CertChainFromCert
-CERT_CertificateRequestTemplate DATA
-CERT_CertificateTemplate DATA
-CERT_CertListFromCert
-CERT_ChangeCertTrust
-CERT_CheckCertUsage
-CERT_CheckCertValidTimes
-CERT_CheckNameSpace
-CERT_ClearOCSPCache
-CERT_CompareCerts
-CERT_CompareName
-CERT_ConvertAndDecodeCertificate
-CERT_CopyName
-CERT_CopyRDN
-CERT_CreateCertificate
-CERT_CreateCertificateRequest
-CERT_CreateSubjectCertList
-CERT_CreateValidity
-CERT_CrlTemplate DATA
-CERT_DecodeAltNameExtension
-CERT_DecodeAuthInfoAccessExtension
-CERT_DecodeAuthKeyID
-CERT_DecodeAVAValue
-CERT_DecodeBasicConstraintValue
-CERT_DecodeCertFromPackage
-CERT_DecodeCertificatePoliciesExtension
-CERT_DecodeCertPackage
-CERT_DecodeCRLDistributionPoints
-CERT_DecodeNameConstraintsExtension
-CERT_DecodeOidSequence
-CERT_DecodePrivKeyUsagePeriodExtension
-CERT_DecodeTrustString
-CERT_DecodeUserNotice
-CERT_DerNameToAscii
-CERT_DestroyCertArray
-CERT_DestroyCertificate
-CERT_DestroyCertificateList
-CERT_DestroyCertificatePoliciesExtension
-CERT_DestroyCertificateRequest
-CERT_DestroyCertList
-CERT_DestroyName
-CERT_DestroyOidSequence
-CERT_DestroyUserNotice
-CERT_DestroyValidity
-CERT_DisableOCSPChecking
-CERT_DisableOCSPDefaultResponder
-CERT_DupCertificate
-CERT_EnableOCSPChecking
-CERT_EncodeAltNameExtension
-CERT_EncodeAndAddBitStrExtension
-CERT_EncodeAuthKeyID
-CERT_EncodeBasicConstraintValue
-CERT_EncodeCertPoliciesExtension
-CERT_EncodeCRLDistributionPoints
-CERT_EncodeInfoAccessExtension
-CERT_EncodeInhibitAnyExtension
-CERT_EncodeNameConstraintsExtension
-CERT_EncodeNoticeReference
-CERT_EncodePolicyConstraintsExtension
-CERT_EncodePolicyMappingExtension
-CERT_EncodeSubjectKeyID
-CERT_EncodeUserNotice
-CERT_ExtractPublicKey
-CERT_FilterCertListByCANames
-CERT_FilterCertListByUsage
-CERT_FilterCertListForUserCerts
-CERT_FindCertByDERCert
-CERT_FindCertByIssuerAndSN
-CERT_FindCertByName
-CERT_FindCertByNickname
-CERT_FindCertByNicknameOrEmailAddr
-CERT_FindCertExtension
-CERT_FindCertIssuer
-CERT_FindKeyUsageExtension
-CERT_FindUserCertByUsage
-CERT_FindUserCertsByUsage
-CERT_FinishCertificateRequestAttributes
-CERT_FinishExtensions
-CERT_ForcePostMethodForOCSP
-CERT_FreeNicknames
-CERT_GenTime2FormattedAscii_Util
-CERT_GetCertChainFromCert
-CERT_GetCertEmailAddress
-CERT_GetCertificateRequestExtensions
-CERT_GetCertTimes
-CERT_GetCertTrust
-CERT_GetCommonName
-CERT_GetConstrainedCertificateNames
-CERT_GetCountryName
-CERT_GetDefaultCertDB
-CERT_GetFirstEmailAddress
-CERT_GetGeneralNameTypeFromString
-CERT_GetImposedNameConstraints
-CERT_GetLocalityName
-CERT_GetNextEmailAddress
-CERT_GetNextGeneralName
-CERT_GetNextNameConstraint
-CERT_GetOCSPAuthorityInfoAccessLocation
-CERT_GetOidString
-CERT_GetOrgName
-CERT_GetOrgUnitName
-CERT_GetStateName
-CERT_Hexify
-CERT_ImportCerts
-CERT_IsCACert
-CERT_IsUserCert
-CERT_MakeCANickname
-CERT_MergeExtensions
-CERT_NameTemplate DATA
-CERT_NameToAscii
-CERT_NewCertList
-CERT_NewTempCertificate
-CERT_NicknameStringsFromCertList
-CERT_OCSPCacheSettings
-CERT_PKIXVerifyCert
-CERT_RemoveCertListNode
-CERT_RFC1485_EscapeAndQuote
-CERT_SaveSMimeProfile
-CERT_SequenceOfCertExtensionTemplate DATA
-CERT_SetOCSPFailureMode
-CERT_SetOCSPTimeout
-CERT_SignedCrlTemplate DATA
-CERT_SignedDataTemplate DATA
-CERT_StartCertExtensions
-CERT_StartCertificateRequestAttributes
-CERT_SubjectPublicKeyInfoTemplate DATA
-CERT_TimeChoiceTemplate DATA
-CERT_VerifyCertificate
-CERT_VerifySignedDataWithPublicKeyInfo
-DER_AsciiToTime_Util
-DER_DecodeTimeChoice_Util
-DER_Encode
-DER_EncodeTimeChoice_Util
-DER_Encode_Util
-DER_GeneralizedTimeToTime
-DER_GeneralizedTimeToTime_Util
-DER_GetInteger
-DER_GetInteger_Util
-DER_Lengths
-DER_SetUInteger
-DER_UTCTimeToTime_Util
-DSAU_DecodeDerSigToLen
-DSAU_EncodeDerSigWithLen
-DTLS_GetHandshakeTimeout
-DTLS_ImportFD
-HASH_Begin
-HASH_Create
-HASH_Destroy
-HASH_End
-HASH_GetHashObject
-HASH_GetType
-HASH_HashBuf
-HASH_ResultLenByOidTag
-HASH_Update
-NSSBase64_DecodeBuffer
-NSSBase64_EncodeItem
-NSSBase64_EncodeItem_Util
-NSS_CMSContentInfo_GetContent
-NSS_CMSContentInfo_SetContent_Data
-NSS_CMSContentInfo_SetContent_EnvelopedData
-NSS_CMSContentInfo_SetContent_SignedData
-NSS_CMSDecoder_Cancel
-NSS_CMSDecoder_Finish
-NSS_CMSDecoder_Start
-NSS_CMSDecoder_Update
-NSS_CMSEncoder_Cancel
-NSS_CMSEncoder_Finish
-NSS_CMSEncoder_Start
-NSS_CMSEncoder_Update
-NSS_CMSEnvelopedData_AddRecipient
-NSS_CMSEnvelopedData_Create
-NSS_CMSEnvelopedData_GetContentInfo
-NSS_CMSMessage_ContentLevel
-NSS_CMSMessage_ContentLevelCount
-NSS_CMSMessage_Create
-NSS_CMSMessage_CreateFromDER
-NSS_CMSMessage_Destroy
-NSS_CMSMessage_GetContent
-NSS_CMSMessage_GetContentInfo
-NSS_CMSMessage_IsEncrypted
-NSS_CMSMessage_IsSigned
-NSS_CMSRecipientInfo_Create
-NSS_CMSSignedData_AddCertificate
-NSS_CMSSignedData_AddCertList
-NSS_CMSSignedData_AddSignerInfo
-NSS_CMSSignedData_Create
-NSS_CMSSignedData_CreateCertsOnly
-NSS_CMSSignedData_Destroy
-NSS_CMSSignedData_GetContentInfo
-NSS_CMSSignedData_GetSignerInfo
-NSS_CMSSignedData_ImportCerts
-NSS_CMSSignedData_SetDigestValue
-NSS_CMSSignedData_SignerInfoCount
-NSS_CMSSignedData_VerifySignerInfo
-NSS_CMSSignerInfo_AddMSSMIMEEncKeyPrefs
-NSS_CMSSignerInfo_AddSigningTime
-NSS_CMSSignerInfo_AddSMIMECaps
-NSS_CMSSignerInfo_AddSMIMEEncKeyPrefs
-NSS_CMSSignerInfo_Create
-NSS_CMSSignerInfo_GetSignerCommonName
-NSS_CMSSignerInfo_GetSignerEmailAddress
-NSS_CMSSignerInfo_GetSigningCertificate
-NSS_CMSSignerInfo_IncludeCerts
-NSS_CMSSignerInfo_Verify
-NSS_FindCertKEAType
-NSS_GetAlgorithmPolicy
-NSS_Get_CERT_CertificateRequestTemplate
-NSS_Get_CERT_CertificateTemplate
-NSS_Get_CERT_CrlTemplate
-NSS_Get_CERT_NameTemplate
-NSS_Get_CERT_SequenceOfCertExtensionTemplate
-NSS_Get_CERT_SignedCrlTemplate
-NSS_Get_CERT_SignedDataTemplate
-NSS_Get_CERT_SubjectPublicKeyInfoTemplate
-NSS_Get_CERT_TimeChoiceTemplate
-NSS_Get_SEC_AnyTemplate_Util
-NSS_Get_SEC_BitStringTemplate
-NSS_Get_SEC_BitStringTemplate_Util
-NSS_Get_SEC_BMPStringTemplate
-NSS_Get_SEC_BooleanTemplate_Util
-NSS_Get_SEC_GeneralizedTimeTemplate_Util
-NSS_Get_SEC_IA5StringTemplate
-NSS_Get_SEC_IA5StringTemplate_Util
-NSS_Get_SEC_IntegerTemplate
-NSS_Get_SEC_IntegerTemplate_Util
-NSS_Get_SECKEY_RSAPSSParamsTemplate
-NSS_Get_SEC_NullTemplate_Util
-NSS_Get_SEC_ObjectIDTemplate_Util
-NSS_Get_SEC_OctetStringTemplate
-NSS_Get_SEC_OctetStringTemplate_Util
-NSS_Get_SECOID_AlgorithmIDTemplate
-NSS_Get_SECOID_AlgorithmIDTemplate_Util
-NSS_Get_SEC_SignedCertificateTemplate
-NSS_Get_SEC_UTF8StringTemplate
-NSS_Get_SEC_UTF8StringTemplate_Util
-NSS_GetVersion
-NSS_Init
-NSS_Initialize
-NSS_InitWithMerge
-NSS_IsInitialized
-NSS_NoDB_Init
-NSS_SecureMemcmp
-NSS_SetAlgorithmPolicy
-NSS_SetDomesticPolicy
-NSS_Shutdown
-NSSSMIME_GetVersion
-NSS_SMIMESignerInfo_SaveSMIMEProfile
-NSS_SMIMEUtil_FindBulkAlgForRecipients
-NSSSSL_GetVersion
-NSSUTIL_ArgDecodeNumber
-NSSUTIL_ArgFetchValue
-NSSUTIL_ArgGetLabel
-NSSUTIL_ArgGetParamValue
-NSSUTIL_ArgHasFlag
-NSSUTIL_ArgIsBlank
-NSSUTIL_ArgParseCipherFlags
-NSSUTIL_ArgParseModuleSpec
-NSSUTIL_ArgParseSlotFlags
-NSSUTIL_ArgParseSlotInfo
-NSSUTIL_ArgReadLong
-NSSUTIL_ArgSkipParameter
-NSSUTIL_ArgStrip
-NSSUTIL_DoModuleDBFunction
-_NSSUTIL_EvaluateConfigDir
-_NSSUTIL_GetSecmodName
-NSSUTIL_GetVersion
-NSSUTIL_MkModuleSpec
-NSSUTIL_MkNSSString
-NSSUTIL_MkSlotString
-NSSUTIL_Quote
-PK11_AlgtagToMechanism
-PK11_Authenticate
-PK11_ChangePW
-PK11_CheckUserPassword
-PK11_CipherOp
-PK11_ConfigurePKCS11
-PK11_CreateContextBySymKey
-PK11_CreateDigestContext
-PK11_CreateGenericObject
-PK11_CreateMergeLog
-PK11_CreatePBEV2AlgorithmID
-PK11_Decrypt
-PK11_DeleteTokenCertAndKey
-PK11_DeleteTokenPrivateKey
-PK11_DeleteTokenPublicKey
-PK11_DEREncodePublicKey
-PK11_Derive
-PK11_DeriveWithTemplate
-PK11_DestroyContext
-PK11_DestroyGenericObject
-PK11_DestroyMergeLog
-PK11_DestroyObject
-PK11_DestroyTokenObject
-PK11_DigestBegin
-PK11_DigestFinal
-PK11_DigestOp
-PK11_DoesMechanism
-PK11_Encrypt
-PK11_ExportDERPrivateKeyInfo
-PK11_ExportEncryptedPrivKeyInfo
-PK11_ExtractKeyValue
-PK11_FindCertFromNickname
-PK11_FindCertsFromEmailAddress
-PK11_FindCertsFromNickname
-PK11_FindKeyByAnyCert
-PK11_FindKeyByDERCert
-PK11_FindKeyByKeyID
-PK11_FindSlotByName
-PK11_FindSlotsByNames
-PK11_FreeSlot
-PK11_FreeSlotList
-PK11_FreeSlotListElement
-PK11_FreeSymKey
-PK11_GenerateKeyPair
-PK11_GenerateKeyPairWithFlags
-PK11_GenerateKeyPairWithOpFlags
-PK11_GenerateRandom
-PK11_GenerateRandomOnSlot
-PK11_GetAllSlotsForCert
-PK11_GetAllTokens
-PK11_GetBestSlot
-PK11_GetBestSlotMultiple
-PK11_GetBlockSize
-PK11_GetCertFromPrivateKey
-PK11_GetDefaultArray
-PK11_GetDefaultFlags
-PK11_GetDisabledReason
-PK11_GetFirstSafe
-PK11_GetInternalKeySlot
-PK11_GetInternalSlot
-PK11_GetIVLength
-PK11_GetKeyData
-PK11_GetKeyGen
-PK11_GetLowLevelKeyIDForPrivateKey
-PK11_GetMechanism
-PK11_GetMinimumPwdLength
-PK11_GetModInfo
-PK11_GetNextSafe
-PK11_GetPadMechanism
-PK11_GetPrivateKeyNickname
-PK11_GetPrivateModulusLen
-PK11_GetSlotID
-PK11_GetSlotInfo
-PK11_GetSlotName
-PK11_GetSlotPWValues
-PK11_GetSlotSeries
-PK11_GetTokenInfo
-PK11_GetTokenName
-PK11_HashBuf
-PK11_HasRootCerts
-PK11_ImportCert
-PK11_ImportCertForKey
-PK11_ImportCRL
-PK11_ImportDERPrivateKeyInfoAndReturnKey
-PK11_ImportPublicKey
-PK11_ImportSymKey
-PK11_InitPin
-PK11_IsDisabled
-PK11_IsFIPS
-PK11_IsFriendly
-PK11_IsHW
-PK11_IsInternal
-PK11_IsLoggedIn
-PK11_IsPresent
-PK11_IsReadOnly
-PK11_IsRemovable
-PK11_KeyForCertExists
-PK11_KeyGen
-PK11_KeyGenWithTemplate
-PK11_ListCerts
-PK11_ListCertsInSlot
-PK11_ListPrivateKeysInSlot
-PK11_ListPrivKeysInSlot
-PK11_LoadPrivKey
-PK11_Logout
-PK11_LogoutAll
-PK11_MakeIDFromPubKey
-PK11_MechanismToAlgtag
-PK11_MergeTokens
-PK11_NeedLogin
-PK11_NeedUserInit
-PK11_ParamFromIV
-PK11_PBEKeyGen
-PK11_PrivDecrypt
-PK11_PrivDecryptPKCS1
-PK11_ProtectedAuthenticationPath
-PK11_PubDeriveWithKDF
-PK11_PubEncrypt
-PK11_PubEncryptPKCS1
-PK11_PubUnwrapSymKey
-PK11_PubWrapSymKey
-PK11_RandomUpdate
-PK11_ReadRawAttribute
-PK11_ReferenceSlot
-PK11_ResetToken
-PK11SDR_Decrypt
-PK11SDR_Encrypt
-PK11_SetPasswordFunc
-PK11_SetSlotPWValues
-PK11_Sign
-PK11_SignatureLen
-PK11_UnwrapPrivKey
-PK11_UnwrapSymKey
-PK11_UpdateSlotAttribute
-PK11_UserDisableSlot
-PK11_UserEnableSlot
-PK11_WrapPrivKey
-PK11_WrapSymKey
-PORT_Alloc
-PORT_Alloc_Util
-PORT_ArenaAlloc
-PORT_ArenaAlloc_Util
-PORT_ArenaGrow_Util
-PORT_ArenaMark_Util
-PORT_ArenaRelease_Util
-PORT_ArenaStrdup
-PORT_ArenaStrdup_Util
-PORT_ArenaUnmark_Util
-PORT_ArenaZAlloc
-PORT_ArenaZAlloc_Util
-PORT_Free
-PORT_FreeArena
-PORT_FreeArena_Util
-PORT_Free_Util
-PORT_GetError
-PORT_GetError_Util
-PORT_NewArena
-PORT_NewArena_Util
-PORT_Realloc_Util
-PORT_RegExpSearch
-PORT_SetError
-PORT_SetError_Util
-PORT_SetUCS2_ASCIIConversionFunction
-PORT_SetUCS2_ASCIIConversionFunction_Util
-PORT_Strdup
-PORT_Strdup_Util
-PORT_UCS2_ASCIIConversion_Util
-PORT_UCS2_UTF8Conversion
-PORT_UCS2_UTF8Conversion_Util
-PORT_ZAlloc
-PORT_ZAlloc_Util
-PORT_ZFree_Util
-SEC_AnyTemplate_Util DATA
-SEC_ASN1Decode
-SEC_ASN1DecodeInteger
-SEC_ASN1DecodeItem
-SEC_ASN1DecodeItem_Util
-SEC_ASN1Decode_Util
-SEC_ASN1EncodeInteger_Util
-SEC_ASN1EncodeItem
-SEC_ASN1EncodeItem_Util
-SEC_ASN1EncodeUnsignedInteger_Util
-SEC_ASN1Encode_Util
-SEC_BitStringTemplate DATA
-SEC_BitStringTemplate_Util DATA
-SEC_BMPStringTemplate DATA
-SEC_BooleanTemplate_Util DATA
-SEC_CertNicknameConflict
-SEC_DeletePermCertificate
-SEC_DerSignData
-SEC_DestroyCrl
-SEC_GeneralizedTimeTemplate_Util DATA
-SEC_GetSignatureAlgorithmOidTag
-SEC_IA5StringTemplate DATA
-SEC_IA5StringTemplate_Util DATA
-SEC_IntegerTemplate DATA
-SEC_IntegerTemplate_Util DATA
-SECITEM_AllocArray
-SECITEM_AllocItem
-SECITEM_AllocItem_Util
-SECITEM_ArenaDupItem_Util
-SECITEM_CompareItem
-SECITEM_CompareItem_Util
-SECITEM_CopyItem
-SECITEM_CopyItem_Util
-SECITEM_DupArray
-SECITEM_DupItem
-SECITEM_DupItem_Util
-SECITEM_FreeItem
-SECITEM_FreeItem_Util
-SECITEM_HashCompare
-SECITEM_ItemsAreEqual
-SECITEM_ItemsAreEqual_Util
-SECITEM_ReallocItemV2
-SECITEM_ZfreeItem
-SECITEM_ZfreeItem_Util
-SECKEY_ConvertToPublicKey
-SECKEY_CopyPrivateKey
-SECKEY_CopyPublicKey
-SECKEY_CopySubjectPublicKeyInfo
-SECKEY_CreateSubjectPublicKeyInfo
-SECKEY_DecodeDERSubjectPublicKeyInfo
-SECKEY_DestroyEncryptedPrivateKeyInfo
-SECKEY_DestroyPrivateKey
-SECKEY_DestroyPrivateKeyList
-SECKEY_DestroyPublicKey
-SECKEY_DestroySubjectPublicKeyInfo
-SECKEY_ECParamsToBasePointOrderLen
-SECKEY_ECParamsToKeySize
-SECKEY_EncodeDERSubjectPublicKeyInfo
-SECKEY_ExtractPublicKey
-SECKEY_GetPublicKeyType
-SECKEY_ImportDERPublicKey
-SECKEY_PublicKeyStrength
-SECKEY_RSAPSSParamsTemplate DATA
-SECKEY_SignatureLen
-SECMIME_DecryptionAllowed
-SECMOD_AddNewModule
-SECMOD_AddNewModuleEx
-SECMOD_CancelWait
-SECMOD_CanDeleteInternalModule
-SECMOD_CloseUserDB
-SECMOD_CreateModule
-SECMOD_DeleteInternalModule
-SECMOD_DeleteModule
-SECMOD_DestroyModule
-SECMOD_FindModule
-SECMOD_GetDeadModuleList
-SECMOD_GetDefaultModuleList
-SECMOD_GetDefaultModuleListLock
-SECMOD_GetInternalModule
-SECMOD_GetModuleSpecList
-SECMOD_GetReadLock
-SECMOD_HasRemovableSlots
-SECMOD_InternaltoPubMechFlags
-SECMOD_LoadModule
-SECMOD_LoadUserModule
-SECMOD_OpenUserDB
-SECMOD_PubCipherFlagstoInternal
-SECMOD_PubMechFlagstoInternal
-SECMOD_ReferenceModule
-SECMOD_ReleaseReadLock
-SECMOD_UnloadUserModule
-SECMOD_UpdateModule
-SECMOD_WaitForAnyTokenEvent
-SEC_NullTemplate_Util DATA
-SEC_ObjectIDTemplate_Util DATA
-SEC_OctetStringTemplate DATA
-SEC_OctetStringTemplate_Util DATA
-SECOID_AddEntry
-SECOID_AddEntry_Util
-SECOID_AlgorithmIDTemplate DATA
-SECOID_AlgorithmIDTemplate_Util DATA
-SECOID_CopyAlgorithmID_Util
-SECOID_DestroyAlgorithmID
-SECOID_DestroyAlgorithmID_Util
-SECOID_FindOID
-SECOID_FindOIDByMechanism
-SECOID_FindOIDByTag
-SECOID_FindOIDByTag_Util
-SECOID_FindOIDTag
-SECOID_FindOIDTagDescription_Util
-SECOID_FindOIDTag_Util
-SECOID_FindOID_Util
-SECOID_GetAlgorithmTag
-SECOID_GetAlgorithmTag_Util
-SECOID_Init
-SECOID_SetAlgorithmID
-SECOID_SetAlgorithmID_Util
-SECOID_Shutdown
-SEC_PKCS12AddCertAndKey
-SEC_PKCS12AddPasswordIntegrity
-SEC_PKCS12CreateExportContext
-SEC_PKCS12CreatePasswordPrivSafe
-SEC_PKCS12CreateUnencryptedSafe
-SEC_PKCS12DecoderFinish
-SEC_PKCS12DecoderImportBags
-SEC_PKCS12DecoderIterateInit
-SEC_PKCS12DecoderIterateNext
-SEC_PKCS12DecoderRenameCertNicknames
-SEC_PKCS12DecoderStart
-SEC_PKCS12DecoderUpdate
-SEC_PKCS12DecoderValidateBags
-SEC_PKCS12DecoderVerify
-SEC_PKCS12DestroyExportContext
-SEC_PKCS12EnableCipher
-SEC_PKCS12Encode
-SEC_PKCS12IsEncryptionAllowed
-SEC_PKCS12SetPreferredCipher
-SEC_PKCS5GetPBEAlgorithm
-SEC_PKCS5IsAlgorithmPBEAlgTag
-SEC_PKCS7AddSigningTime
-SEC_PKCS7ContentIsEncrypted
-SEC_PKCS7ContentIsSigned
-SEC_PKCS7CopyContentInfo
-SEC_PKCS7CreateSignedData
-SEC_PKCS7DecodeItem
-SEC_PKCS7DecoderFinish
-SEC_PKCS7DecoderStart
-SEC_PKCS7DecoderUpdate
-SEC_PKCS7DestroyContentInfo
-SEC_PKCS7Encode
-SEC_PKCS7IncludeCertChain
-SEC_PKCS7VerifyDetachedSignature
-SEC_QuickDERDecodeItem
-SEC_QuickDERDecodeItem_Util
-SEC_RegisterDefaultHttpClient
-SEC_SignData
-SEC_SignedCertificateTemplate DATA
-SEC_StringToOID
-SEC_UTF8StringTemplate DATA
-SEC_UTF8StringTemplate_Util DATA
-SGN_Begin
-SGN_CreateDigestInfo_Util
-SGN_DecodeDigestInfo
-SGN_DestroyContext
-SGN_DestroyDigestInfo_Util
-SGN_End
-SGN_NewContext
-SGN_Update
-SSL_AuthCertificateComplete
-SSL_AuthCertificateHook
-SSL_CipherPrefGet
-SSL_CipherPrefSet
-SSL_CipherPrefSetDefault
-SSL_ClearSessionCache
-SSL_ConfigSecureServer
-SSL_ConfigSecureServerWithCertChain
-SSL_ConfigServerSessionIDCache
-SSL_ExportKeyingMaterial
-SSL_ForceHandshake
-SSL_GetChannelInfo
-SSL_GetCipherSuiteInfo
-SSL_GetClientAuthDataHook
-SSL_GetImplementedCiphers
-SSL_GetNextProto
-SSL_GetNumImplementedCiphers
-SSL_GetSRTPCipher
-SSL_HandshakeCallback
-SSL_HandshakeNegotiatedExtension
-SSL_ImplementedCiphers DATA
-SSL_ImportFD
-SSL_NumImplementedCiphers DATA
-SSL_OptionSet
-SSL_OptionSetDefault
-SSL_PeerCertificate
-SSL_PeerCertificateChain
-SSL_PeerStapledOCSPResponses
-SSL_ResetHandshake
-SSL_SetCanFalseStartCallback
-SSL_SetNextProtoNego
-SSL_SetPKCS11PinArg
-SSL_SetSockPeerID
-SSL_SetSRTPCiphers
-SSL_SetStapledOCSPResponses
-SSL_SetURL
-SSL_SNISocketConfigHook
-SSL_VersionRangeGet
-SSL_VersionRangeGetDefault
-SSL_VersionRangeGetSupported
-SSL_VersionRangeSet
-SSL_VersionRangeSetDefault
-UTIL_SetForkState
-VFY_Begin
-VFY_CreateContext
-VFY_DestroyContext
-VFY_End
-VFY_EndWithSignature
-VFY_Update
-VFY_VerifyData
-VFY_VerifyDataWithAlgorithmID
-VFY_VerifyDigestDirect
-_SGN_VerifyPKCS1DigestInfo
-__PK11_SetCertificateNickname
diff --git a/src/third_party/mozjs-45/config/external/nss/nss.mk b/src/third_party/mozjs-45/config/external/nss/nss.mk
deleted file mode 100644
index 38d234a..0000000
--- a/src/third_party/mozjs-45/config/external/nss/nss.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-include $(DEPTH)/config/autoconf.mk
-
-include $(topsrcdir)/config/config.mk
-
-dirs :=
-
-define add_dirs
-SHARED_LIBRARY_DIRS :=
-include $(topsrcdir)/security/$(1)/config.mk
-dirs += $$(addprefix $(1)/,$$(SHARED_LIBRARY_DIRS)) $(1)
-endef
-$(foreach dir,util nss ssl smime,$(eval $(call add_dirs,nss/lib/$(dir))))
-
-libs :=
-define add_lib
-LIBRARY_NAME :=
-include $(topsrcdir)/security/$(1)/manifest.mn
-libs += $$(addprefix $(1)/,$(LIB_PREFIX)$$(LIBRARY_NAME).$(LIB_SUFFIX))
-endef
-$(foreach dir,$(dirs),$(eval $(call add_lib,$(dir))))
-
-echo-variable-%:
- @echo $($*)
diff --git a/src/third_party/mozjs-45/config/external/nss/nss.symbols b/src/third_party/mozjs-45/config/external/nss/nss.symbols
deleted file mode 100644
index 0a6b015..0000000
--- a/src/third_party/mozjs-45/config/external/nss/nss.symbols
+++ /dev/null
@@ -1,702 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#ifndef XP_WIN
-# NSPR, unlike NSS, exports symbols with symbol visibility (Unix) or __declspec
-# (Windows). When using a linker script, however, we need to explicitly
-# specify that NSPR's symbols should be globally visible. Otherwise, NSPR's
-# exported symbols would be hidden.
-# .def files on Windows don't allow wildcards, of course, which is why this is
-# excluded on Windows, but it doesn't matter because the symbols are already
-# exported in NSPR (Windows peculiarity).
-PR_*
-PL_*
-#endif
-#include ../../../db/sqlite3/src/sqlite.symbols
-ATOB_AsciiToData
-ATOB_AsciiToData_Util
-ATOB_ConvertAsciiToItem
-ATOB_ConvertAsciiToItem_Util
-BTOA_ConvertItemToAscii_Util
-BTOA_DataToAscii
-BTOA_DataToAscii_Util
-CERT_AddCertToListHead
-CERT_AddCertToListTail
-CERT_AddExtension
-CERT_AddExtensionByOID
-__CERT_AddTempCertToPerm
-CERT_AsciiToName
-CERT_CacheOCSPResponseFromSideChannel
-CERT_CertChainFromCert
-CERT_CertificateRequestTemplate @DATA@
-CERT_CertificateTemplate @DATA@
-CERT_CertListFromCert
-CERT_ChangeCertTrust
-CERT_CheckCertUsage
-CERT_CheckCertValidTimes
-CERT_CheckNameSpace
-CERT_ClearOCSPCache
-CERT_CompareCerts
-CERT_CompareName
-CERT_ConvertAndDecodeCertificate
-CERT_CopyName
-CERT_CopyRDN
-CERT_CreateCertificate
-CERT_CreateCertificateRequest
-CERT_CreateSubjectCertList
-CERT_CreateValidity
-CERT_CrlTemplate @DATA@
-CERT_DecodeAltNameExtension
-CERT_DecodeAuthInfoAccessExtension
-CERT_DecodeAuthKeyID
-CERT_DecodeAVAValue
-CERT_DecodeBasicConstraintValue
-CERT_DecodeCertFromPackage
-CERT_DecodeCertificatePoliciesExtension
-CERT_DecodeCertPackage
-CERT_DecodeCRLDistributionPoints
-CERT_DecodeNameConstraintsExtension
-CERT_DecodeOidSequence
-CERT_DecodePrivKeyUsagePeriodExtension
-CERT_DecodeTrustString
-CERT_DecodeUserNotice
-CERT_DerNameToAscii
-CERT_DestroyCertArray
-CERT_DestroyCertificate
-CERT_DestroyCertificateList
-CERT_DestroyCertificatePoliciesExtension
-CERT_DestroyCertificateRequest
-CERT_DestroyCertList
-CERT_DestroyName
-CERT_DestroyOidSequence
-CERT_DestroyUserNotice
-CERT_DestroyValidity
-CERT_DisableOCSPChecking
-CERT_DisableOCSPDefaultResponder
-CERT_DupCertificate
-CERT_EnableOCSPChecking
-CERT_EncodeAltNameExtension
-CERT_EncodeAndAddBitStrExtension
-CERT_EncodeAuthKeyID
-CERT_EncodeBasicConstraintValue
-CERT_EncodeCertPoliciesExtension
-CERT_EncodeCRLDistributionPoints
-CERT_EncodeInfoAccessExtension
-CERT_EncodeInhibitAnyExtension
-CERT_EncodeNameConstraintsExtension
-CERT_EncodeNoticeReference
-CERT_EncodePolicyConstraintsExtension
-CERT_EncodePolicyMappingExtension
-CERT_EncodeSubjectKeyID
-CERT_EncodeUserNotice
-CERT_ExtractPublicKey
-CERT_FilterCertListByCANames
-CERT_FilterCertListByUsage
-CERT_FilterCertListForUserCerts
-CERT_FindCertByDERCert
-CERT_FindCertByIssuerAndSN
-CERT_FindCertByName
-CERT_FindCertByNickname
-CERT_FindCertByNicknameOrEmailAddr
-CERT_FindCertExtension
-CERT_FindCertIssuer
-CERT_FindKeyUsageExtension
-CERT_FindUserCertByUsage
-CERT_FindUserCertsByUsage
-CERT_FinishCertificateRequestAttributes
-CERT_FinishExtensions
-CERT_ForcePostMethodForOCSP
-CERT_FreeNicknames
-CERT_GenTime2FormattedAscii_Util
-CERT_GetCertChainFromCert
-CERT_GetCertEmailAddress
-CERT_GetCertificateRequestExtensions
-CERT_GetCertTimes
-CERT_GetCertTrust
-CERT_GetCommonName
-CERT_GetConstrainedCertificateNames
-CERT_GetCountryName
-CERT_GetDefaultCertDB
-CERT_GetFirstEmailAddress
-CERT_GetGeneralNameTypeFromString
-CERT_GetImposedNameConstraints
-CERT_GetLocalityName
-CERT_GetNextEmailAddress
-CERT_GetNextGeneralName
-CERT_GetNextNameConstraint
-CERT_GetOCSPAuthorityInfoAccessLocation
-CERT_GetOidString
-CERT_GetOrgName
-CERT_GetOrgUnitName
-CERT_GetStateName
-CERT_Hexify
-CERT_ImportCerts
-CERT_IsCACert
-CERT_IsUserCert
-CERT_MakeCANickname
-CERT_MergeExtensions
-CERT_NameTemplate @DATA@
-CERT_NameToAscii
-CERT_NewCertList
-CERT_NewTempCertificate
-CERT_NicknameStringsFromCertList
-CERT_OCSPCacheSettings
-CERT_PKIXVerifyCert
-CERT_RemoveCertListNode
-CERT_RFC1485_EscapeAndQuote
-CERT_SaveSMimeProfile
-CERT_SequenceOfCertExtensionTemplate @DATA@
-CERT_SetOCSPFailureMode
-CERT_SetOCSPTimeout
-CERT_SignedCrlTemplate @DATA@
-CERT_SignedDataTemplate @DATA@
-CERT_StartCertExtensions
-CERT_StartCertificateRequestAttributes
-CERT_SubjectPublicKeyInfoTemplate @DATA@
-CERT_TimeChoiceTemplate @DATA@
-CERT_VerifyCertificate
-CERT_VerifySignedDataWithPublicKeyInfo
-DER_AsciiToTime_Util
-DER_DecodeTimeChoice_Util
-DER_Encode
-DER_EncodeTimeChoice_Util
-DER_Encode_Util
-DER_GeneralizedTimeToTime
-DER_GeneralizedTimeToTime_Util
-DER_GetInteger
-DER_GetInteger_Util
-DER_Lengths
-DER_SetUInteger
-DER_UTCTimeToTime_Util
-DSAU_DecodeDerSigToLen
-DSAU_EncodeDerSigWithLen
-DTLS_GetHandshakeTimeout
-DTLS_ImportFD
-HASH_Begin
-HASH_Create
-HASH_Destroy
-HASH_End
-HASH_GetHashObject
-HASH_GetType
-HASH_HashBuf
-HASH_ResultLenByOidTag
-HASH_Update
-NSSBase64_DecodeBuffer
-NSSBase64_EncodeItem
-NSSBase64_EncodeItem_Util
-NSS_CMSContentInfo_GetContent
-NSS_CMSContentInfo_SetContent_Data
-NSS_CMSContentInfo_SetContent_EnvelopedData
-NSS_CMSContentInfo_SetContent_SignedData
-NSS_CMSDecoder_Cancel
-NSS_CMSDecoder_Finish
-NSS_CMSDecoder_Start
-NSS_CMSDecoder_Update
-NSS_CMSEncoder_Cancel
-NSS_CMSEncoder_Finish
-NSS_CMSEncoder_Start
-NSS_CMSEncoder_Update
-NSS_CMSEnvelopedData_AddRecipient
-NSS_CMSEnvelopedData_Create
-NSS_CMSEnvelopedData_GetContentInfo
-NSS_CMSMessage_ContentLevel
-NSS_CMSMessage_ContentLevelCount
-NSS_CMSMessage_Create
-NSS_CMSMessage_CreateFromDER
-NSS_CMSMessage_Destroy
-NSS_CMSMessage_GetContent
-NSS_CMSMessage_GetContentInfo
-NSS_CMSMessage_IsEncrypted
-NSS_CMSMessage_IsSigned
-NSS_CMSRecipientInfo_Create
-NSS_CMSSignedData_AddCertificate
-NSS_CMSSignedData_AddCertList
-NSS_CMSSignedData_AddSignerInfo
-NSS_CMSSignedData_Create
-NSS_CMSSignedData_CreateCertsOnly
-NSS_CMSSignedData_Destroy
-NSS_CMSSignedData_GetContentInfo
-NSS_CMSSignedData_GetSignerInfo
-NSS_CMSSignedData_ImportCerts
-NSS_CMSSignedData_SetDigestValue
-NSS_CMSSignedData_SignerInfoCount
-NSS_CMSSignedData_VerifySignerInfo
-NSS_CMSSignerInfo_AddMSSMIMEEncKeyPrefs
-NSS_CMSSignerInfo_AddSigningTime
-NSS_CMSSignerInfo_AddSMIMECaps
-NSS_CMSSignerInfo_AddSMIMEEncKeyPrefs
-NSS_CMSSignerInfo_Create
-NSS_CMSSignerInfo_GetSignerCommonName
-NSS_CMSSignerInfo_GetSignerEmailAddress
-NSS_CMSSignerInfo_GetSigningCertificate
-NSS_CMSSignerInfo_IncludeCerts
-NSS_CMSSignerInfo_Verify
-NSS_FindCertKEAType
-NSS_GetAlgorithmPolicy
-NSS_Get_CERT_CertificateRequestTemplate
-NSS_Get_CERT_CertificateTemplate
-NSS_Get_CERT_CrlTemplate
-NSS_Get_CERT_NameTemplate
-NSS_Get_CERT_SequenceOfCertExtensionTemplate
-NSS_Get_CERT_SignedCrlTemplate
-NSS_Get_CERT_SignedDataTemplate
-NSS_Get_CERT_SubjectPublicKeyInfoTemplate
-NSS_Get_CERT_TimeChoiceTemplate
-NSS_Get_SEC_AnyTemplate_Util
-NSS_Get_SEC_BitStringTemplate
-NSS_Get_SEC_BitStringTemplate_Util
-NSS_Get_SEC_BMPStringTemplate
-NSS_Get_SEC_BooleanTemplate_Util
-NSS_Get_SEC_GeneralizedTimeTemplate_Util
-NSS_Get_SEC_IA5StringTemplate
-NSS_Get_SEC_IA5StringTemplate_Util
-NSS_Get_SEC_IntegerTemplate
-NSS_Get_SEC_IntegerTemplate_Util
-NSS_Get_SECKEY_RSAPSSParamsTemplate
-NSS_Get_SEC_NullTemplate_Util
-NSS_Get_SEC_ObjectIDTemplate_Util
-NSS_Get_SEC_OctetStringTemplate
-NSS_Get_SEC_OctetStringTemplate_Util
-NSS_Get_SECOID_AlgorithmIDTemplate
-NSS_Get_SECOID_AlgorithmIDTemplate_Util
-NSS_Get_SEC_SignedCertificateTemplate
-NSS_Get_SEC_UTF8StringTemplate
-NSS_Get_SEC_UTF8StringTemplate_Util
-NSS_GetVersion
-NSS_Init
-NSS_Initialize
-NSS_InitWithMerge
-NSS_IsInitialized
-NSS_NoDB_Init
-NSS_SecureMemcmp
-NSS_SetAlgorithmPolicy
-NSS_SetDomesticPolicy
-NSS_Shutdown
-NSSSMIME_GetVersion
-NSS_SMIMESignerInfo_SaveSMIMEProfile
-NSS_SMIMEUtil_FindBulkAlgForRecipients
-NSSSSL_GetVersion
-NSSUTIL_ArgDecodeNumber
-NSSUTIL_ArgFetchValue
-NSSUTIL_ArgGetLabel
-NSSUTIL_ArgGetParamValue
-NSSUTIL_ArgHasFlag
-NSSUTIL_ArgIsBlank
-NSSUTIL_ArgParseCipherFlags
-NSSUTIL_ArgParseModuleSpec
-NSSUTIL_ArgParseSlotFlags
-NSSUTIL_ArgParseSlotInfo
-NSSUTIL_ArgReadLong
-NSSUTIL_ArgSkipParameter
-NSSUTIL_ArgStrip
-NSSUTIL_DoModuleDBFunction
-_NSSUTIL_EvaluateConfigDir
-_NSSUTIL_GetSecmodName
-NSSUTIL_GetVersion
-NSSUTIL_MkModuleSpec
-NSSUTIL_MkNSSString
-NSSUTIL_MkSlotString
-NSSUTIL_Quote
-PK11_AlgtagToMechanism
-PK11_Authenticate
-PK11_ChangePW
-PK11_CheckUserPassword
-PK11_CipherOp
-PK11_ConfigurePKCS11
-PK11_CreateContextBySymKey
-PK11_CreateDigestContext
-PK11_CreateGenericObject
-PK11_CreateMergeLog
-PK11_CreatePBEV2AlgorithmID
-PK11_Decrypt
-PK11_DeleteTokenCertAndKey
-PK11_DeleteTokenPrivateKey
-PK11_DeleteTokenPublicKey
-PK11_DEREncodePublicKey
-PK11_Derive
-PK11_DeriveWithTemplate
-PK11_DestroyContext
-PK11_DestroyGenericObject
-PK11_DestroyMergeLog
-PK11_DestroyObject
-PK11_DestroyTokenObject
-PK11_DigestBegin
-PK11_DigestFinal
-PK11_DigestOp
-PK11_DoesMechanism
-PK11_Encrypt
-PK11_ExportDERPrivateKeyInfo
-PK11_ExportEncryptedPrivKeyInfo
-PK11_ExtractKeyValue
-PK11_FindCertFromNickname
-PK11_FindCertsFromEmailAddress
-PK11_FindCertsFromNickname
-PK11_FindKeyByAnyCert
-PK11_FindKeyByDERCert
-PK11_FindKeyByKeyID
-PK11_FindSlotByName
-PK11_FindSlotsByNames
-PK11_FreeSlot
-PK11_FreeSlotList
-PK11_FreeSlotListElement
-PK11_FreeSymKey
-PK11_GenerateKeyPair
-PK11_GenerateKeyPairWithFlags
-PK11_GenerateKeyPairWithOpFlags
-PK11_GenerateRandom
-PK11_GenerateRandomOnSlot
-PK11_GetAllSlotsForCert
-PK11_GetAllTokens
-PK11_GetBestSlot
-PK11_GetBestSlotMultiple
-PK11_GetBlockSize
-PK11_GetCertFromPrivateKey
-PK11_GetDefaultArray
-PK11_GetDefaultFlags
-PK11_GetDisabledReason
-PK11_GetFirstSafe
-PK11_GetInternalKeySlot
-PK11_GetInternalSlot
-PK11_GetIVLength
-PK11_GetKeyData
-PK11_GetKeyGen
-PK11_GetLowLevelKeyIDForPrivateKey
-PK11_GetMechanism
-PK11_GetMinimumPwdLength
-PK11_GetModInfo
-PK11_GetNextSafe
-PK11_GetPadMechanism
-PK11_GetPrivateKeyNickname
-PK11_GetPrivateModulusLen
-PK11_GetSlotID
-PK11_GetSlotInfo
-PK11_GetSlotName
-PK11_GetSlotPWValues
-PK11_GetSlotSeries
-PK11_GetTokenInfo
-PK11_GetTokenName
-PK11_HashBuf
-PK11_HasRootCerts
-PK11_ImportCert
-PK11_ImportCertForKey
-PK11_ImportCRL
-PK11_ImportDERPrivateKeyInfoAndReturnKey
-PK11_ImportPublicKey
-PK11_ImportSymKey
-PK11_InitPin
-PK11_IsDisabled
-PK11_IsFIPS
-PK11_IsFriendly
-PK11_IsHW
-PK11_IsInternal
-PK11_IsLoggedIn
-PK11_IsPresent
-PK11_IsReadOnly
-PK11_IsRemovable
-PK11_KeyForCertExists
-PK11_KeyGen
-PK11_KeyGenWithTemplate
-PK11_ListCerts
-PK11_ListCertsInSlot
-PK11_ListPrivateKeysInSlot
-PK11_ListPrivKeysInSlot
-PK11_LoadPrivKey
-PK11_Logout
-PK11_LogoutAll
-PK11_MakeIDFromPubKey
-PK11_MechanismToAlgtag
-PK11_MergeTokens
-PK11_NeedLogin
-PK11_NeedUserInit
-PK11_ParamFromIV
-PK11_PBEKeyGen
-PK11_PrivDecrypt
-PK11_PrivDecryptPKCS1
-PK11_ProtectedAuthenticationPath
-PK11_PubDeriveWithKDF
-PK11_PubEncrypt
-PK11_PubEncryptPKCS1
-PK11_PubUnwrapSymKey
-PK11_PubWrapSymKey
-PK11_RandomUpdate
-PK11_ReadRawAttribute
-PK11_ReferenceSlot
-PK11_ResetToken
-PK11SDR_Decrypt
-PK11SDR_Encrypt
-PK11_SetPasswordFunc
-PK11_SetSlotPWValues
-PK11_Sign
-PK11_SignatureLen
-PK11_UnwrapPrivKey
-PK11_UnwrapSymKey
-PK11_UpdateSlotAttribute
-PK11_UserDisableSlot
-PK11_UserEnableSlot
-PK11_WrapPrivKey
-PK11_WrapSymKey
-PORT_Alloc
-PORT_Alloc_Util
-PORT_ArenaAlloc
-PORT_ArenaAlloc_Util
-PORT_ArenaGrow_Util
-PORT_ArenaMark_Util
-PORT_ArenaRelease_Util
-PORT_ArenaStrdup
-PORT_ArenaStrdup_Util
-PORT_ArenaUnmark_Util
-PORT_ArenaZAlloc
-PORT_ArenaZAlloc_Util
-PORT_Free
-PORT_FreeArena
-PORT_FreeArena_Util
-PORT_Free_Util
-PORT_GetError
-PORT_GetError_Util
-PORT_NewArena
-PORT_NewArena_Util
-PORT_Realloc_Util
-PORT_RegExpSearch
-PORT_SetError
-PORT_SetError_Util
-PORT_SetUCS2_ASCIIConversionFunction
-PORT_SetUCS2_ASCIIConversionFunction_Util
-PORT_Strdup
-PORT_Strdup_Util
-PORT_UCS2_ASCIIConversion_Util
-PORT_UCS2_UTF8Conversion
-PORT_UCS2_UTF8Conversion_Util
-PORT_ZAlloc
-PORT_ZAlloc_Util
-PORT_ZFree_Util
-SEC_AnyTemplate_Util @DATA@
-SEC_ASN1Decode
-SEC_ASN1DecodeInteger
-SEC_ASN1DecodeItem
-SEC_ASN1DecodeItem_Util
-SEC_ASN1Decode_Util
-SEC_ASN1EncodeInteger_Util
-SEC_ASN1EncodeItem
-SEC_ASN1EncodeItem_Util
-SEC_ASN1EncodeUnsignedInteger_Util
-SEC_ASN1Encode_Util
-SEC_BitStringTemplate @DATA@
-SEC_BitStringTemplate_Util @DATA@
-SEC_BMPStringTemplate @DATA@
-SEC_BooleanTemplate_Util @DATA@
-SEC_CertNicknameConflict
-SEC_DeletePermCertificate
-SEC_DerSignData
-SEC_DestroyCrl
-SEC_GeneralizedTimeTemplate_Util @DATA@
-SEC_GetSignatureAlgorithmOidTag
-SEC_IA5StringTemplate @DATA@
-SEC_IA5StringTemplate_Util @DATA@
-SEC_IntegerTemplate @DATA@
-SEC_IntegerTemplate_Util @DATA@
-SECITEM_AllocArray
-SECITEM_AllocItem
-SECITEM_AllocItem_Util
-SECITEM_ArenaDupItem_Util
-SECITEM_CompareItem
-SECITEM_CompareItem_Util
-SECITEM_CopyItem
-SECITEM_CopyItem_Util
-SECITEM_DupArray
-SECITEM_DupItem
-SECITEM_DupItem_Util
-SECITEM_FreeItem
-SECITEM_FreeItem_Util
-SECITEM_HashCompare
-SECITEM_ItemsAreEqual
-SECITEM_ItemsAreEqual_Util
-SECITEM_ReallocItemV2
-SECITEM_ZfreeItem
-SECITEM_ZfreeItem_Util
-SECKEY_ConvertToPublicKey
-SECKEY_CopyPrivateKey
-SECKEY_CopyPublicKey
-SECKEY_CopySubjectPublicKeyInfo
-SECKEY_CreateSubjectPublicKeyInfo
-SECKEY_DecodeDERSubjectPublicKeyInfo
-SECKEY_DestroyEncryptedPrivateKeyInfo
-SECKEY_DestroyPrivateKey
-SECKEY_DestroyPrivateKeyList
-SECKEY_DestroyPublicKey
-SECKEY_DestroySubjectPublicKeyInfo
-SECKEY_ECParamsToBasePointOrderLen
-SECKEY_ECParamsToKeySize
-SECKEY_EncodeDERSubjectPublicKeyInfo
-SECKEY_ExtractPublicKey
-SECKEY_GetPublicKeyType
-SECKEY_ImportDERPublicKey
-SECKEY_PublicKeyStrength
-SECKEY_RSAPSSParamsTemplate @DATA@
-SECKEY_SignatureLen
-SECMIME_DecryptionAllowed
-SECMOD_AddNewModule
-SECMOD_AddNewModuleEx
-SECMOD_CancelWait
-SECMOD_CanDeleteInternalModule
-SECMOD_CloseUserDB
-SECMOD_CreateModule
-SECMOD_DeleteInternalModule
-SECMOD_DeleteModule
-SECMOD_DestroyModule
-SECMOD_FindModule
-SECMOD_GetDeadModuleList
-SECMOD_GetDefaultModuleList
-SECMOD_GetDefaultModuleListLock
-SECMOD_GetInternalModule
-SECMOD_GetModuleSpecList
-SECMOD_GetReadLock
-SECMOD_HasRemovableSlots
-SECMOD_InternaltoPubMechFlags
-SECMOD_LoadModule
-SECMOD_LoadUserModule
-SECMOD_OpenUserDB
-SECMOD_PubCipherFlagstoInternal
-SECMOD_PubMechFlagstoInternal
-SECMOD_ReferenceModule
-SECMOD_ReleaseReadLock
-SECMOD_UnloadUserModule
-SECMOD_UpdateModule
-SECMOD_WaitForAnyTokenEvent
-SEC_NullTemplate_Util @DATA@
-SEC_ObjectIDTemplate_Util @DATA@
-SEC_OctetStringTemplate @DATA@
-SEC_OctetStringTemplate_Util @DATA@
-SECOID_AddEntry
-SECOID_AddEntry_Util
-SECOID_AlgorithmIDTemplate @DATA@
-SECOID_AlgorithmIDTemplate_Util @DATA@
-SECOID_CopyAlgorithmID_Util
-SECOID_DestroyAlgorithmID
-SECOID_DestroyAlgorithmID_Util
-SECOID_FindOID
-SECOID_FindOIDByMechanism
-SECOID_FindOIDByTag
-SECOID_FindOIDByTag_Util
-SECOID_FindOIDTag
-SECOID_FindOIDTagDescription_Util
-SECOID_FindOIDTag_Util
-SECOID_FindOID_Util
-SECOID_GetAlgorithmTag
-SECOID_GetAlgorithmTag_Util
-SECOID_Init
-SECOID_SetAlgorithmID
-SECOID_SetAlgorithmID_Util
-SECOID_Shutdown
-SEC_PKCS12AddCertAndKey
-SEC_PKCS12AddPasswordIntegrity
-SEC_PKCS12CreateExportContext
-SEC_PKCS12CreatePasswordPrivSafe
-SEC_PKCS12CreateUnencryptedSafe
-SEC_PKCS12DecoderFinish
-SEC_PKCS12DecoderImportBags
-SEC_PKCS12DecoderIterateInit
-SEC_PKCS12DecoderIterateNext
-SEC_PKCS12DecoderRenameCertNicknames
-SEC_PKCS12DecoderStart
-SEC_PKCS12DecoderUpdate
-SEC_PKCS12DecoderValidateBags
-SEC_PKCS12DecoderVerify
-SEC_PKCS12DestroyExportContext
-SEC_PKCS12EnableCipher
-SEC_PKCS12Encode
-SEC_PKCS12IsEncryptionAllowed
-SEC_PKCS12SetPreferredCipher
-SEC_PKCS5GetPBEAlgorithm
-SEC_PKCS5IsAlgorithmPBEAlgTag
-SEC_PKCS7AddSigningTime
-SEC_PKCS7ContentIsEncrypted
-SEC_PKCS7ContentIsSigned
-SEC_PKCS7CopyContentInfo
-SEC_PKCS7CreateSignedData
-SEC_PKCS7DecodeItem
-SEC_PKCS7DecoderFinish
-SEC_PKCS7DecoderStart
-SEC_PKCS7DecoderUpdate
-SEC_PKCS7DestroyContentInfo
-SEC_PKCS7Encode
-SEC_PKCS7IncludeCertChain
-SEC_PKCS7VerifyDetachedSignature
-SEC_QuickDERDecodeItem
-SEC_QuickDERDecodeItem_Util
-SEC_RegisterDefaultHttpClient
-SEC_SignData
-SEC_SignedCertificateTemplate @DATA@
-SEC_StringToOID
-SEC_UTF8StringTemplate @DATA@
-SEC_UTF8StringTemplate_Util @DATA@
-SGN_Begin
-SGN_CreateDigestInfo_Util
-SGN_DecodeDigestInfo
-SGN_DestroyContext
-SGN_DestroyDigestInfo_Util
-SGN_End
-SGN_NewContext
-SGN_Update
-SSL_AuthCertificateComplete
-SSL_AuthCertificateHook
-SSL_CipherPrefGet
-SSL_CipherPrefSet
-SSL_CipherPrefSetDefault
-SSL_ClearSessionCache
-SSL_ConfigSecureServer
-SSL_ConfigSecureServerWithCertChain
-SSL_ConfigServerSessionIDCache
-SSL_ExportKeyingMaterial
-SSL_ForceHandshake
-SSL_GetChannelInfo
-SSL_GetCipherSuiteInfo
-SSL_GetClientAuthDataHook
-SSL_GetImplementedCiphers
-SSL_GetNextProto
-SSL_GetNumImplementedCiphers
-SSL_GetSRTPCipher
-SSL_HandshakeCallback
-SSL_HandshakeNegotiatedExtension
-SSL_ImplementedCiphers @DATA@
-SSL_ImportFD
-SSL_NumImplementedCiphers @DATA@
-SSL_OptionSet
-SSL_OptionSetDefault
-SSL_PeerCertificate
-SSL_PeerCertificateChain
-SSL_PeerStapledOCSPResponses
-SSL_ResetHandshake
-SSL_SetCanFalseStartCallback
-SSL_SetNextProtoNego
-SSL_SetPKCS11PinArg
-SSL_SetSockPeerID
-SSL_SetSRTPCiphers
-SSL_SetStapledOCSPResponses
-SSL_SetURL
-SSL_SNISocketConfigHook
-SSL_VersionRangeGet
-SSL_VersionRangeGetDefault
-SSL_VersionRangeGetSupported
-SSL_VersionRangeSet
-SSL_VersionRangeSetDefault
-UTIL_SetForkState
-VFY_Begin
-VFY_CreateContext
-VFY_DestroyContext
-VFY_End
-VFY_EndWithSignature
-VFY_Update
-VFY_VerifyData
-VFY_VerifyDataWithAlgorithmID
-VFY_VerifyDigestDirect
-_SGN_VerifyPKCS1DigestInfo
-__PK11_SetCertificateNickname
-# These symbols are not used by Firefox itself, but are used by Java's security
-# libraries, which in turn are used by Java applets/plugins/etc. Provide them
-# to make Java code happy.
-NSS_VersionCheck
-NSS_Initialize
-#ifdef NSS_EXTRA_SYMBOLS_FILE
-#include @NSS_EXTRA_SYMBOLS_FILE@
-#endif
diff --git a/src/third_party/mozjs-45/config/external/sqlite/Makefile.in b/src/third_party/mozjs-45/config/external/sqlite/Makefile.in
deleted file mode 100644
index 02dde05..0000000
--- a/src/third_party/mozjs-45/config/external/sqlite/Makefile.in
+++ /dev/null
@@ -1,28 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-LIB_IS_C_ONLY = 1
-
-include $(topsrcdir)/config/config.mk
-
-ifeq ($(OS_ARCH),WINNT)
-# This needs to stay there for now
-DEFFILE = $(DEPTH)/db/sqlite3/src/sqlite-processed.def
-
-else
-ifndef MOZ_FOLD_LIBS
-ifdef GCC_USE_GNU_LD
-
-GARBAGE += \
- $(LD_VERSION_SCRIPT) \
- $(NULL)
-
-# Convert to the format we need for ld.
-$(LD_VERSION_SCRIPT): $(topsrcdir)/db/sqlite3/src/sqlite.def
- @$(call py_action,convert_def_file, \
- $(DEFINES) $(ACDEFINES) $(MOZ_DEBUG_DEFINES) -o $@ $^)
-
-endif
-endif
-endif
diff --git a/src/third_party/mozjs-45/config/external/sqlite/moz.build b/src/third_party/mozjs-45/config/external/sqlite/moz.build
deleted file mode 100644
index b6f5007..0000000
--- a/src/third_party/mozjs-45/config/external/sqlite/moz.build
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-if CONFIG['MOZ_NATIVE_SQLITE']:
- Library('sqlite')
- OS_LIBS += CONFIG['SQLITE_LIBS']
-else:
- DIRS += ['../../../db/sqlite3/src']
- if CONFIG['MOZ_FOLD_LIBS']:
- Library('sqlite')
- # When folding libraries, sqlite is actually in the nss library.
- USE_LIBS += [
- 'nss',
- ]
- else:
- SharedLibrary('sqlite')
- SHARED_LIBRARY_NAME = 'mozsqlite3'
-
- if CONFIG['OS_ARCH'] == 'Linux' and CONFIG['GCC_USE_GNU_LD']:
- LD_VERSION_SCRIPT = 'sqlite-processed.def'
diff --git a/src/third_party/mozjs-45/config/external/zlib/moz.build b/src/third_party/mozjs-45/config/external/zlib/moz.build
deleted file mode 100644
index f1efe43..0000000
--- a/src/third_party/mozjs-45/config/external/zlib/moz.build
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-Library('zlib')
-
-if CONFIG['MOZ_NATIVE_ZLIB']:
- OS_LIBS += CONFIG['MOZ_ZLIB_LIBS']
-else:
- if CONFIG['ZLIB_IN_MOZGLUE']:
- # Can't do this until mozglue is handled by moz.build instead of
- # config/rules.mk.
- # USE_LIBS += [
- # 'mozglue'
- # ]
- pass
- DIRS += [
- '../../../modules/zlib',
- ]
diff --git a/src/third_party/mozjs-45/config/faster/rules.mk b/src/third_party/mozjs-45/config/faster/rules.mk
deleted file mode 100644
index cf665a6..0000000
--- a/src/third_party/mozjs-45/config/faster/rules.mk
+++ /dev/null
@@ -1,141 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# /!\ Please make sure to update the following comment when you touch this
-# file. Thank you /!\
-
-# The traditional Mozilla build system relied on going through the entire
-# build tree a number of times with different targets, and many of the
-# things happening at each step required other things happening in previous
-# steps without any documentation of those dependencies.
-#
-# This new build system tries to start afresh by establishing what files or
-# operations are needed for the build, and applying the necessary rules to
-# have those in place, relying on make dependencies to get them going.
-#
-# As of writing, only building non-compiled parts of Firefox is supported
-# here (a few other things are also left out). This is a starting point, with
-# the intent to grow this build system to make it more complete.
-#
-# This file contains rules and dependencies to get things working. The intent
-# is for a Makefile to define some dependencies and variables, and include
-# this file. What needs to be defined there, and ends up being generated by
-# python/mozbuild/mozbuild/backend/fastermake.py is the following:
-# - TOPSRCDIR/TOPOBJDIR, respectively the top source directory and the top
-# object directory
-# - BACKEND, the path to the file the backend will always update when running
-# mach build-backend
-# - PYTHON, the path to the python executable
-# - ACDEFINES, which contains a set of -Dvar=name to be used during
-# preprocessing
-# - INSTALL_MANIFESTS, which defines the list of base directories handled
-# by install manifests, see further below
-# - MANIFEST_TARGETS, which defines the file paths of chrome manifests, see
-# further below
-#
-# A convention used between this file and the Makefile including it is that
-# global Make variables names are uppercase, while "local" Make variables
-# applied to specific targets are lowercase.
-
-# Targets to be triggered for a default build
-default: $(addprefix install-,$(INSTALL_MANIFESTS))
-
-# Explicit files to be built for a default build
-default: $(addprefix $(TOPOBJDIR)/,$(MANIFEST_TARGETS))
-ifndef TEST_MOZBUILD
-default: $(TOPOBJDIR)/dist/bin/platform.ini
-endif
-
-ifndef NO_XPIDL
-# Targets from the recursive make backend to be built for a default build
-default: $(TOPOBJDIR)/config/makefiles/xpidl/xpidl
-endif
-
-ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
-# Mac builds require to copy things in dist/bin/*.app
-# TODO: remove the MOZ_WIDGET_TOOLKIT and MOZ_BUILD_APP variables from
-# faster/Makefile and python/mozbuild/mozbuild/test/backend/test_build.py
-# when this is not required anymore.
-default:
- $(MAKE) -C $(TOPOBJDIR)/$(MOZ_BUILD_APP)/app repackage
-endif
-
-.PHONY: FORCE
-
-# Extra define to trigger some workarounds. We should strive to limit the
-# use of those. As of writing the only ones are in
-# toolkit/content/buildconfig.html and browser/locales/jar.mn.
-ACDEFINES += -DBUILD_FASTER
-
-# Generic rule to fall back to the recursive make backend
-$(TOPOBJDIR)/%: FORCE
- $(MAKE) -C $(dir $@) $(notdir $@)
-
-# Files under the faster/ sub-directory, however, are not meant to use the
-# fallback
-$(TOPOBJDIR)/faster/%: ;
-
-# Files under the python virtualenv, which are dependencies of the BACKEND
-# file, are not meant to use the fallback either.
-$(TOPOBJDIR)/_virtualenv/%: ;
-
-# And files under dist/ are meant to be copied from their first dependency
-# if there is no other rule.
-$(TOPOBJDIR)/dist/%:
- rm -f $@
- mkdir -p $(@D)
- cp $< $@
-
-# Refresh backend
-$(BACKEND):
- cd $(TOPOBJDIR) && $(PYTHON) config.status --backend FasterMake
-
-$(MAKEFILE_LIST): $(BACKEND)
-
-# Install files using install manifests
-#
-# The list of base directories is given in INSTALL_MANIFESTS. The
-# corresponding install manifests are named correspondingly, with forward
-# slashes replaced with underscores, and prefixed with `install_`. That is,
-# the install manifest for `dist/bin` would be `install_dist_bin`.
-$(addprefix install-,$(INSTALL_MANIFESTS)): install-%: $(TOPOBJDIR)/config/buildid
- @# For now, force preprocessed files to be reprocessed every time.
- @# The overhead is not that big, and this avoids waiting for proper
- @# support for defines tracking in process_install_manifest.
- @touch install_$(subst /,_,$*)
- $(PYTHON) -m mozbuild.action.process_install_manifest \
- --track install_$(subst /,_,$*).track \
- $(TOPOBJDIR)/$* \
- -DAB_CD=en-US \
- -DMOZ_APP_BUILDID=$(shell cat $(TOPOBJDIR)/config/buildid) \
- $(ACDEFINES) \
- $(MOZ_DEBUG_DEFINES) \
- install_$(subst /,_,$*)
-
-# Create some chrome manifests
-# This rule is forced to run every time because it may be updating files that
-# already exit.
-#
-# The list of chrome manifests is given in MANIFEST_TARGETS, relative to the
-# top object directory. The content for those manifests is given in the
-# `content` variable associated with the target. For example:
-# MANIFEST_TARGETS = foo
-# $(TOPOBJDIR)/foo: content = "manifest foo.manifest" "manifest bar.manifest"
-$(addprefix $(TOPOBJDIR)/,$(MANIFEST_TARGETS)): FORCE
- $(PYTHON) -m mozbuild.action.buildlist \
- $@ \
- $(content)
-
-# ============================================================================
-# Below is a set of additional dependencies and variables used to build things
-# that are not supported by data in moz.build.
-
-# Files to build with the recursive backend and simply copy
-$(TOPOBJDIR)/dist/bin/platform.ini: $(TOPOBJDIR)/toolkit/xre/platform.ini
-
-$(TOPOBJDIR)/toolkit/xre/platform.ini: $(TOPOBJDIR)/config/buildid
-
-# The xpidl target in config/makefiles/xpidl requires the install manifest for
-# dist/idl to have been processed.
-$(TOPOBJDIR)/config/makefiles/xpidl/xpidl: $(TOPOBJDIR)/install-dist_idl
diff --git a/src/third_party/mozjs-45/config/find_OOM_errors.py b/src/third_party/mozjs-45/config/find_OOM_errors.py
deleted file mode 100644
index 1606511..0000000
--- a/src/third_party/mozjs-45/config/find_OOM_errors.py
+++ /dev/null
@@ -1,352 +0,0 @@
-#!/usr/bin/env python
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-from __future__ import print_function
-
-usage = """%prog: A test for OOM conditions in the shell.
-
-%prog finds segfaults and other errors caused by incorrect handling of
-allocation during OOM (out-of-memory) conditions.
-"""
-
-help = """Check for regressions only. This runs a set of files with a known
-number of OOM errors (specified by REGRESSION_COUNT), and exits with a non-zero
-result if more or less errors are found. See js/src/Makefile.in for invocation.
-"""
-
-
-import hashlib
-import re
-import shlex
-import subprocess
-import sys
-import threading
-import time
-
-from optparse import OptionParser
-
-#####################################################################
-# Utility functions
-#####################################################################
-def run(args, stdin=None):
- class ThreadWorker(threading.Thread):
- def __init__(self, pipe):
- super(ThreadWorker, self).__init__()
- self.all = ""
- self.pipe = pipe
- self.setDaemon(True)
-
- def run(self):
- while True:
- line = self.pipe.readline()
- if line == '': break
- else:
- self.all += line
-
- try:
- if type(args) == str:
- args = shlex.split(args)
-
- args = [str(a) for a in args] # convert to strs
-
- stdin_pipe = subprocess.PIPE if stdin else None
- proc = subprocess.Popen(args, stdin=stdin_pipe, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- if stdin_pipe:
- proc.stdin.write(stdin)
- proc.stdin.close()
-
- stdout_worker = ThreadWorker(proc.stdout)
- stderr_worker = ThreadWorker(proc.stderr)
- stdout_worker.start()
- stderr_worker.start()
-
- proc.wait()
- stdout_worker.join()
- stderr_worker.join()
-
- except KeyboardInterrupt as e:
- sys.exit(-1)
-
- stdout, stderr = stdout_worker.all, stderr_worker.all
- result = (stdout, stderr, proc.returncode)
- return result
-
-def get_js_files():
- (out, err, exit) = run('find ../jit-test/tests -name "*.js"')
- if (err, exit) != ("", 0):
- sys.exit("Wrong directory, run from an objdir")
- return out.split()
-
-
-
-#####################################################################
-# Blacklisting
-#####################################################################
-def in_blacklist(sig):
- return sig in blacklist
-
-def add_to_blacklist(sig):
- blacklist[sig] = blacklist.get(sig, 0)
- blacklist[sig] += 1
-
-# How often is a particular lines important for this.
-def count_lines():
- """Keep track of the amount of times individual lines occur, in order to
- prioritize the errors which occur most frequently."""
- counts = {}
- for string,count in blacklist.items():
- for line in string.split("\n"):
- counts[line] = counts.get(line, 0) + count
-
- lines = []
- for k,v in counts.items():
- lines.append("{0:6}: {1}".format(v, k))
-
- lines.sort()
-
- countlog = file("../OOM_count_log", "w")
- countlog.write("\n".join(lines))
- countlog.flush()
- countlog.close()
-
-
-#####################################################################
-# Output cleaning
-#####################################################################
-def clean_voutput(err):
- # Skip what we can't reproduce
- err = re.sub(r"^--\d+-- run: /usr/bin/dsymutil \"shell/js\"$", "", err, flags=re.MULTILINE)
- err = re.sub(r"^==\d+==", "", err, flags=re.MULTILINE)
- err = re.sub(r"^\*\*\d+\*\*", "", err, flags=re.MULTILINE)
- err = re.sub(r"^\s+by 0x[0-9A-Fa-f]+: ", "by: ", err, flags=re.MULTILINE)
- err = re.sub(r"^\s+at 0x[0-9A-Fa-f]+: ", "at: ", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Address 0x)[0-9A-Fa-f]+( is not stack'd)", r"\1\2", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Invalid write of size )\d+", r"\1x", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Invalid read of size )\d+", r"\1x", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Address 0x)[0-9A-Fa-f]+( is )\d+( bytes inside a block of size )[0-9,]+( free'd)", r"\1\2\3\4", err, flags=re.MULTILINE)
-
- # Skip the repeating bit due to the segfault
- lines = []
- for l in err.split('\n'):
- if l == " Process terminating with default action of signal 11 (SIGSEGV)":
- break
- lines.append(l)
- err = '\n'.join(lines)
-
- return err
-
-def remove_failed_allocation_backtraces(err):
- lines = []
-
- add = True
- for l in err.split('\n'):
-
- # Set start and end conditions for including text
- if l == " The site of the failed allocation is:":
- add = False
- elif l[:2] not in ['by: ', 'at:']:
- add = True
-
- if add:
- lines.append(l)
-
-
- err = '\n'.join(lines)
-
- return err
-
-
-def clean_output(err):
- err = re.sub(r"^js\(\d+,0x[0-9a-f]+\) malloc: \*\*\* error for object 0x[0-9a-f]+: pointer being freed was not allocated\n\*\*\* set a breakppoint in malloc_error_break to debug\n$", "pointer being freed was not allocated", err, flags=re.MULTILINE)
-
- return err
-
-
-#####################################################################
-# Consts, etc
-#####################################################################
-
-command_template = 'shell/js' \
- + ' -m -j -p' \
- + ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
- + ' -f ../jit-test/lib/prolog.js' \
- + ' -f {0}'
-
-
-# Blacklists are things we don't want to see in our logs again (though we do
-# want to count them when they happen). Whitelists we do want to see in our
-# logs again, principally because the information we have isn't enough.
-
-blacklist = {}
-add_to_blacklist(r"('', '', 1)") # 1 means OOM if the shell hasn't launched yet.
-add_to_blacklist(r"('', 'out of memory\n', 1)")
-
-whitelist = set()
-whitelist.add(r"('', 'out of memory\n', -11)") # -11 means OOM
-whitelist.add(r"('', 'out of memory\nout of memory\n', -11)")
-
-
-
-#####################################################################
-# Program
-#####################################################################
-
-# Options
-parser = OptionParser(usage=usage)
-parser.add_option("-r", "--regression", action="store", metavar="REGRESSION_COUNT", help=help,
- type="int", dest="regression", default=None)
-
-(OPTIONS, args) = parser.parse_args()
-
-
-if OPTIONS.regression != None:
- # TODO: This should be expanded as we get a better hang of the OOM problems.
- # For now, we'll just check that the number of OOMs in one short file does not
- # increase.
- files = ["../jit-test/tests/arguments/args-createontrace.js"]
-else:
- files = get_js_files()
-
- # Use a command-line arg to reduce the set of files
- if len (args):
- files = [f for f in files if f.find(args[0]) != -1]
-
-
-if OPTIONS.regression == None:
- # Don't use a logfile, this is automated for tinderbox.
- log = file("../OOM_log", "w")
-
-
-num_failures = 0
-for f in files:
-
- # Run it once to establish boundaries
- command = (command_template + ' -O').format(f)
- out, err, exit = run(command)
- max = re.match(".*OOM max count: (\d+).*", out, flags=re.DOTALL).groups()[0]
- max = int(max)
-
- # OOMs don't recover well for the first 20 allocations or so.
- # TODO: revisit this.
- for i in range(20, max):
-
- if OPTIONS.regression == None:
- print("Testing allocation {0}/{1} in {2}".format(i,max,f))
- else:
- sys.std