Import Cobalt 21.lts.1.276914
diff --git a/src/cobalt/build/build.id b/src/cobalt/build/build.id
index 0b3308c..7f8b3ba 100644
--- a/src/cobalt/build/build.id
+++ b/src/cobalt/build/build.id
@@ -1 +1 @@
-276711
\ No newline at end of file
+276914
\ No newline at end of file
diff --git a/src/starboard/evergreen/shared/gyp_configuration.py b/src/starboard/evergreen/shared/gyp_configuration.py
index d11e04c..ead5082 100644
--- a/src/starboard/evergreen/shared/gyp_configuration.py
+++ b/src/starboard/evergreen/shared/gyp_configuration.py
@@ -102,9 +102,9 @@
def GetTestTargets(self):
tests = super(EvergreenConfiguration, self).GetTestTargets()
- return [test for test in tests if test not in self.__BLACKLISTED_TESTS]
+ return [test for test in tests if test not in self.__FORBIDDEN_TESTS]
- __BLACKLISTED_TESTS = [ # pylint: disable=invalid-name
+ __FORBIDDEN_TESTS = [ # pylint: disable=invalid-name
# elf_loader_test and installation_manager_test are explicitly tests that
# validate the correctness of the underlying platform. We should not be
# running these tests in Evergreen mode, and instead will rely on the
diff --git a/src/tools/gyp/pylib/gyp/generator/ninja.py b/src/tools/gyp/pylib/gyp/generator/ninja.py
index 556dc46..81dc81a 100755
--- a/src/tools/gyp/pylib/gyp/generator/ninja.py
+++ b/src/tools/gyp/pylib/gyp/generator/ninja.py
@@ -26,6 +26,7 @@
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
+from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
if sys.platform == 'cygwin':
@@ -236,9 +237,28 @@
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
+ def UsesToc(self, flavor):
+ """Return true if the target should produce a restat rule based on a TOC
+ file."""
+ try:
+ # Do not use TOC files for abstract toolchain.
+ toolchain = GetTargetToolchain(flavor)
+ return False
+ except NotImplementedError:
+ # Follow the logic for the legacy toolchain.
+ pass
+ # For bundles, the .TOC should be produced for the binary, not for
+ # FinalOutput(). But the naive approach would put the TOC file into the
+ # bundle, so don't do this for bundles for now.
+ if flavor in windows_host_flavors or self.bundle:
+ return False
+ return self.type in ('shared_library', 'loadable_module')
+
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
+ if self.UsesToc(flavor):
+ return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
@@ -446,8 +466,12 @@
if len(targets) == 1:
return targets[0]
- assert FindFirstInstanceOf(abstract.Stamp, GetHostToolchain(
- self.flavor)), 'Host toolchain must provide stamp tool.'
+ try:
+ assert FindFirstInstanceOf(abstract.Stamp, GetHostToolchain(
+ self.flavor)), 'Host toolchain must provide stamp tool.'
+ except NotImplementedError:
+ # Fall back to the legacy toolchain.
+ pass
stamp_output = self.GypPathToUniqueOutput(name + '.stamp')
self.ninja.build(stamp_output, 'stamp', targets)
@@ -791,8 +815,12 @@
return all_outputs
def WriteCopy(self, src, dst, prebuild, env, mac_bundle_depends):
- assert FindFirstInstanceOf(abstract.Copy, GetHostToolchain(
- self.flavor)), 'Host toolchain must provide copy tool.'
+ try:
+ assert FindFirstInstanceOf(abstract.Copy, GetHostToolchain(
+ self.flavor)), 'Host toolchain must provide copy tool.'
+ except NotImplementedError:
+ # Fall back to the legacy toolchain.
+ pass
dst = self.GypPathToNinja(dst, env)
# Renormalize with the separator character of the os on which ninja will run
@@ -890,245 +918,572 @@
def WriteSources(self, config_name, config, sources, predepends,
precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
- shell = GetShell(self.flavor)
- if self.toolset == 'target':
- toolchain = GetTargetToolchain(self.flavor, spec=spec,
- config_name=config_name)
- else:
- toolchain = GetHostToolchain(self.flavor, spec=spec,
- config_name=config_name)
+ try:
+ shell = GetShell(self.flavor)
- defines = config.get('defines', [])
- include_dirs = [
- self.GypPathToNinja(include_dir)
- for include_dir in config.get('include_dirs', [])
- ]
-
- # TODO: This code emulates legacy toolchain behavior. We need to migrate
- # to single-responsibility, toolchain-independent GYP keywords as
- # per abstract toolchain design doc.
- cflags = GetConfigFlags(config, self.toolset, 'cflags')
- cflags_c = GetConfigFlags(config, self.toolset, 'cflags_c')
- cflags_cc = GetConfigFlags(config, self.toolset, 'cflags_cc')
- cflags_mm = GetConfigFlags(config, self.toolset, 'cflags_mm')
- obj = 'obj'
- if self.toolset != 'target':
- obj += '.' + self.toolset
- pdbpath = os.path.normpath(
- os.path.join(obj, self.base_dir, self.name + '.pdb'))
- self.WriteVariableList('pdbname', [pdbpath])
-
- c_compiler = FindFirstInstanceOf(abstract.CCompiler, toolchain)
- if c_compiler:
- c_compiler_flags = c_compiler.GetFlags(defines, include_dirs,
- cflags + cflags_c)
- self.ninja.variable(
- '{0}_flags'.format(GetNinjaRuleName(c_compiler, self.toolset)),
- shell.Join(c_compiler_flags))
-
- cxx_compiler = FindFirstInstanceOf(abstract.CxxCompiler, toolchain)
- if cxx_compiler:
- cxx_compiler_flags = cxx_compiler.GetFlags(defines, include_dirs,
- cflags + cflags_cc)
- self.ninja.variable(
- '{0}_flags'.format(GetNinjaRuleName(cxx_compiler, self.toolset)),
- shell.Join(cxx_compiler_flags))
-
- objcxx_compiler = FindFirstInstanceOf(abstract.ObjectiveCxxCompiler,
- toolchain)
- if objcxx_compiler:
- objcxx_compiler_flags = objcxx_compiler.GetFlags(
- defines, include_dirs, cflags + cflags_cc + cflags_mm)
- self.ninja.variable(
- '{0}_flags'.format(GetNinjaRuleName(objcxx_compiler, self.toolset)),
- shell.Join(objcxx_compiler_flags))
-
- assembler = FindFirstInstanceOf(abstract.AssemblerWithCPreprocessor,
- toolchain)
- if assembler:
- assembler_flags = assembler.GetFlags(defines, include_dirs,
- cflags + cflags_c)
- self.ninja.variable(
- '{0}_flags'.format(GetNinjaRuleName(assembler, self.toolset)),
- shell.Join(assembler_flags))
-
- self.ninja.newline()
-
- outputs = []
- for source in sources:
- _, extension = os.path.splitext(source)
- if extension in ['.c']:
- assert c_compiler, ('Toolchain must provide C compiler in order to '
- 'build {0} for {1} platform.').format(
- source, self.toolset)
- rule_name = GetNinjaRuleName(c_compiler, self.toolset)
- elif extension in ['.cc', '.cpp', '.cxx']:
- assert cxx_compiler, ('Toolchain must provide C++ compiler in order '
- 'to build {0} for {1} platform.').format(
- source, self.toolset)
- rule_name = GetNinjaRuleName(cxx_compiler, self.toolset)
- elif extension in ['.mm']:
- assert objcxx_compiler, ('Toolchain must provide Objective-C++ '
- 'compiler in order to build {0} for {1} '
- 'platform.').format(source, self.toolset)
- rule_name = GetNinjaRuleName(objcxx_compiler, self.toolset)
- elif extension in ['.S', '.s']:
- assert assembler, ('Toolchain must provide assembler in order to '
- 'build {0} for {1} platform.').format(
- source, self.toolset)
- rule_name = GetNinjaRuleName(assembler, self.toolset)
+ if self.toolset == 'target':
+ toolchain = GetTargetToolchain(self.flavor, spec=spec,
+ config_name=config_name)
else:
- rule_name = None
+ toolchain = GetHostToolchain(self.flavor, spec=spec,
+ config_name=config_name)
- if rule_name:
+ defines = config.get('defines', [])
+ include_dirs = [
+ self.GypPathToNinja(include_dir)
+ for include_dir in config.get('include_dirs', [])
+ ]
+
+ # TODO: This code emulates legacy toolchain behavior. We need to migrate
+ # to single-responsibility, toolchain-independent GYP keywords as
+ # per abstract toolchain design doc.
+ cflags = GetConfigFlags(config, self.toolset, 'cflags')
+ cflags_c = GetConfigFlags(config, self.toolset, 'cflags_c')
+ cflags_cc = GetConfigFlags(config, self.toolset, 'cflags_cc')
+ cflags_mm = GetConfigFlags(config, self.toolset, 'cflags_mm')
+ obj = 'obj'
+ if self.toolset != 'target':
+ obj += '.' + self.toolset
+ pdbpath = os.path.normpath(
+ os.path.join(obj, self.base_dir, self.name + '.pdb'))
+ self.WriteVariableList('pdbname', [pdbpath])
+
+ c_compiler = FindFirstInstanceOf(abstract.CCompiler, toolchain)
+ if c_compiler:
+ c_compiler_flags = c_compiler.GetFlags(defines, include_dirs,
+ cflags + cflags_c)
+ self.ninja.variable(
+ '{0}_flags'.format(GetNinjaRuleName(c_compiler, self.toolset)),
+ shell.Join(c_compiler_flags))
+
+ cxx_compiler = FindFirstInstanceOf(abstract.CxxCompiler, toolchain)
+ if cxx_compiler:
+ cxx_compiler_flags = cxx_compiler.GetFlags(defines, include_dirs,
+ cflags + cflags_cc)
+ self.ninja.variable(
+ '{0}_flags'.format(GetNinjaRuleName(cxx_compiler, self.toolset)),
+ shell.Join(cxx_compiler_flags))
+
+ objcxx_compiler = FindFirstInstanceOf(abstract.ObjectiveCxxCompiler,
+ toolchain)
+ if objcxx_compiler:
+ objcxx_compiler_flags = objcxx_compiler.GetFlags(
+ defines, include_dirs, cflags + cflags_cc + cflags_mm)
+ self.ninja.variable(
+ '{0}_flags'.format(GetNinjaRuleName(objcxx_compiler, self.toolset)),
+ shell.Join(objcxx_compiler_flags))
+
+ assembler = FindFirstInstanceOf(abstract.AssemblerWithCPreprocessor,
+ toolchain)
+ if assembler:
+ assembler_flags = assembler.GetFlags(defines, include_dirs,
+ cflags + cflags_c)
+ self.ninja.variable(
+ '{0}_flags'.format(GetNinjaRuleName(assembler, self.toolset)),
+ shell.Join(assembler_flags))
+
+ self.ninja.newline()
+
+ outputs = []
+ for source in sources:
+ _, extension = os.path.splitext(source)
+ if extension in ['.c']:
+ assert c_compiler, ('Toolchain must provide C compiler in order to '
+ 'build {0} for {1} platform.').format(
+ source, self.toolset)
+ rule_name = GetNinjaRuleName(c_compiler, self.toolset)
+ elif extension in ['.cc', '.cpp', '.cxx']:
+ assert cxx_compiler, ('Toolchain must provide C++ compiler in order '
+ 'to build {0} for {1} platform.').format(
+ source, self.toolset)
+ rule_name = GetNinjaRuleName(cxx_compiler, self.toolset)
+ elif extension in ['.mm']:
+ assert objcxx_compiler, ('Toolchain must provide Objective-C++ '
+ 'compiler in order to build {0} for {1} '
+ 'platform.').format(source, self.toolset)
+ rule_name = GetNinjaRuleName(objcxx_compiler, self.toolset)
+ elif extension in ['.S', '.s']:
+ assert assembler, ('Toolchain must provide assembler in order to '
+ 'build {0} for {1} platform.').format(
+ source, self.toolset)
+ rule_name = GetNinjaRuleName(assembler, self.toolset)
+ else:
+ rule_name = None
+
+ if rule_name:
+ input = self.GypPathToNinja(source)
+ output = '{0}.o'.format(self.GypPathToUniqueOutput(source))
+ self.ninja.build(
+ output,
+ rule_name,
+ input,
+ implicit=None, # TODO: Implemenet precompiled headers.
+ order_only=predepends)
+ outputs.append(output)
+
+ except NotImplementedError:
+ # Fall back to the legacy toolchain.
+
+ if self.toolset == 'host':
+ self.ninja.variable('ar', '$ar_host')
+ self.ninja.variable('cc', '$cc_host')
+ self.ninja.variable('cxx', '$cxx_host')
+ self.ninja.variable('ld', '$ld_host')
+
+ extra_defines = []
+ if self.flavor == 'mac':
+ cflags = self.xcode_settings.GetCflags(config_name)
+ cflags_c = self.xcode_settings.GetCflagsC(config_name)
+ cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
+ cflags_objc = ['$cflags_c'] + \
+ self.xcode_settings.GetCflagsObjC(config_name)
+ cflags_objcc = ['$cflags_cc'] + \
+ self.xcode_settings.GetCflagsObjCC(config_name)
+ elif GetToolchainOrNone(self.flavor):
+ cflags = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetCflags(config_name)
+ cflags_c = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetCflagsC(config_name)
+ cflags_cc = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetCflagsCC(config_name)
+ extra_defines = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetDefines(config_name)
+ obj = 'obj'
+ if self.toolset != 'target':
+ obj += '.' + self.toolset
+ pdbpath = os.path.normpath(
+ os.path.join(obj, self.base_dir, self.name + '.pdb'))
+ self.WriteVariableList('pdbname', [pdbpath])
+ self.WriteVariableList('pchprefix', [self.name])
+ else:
+ cflags = config.get('cflags', [])
+ cflags_c = config.get('cflags_c', [])
+ cflags_cc = config.get('cflags_cc', [])
+
+ cflags_host = config.get('cflags_host', cflags)
+ cflags_c_host = config.get('cflags_c_host', cflags_c)
+ cflags_cc_host = config.get('cflags_cc_host', cflags_cc)
+
+ defines = config.get('defines', []) + extra_defines
+ if GetToolchainOrNone(self.flavor):
+ self.WriteVariableList(
+ 'defines',
+ [GetToolchainOrNone(self.flavor).Define(d) for d in defines])
+ else:
+ self.WriteVariableList('defines',
+ [Define(d, self.flavor) for d in defines])
+ if GetToolchainOrNone(self.flavor):
+ self.WriteVariableList('rcflags', [
+ QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
+ for f in GetToolchainOrNone(self.flavor).GetCompilerSettings()
+ .GetRcFlags(config_name, self.GypPathToNinja)
+ ])
+
+ include_dirs = config.get('include_dirs', [])
+ include_dirs += config.get('include_dirs_' + self.toolset, [])
+
+ if GetToolchainOrNone(self.flavor):
+ include_dirs = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().ProcessIncludeDirs(
+ include_dirs, config_name)
+ self.WriteVariableList('includes', [
+ '/I' + GetToolchainOrNone(self.flavor).QuoteForRspFile(
+ self.GypPathToNinja(i)) for i in include_dirs
+ ])
+ else:
+ self.WriteVariableList('includes', [
+ QuoteShellArgument('-I' + self.GypPathToNinja(i), self.flavor)
+ for i in include_dirs
+ ])
+
+ pch_commands = precompiled_header.GetPchBuildCommands()
+ if self.flavor == 'mac':
+ self.WriteVariableList('cflags_pch_c',
+ [precompiled_header.GetInclude('c')])
+ self.WriteVariableList('cflags_pch_cc',
+ [precompiled_header.GetInclude('cc')])
+ self.WriteVariableList('cflags_pch_objc',
+ [precompiled_header.GetInclude('m')])
+ self.WriteVariableList('cflags_pch_objcc',
+ [precompiled_header.GetInclude('mm')])
+
+ self.WriteVariableList('cflags', map(self.ExpandSpecial, cflags))
+ self.WriteVariableList('cflags_c', map(self.ExpandSpecial, cflags_c))
+ self.WriteVariableList('cflags_cc', map(self.ExpandSpecial, cflags_cc))
+
+ self.WriteVariableList('cflags_host', map(self.ExpandSpecial,
+ cflags_host))
+ self.WriteVariableList('cflags_c_host',
+ map(self.ExpandSpecial, cflags_c_host))
+ self.WriteVariableList('cflags_cc_host',
+ map(self.ExpandSpecial, cflags_cc_host))
+
+ if self.flavor == 'mac':
+ self.WriteVariableList('cflags_objc',
+ map(self.ExpandSpecial, cflags_objc))
+ self.WriteVariableList('cflags_objcc',
+ map(self.ExpandSpecial, cflags_objcc))
+ self.ninja.newline()
+
+ outputs = []
+ for source in sources:
+ filename, ext = os.path.splitext(source)
+ ext = ext[1:]
+ obj_ext = self.obj_ext
+ if ext in ('cc', 'cpp', 'cxx'):
+ command = 'cxx'
+ elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
+ command = 'cc'
+ elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
+ command = 'cc_s'
+ elif (self.flavor == 'win' and ext == 'asm' and GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetArch(config_name) == 'x86' and
+ not GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().HasExplicitAsmRules(spec)):
+ # Asm files only get auto assembled for x86 (not x64).
+ command = 'asm'
+ # Add the _asm suffix as msvs is capable of handling .cc and
+ # .asm files of the same name without collision.
+ obj_ext = '_asm.obj'
+ elif self.flavor == 'mac' and ext == 'm':
+ command = 'objc'
+ elif self.flavor == 'mac' and ext == 'mm':
+ command = 'objcxx'
+ elif self.flavor in microsoft_flavors and ext == 'rc':
+ # TODO: Starboardize this.
+ command = 'rc'
+ obj_ext = '.res'
+ else:
+ # Ignore unhandled extensions.
+ continue
+ if self.toolset != 'target':
+ command += '_' + self.toolset
+
input = self.GypPathToNinja(source)
- output = '{0}.o'.format(self.GypPathToUniqueOutput(source))
+ output = self.GypPathToUniqueOutput(filename + obj_ext)
+ implicit = precompiled_header.GetObjDependencies([input], [output])
+ variables = []
+ if GetToolchainOrNone(self.flavor):
+ (variables, output,
+ implicit) = precompiled_header.GetFlagsModifications(
+ input, output, implicit, command, cflags_c, cflags_cc,
+ self.ExpandSpecial)
self.ninja.build(
output,
- rule_name,
+ command,
input,
- implicit=None, # TODO: Implemenet precompiled headers.
- order_only=predepends)
+ implicit=[gch for _, _, gch in implicit],
+ order_only=predepends,
+ variables=variables)
outputs.append(output)
+ self.WritePchTargets(pch_commands)
+
self.ninja.newline()
return outputs
+ def WritePchTargets(self, pch_commands):
+ """Writes ninja rules to compile prefix headers."""
+ if not pch_commands:
+ return
+
+ for gch, lang_flag, lang, input in pch_commands:
+ var_name = {
+ 'c': 'cflags_pch_c',
+ 'cc': 'cflags_pch_cc',
+ 'm': 'cflags_pch_objc',
+ 'mm': 'cflags_pch_objcc',
+ }[lang]
+
+ map = {
+ 'c': 'cc',
+ 'cc': 'cxx',
+ 'm': 'objc',
+ 'mm': 'objcxx',
+ }
+ cmd = map.get(lang)
+ self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)])
+
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
- if self.toolset == 'target':
- toolchain = GetTargetToolchain(
- self.flavor,
- spec=spec,
- config_name=config_name,
- gyp_path_to_ninja=self.GypPathToNinja,
- expand_special=self.ExpandSpecial,
- gyp_path_to_unique_output=self.GypPathToUniqueOutput,
- compute_output_file_name=self.ComputeOutputFileName
- )
- else:
- toolchain = GetHostToolchain(
- self.flavor,
- spec=spec,
- config_name=config_name,
- gyp_path_to_ninja=self.GypPathToNinja,
- expand_special=self.ExpandSpecial,
- gyp_path_to_unique_output=self.GypPathToUniqueOutput,
- compute_output_file_name=self.ComputeOutputFileName
- )
- shell = GetShell(self.flavor)
- extra_bindings = []
- target_type = spec['type']
- if target_type == 'executable':
- executable_linker = FindFirstInstanceOf(abstract.ExecutableLinker,
- toolchain)
- assert executable_linker, ('Toolchain must provide executable linker '
- 'for {0} platform.').format(self.toolset)
+ try:
+ if self.toolset == 'target':
+ toolchain = GetTargetToolchain(
+ self.flavor,
+ spec=spec,
+ config_name=config_name,
+ gyp_path_to_ninja=self.GypPathToNinja,
+ expand_special=self.ExpandSpecial,
+ gyp_path_to_unique_output=self.GypPathToUniqueOutput,
+ compute_output_file_name=self.ComputeOutputFileName
+ )
+ else:
+ toolchain = GetHostToolchain(
+ self.flavor,
+ spec=spec,
+ config_name=config_name,
+ gyp_path_to_ninja=self.GypPathToNinja,
+ expand_special=self.ExpandSpecial,
+ gyp_path_to_unique_output=self.GypPathToUniqueOutput,
+ compute_output_file_name=self.ComputeOutputFileName
+ )
- rule_name = GetNinjaRuleName(executable_linker, self.toolset)
+ shell = GetShell(self.flavor)
+ extra_bindings = []
+ target_type = spec['type']
+ if target_type == 'executable':
+ executable_linker = FindFirstInstanceOf(abstract.ExecutableLinker,
+ toolchain)
+ assert executable_linker, ('Toolchain must provide executable linker '
+ 'for {0} platform.').format(self.toolset)
- # TODO: This code emulates legacy toolchain behavior. We need to migrate
- # to single-responsibility, toolchain-independent GYP keywords as
- # per abstract toolchain design doc.
- libraries_keyword = 'libraries{0}'.format('_host' if self.toolset ==
- 'host' else '')
- libraries = spec.get(libraries_keyword, []) + config.get(
- libraries_keyword, [])
+ rule_name = GetNinjaRuleName(executable_linker, self.toolset)
- ldflags_executable = GetConfigFlags(
- config, self.toolset, 'ldflags_executable')
- if not ldflags_executable:
- ldflags_executable = GetConfigFlags(config, self.toolset, 'ldflags')
+ # TODO: This code emulates legacy toolchain behavior. We need to migrate
+ # to single-responsibility, toolchain-independent GYP keywords as
+ # per abstract toolchain design doc.
+ libraries_keyword = 'libraries{0}'.format('_host' if self.toolset ==
+ 'host' else '')
+ libraries = spec.get(libraries_keyword, []) + config.get(
+ libraries_keyword, [])
- ldflags = gyp.common.uniquer(
- map(self.ExpandSpecial, ldflags_executable + libraries))
+ ldflags_executable = GetConfigFlags(
+ config, self.toolset, 'ldflags_executable')
+ if not ldflags_executable:
+ ldflags_executable = GetConfigFlags(config, self.toolset, 'ldflags')
- executable_linker_flags = executable_linker.GetFlags(ldflags)
- self.ninja.variable('{0}_flags'.format(rule_name),
- shell.Join(executable_linker_flags))
- elif target_type == 'shared_library':
- shared_library_linker = FindFirstInstanceOf(
- abstract.SharedLibraryLinker, toolchain)
- assert shared_library_linker, (
- 'Toolchain must provide shared library linker '
- 'for {0} platform.').format(self.toolset)
+ ldflags = gyp.common.uniquer(
+ map(self.ExpandSpecial, ldflags_executable + libraries))
- rule_name = GetNinjaRuleName(shared_library_linker, self.toolset)
+ executable_linker_flags = executable_linker.GetFlags(ldflags)
+ self.ninja.variable('{0}_flags'.format(rule_name),
+ shell.Join(executable_linker_flags))
+ elif target_type == 'shared_library':
+ shared_library_linker = FindFirstInstanceOf(
+ abstract.SharedLibraryLinker, toolchain)
+ assert shared_library_linker, (
+ 'Toolchain must provide shared library linker '
+ 'for {0} platform.').format(self.toolset)
- # TODO: This code emulates legacy toolchain behavior. We need to migrate
- # to single-responsibility, toolchain-independent GYP keywords as
- # per abstract toolchain design doc.
- libraries_keyword = 'libraries{0}'.format('_host' if self.toolset ==
- 'host' else '')
- libraries = spec.get(libraries_keyword, []) + config.get(
- libraries_keyword, [])
+ rule_name = GetNinjaRuleName(shared_library_linker, self.toolset)
- ldflags_shared = GetConfigFlags(config, self.toolset, 'ldflags_shared')
- if not ldflags_shared:
- ldflags_shared = GetConfigFlags(config, self.toolset, 'ldflags')
+ # TODO: This code emulates legacy toolchain behavior. We need to migrate
+ # to single-responsibility, toolchain-independent GYP keywords as
+ # per abstract toolchain design doc.
+ libraries_keyword = 'libraries{0}'.format('_host' if self.toolset ==
+ 'host' else '')
+ libraries = spec.get(libraries_keyword, []) + config.get(
+ libraries_keyword, [])
- ldflags = gyp.common.uniquer(
- map(self.ExpandSpecial, ldflags_shared + libraries))
+ ldflags_shared = GetConfigFlags(config, self.toolset, 'ldflags_shared')
+ if not ldflags_shared:
+ ldflags_shared = GetConfigFlags(config, self.toolset, 'ldflags')
- shared_library_linker_flags = shared_library_linker.GetFlags(ldflags)
- self.ninja.variable('{0}_flags'.format(rule_name),
- shell.Join(shared_library_linker_flags))
+ ldflags = gyp.common.uniquer(
+ map(self.ExpandSpecial, ldflags_shared + libraries))
+
+ shared_library_linker_flags = shared_library_linker.GetFlags(ldflags)
+ self.ninja.variable('{0}_flags'.format(rule_name),
+ shell.Join(shared_library_linker_flags))
+ output = self.ComputeOutput(spec)
+ extra_bindings.append(('soname', os.path.split(output)[1]))
+ extra_bindings.append(('dll', output))
+ if '/NOENTRY' not in shared_library_linker_flags:
+ extra_bindings.append(('implibflag',
+ '/IMPLIB:%s' % output + '.lib'))
+
+ else:
+ raise Exception('Target type {0} is not supported for target {1}.'
+ .format(target_type, spec['target_name']))
+
+ order_only_deps = set()
+
+ if 'dependencies' in spec:
+ # Two kinds of dependencies:
+ # - Linkable dependencies (like a .a or a .so): add them to the link
+ # line.
+ # - Non-linkable dependencies (like a rule that generates a file
+ # and writes a stamp file): add them to implicit_deps or
+ # order_only_deps
+ extra_link_deps = []
+ for dep in spec['dependencies']:
+ target = self.target_outputs.get(dep)
+ if not target:
+ continue
+ linkable = target.Linkable()
+ if linkable:
+ extra_link_deps.append(target.binary)
+
+ final_output = target.FinalOutput()
+ if not linkable or final_output != target.binary:
+ order_only_deps.add(final_output)
+
+ # dedup the extra link deps while preserving order
+ seen = set()
+ extra_link_deps = [
+ x for x in extra_link_deps if x not in seen and not seen.add(x)
+ ]
+
+ link_deps.extend(extra_link_deps)
+
+ tail_deps = GetConfigFlags(config, self.toolset, 'TailDependencies')
+ if tail_deps:
+ link_deps.extend(map(self.ExpandSpecial, tail_deps))
+
output = self.ComputeOutput(spec)
- extra_bindings.append(('soname', os.path.split(output)[1]))
- extra_bindings.append(('dll', output))
- if '/NOENTRY' not in shared_library_linker_flags:
- extra_bindings.append(('implibflag',
- '/IMPLIB:%s' % output + '.lib'))
+ self.target.binary = output
- else:
- raise Exception('Target type {0} is not supported for target {1}.'
- .format(target_type, spec['target_name']))
+ self.ninja.build(
+ output,
+ rule_name,
+ link_deps,
+ order_only=list(order_only_deps),
+ variables=extra_bindings)
- order_only_deps = set()
+ except NotImplementedError:
+ # Fall back to the legacy toolchain.
- if 'dependencies' in spec:
- # Two kinds of dependencies:
- # - Linkable dependencies (like a .a or a .so): add them to the link
- # line.
- # - Non-linkable dependencies (like a rule that generates a file
- # and writes a stamp file): add them to implicit_deps or
- # order_only_deps
- extra_link_deps = []
- for dep in spec['dependencies']:
- target = self.target_outputs.get(dep)
- if not target:
- continue
- linkable = target.Linkable()
- if linkable:
- extra_link_deps.append(target.binary)
+ command = {
+ 'executable': 'link',
+ 'loadable_module': 'solink_module',
+ 'shared_library': 'solink',
+ }[spec['type']]
- final_output = target.FinalOutput()
- if not linkable or final_output != target.binary:
- order_only_deps.add(final_output)
+ implicit_deps = set()
+ order_only_deps = set()
+ solibs = set()
- # dedup the extra link deps while preserving order
- seen = set()
- extra_link_deps = [
- x for x in extra_link_deps if x not in seen and not seen.add(x)
- ]
+ if 'dependencies' in spec:
+ # Two kinds of dependencies:
+ # - Linkable dependencies (like a .a or a .so): add them to the link
+ # line.
+ # - Non-linkable dependencies (like a rule that generates a file
+ # and writes a stamp file): add them to implicit_deps or
+ # order_only_deps
+ extra_link_deps = []
+ for dep in spec['dependencies']:
+ target = self.target_outputs.get(dep)
+ if not target:
+ continue
+ linkable = target.Linkable()
+ # TODO: Starboardize.
+ if linkable:
+ if (self.flavor in microsoft_flavors and target.component_objs and
+ GetToolchainOrNone(self.flavor).GetCompilerSettings()
+ .IsUseLibraryDependencyInputs(config_name)):
+ extra_link_deps.extend(target.component_objs)
+ elif (self.flavor in microsoft_flavors and
+ target.import_lib):
+ extra_link_deps.append(target.import_lib)
+ elif target.UsesToc(self.flavor):
+ solibs.add(target.binary)
+ implicit_deps.add(target.binary + '.TOC')
+ else:
+ extra_link_deps.append(target.binary)
- link_deps.extend(extra_link_deps)
+ final_output = target.FinalOutput()
+ if not linkable or final_output != target.binary:
+ order_only_deps.add(final_output)
- tail_deps = GetConfigFlags(config, self.toolset, 'TailDependencies')
- if tail_deps:
- link_deps.extend(map(self.ExpandSpecial, tail_deps))
+ # dedup the extra link deps while preserving order
+ seen = set()
+ extra_link_deps = [
+ x for x in extra_link_deps if x not in seen and not seen.add(x)
+ ]
- output = self.ComputeOutput(spec)
- self.target.binary = output
+ link_deps.extend(extra_link_deps)
- self.ninja.build(
- output,
- rule_name,
- link_deps,
- order_only=list(order_only_deps),
- variables=extra_bindings)
+ extra_bindings = []
+ if self.is_mac_bundle:
+ output = self.ComputeMacBundleBinaryOutput()
+ else:
+ output = self.ComputeOutput(spec)
+ extra_bindings.append(('postbuilds',
+ self.GetPostbuildCommand(spec, output, output)))
+
+ if self.flavor == 'mac':
+ ldflags = self.xcode_settings.GetLdflags(
+ config_name,
+ self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
+ self.GypPathToNinja)
+ elif GetToolchainOrNone(self.flavor):
+ libflags = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetLibFlags(
+ config_name, self.GypPathToNinja)
+ self.WriteVariableList(
+ 'libflags', gyp.common.uniquer(map(self.ExpandSpecial, libflags)))
+ is_executable = spec['type'] == 'executable'
+ manifest_name = self.GypPathToUniqueOutput(
+ self.ComputeOutputFileName(spec))
+ ldflags, manifest_files = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetLdFlags(
+ config_name, **{
+ 'gyp_path_to_ninja': self.GypPathToNinja,
+ 'expand_special': self.ExpandSpecial,
+ 'manifest_name': manifest_name,
+ 'is_executable': is_executable
+ })
+ self.WriteVariableList('manifests', manifest_files)
+ else:
+ ldflags = config.get('ldflags', [])
+ ldflags_host = config.get('ldflags_host', ldflags)
+
+ self.WriteVariableList(
+ 'ldflags', gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
+ if ('ldflags_host' in locals()):
+ self.WriteVariableList(
+ 'ldflags_host',
+ gyp.common.uniquer(map(self.ExpandSpecial, ldflags_host)))
+
+ if self.toolset == 'host':
+ libs = spec.get('libraries_host', [])
+ libs.extend(config.get('libraries_host', []))
+ else:
+ libs = spec.get('libraries', [])
+ libs.extend(config.get('libraries', []))
+
+ libraries = gyp.common.uniquer(map(self.ExpandSpecial, libs))
+
+ if self.flavor == 'mac':
+ libraries = self.xcode_settings.AdjustLibraries(libraries)
+ elif GetToolchainOrNone(self.flavor):
+ libraries = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().ProcessLibraries(libraries)
+ self.WriteVariableList('libs', libraries)
+
+ self.target.binary = output
+
+ if command in ('solink', 'solink_module'):
+ extra_bindings.append(('soname', os.path.split(output)[1]))
+ extra_bindings.append(('lib',
+ gyp.common.EncodePOSIXShellArgument(output)))
+ # TODO: Starboardize.
+ if self.flavor in microsoft_flavors:
+ extra_bindings.append(('dll', output))
+ if '/NOENTRY' not in ldflags:
+ self.target.import_lib = output + '.lib'
+ extra_bindings.append(('implibflag',
+ '/IMPLIB:%s' % self.target.import_lib))
+ output = [output, self.target.import_lib]
+ else:
+ output = [output, output + '.TOC']
+
+ if len(solibs):
+ extra_bindings.append(('solibs',
+ gyp.common.EncodePOSIXShellList(solibs)))
+
+ if self.toolset != 'target':
+ command += '_' + self.toolset
+
+ self.ninja.build(
+ output,
+ command,
+ link_deps,
+ implicit=list(implicit_deps),
+ order_only=list(order_only_deps),
+ variables=extra_bindings)
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
if spec['type'] == 'none':
@@ -1139,37 +1494,62 @@
self.target.binary = self.ComputeOutput(spec)
variables = []
- if self.toolset == 'target':
- toolchain = GetTargetToolchain(
- self.flavor,
- spec=spec,
- config_name=config_name,
- gyp_path_to_ninja=self.GypPathToNinja
- )
- else:
- toolchain = GetHostToolchain(
- self.flavor,
- spec=spec,
- config_name=config_name,
- gyp_path_to_ninja=self.GypPathToNinja
- )
+ try:
+ if self.toolset == 'target':
+ toolchain = GetTargetToolchain(
+ self.flavor,
+ spec=spec,
+ config_name=config_name,
+ gyp_path_to_ninja=self.GypPathToNinja
+ )
+ else:
+ toolchain = GetHostToolchain(
+ self.flavor,
+ spec=spec,
+ config_name=config_name,
+ gyp_path_to_ninja=self.GypPathToNinja
+ )
- shell = GetShell(self.flavor)
- static_linker = FindFirstInstanceOf(abstract.StaticLinker, toolchain)
- if not self.is_standalone_static_library:
- static_thin_linker = FindFirstInstanceOf(abstract.StaticThinLinker,
- toolchain)
- if static_thin_linker:
- static_linker = static_thin_linker
- assert static_linker, ('Toolchain must provide static linker in order '
- 'to build {0} for {1} platform.').format(
- self.target.binary, self.toolset)
+ shell = GetShell(self.flavor)
+ static_linker = FindFirstInstanceOf(abstract.StaticLinker, toolchain)
+ if not self.is_standalone_static_library:
+ static_thin_linker = FindFirstInstanceOf(abstract.StaticThinLinker,
+ toolchain)
+ if static_thin_linker:
+ static_linker = static_thin_linker
+ assert static_linker, ('Toolchain must provide static linker in order '
+ 'to build {0} for {1} platform.').format(
+ self.target.binary, self.toolset)
- rule_name = GetNinjaRuleName(static_linker, self.toolset)
+ rule_name = GetNinjaRuleName(static_linker, self.toolset)
- static_linker_flags = static_linker.GetFlags()
- self.ninja.variable('{0}_flags'.format(rule_name),
- shell.Join(static_linker_flags))
+ static_linker_flags = static_linker.GetFlags()
+ self.ninja.variable('{0}_flags'.format(rule_name),
+ shell.Join(static_linker_flags))
+ except NotImplementedError:
+ # Fall back to the legacy toolchain.
+
+ if GetToolchainOrNone(self.flavor):
+ libflags = GetToolchainOrNone(
+ self.flavor).GetCompilerSettings().GetLibFlags(
+ config_name, self.GypPathToNinja)
+ # TODO: Starboardize libflags vs libtool_flags.
+ variables.append(('libflags', ' '.join(libflags)))
+ postbuild = self.GetPostbuildCommand(spec, self.target.binary,
+ self.target.binary)
+ if postbuild:
+ variables.append(('postbuilds', postbuild))
+ if self.xcode_settings:
+ variables.append(('libtool_flags',
+ self.xcode_settings.GetLibtoolflags(config_name)))
+ # TODO: Starboardize.
+ if (self.flavor not in (['mac'] + microsoft_flavors) and
+ not self.is_standalone_static_library):
+ rule_name = 'alink_thin'
+ else:
+ rule_name = 'alink'
+ if self.toolset != 'target':
+ rule_name += '_' + self.toolset
self.ninja.build(
self.target.binary,
@@ -1280,6 +1660,12 @@
path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
return os.path.join(path, self.xcode_settings.GetWrapperName())
+ def ComputeMacBundleBinaryOutput(self):
+ """Return the 'output' (full output path) to the binary in a bundle."""
+ assert self.is_mac_bundle
+ path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
+ return os.path.join(path, self.xcode_settings.GetExecutablePath())
+
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
@@ -1550,6 +1936,62 @@
pass
return open(path, mode)
+
+def GetDefaultConcurrentLinks():
+ """Returns a best-guess for a number of concurrent links."""
+ pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
+ if pool_size:
+ return pool_size
+
+ if sys.platform in ('win32', 'cygwin'):
+ import ctypes
+
+ class MEMORYSTATUSEX(ctypes.Structure):
+ _fields_ = [
+ ('dwLength', ctypes.c_ulong),
+ ('dwMemoryLoad', ctypes.c_ulong),
+ ('ullTotalPhys', ctypes.c_ulonglong),
+ ('ullAvailPhys', ctypes.c_ulonglong),
+ ('ullTotalPageFile', ctypes.c_ulonglong),
+ ('ullAvailPageFile', ctypes.c_ulonglong),
+ ('ullTotalVirtual', ctypes.c_ulonglong),
+ ('ullAvailVirtual', ctypes.c_ulonglong),
+ ('sullAvailExtendedVirtual', ctypes.c_ulonglong),
+ ]
+
+ stat = MEMORYSTATUSEX()
+ stat.dwLength = ctypes.sizeof(stat)
+ ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+ # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
+ # on a 64 GB machine.
+ mem_limit = max(1, stat.ullTotalPhys / (5 * (2**30))) # total / 5GB
+ hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+ return min(mem_limit, hard_cap)
+ elif sys.platform.startswith('linux'):
+ if os.path.exists('/proc/meminfo'):
+ with open('/proc/meminfo') as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ # Allow 6Gb per link on Linux because Gold is quite memory hungry
+ return max(1, int(match.group(1)) / (6 * (2**20)))
+ return 1
+ elif sys.platform == 'darwin':
+ try:
+ avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+ # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+ # 4GB per ld process allows for some more bloat.
+ return max(1, avail_bytes / (4 * (2**30))) # total / 4GB
+ except:
+ return 1
+ else:
+ # TODO(scottmg): Implement this for other platforms.
+ return 1
+
+
def MaybeWritePathVariable(ninja, tool, toolset):
if tool.GetPath():
ninja.variable('{0}_path'.format(GetNinjaRuleName(tool, toolset)),
@@ -1611,53 +2053,609 @@
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
- # To avoid duplication, platform-agnostic tools (such as stamp and copy)
- # will be processed only in the host toolchain.
- target_toolchain = [
- target_tool for target_tool in GetTargetToolchain(flavor)
- if not target_tool.IsPlatformAgnostic()
- ]
- host_toolchain = GetHostToolchain(flavor)
+ try:
+ # To avoid duplication, platform-agnostic tools (such as stamp and copy)
+ # will be processed only in the host toolchain.
+ target_toolchain = [
+ target_tool for target_tool in GetTargetToolchain(flavor)
+ if not target_tool.IsPlatformAgnostic()
+ ]
+ host_toolchain = GetHostToolchain(flavor)
- shell = GetShell(flavor)
+ shell = GetShell(flavor)
- for target_tool in target_toolchain:
- MaybeWritePathVariable(master_ninja, target_tool, 'target')
- for host_tool in host_toolchain:
- MaybeWritePathVariable(master_ninja, host_tool, 'host')
- master_ninja.newline()
+ for target_tool in target_toolchain:
+ MaybeWritePathVariable(master_ninja, target_tool, 'target')
+ for host_tool in host_toolchain:
+ MaybeWritePathVariable(master_ninja, host_tool, 'host')
+ master_ninja.newline()
- for target_tool in target_toolchain:
- MaybeWriteExtraFlagsVariable(master_ninja, target_tool, 'target', shell)
- for host_tool in host_toolchain:
- MaybeWriteExtraFlagsVariable(master_ninja, host_tool, 'host', shell)
- master_ninja.newline()
+ for target_tool in target_toolchain:
+ MaybeWriteExtraFlagsVariable(master_ninja, target_tool, 'target', shell)
+ for host_tool in host_toolchain:
+ MaybeWriteExtraFlagsVariable(master_ninja, host_tool, 'host', shell)
+ master_ninja.newline()
- for target_tool in target_toolchain:
- MaybeWritePool(master_ninja, target_tool, 'target')
- for host_tool in host_toolchain:
- MaybeWritePool(master_ninja, host_tool, 'host')
- master_ninja.newline()
+ for target_tool in target_toolchain:
+ MaybeWritePool(master_ninja, target_tool, 'target')
+ for host_tool in host_toolchain:
+ MaybeWritePool(master_ninja, host_tool, 'host')
+ master_ninja.newline()
- for target_tool in target_toolchain:
- MaybeWriteRule(master_ninja, target_tool, 'target', shell)
- for host_tool in host_toolchain:
- MaybeWriteRule(master_ninja, host_tool, 'host', shell)
- master_ninja.newline()
+ for target_tool in target_toolchain:
+ MaybeWriteRule(master_ninja, target_tool, 'target', shell)
+ for host_tool in host_toolchain:
+ MaybeWriteRule(master_ninja, host_tool, 'host', shell)
+ master_ninja.newline()
- # Copy the gyp-win-tool to the toplevel_build.
- # Also write python to the master_ninja.
- if is_windows:
+ # Copy the gyp-win-tool to the toplevel_build.
+ # Also write python to the master_ninja.
+ if is_windows:
+ gyp.common.CopyTool(flavor, toplevel_build)
+ if GetToolchainOrNone(flavor):
+ GetToolchainOrNone(flavor).GenerateEnvironmentFiles(
+ toplevel_build, generator_flags, OpenOutput)
+ else:
+ gyp.msvs_emulation.GenerateEnvironmentFiles(toplevel_build,
+ generator_flags, OpenOutput)
+ master_ninja.variable('python', sys.executable)
+ master_ninja.newline()
+
+ except NotImplementedError:
+ # Fall back to the legacy toolchain.
+
+ # Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
- if GetToolchainOrNone(flavor):
+
+ # Grab make settings for CC/CXX.
+ # The rules are
+ # - The priority from low to high is gcc/g++, the 'make_global_settings' in
+ # gyp, the environment variable.
+ # - If there is no 'make_global_settings' for CC.host/CXX.host or
+ # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
+ # to cc/cxx.
+ if (flavor in sony_flavors and is_windows):
+ cc = 'cl.exe'
+ cxx = 'cl.exe'
+ ld = 'link.exe'
+ gyp.msvs_emulation.GenerateEnvironmentFiles(toplevel_build,
+ generator_flags, OpenOutput)
+ ld_host = '$ld'
+ elif GetToolchainOrNone(flavor):
+ # TODO: starboardize.
+ cc = 'cl.exe'
+ cxx = 'cl.exe'
+ ld = 'link.exe'
GetToolchainOrNone(flavor).GenerateEnvironmentFiles(
toplevel_build, generator_flags, OpenOutput)
+ ld_host = '$ld'
else:
- gyp.msvs_emulation.GenerateEnvironmentFiles(toplevel_build,
- generator_flags, OpenOutput)
- master_ninja.variable('python', sys.executable)
+ cc = 'gcc'
+ cxx = 'g++'
+ ld = '$cxx'
+ ld_host = '$cxx_host'
+
+ cc_host = None
+ cxx_host = None
+ cc_host_global_setting = None
+ cxx_host_global_setting = None
+
+ build_to_root = InvertRelativePath(build_dir)
+ for key, value in make_global_settings:
+ if key == 'CC':
+ cc = os.path.join(build_to_root, value)
+ if key == 'CXX':
+ cxx = os.path.join(build_to_root, value)
+ if key == 'LD':
+ ld = os.path.join(build_to_root, value)
+ if key == 'CC.host':
+ cc_host = os.path.join(build_to_root, value)
+ cc_host_global_setting = value
+ if key == 'CXX.host':
+ cxx_host = os.path.join(build_to_root, value)
+ cxx_host_global_setting = value
+ if key == 'LD.host':
+ ld_host = os.path.join(build_to_root, value)
+
+ flock = 'flock'
+ if flavor == 'mac':
+ flock = './gyp-mac-tool flock'
+ cc = GetEnvironFallback(['CC_target', 'CC'], cc)
+ master_ninja.variable('cc', cc)
+ cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
+ master_ninja.variable('cxx', cxx)
+ ld = GetEnvironFallback(['LD_target', 'LD'], ld)
+ rc = GetEnvironFallback(['RC'], 'rc.exe')
+
+ if not cc_host:
+ cc_host = cc
+ if not cxx_host:
+ cxx_host = cxx
+
+ # gyp-win-tool wrappers have a winpython only flock implementation.
+ if sys.platform == 'cygwin':
+ python_exec = '$python'
+ else:
+ python_exec = sys.executable
+
+ ar_flags = ''
+ if flavor in microsoft_flavors:
+ master_ninja.variable('ld', ld)
+ master_ninja.variable('ar', os.environ.get('AR', 'ar'))
+ master_ninja.variable('rc', rc)
+ master_ninja.variable('asm', 'ml.exe')
+ master_ninja.variable('mt', 'mt.exe')
+ master_ninja.variable('use_dep_database', '1')
+ elif flavor in sony_flavors:
+ # Require LD to be set.
+ master_ninja.variable('ld', os.environ.get('LD'))
+ master_ninja.variable('ar', os.environ.get('AR', 'ar'))
+ ar_flags = os.environ.get('ARFLAGS', 'rcs')
+ master_ninja.variable('arFlags', ar_flags)
+ # On Sony, when we use orbis-snarl.exe with a response file, we cannot
+ # pass it flags (like 'rcs'), so ARFLAGS is likely set to '' for this
+ # platform. In that case, do not append the thin archive 'T' flag
+ # to the flags string.
+ thin_flag_to_add = ''
+ if len(ar_flags) >= 1 and ar_flags.find('T') == -1:
+ thin_flag_to_add = 'T'
+ master_ninja.variable('arThinFlags', ar_flags + thin_flag_to_add)
+
+ else:
+ master_ninja.variable('ld', ld)
+ master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
+ ar_flags = os.environ.get('ARFLAGS', 'rcs')
+ master_ninja.variable('arFlags', ar_flags)
+ thin_flag_to_add = ''
+ if ar_flags.find('T') == -1:
+ thin_flag_to_add = 'T'
+ master_ninja.variable('arThinFlags', ar_flags + thin_flag_to_add)
+ master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
+ cc_host = GetEnvironFallback(['CC_host'], cc_host)
+ cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
+ ld_host = GetEnvironFallback(['LD_host'], ld_host)
+ arflags_host = GetEnvironFallback(['ARFLAGS_host'], ar_flags)
+ arthinflags_host = GetEnvironFallback(['ARTHINFLAGS_host'], arflags_host)
+
+ # The environment variable could be used in 'make_global_settings', like
+ # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
+ if '$(CC)' in cc_host and cc_host_global_setting:
+ cc_host = cc_host_global_setting.replace('$(CC)', cc)
+ if '$(CXX)' in cxx_host and cxx_host_global_setting:
+ cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
+ master_ninja.variable('cc_host', cc_host)
+ master_ninja.variable('cxx_host', cxx_host)
+ master_ninja.variable('arFlags_host', arflags_host)
+ master_ninja.variable('arThinFlags_host', arthinflags_host)
+ master_ninja.variable('ld_host', ld_host)
+
+ if sys.platform == 'cygwin':
+ python_path = cygpath.to_nt(
+ '/cygdrive/c/python_27_amd64/files/python.exe')
+ else:
+ python_path = 'python'
+ master_ninja.variable('python', python_path)
master_ninja.newline()
+ master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
+ master_ninja.newline()
+
+ if flavor not in microsoft_flavors:
+ if flavor in sony_flavors:
+ # uca := Unnamed Console A
+ dep_format = 'uca'
+ master_ninja.rule(
+ 'cc',
+ description='CC $out',
+ command=('$cc @$out.rsp'),
+ rspfile='$out.rsp',
+ rspfile_content=('-c $in -o $out '
+ '-MMD $defines $includes $cflags $cflags_c '
+ '$cflags_pch_c'),
+ depfile='$out_no_ext.d',
+ deps='gcc',
+ depformat=dep_format)
+ master_ninja.rule(
+ 'cxx',
+ description='CXX $out',
+ command=('$cxx @$out.rsp'),
+ rspfile='$out.rsp',
+ rspfile_content=('-c $in -o $out '
+ '-MMD $defines $includes $cflags $cflags_cc '
+ '$cflags_pch_cc'),
+ depfile='$out_no_ext.d',
+ deps='gcc',
+ depformat=dep_format)
+ else:
+ master_ninja.rule(
+ 'cc',
+ description='CC $out',
+ command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
+ '$cflags_pch_c -c $in -o $out'),
+ deps='gcc',
+ depfile='$out.d')
+ master_ninja.rule(
+ 'cc_s',
+ description='CC $out',
+ command=('$cc $defines $includes $cflags $cflags_c '
+ '$cflags_pch_c -c $in -o $out'))
+ master_ninja.rule(
+ 'cxx',
+ description='CXX $out',
+ command=(
+ '$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
+ '$cflags_pch_cc -c $in -o $out'),
+ deps='gcc',
+ depfile='$out.d')
+
+ else:
+ cc_command = ('$cc /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+ cxx_command = ('$cxx /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+ master_ninja.rule(
+ 'cc',
+ description='CC $out',
+ command=cc_command,
+ deps='msvc',
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags $cflags_c')
+ master_ninja.rule(
+ 'cxx',
+ description='CXX $out',
+ command=cxx_command,
+ deps='msvc',
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags $cflags_cc')
+
+ master_ninja.rule(
+ 'rc',
+ description='RC $in',
+ # Note: $in must be last otherwise rc.exe complains.
+ command=('%s gyp-win-tool rc-wrapper '
+ '$arch $rc $defines $includes $rcflags /fo$out $in' %
+ python_exec))
+ master_ninja.rule(
+ 'asm',
+ description='ASM $in',
+ command=(
+ '%s gyp-win-tool asm-wrapper '
+ '$arch $asm $defines $includes /c /Fo $out $in' % python_exec))
+
+ if flavor not in (['mac'] + microsoft_flavors):
+ alink_command = 'rm -f $out && $ar $arFlags $out @$out.rsp'
+ # TODO: Use rcsT on Linux only.
+ alink_thin_command = 'rm -f $out && $ar $arThinFlags $out @$out.rsp'
+
+ ld_cmd = '$ld'
+
+ if flavor in sony_flavors and is_windows:
+ alink_command = 'cmd.exe /c ' + alink_command
+ alink_thin_command = 'cmd.exe /c ' + alink_thin_command
+ ld_cmd = '%s gyp-win-tool link-wrapper $arch $ld' % python_exec
+
+ master_ninja.rule(
+ 'alink',
+ description='AR $out',
+ command=alink_command,
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline')
+ master_ninja.rule(
+ 'alink_thin',
+ description='AR $out',
+ command=alink_thin_command,
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline')
+
+ # This allows targets that only need to depend on $lib's API to declare
+ # an order-only dependency on $lib.TOC and avoid relinking such
+ # downstream dependencies when $lib changes only in non-public ways.
+ # The resulting string leaves an uninterpolated %{suffix} which
+ # is used in the final substitution below.
+ mtime_preserving_solink_base = (
+ 'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then %(solink)s && '
+ '%(extract_toc)s > ${lib}.TOC; else %(solink)s && %(extract_toc)s '
+ '> ${lib}.tmp && if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv '
+ '${lib}.tmp ${lib}.TOC ; fi; fi' % {
+ 'solink': (
+ ld_cmd +
+ ' -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s'),
+ 'extract_toc': ('{ readelf -d ${lib} | grep SONAME ; '
+ 'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')
+ })
+
+ master_ninja.rule(
+ 'solink',
+ description='SOLINK $lib',
+ restat=True,
+ command=(mtime_preserving_solink_base % {
+ 'suffix':
+ '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
+ '$libs'
+ }))
+ master_ninja.rule(
+ 'solink_module',
+ description='SOLINK(module) $lib',
+ restat=True,
+ command=(mtime_preserving_solink_base % {
+ 'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group $libs'
+ }))
+
+ if flavor in sony_flavors:
+ # Sony linkers don't know about rpath.
+ rpath = ''
+ else:
+ rpath = r'-Wl,-rpath=\$$ORIGIN/lib'
+
+ master_ninja.rule(
+ 'link',
+ description='LINK $out',
+ command=(ld_cmd + ' @$out.rsp'),
+ rspfile='$out.rsp',
+ rspfile_content=('$ldflags -o $out %s -Wl,--start-group $in $solibs '
+ '-Wl,--end-group $libs' % rpath),
+ pool='link_pool')
+ elif flavor in microsoft_flavors:
+ master_ninja.rule(
+ 'alink',
+ description='LIB $out',
+ command=(
+ '%s gyp-win-tool link-wrapper $arch '
+ '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % python_exec),
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libflags')
+ dlldesc = 'LINK(DLL) $dll'
+ dllcmd = ('%s gyp-win-tool link-wrapper $arch '
+ '$ld /nologo $implibflag /DLL /OUT:$dll '
+ '/PDB:$dll.pdb @$dll.rsp' % python_exec)
+ if not flavor in microsoft_flavors:
+ # XB1 doesn't need a manifest.
+ dllcmd += (
+ ' && %s gyp-win-tool manifest-wrapper $arch '
+ '$mt -nologo -manifest $manifests -out:$dll.manifest' % python_exec)
+ master_ninja.rule(
+ 'solink',
+ description=dlldesc,
+ command=dllcmd,
+ rspfile='$dll.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True)
+ master_ninja.rule(
+ 'solink_module',
+ description=dlldesc,
+ command=dllcmd,
+ rspfile='$dll.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True)
+ # Note that ldflags goes at the end so that it has the option of
+ # overriding default settings earlier in the command line.
+ if flavor == 'win':
+ link_command = ('%s gyp-win-tool link-wrapper $arch '
+ '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && '
+ '%s gyp-win-tool manifest-wrapper $arch '
+ '$mt -nologo -manifest $manifests -out:$out.manifest' %
+ (python_exec, python_exec))
+ else:
+ assert flavor in microsoft_flavors
+ # XB1 doesn't need a manifest.
+ link_command = (
+ '%s gyp-win-tool link-wrapper $arch '
+ '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp' % (python_exec))
+
+ master_ninja.rule(
+ 'link',
+ description='LINK $out',
+ command=link_command,
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libs $ldflags',
+ pool='link_pool')
+ else:
+ master_ninja.rule(
+ 'objc',
+ description='OBJC $out',
+ command=(
+ '$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
+ '$cflags_pch_objc -c $in -o $out'),
+ depfile='$out.d')
+ master_ninja.rule(
+ 'objcxx',
+ description='OBJCXX $out',
+ command=(
+ '$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
+ '$cflags_pch_objcc -c $in -o $out'),
+ depfile='$out.d')
+ master_ninja.rule(
+ 'alink',
+ description='LIBTOOL-STATIC $out, POSTBUILDS',
+ command='rm -f $out && '
+ './gyp-mac-tool filter-libtool libtool $libtool_flags '
+ '-static -o $out $in'
+ '$postbuilds')
+
+ # Record the public interface of $lib in $lib.TOC. See the corresponding
+ # comment in the posix section above for details.
+ mtime_preserving_solink_base = (
+ 'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
+ # Always force dependent targets to relink if this library
+ # reexports something. Handling this correctly would require
+ # recursive TOC dumping but this is rare in practice, so punt.
+ 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
+ '%(solink)s && %(extract_toc)s > ${lib}.TOC; '
+ 'else '
+ '%(solink)s && %(extract_toc)s > ${lib}.tmp && '
+ 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
+ 'mv ${lib}.tmp ${lib}.TOC ; '
+ 'fi; '
+ 'fi' % {
+ 'solink': '$ld -shared $ldflags -o $lib %(suffix)s',
+ 'extract_toc':
+ '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
+ 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'
+ })
+
+ # TODO(thakis): The solink_module rule is likely wrong. Xcode seems to
+ # pass -bundle -single_module here (for osmesa.so).
+ master_ninja.rule(
+ 'solink',
+ description='SOLINK $lib, POSTBUILDS',
+ restat=True,
+ command=(mtime_preserving_solink_base % {
+ 'suffix': '$in $solibs $libs$postbuilds'
+ }))
+ master_ninja.rule(
+ 'solink_module',
+ description='SOLINK(module) $lib, POSTBUILDS',
+ restat=True,
+ command=(mtime_preserving_solink_base % {
+ 'suffix': '$in $solibs $libs$postbuilds'
+ }))
+
+ master_ninja.rule(
+ 'link',
+ description='LINK $out, POSTBUILDS',
+ command=('$ld $ldflags -o $out '
+ '$in $solibs $libs$postbuilds'),
+ pool='link_pool')
+ master_ninja.rule(
+ 'infoplist',
+ description='INFOPLIST $out',
+ command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
+ 'plutil -convert xml1 $out $out'))
+ master_ninja.rule(
+ 'mac_tool',
+ description='MACTOOL $mactool_cmd $in',
+ command='$env ./gyp-mac-tool $mactool_cmd $in $out')
+ master_ninja.rule(
+ 'package_framework',
+ description='PACKAGE FRAMEWORK $out, POSTBUILDS',
+ command='./gyp-mac-tool package-framework $out $version$postbuilds '
+ '&& touch $out')
+ if flavor in microsoft_flavors:
+ master_ninja.rule(
+ 'stamp',
+ description='STAMP $out',
+ command='%s gyp-win-tool stamp $out' % python_exec)
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='%s gyp-win-tool recursive-mirror $in $out' % python_exec)
+ elif sys.platform in ['cygwin', 'win32']:
+ master_ninja.rule(
+ 'stamp',
+ description='STAMP $out',
+ command='$python gyp-win-tool stamp $out')
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='$python gyp-win-tool recursive-mirror $in $out')
+ else:
+ master_ninja.rule(
+ 'stamp', description='STAMP $out', command='${postbuilds}touch $out')
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='rm -rf $out && cp -af $in $out')
+ master_ninja.newline()
+
+ # Output host building rules
+ if is_windows:
+ cc_command = ('$cc /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+ cxx_command = ('$cxx /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+ master_ninja.rule(
+ 'cc_host',
+ description='CC_HOST $out',
+ command=cc_command,
+ deps='msvc',
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags_host $cflags_c_host')
+ master_ninja.rule(
+ 'cxx_host',
+ description='CXX_HOST $out',
+ command=cxx_command,
+ deps='msvc',
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags_host $cflags_cc_host')
+
+ master_ninja.rule(
+ 'alink_host',
+ description='LIB_HOST $out',
+ command=(
+ '%s gyp-win-tool link-wrapper $arch '
+ '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % python_exec),
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libflags_host')
+
+ master_ninja.rule(
+ 'alink_thin_host',
+ description='LIB_HOST $out',
+ command=(
+ '%s gyp-win-tool link-wrapper $arch '
+ '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % python_exec),
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libflags_host')
+
+ link_command = (
+ '%s gyp-win-tool link-wrapper $arch '
+ '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp' % (python_exec))
+
+ master_ninja.rule(
+ 'link_host',
+ description='LINK_HOST $out',
+ command=link_command,
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libs $ldflags',
+ pool='link_pool')
+ else:
+ cc_command = 'bash -c "$cc_host @$out.rsp"'
+ cxx_command = 'bash -c "$cxx_host @$out.rsp"'
+ master_ninja.rule(
+ 'cc_host',
+ description='CC_HOST $out',
+ command=cc_command,
+ rspfile='$out.rsp',
+ rspfile_content=('-MMD -MF $out.d $defines $includes $cflags_host '
+ '$cflags_c_host $cflags_pch_c -c $in -o $out'),
+ depfile='$out.d')
+ master_ninja.rule(
+ 'cxx_host',
+ description='CXX_HOST $out',
+ command=cxx_command,
+ rspfile='$out.rsp',
+ rspfile_content=('-MMD -MF $out.d $defines $includes $cflags_host '
+ '$cflags_cc_host $cflags_pch_cc -c $in -o $out'),
+ depfile='$out.d')
+
+ alink_command = 'rm -f $out && $ar_host $arFlags_host $out @$out.rsp'
+ alink_thin_command = ('rm -f $out && $ar_host $arThinFlags_host $out '
+ '@$out.rsp')
+
+ master_ninja.rule(
+ 'alink_host',
+ description='AR_HOST $out',
+ command='bash -c "' + alink_command + '"',
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline')
+ master_ninja.rule(
+ 'alink_thin_host',
+ description='AR_HOST $out',
+ command='bash -c "' + alink_thin_command + '"',
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline')
+ beginlinkinlibs = ''
+ endlinkinlibs = ''
+ if is_linux:
+ beginlinkinlibs = '-Wl,--start-group'
+ endlinkinlibs = '-Wl,--end-group'
+ rpath = '-Wl,-rpath=\$$ORIGIN/lib'
+ master_ninja.rule(
+ 'link_host',
+ description='LINK_HOST $out',
+ command=('bash -c "$ld_host $ldflags_host -o $out %s '
+ '%s $in $solibs %s $libs"' % (rpath, beginlinkinlibs,
+ endlinkinlibs)))
+
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts,