diff --git a/tools/gyp/gyptest.py b/tools/gyp/gyptest.py index c84f3d3306c..6c6b00944fe 100755 --- a/tools/gyp/gyptest.py +++ b/tools/gyp/gyptest.py @@ -171,7 +171,9 @@ def main(argv=None): os.chdir(opts.chdir) if opts.path: - os.environ['PATH'] += ':' + ':'.join(opts.path) + extra_path = [os.path.abspath(p) for p in opts.path] + extra_path = os.pathsep.join(extra_path) + os.environ['PATH'] += os.pathsep + extra_path if not args: if not opts.all: diff --git a/tools/gyp/pylib/gyp/MSVSNew.py b/tools/gyp/pylib/gyp/MSVSNew.py index 08659737831..253fe619863 100644 --- a/tools/gyp/pylib/gyp/MSVSNew.py +++ b/tools/gyp/pylib/gyp/MSVSNew.py @@ -59,7 +59,13 @@ def MakeGuid(name, seed='msvs_new'): #------------------------------------------------------------------------------ -class MSVSFolder(object): +class MSVSSolutionEntry(object): + def __cmp__(self, other): + # Sort by name then guid (so things are in order on vs2008). + return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) + + +class MSVSFolder(MSVSSolutionEntry): """Folder in a Visual Studio project or solution.""" def __init__(self, path, name = None, entries = None, @@ -85,7 +91,7 @@ def __init__(self, path, name = None, entries = None, self.guid = guid # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) + self.entries = sorted(list(entries or [])) self.items = list(items or []) self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] @@ -100,7 +106,7 @@ def get_guid(self): #------------------------------------------------------------------------------ -class MSVSProject(object): +class MSVSProject(MSVSSolutionEntry): """Visual Studio project.""" def __init__(self, path, name = None, dependencies = None, guid = None, @@ -229,15 +235,7 @@ def Write(self, writer=gyp.common.WriteOnDiff): if isinstance(e, MSVSFolder): entries_to_check += e.entries - # Sort by name then guid (so things are in order on vs2008). - def NameThenGuid(a, b): - if a.name < b.name: return -1 - if a.name > b.name: return 1 - if a.get_guid() < b.get_guid(): return -1 - if a.get_guid() > b.get_guid(): return 1 - return 0 - - all_entries = sorted(all_entries, NameThenGuid) + all_entries = sorted(all_entries) # Open file and print header f = writer(self.path) diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py index eeec2e615e5..97caf669801 100644 --- a/tools/gyp/pylib/gyp/MSVSVersion.py +++ b/tools/gyp/pylib/gyp/MSVSVersion.py @@ -9,6 +9,7 @@ import re import subprocess import sys +import gyp class VisualStudioVersion(object): @@ -193,6 +194,8 @@ def _CreateVersion(name, path, sdk_based=False): autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is passed in that doesn't match a value in versions python will throw a error. """ + if path: + path = os.path.normpath(path) versions = { '2012': VisualStudioVersion('2012', 'Visual Studio 2012', @@ -264,6 +267,14 @@ def _CreateVersion(name, path, sdk_based=False): return versions[str(name)] +def _ConvertToCygpath(path): + """Convert to cygwin path if we are using cygwin.""" + if sys.platform == 'cygwin': + p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE) + path = p.communicate()[0].strip() + return path + + def _DetectVisualStudioVersions(versions_to_check, force_express): """Collect the list of installed visual studio versions. @@ -294,6 +305,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express): path = _RegistryGetValue(keys[index], 'InstallDir') if not path: continue + path = _ConvertToCygpath(path) # Check for full. full_path = os.path.join(path, 'devenv.exe') express_path = os.path.join(path, 'vcexpress.exe') @@ -314,6 +326,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express): path = _RegistryGetValue(keys[index], version) if not path: continue + path = _ConvertToCygpath(path) versions.append(_CreateVersion(version_to_year[version] + 'e', os.path.join(path, '..'), sdk_based=True)) diff --git a/tools/gyp/pylib/gyp/__init__.py b/tools/gyp/pylib/gyp/__init__.py index 54488e5e6bb..ac300a903c0 100755 --- a/tools/gyp/pylib/gyp/__init__.py +++ b/tools/gyp/pylib/gyp/__init__.py @@ -12,6 +12,7 @@ import shlex import sys import traceback +from gyp.common import GypError # Default debug modes for GYP debug = {} @@ -44,15 +45,9 @@ def FindBuildFiles(): return build_files -class GypError(Exception): - """Error class representing an error, which is to be presented - to the user. The main entry point will catch and display this. - """ - pass - - def Load(build_files, format, default_variables={}, - includes=[], depth='.', params=None, check=False, circular_check=True): + includes=[], depth='.', params=None, check=False, + circular_check=True): """ Loads one or more specified build files. default_variables and includes will be copied before use. @@ -130,7 +125,8 @@ def Load(build_files, format, default_variables={}, # Process the input specific to this generator. result = gyp.input.Load(build_files, default_variables, includes[:], - depth, generator_input_info, check, circular_check) + depth, generator_input_info, check, circular_check, + params['parallel']) return [generator] + result def NameValueListToDict(name_value_list): @@ -317,9 +313,14 @@ def gyp_main(args): help='do not read options from environment variables') parser.add_option('--check', dest='check', action='store_true', help='check format of gyp files') + parser.add_option('--parallel', action='store_true', + env_name='GYP_PARALLEL', + help='Use multiprocessing for speed (experimental)') parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', default=None, metavar='DIR', type='path', help='directory to use as the root of the source tree') + parser.add_option('--build', dest='configs', action='append', + help='configuration for build after project generation') # --no-circular-check disables the check for circular relationships between # .gyp files. These relationships should not exist, but they've only been # observed to be harmful with the Xcode generator. Chromium's .gyp files @@ -374,6 +375,9 @@ def gyp_main(args): if g_o: options.generator_output = g_o + if not options.parallel and options.use_environment: + options.parallel = bool(os.environ.get('GYP_PARALLEL')) + for mode in options.debug: gyp.debug[mode] = 1 @@ -484,7 +488,8 @@ def gyp_main(args): 'cwd': os.getcwd(), 'build_files_arg': build_files_arg, 'gyp_binary': sys.argv[0], - 'home_dot_gyp': home_dot_gyp} + 'home_dot_gyp': home_dot_gyp, + 'parallel': options.parallel} # Start with the default variables from the command line. [generator, flat_list, targets, data] = Load(build_files, format, @@ -502,6 +507,13 @@ def gyp_main(args): # generate targets in the order specified in flat_list. generator.GenerateOutput(flat_list, targets, data, params) + if options.configs: + valid_configs = targets[flat_list[0]]['configurations'].keys() + for conf in options.configs: + if conf not in valid_configs: + raise GypError('Invalid config specified via --build: %s' % conf) + generator.PerformBuild(data, options.configs, params) + # Done return 0 diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py index 6144d2fe7af..e917a59a3c5 100644 --- a/tools/gyp/pylib/gyp/common.py +++ b/tools/gyp/pylib/gyp/common.py @@ -27,6 +27,13 @@ def __call__(self, *args): return result +class GypError(Exception): + """Error class representing an error, which is to be presented + to the user. The main entry point will catch and display this. + """ + pass + + def ExceptionAppend(e, msg): """Append a message to the given exception's message.""" if not e.args: @@ -361,13 +368,20 @@ def GetFlavor(params): 'cygwin': 'win', 'win32': 'win', 'darwin': 'mac', - 'sunos5': 'solaris', - 'freebsd7': 'freebsd', - 'freebsd8': 'freebsd', - 'freebsd9': 'freebsd', } - flavor = flavors.get(sys.platform, 'linux') - return params.get('flavor', flavor) + + if 'flavor' in params: + return params['flavor'] + if sys.platform in flavors: + return flavors[sys.platform] + if sys.platform.startswith('sunos'): + return 'solaris' + if sys.platform.startswith('freebsd'): + return 'freebsd' + if sys.platform.startswith('dragonfly'): + return 'dragonflybsd' + + return 'linux' def CopyTool(flavor, out_path): diff --git a/tools/gyp/pylib/gyp/common_test.py b/tools/gyp/pylib/gyp/common_test.py index aabaf344b05..7fbac09d0fe 100755 --- a/tools/gyp/pylib/gyp/common_test.py +++ b/tools/gyp/pylib/gyp/common_test.py @@ -8,6 +8,7 @@ import gyp.common import unittest +import sys class TestTopologicallySorted(unittest.TestCase): @@ -40,5 +41,32 @@ def GetEdge(node): graph.keys(), GetEdge) +class TestGetFlavor(unittest.TestCase): + """Test that gyp.common.GetFlavor works as intended""" + original_platform = '' + + def setUp(self): + self.original_platform = sys.platform + + def tearDown(self): + sys.platform = self.original_platform + + def assertFlavor(self, expected, argument, param): + sys.platform = argument + self.assertEqual(expected, gyp.common.GetFlavor(param)) + + def test_platform_default(self): + self.assertFlavor('dragonflybsd', 'dragonfly3', {}) + self.assertFlavor('freebsd' , 'freebsd9' , {}) + self.assertFlavor('freebsd' , 'freebsd10' , {}) + self.assertFlavor('solaris' , 'sunos5' , {}); + self.assertFlavor('solaris' , 'sunos' , {}); + self.assertFlavor('linux' , 'linux2' , {}); + self.assertFlavor('linux' , 'linux3' , {}); + + def test_param(self): + self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) + + if __name__ == '__main__': unittest.main() diff --git a/tools/gyp/pylib/gyp/generator/android.py b/tools/gyp/pylib/gyp/generator/android.py index 0cecf7a64c4..872ec844c8c 100644 --- a/tools/gyp/pylib/gyp/generator/android.py +++ b/tools/gyp/pylib/gyp/generator/android.py @@ -38,12 +38,22 @@ 'RULE_INPUT_PATH': '$(RULE_SOURCES)', 'RULE_INPUT_EXT': '$(suffix $<)', 'RULE_INPUT_NAME': '$(notdir $<)', + 'CONFIGURATION_NAME': 'NOT_USED_ON_ANDROID', } # Make supports multiple toolsets generator_supports_multiple_toolsets = True +# Generator-specific gyp specs. +generator_additional_non_configuration_keys = [ + # Boolean to declare that this target does not want its name mangled. + 'android_unmangled_name', +] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] + + SHARED_FOOTER = """\ # "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from # all the included sub-makefiles. This is just here to clarify. @@ -153,7 +163,7 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, extra_outputs = [] extra_sources = [] - self.android_class = MODULE_CLASSES.get(self.type, 'NONE') + self.android_class = MODULE_CLASSES.get(self.type, 'GYP') self.android_module = self.ComputeAndroidModule(spec) (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) self.output = self.output_binary = self.ComputeOutput(spec) @@ -576,6 +586,10 @@ def ComputeAndroidModule(self, spec): distinguish gyp-generated module names. """ + if int(spec.get('android_unmangled_name', 0)): + assert self.type != 'shared_library' or self.target.startswith('lib') + return self.target + if self.type == 'shared_library': # For reasons of convention, the Android build system requires that all # shared library modules are named 'libfoo' when generating -l flags. @@ -838,10 +852,11 @@ def WriteTarget(self, spec, configs, deps, link_deps, part_of_all): # Add an alias from the gyp target name to the Android module name. This # simplifies manual builds of the target, and is required by the test # framework. - self.WriteLn('# Alias gyp target name.') - self.WriteLn('.PHONY: %s' % self.target) - self.WriteLn('%s: %s' % (self.target, self.android_module)) - self.WriteLn('') + if self.target != self.android_module: + self.WriteLn('# Alias gyp target name.') + self.WriteLn('.PHONY: %s' % self.target) + self.WriteLn('%s: %s' % (self.target, self.android_module)) + self.WriteLn('') # Add the command to trigger build of the target type depending # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY @@ -989,7 +1004,7 @@ def CalculateMakefilePath(build_file, base_name): default_configuration = 'Default' srcdir = '.' - makefile_name = 'GypAndroid.mk' + options.suffix + makefile_name = 'GypAndroid' + options.suffix + '.mk' makefile_path = os.path.join(options.toplevel_dir, makefile_name) assert not options.generator_output, ( 'The Android backend does not support options.generator_output.') diff --git a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py index acee72ebb1e..f8480dd2842 100644 --- a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py +++ b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py @@ -1,10 +1,12 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. +# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import collections +import os import gyp import gyp.common +import gyp.msvs_emulation import json import sys @@ -22,7 +24,8 @@ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', - 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX']: + 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', + 'CONFIGURATION_NAME']: generator_default_variables[unused] = '' @@ -32,6 +35,30 @@ def CalculateVariables(default_variables, params): default_variables.setdefault(key, val) default_variables.setdefault('OS', gyp.common.GetFlavor(params)) + flavor = gyp.common.GetFlavor(params) + if flavor =='win': + # Copy additional generator configuration data from VS, which is shared + # by the Windows Ninja generator. + import gyp.generator.msvs as msvs_generator + generator_additional_non_configuration_keys = getattr(msvs_generator, + 'generator_additional_non_configuration_keys', []) + generator_additional_path_sections = getattr(msvs_generator, + 'generator_additional_path_sections', []) + + # Set a variable so conditions can be based on msvs_version. + msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) + default_variables['MSVS_VERSION'] = msvs_version.ShortName() + + # To determine processor word size on Windows, in addition to checking + # PROCESSOR_ARCHITECTURE (which reflects the word size of the current + # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which + # contains the actual word size of the system when running thru WOW64). + if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or + '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')): + default_variables['MSVS_OS_BITS'] = 64 + else: + default_variables['MSVS_OS_BITS'] = 32 + def CalculateGeneratorInputInfo(params): """Calculate the generator specific info that gets fed to input (called by diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index 4648bd9b278..bcc2cc619de 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -24,9 +24,9 @@ import os import re import sys +import subprocess import gyp import gyp.common -import gyp.system_test import gyp.xcode_emulation from gyp.common import GetEnvironFallback @@ -125,7 +125,10 @@ def ensure_directory_exists(path): LINK_COMMANDS_LINUX = """\ quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %.o,$^) +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) # Due to circular dependencies between libraries :(, we wrap the # special "figure out circular dependencies" flags around the entire @@ -158,7 +161,7 @@ def ensure_directory_exists(path): LINK_COMMANDS_MAC = """\ quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool -static -o $@ $(filter %.o,$^) +cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) quiet_cmd_link = LINK($(TOOLSET)) $@ cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) @@ -176,7 +179,10 @@ def ensure_directory_exists(path): LINK_COMMANDS_ANDROID = """\ quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %.o,$^) +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) # Due to circular dependencies between libraries :(, we wrap the # special "figure out circular dependencies" flags around the entire @@ -262,10 +268,7 @@ def ensure_directory_exists(path): LINK.target ?= %(LINK.target)s LDFLAGS.target ?= $(LDFLAGS) AR.target ?= $(AR) -ARFLAGS.target ?= %(ARFLAGS.target)s -# N.B.: the logic of which commands to run should match the computation done -# in gyp's make.py where ARFLAGS.host etc. is computed. # TODO(evan): move all cross-compilation logic to gyp-time so we don't need # to replicate this environment fallback in make as well. CC.host ?= %(CC.host)s @@ -275,7 +278,6 @@ def ensure_directory_exists(path): LINK.host ?= %(LINK.host)s LDFLAGS.host ?= AR.host ?= %(AR.host)s -ARFLAGS.host := %(ARFLAGS.host)s # Define a dir function that can handle spaces. # http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions @@ -721,9 +723,12 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, else: self.output = self.output_binary = self.ComputeOutput(spec) + self.is_standalone_static_library = bool( + spec.get('standalone_static_library', 0)) self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', 'shared_library') - if self.type in self._INSTALLABLE_TARGETS: + if (self.is_standalone_static_library or + self.type in self._INSTALLABLE_TARGETS): self.alias = os.path.basename(self.output) install_path = self._InstallableTargetInstallPath() else: @@ -838,6 +843,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs, actions) part_of_all: flag indicating this target is part of 'all' """ + env = self.GetSortedXcodeEnv() for action in actions: name = StringToMakefileVariable('%s_%s' % (self.qualified_target, action['action_name'])) @@ -858,7 +864,11 @@ def WriteActions(self, actions, extra_sources, extra_outputs, extra_mac_bundle_resources += outputs # Write the actual command. - command = gyp.common.EncodePOSIXShellList(action['action']) + action_commands = action['action'] + if self.flavor == 'mac': + action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env) + for command in action_commands] + command = gyp.common.EncodePOSIXShellList(action_commands) if 'message' in action: self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message'])) else: @@ -907,7 +917,6 @@ def WriteActions(self, actions, extra_sources, extra_outputs, "Spaces in action output filenames not supported (%s)" % output) # See the comment in WriteCopies about expanding env vars. - env = self.GetSortedXcodeEnv() outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] @@ -933,6 +942,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs, rules (used to make other pieces dependent on these rules) part_of_all: flag indicating this target is part of 'all' """ + env = self.GetSortedXcodeEnv() for rule in rules: name = StringToMakefileVariable('%s_%s' % (self.qualified_target, rule['rule_name'])) @@ -972,6 +982,10 @@ def WriteRules(self, rules, extra_sources, extra_outputs, # amount of pain. actions += ['@touch --no-create $@'] + # See the comment in WriteCopies about expanding env vars. + outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] + inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] + outputs = map(self.Absolutify, outputs) all_outputs += outputs # Only write the 'obj' and 'builddir' rules for the "primary" output @@ -996,6 +1010,9 @@ def WriteRules(self, rules, extra_sources, extra_outputs, # action, cd_action, and mkdirs get written to a toplevel variable # called cmd_foo. Toplevel variables can't handle things that change # per makefile like $(TARGET), so hardcode the target. + if self.flavor == 'mac': + action = [gyp.xcode_emulation.ExpandEnvVars(command, env) + for command in action] action = gyp.common.EncodePOSIXShellList(action) action = action.replace('$(TARGET)', self.target) cd_action = cd_action.replace('$(TARGET)', self.target) @@ -1049,7 +1066,7 @@ def WriteCopies(self, copies, extra_outputs, part_of_all): outputs = [] for copy in copies: for path in copy['files']: - # Absolutify() calls normpath, stripping trailing slashes. + # Absolutify() may call normpath, and will strip trailing slashes. path = Sourceify(self.Absolutify(path)) filename = os.path.split(path)[1] output = Sourceify(self.Absolutify(os.path.join(copy['destination'], @@ -1419,6 +1436,9 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' % self.toolset) self.WriteList(ldflags, 'LDFLAGS_%s' % configname) + if self.flavor == 'mac': + self.WriteList(self.xcode_settings.GetLibtoolflags(configname), + 'LIBTOOLFLAGS_%s' % configname) libraries = spec.get('libraries') if libraries: # Remove duplicate entries @@ -1430,6 +1450,10 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, QuoteSpaces(self.output_binary)) self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary)) + if self.flavor == 'mac': + self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' % + QuoteSpaces(self.output_binary)) + # Postbuild actions. Like actions, but implicitly depend on the target's # output. postbuilds = [] @@ -1517,8 +1541,13 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, for link_dep in link_deps: assert ' ' not in link_dep, ( "Spaces in alink input filenames not supported (%s)" % link_dep) - self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, - postbuilds=postbuilds) + if (self.flavor not in ('mac', 'win') and not + self.is_standalone_static_library): + self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', + part_of_all, postbuilds=postbuilds) + else: + self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, + postbuilds=postbuilds) elif self.type == 'shared_library': self.WriteLn('%s: LD_INPUTS := %s' % ( QuoteSpaces(self.output_binary), @@ -1558,9 +1587,12 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, # 1) They need to install to the build dir or "product" dir. # 2) They get shortcuts for building (e.g. "make chrome"). # 3) They are part of "make all". - if self.type in self._INSTALLABLE_TARGETS: + if (self.type in self._INSTALLABLE_TARGETS or + self.is_standalone_static_library): if self.type == 'shared_library': file_desc = 'shared library' + elif self.type == 'static_library': + file_desc = 'static library' else: file_desc = 'executable' install_path = self._InstallableTargetInstallPath() @@ -1830,9 +1862,10 @@ def Absolutify(self, path): """Convert a subdirectory-relative path into a base-relative path. Skips over paths that contain variables.""" if '$(' in path: - # path is no existing file in this case, but calling normpath is still - # important for trimming trailing slashes. - return os.path.normpath(path) + # Don't call normpath in this case, as it might collapse the + # path too aggressively if it features '..'. However it's still + # important to strip trailing slashes. + return path.rstrip('/') return os.path.normpath(os.path.join(self.path, path)) @@ -1881,39 +1914,15 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files_args)}) -def RunSystemTests(flavor): - """Run tests against the system to compute default settings for commands. - - Returns: - dictionary of settings matching the block of command-lines used in - SHARED_HEADER. E.g. the dictionary will contain a ARFLAGS.target - key for the default ARFLAGS for the target ar command. - """ - # Compute flags used for building static archives. - # N.B.: this fallback logic should match the logic in SHARED_HEADER. - # See comment there for more details. - ar_target = GetEnvironFallback(('AR_target', 'AR'), 'ar') - cc_target = GetEnvironFallback(('CC_target', 'CC'), 'cc') - arflags_target = 'crs' - # ar -T enables thin archives on Linux. OS X's ar supports a -T flag, but it - # does something useless (it limits filenames in the archive to 15 chars). - if flavor != 'mac' and gyp.system_test.TestArSupportsT(ar_command=ar_target, - cc_command=cc_target): - arflags_target = 'crsT' - - ar_host = os.environ.get('AR_host', 'ar') - cc_host = os.environ.get('CC_host', 'gcc') - arflags_host = 'crs' - # It feels redundant to compute this again given that most builds aren't - # cross-compiles, but due to quirks of history CC_host defaults to 'gcc' - # while CC_target defaults to 'cc', so the commands really are different - # even though they're nearly guaranteed to run the same code underneath. - if flavor != 'mac' and gyp.system_test.TestArSupportsT(ar_command=ar_host, - cc_command=cc_host): - arflags_host = 'crsT' - - return { 'ARFLAGS.target': arflags_target, - 'ARFLAGS.host': arflags_host } +def PerformBuild(data, configurations, params): + options = params['options'] + for config in configurations: + arguments = ['make'] + if options.toplevel_dir and options.toplevel_dir != '.': + arguments += '-C', options.toplevel_dir + arguments.append('BUILDTYPE=' + config) + print 'Building [%s]: %s' % (config, arguments) + subprocess.check_call(arguments) def GenerateOutput(target_list, target_dicts, data, params): @@ -1991,12 +2000,11 @@ def CalculateMakefilePath(build_file, base_name): 'flock_index': 2, 'extra_commands': SHARED_HEADER_SUN_COMMANDS, }) - elif flavor == 'freebsd': + elif flavor == 'freebsd' or flavor == 'dragonflybsd': header_params.update({ 'flock': 'lockf', }) - header_params.update(RunSystemTests(flavor)) header_params.update({ 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py index 70152486bf6..47cbd36ec69 100644 --- a/tools/gyp/pylib/gyp/generator/msvs.py +++ b/tools/gyp/pylib/gyp/generator/msvs.py @@ -18,6 +18,7 @@ import gyp.MSVSToolFile as MSVSToolFile import gyp.MSVSUserFile as MSVSUserFile import gyp.MSVSVersion as MSVSVersion +from gyp.common import GypError # Regular expression for validating Visual Studio GUIDs. If the GUID @@ -1026,7 +1027,7 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): # Get the information for this configuration include_dirs, resource_include_dirs = _GetIncludeDirs(config) libraries = _GetLibraries(spec) - out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec) + out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) defines = _GetDefines(config) defines = [_EscapeCppDefineForMSVS(d) for d in defines] disabled_warnings = _GetDisabledWarnings(config) @@ -1123,6 +1124,8 @@ def _GetLibraries(spec): unique_libraries_list = [] for entry in reversed(libraries): library = re.sub('^\-l', '', entry) + if not os.path.splitext(library)[1]: + library += '.lib' if library not in found: found.add(library) unique_libraries_list.append(library) @@ -1130,7 +1133,7 @@ def _GetLibraries(spec): return unique_libraries_list -def _GetOutputFilePathAndTool(spec): +def _GetOutputFilePathAndTool(spec, msbuild): """Returns the path and tool to use for this target. Figures out the path of the file this spec will create and the name of @@ -1154,10 +1157,14 @@ def _GetOutputFilePathAndTool(spec): output_file_props = output_file_map.get(spec['type']) if output_file_props and int(spec.get('msvs_auto_output_file', 1)): vc_tool, msbuild_tool, out_dir, suffix = output_file_props + if spec.get('standalone_static_library', 0): + out_dir = '$(OutDir)' out_dir = spec.get('product_dir', out_dir) product_extension = spec.get('product_extension') if product_extension: suffix = '.' + product_extension + elif msbuild: + suffix = '$(TargetExt)' prefix = spec.get('product_prefix', '') product_name = spec.get('product_name', '$(ProjectName)') out_file = ntpath.join(out_dir, prefix + product_name + suffix) @@ -1666,7 +1673,7 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): build_file = gyp.common.BuildFile(qualified_target) # Create object for this project. obj = MSVSNew.MSVSProject( - _FixPath(proj_path), + proj_path, name=spec['target_name'], guid=guid, spec=spec, @@ -1779,6 +1786,25 @@ def _ShardTargets(target_list, target_dicts): return (new_target_list, new_target_dicts) +def PerformBuild(data, configurations, params): + options = params['options'] + msvs_version = params['msvs_version'] + devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') + + for build_file, build_file_dict in data.iteritems(): + (build_file_root, build_file_ext) = os.path.splitext(build_file) + if build_file_ext != '.gyp': + continue + sln_path = build_file_root + options.suffix + '.sln' + if options.generator_output: + sln_path = os.path.join(options.generator_output, sln_path) + + for config in configurations: + arguments = [devenv, sln_path, '/Build', config] + print 'Building [%s]: %s' % (config, arguments) + rtn = subprocess.check_call(arguments) + + def GenerateOutput(target_list, target_dicts, data, params): """Generate .sln and .vcproj files. @@ -2571,13 +2597,13 @@ def _GetMSBuildAttributes(spec, config, build_file): config_type = _GetMSVSConfigurationType(spec, build_file) config_type = _ConvertMSVSConfigurationType(config_type) msbuild_attributes = config.get('msbuild_configuration_attributes', {}) - msbuild_attributes['ConfigurationType'] = config_type + msbuild_attributes.setdefault('ConfigurationType', config_type) output_dir = msbuild_attributes.get('OutputDirectory', - '$(SolutionDir)$(Configuration)\\') - msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '$(SolutionDir)$(Configuration)') + msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\' if 'IntermediateDirectory' not in msbuild_attributes: - intermediate = '$(Configuration)\\' - msbuild_attributes['IntermediateDirectory'] = _FixPath(intermediate) + intermediate = _FixPath('$(Configuration)') + '\\' + msbuild_attributes['IntermediateDirectory'] = intermediate if 'CharacterSet' in msbuild_attributes: msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet( msbuild_attributes['CharacterSet']) @@ -2754,7 +2780,7 @@ def _FinalizeMSBuildSettings(spec, configuration): msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) include_dirs, resource_include_dirs = _GetIncludeDirs(configuration) libraries = _GetLibraries(spec) - out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec) + out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) defines = _GetDefines(configuration) if converted: # Visual Studio 2010 has TR1 @@ -3009,7 +3035,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): extension_to_rule_name) missing_sources = _VerifySourcesExist(sources, project_dir) - for (_, configuration) in configurations.iteritems(): + for configuration in configurations.itervalues(): _FinalizeMSBuildSettings(spec, configuration) # Add attributes to root element diff --git a/tools/gyp/pylib/gyp/generator/msvs_test.py b/tools/gyp/pylib/gyp/generator/msvs_test.py index 5a69c1c288a..c0b021df502 100755 --- a/tools/gyp/pylib/gyp/generator/msvs_test.py +++ b/tools/gyp/pylib/gyp/generator/msvs_test.py @@ -1,6 +1,5 @@ #!/usr/bin/env python - -# Copyright (c) 2011 Google Inc. All rights reserved. +# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -26,6 +25,9 @@ def test_GetLibraries(self): self.assertEqual( msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}), ['a.lib']) + self.assertEqual( + msvs._GetLibraries({'libraries': ['-la']}), + ['a.lib']) self.assertEqual( msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib', '-lb.lib', 'd.lib', 'a.lib']}), diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py index d2b8fdce1cb..fa6bd86ac3f 100644 --- a/tools/gyp/pylib/gyp/generator/ninja.py +++ b/tools/gyp/pylib/gyp/generator/ninja.py @@ -4,15 +4,16 @@ import copy import hashlib +import multiprocessing import os.path import re +import signal import subprocess import sys import gyp import gyp.common import gyp.msvs_emulation import gyp.MSVSVersion -import gyp.system_test import gyp.xcode_emulation from gyp.common import GetEnvironFallback @@ -354,7 +355,8 @@ def WriteCollapsedDependencies(self, name, targets): self.ninja.newline() return targets[0] - def WriteSpec(self, spec, config_name, generator_flags): + def WriteSpec(self, spec, config_name, generator_flags, + case_sensitive_filesystem): """The main entry point for NinjaWriter: write the build rules for a spec. Returns a Target object, which represents the output paths for this spec. @@ -366,6 +368,8 @@ def WriteSpec(self, spec, config_name, generator_flags): self.toolset = spec['toolset'] config = spec['configurations'][config_name] self.target = Target(spec['type']) + self.is_standalone_static_library = bool( + spec.get('standalone_static_library', 0)) self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) self.xcode_settings = self.msvs_settings = None @@ -374,8 +378,8 @@ def WriteSpec(self, spec, config_name, generator_flags): if self.flavor == 'win': self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) - target_platform = self.msvs_settings.GetTargetPlatform(config_name) - self.ninja.variable('arch', self.win_env[target_platform]) + arch = self.msvs_settings.GetArch(config_name) + self.ninja.variable('arch', self.win_env[arch]) # Compute predepends for all rules. # actions_depends is the dependencies this target depends on before running @@ -421,6 +425,8 @@ def WriteSpec(self, spec, config_name, generator_flags): if sources: pch = None if self.flavor == 'win': + gyp.msvs_emulation.VerifyMissingSources( + sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) pch = gyp.msvs_emulation.PrecompiledHeader( self.msvs_settings, config_name, self.GypPathToNinja) else: @@ -428,7 +434,8 @@ def WriteSpec(self, spec, config_name, generator_flags): self.xcode_settings, self.GypPathToNinja, lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) link_deps = self.WriteSources( - config_name, config, sources, compile_depends_stamp, pch) + config_name, config, sources, compile_depends_stamp, pch, + case_sensitive_filesystem, spec) # Some actions/rules output 'sources' that are already object files. link_deps += [self.GypPathToNinja(f) for f in sources if f.endswith(self.obj_ext)] @@ -502,7 +509,7 @@ def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, extra_mac_bundle_resources) if 'copies' in spec: - outputs += self.WriteCopies(spec['copies'], prebuild) + outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends) if 'sources' in spec and self.flavor == 'win': outputs += self.WriteWinIdlFiles(spec, prebuild) @@ -549,11 +556,8 @@ def WriteActions(self, actions, extra_sources, prebuild, is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action) if self.flavor == 'win' else False) args = action['action'] - args = [self.msvs_settings.ConvertVSMacros( - arg, self.base_to_build, config=self.config_name) - for arg in args] if self.flavor == 'win' else args - rule_name = self.WriteNewNinjaRule(name, args, description, - is_cygwin, env=env) + rule_name, _ = self.WriteNewNinjaRule(name, args, description, + is_cygwin, env=env) inputs = [self.GypPathToNinja(i, env) for i in action['inputs']] if int(action.get('process_outputs_as_sources', False)): @@ -573,6 +577,7 @@ def WriteActions(self, actions, extra_sources, prebuild, def WriteRules(self, rules, extra_sources, prebuild, extra_mac_bundle_resources): + env = self.GetSortedXcodeEnv() all_outputs = [] for rule in rules: # First write out a rule for the rule action. @@ -588,10 +593,8 @@ def WriteRules(self, rules, extra_sources, prebuild, ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule) if self.flavor == 'win' else False) - args = [self.msvs_settings.ConvertVSMacros( - arg, self.base_to_build, config=self.config_name) - for arg in args] if self.flavor == 'win' else args - rule_name = self.WriteNewNinjaRule(name, args, description, is_cygwin) + rule_name, args = self.WriteNewNinjaRule( + name, args, description, is_cygwin, env=env) # TODO: if the command references the outputs directly, we should # simplify it to just use $out. @@ -648,10 +651,10 @@ def cygwin_munge(path): else: assert var == None, repr(var) - inputs = map(self.GypPathToNinja, inputs) - outputs = map(self.GypPathToNinja, outputs) + inputs = [self.GypPathToNinja(i, env) for i in inputs] + outputs = [self.GypPathToNinja(o, env) for o in outputs] extra_bindings.append(('unique_name', - re.sub('[^a-zA-Z0-9_]', '_', outputs[0]))) + hashlib.md5(outputs[0]).hexdigest())) self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), implicit=inputs, order_only=prebuild, @@ -661,7 +664,7 @@ def cygwin_munge(path): return all_outputs - def WriteCopies(self, copies, prebuild): + def WriteCopies(self, copies, prebuild, mac_bundle_depends): outputs = [] env = self.GetSortedXcodeEnv() for copy in copies: @@ -673,6 +676,15 @@ def WriteCopies(self, copies, prebuild): dst = self.GypPathToNinja(os.path.join(copy['destination'], basename), env) outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) + if self.is_mac_bundle: + # gyp has mac_bundle_resources to copy things into a bundle's + # Resources folder, but there's no built-in way to copy files to other + # places in the bundle. Hence, some targets use copies for this. Check + # if this file is copied into the current bundle, and if so add it to + # the bundle depends so that dependent targets get rebuilt if the copy + # input changes. + if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()): + mac_bundle_depends.append(dst) return outputs @@ -709,7 +721,7 @@ def WriteMacInfoPlist(self, bundle_depends): bundle_depends.append(out) def WriteSources(self, config_name, config, sources, predepends, - precompiled_header): + precompiled_header, case_sensitive_filesystem, spec): """Write build rules to compile all of |sources|.""" if self.toolset == 'host': self.ninja.variable('ar', '$ar_host') @@ -781,10 +793,13 @@ def WriteSources(self, config_name, config, sources, predepends, obj_ext = self.obj_ext if ext in ('cc', 'cpp', 'cxx'): command = 'cxx' - elif ext == 'c' or (ext in ('s', 'S') and self.flavor != 'win'): + elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): command = 'cc' + elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. + command = 'cc_s' elif (self.flavor == 'win' and ext == 'asm' and - self.msvs_settings.GetTargetPlatform(config_name) == 'Win32'): + self.msvs_settings.GetArch(config_name) == 'x86' and + not self.msvs_settings.HasExplicitAsmRules(spec)): # Asm files only get auto assembled for x86 (not x64). command = 'asm' # Add the _asm suffix as msvs is capable of handling .cc and @@ -802,6 +817,12 @@ def WriteSources(self, config_name, config, sources, predepends, continue input = self.GypPathToNinja(source) output = self.GypPathToUniqueOutput(filename + obj_ext) + # Ninja's depfile handling gets confused when the case of a filename + # changes on a case-insensitive file system. To work around that, always + # convert .o filenames to lowercase on such file systems. See + # https://github.com/martine/ninja/issues/402 for details. + if not case_sensitive_filesystem: + output = output.lower() implicit = precompiled_header.GetObjDependencies([input], [output]) self.ninja.build(output, command, input, implicit=[gch for _, _, gch in implicit], @@ -918,10 +939,12 @@ def WriteLink(self, spec, config_name, config, link_deps): extra_bindings.append(('lib', gyp.common.EncodePOSIXShellArgument(output))) if self.flavor == 'win': - self.target.import_lib = output + '.lib' extra_bindings.append(('dll', output)) - extra_bindings.append(('implib', self.target.import_lib)) - output = [output, self.target.import_lib] + if '/NOENTRY' not in ldflags: + self.target.import_lib = output + '.lib' + extra_bindings.append(('implibflag', + '/IMPLIB:%s' % self.target.import_lib)) + output = [output, self.target.import_lib] else: output = [output, output + '.TOC'] @@ -939,10 +962,21 @@ def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): self.target.binary = compile_deps elif spec['type'] == 'static_library': self.target.binary = self.ComputeOutput(spec) - self.ninja.build(self.target.binary, 'alink', link_deps, - order_only=compile_deps, - variables=[('postbuilds', self.GetPostbuildCommand( - spec, self.target.binary, self.target.binary))]) + variables = [] + postbuild = self.GetPostbuildCommand( + spec, self.target.binary, self.target.binary) + if postbuild: + variables.append(('postbuilds', postbuild)) + if self.xcode_settings: + variables.append(('libtool_flags', + self.xcode_settings.GetLibtoolflags(config_name))) + if (self.flavor not in ('mac', 'win') and not + self.is_standalone_static_library): + self.ninja.build(self.target.binary, 'alink_thin', link_deps, + order_only=compile_deps, variables=variables) + else: + self.ninja.build(self.target.binary, 'alink', link_deps, + order_only=compile_deps, variables=variables) else: self.WriteLink(spec, config_name, config, link_deps) return self.target.binary @@ -1126,7 +1160,7 @@ def ComputeOutput(self, spec, type=None): elif self.flavor == 'win' and self.toolset == 'target': type_in_output_root += ['shared_library'] - if type in type_in_output_root: + if type in type_in_output_root or self.is_standalone_static_library: return filename elif type == 'shared_library': libdir = 'lib' @@ -1142,10 +1176,22 @@ def WriteVariableList(self, var, values): values = [] self.ninja.variable(var, ' '.join(values)) - def WriteNewNinjaRule(self, name, args, description, is_cygwin, env={}): + def WriteNewNinjaRule(self, name, args, description, is_cygwin, env): """Write out a new ninja "rule" statement for a given command. - Returns the name of the new rule.""" + Returns the name of the new rule, and a copy of |args| with variables + expanded.""" + + if self.flavor == 'win': + args = [self.msvs_settings.ConvertVSMacros( + arg, self.base_to_build, config=self.config_name) + for arg in args] + description = self.msvs_settings.ConvertVSMacros( + description, config=self.config_name) + elif self.flavor == 'mac': + # |env| is an empty list on non-mac. + args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] + description = gyp.xcode_emulation.ExpandEnvVars(description, env) # TODO: we shouldn't need to qualify names; we do it because # currently the ninja rule namespace is global, but it really @@ -1156,11 +1202,12 @@ def WriteNewNinjaRule(self, name, args, description, is_cygwin, env={}): rule_name += '.' + name rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) - args = args[:] - - if self.flavor == 'win': - description = self.msvs_settings.ConvertVSMacros( - description, config=self.config_name) + # Remove variable references, but not if they refer to the magic rule + # variables. This is not quite right, as it also protects these for + # actions, not just for rules where they are valid. Good enough. + protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ] + protect = '(?!' + '|'.join(map(re.escape, protect)) + ')' + description = re.sub(protect + r'\$', '_', description) # gyp dictates that commands are run from the base directory. # cd into the directory before running, and adjust paths in @@ -1182,10 +1229,6 @@ def WriteNewNinjaRule(self, name, args, description, is_cygwin, env={}): else: env = self.ComputeExportEnvString(env) command = gyp.common.EncodePOSIXShellList(args) - if env: - # If an environment is passed in, variables in the command should be - # read from it, instead of from ninja's internal variables. - command = ninja_syntax.escape(command) command = 'cd %s; ' % self.build_to_base + env + command # GYP rules/actions express being no-ops by not touching their outputs. @@ -1195,7 +1238,7 @@ def WriteNewNinjaRule(self, name, args, description, is_cygwin, env={}): rspfile=rspfile, rspfile_content=rspfile_content) self.ninja.newline() - return rule_name + return rule_name, args def CalculateVariables(default_variables, params): @@ -1278,16 +1321,26 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, flavor = gyp.common.GetFlavor(params) generator_flags = params.get('generator_flags', {}) + # generator_dir: relative path from pwd to where make puts build files. + # Makes migrating from make to ninja easier, ninja doesn't put anything here. + generator_dir = os.path.relpath(params['options'].generator_output or '.') + + # output_dir: relative path from generator_dir to the build directory. + output_dir = generator_flags.get('output_dir', 'out') + # build_dir: relative path from source root to our output files. # e.g. "out/Debug" - build_dir = os.path.join(generator_flags.get('output_dir', 'out'), - config_name) + build_dir = os.path.normpath(os.path.join(generator_dir, + output_dir, + config_name)) toplevel_build = os.path.join(options.toplevel_dir, build_dir) master_ninja = ninja_syntax.Writer( OpenOutput(os.path.join(toplevel_build, 'build.ninja')), width=120) + case_sensitive_filesystem = not os.path.exists( + os.path.join(toplevel_build, 'BUILD.NINJA')) # Put build-time support tools in out/{config_name}. gyp.common.CopyTool(flavor, toplevel_build) @@ -1380,8 +1433,6 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, else: master_ninja.variable('ld_host', flock + ' linker.lock ' + ld_host) - if flavor == 'mac': - master_ninja.variable('mac_tool', os.path.join('.', 'gyp-mac-tool')) master_ninja.newline() if flavor != 'win': @@ -1391,6 +1442,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' '$cflags_pch_c -c $in -o $out'), depfile='$out.d') + master_ninja.rule( + 'cc_s', + description='CC $out', + command=('$cc $defines $includes $cflags $cflags_c ' + '$cflags_pch_c -c $in -o $out')) master_ninja.rule( 'cxx', description='CXX $out', @@ -1398,19 +1454,17 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, '$cflags_pch_cc -c $in -o $out'), depfile='$out.d') else: - # TODO(scottmg): Requires fork of ninja for dependency and linking - # support: https://github.com/sgraham/ninja # Template for compile commands mostly shared between compiling files # and generating PCH. In the case of PCH, the "output" is specified by /Fp # rather than /Fo (for object files), but we still need to specify an /Fo # when compiling PCH. - cc_template = ('ninja-deplist-helper -r . -q -f cl -o $out.dl -e $arch ' - '--command ' + cc_template = ('ninja -t msvc -r . -o $out -e $arch ' + '-- ' '$cc /nologo /showIncludes /FC ' '@$out.rsp ' '$cflags_pch_c /c $in %(outspec)s /Fd$pdbname ') - cxx_template = ('ninja-deplist-helper -r . -q -f cl -o $out.dl -e $arch ' - '--command ' + cxx_template = ('ninja -t msvc -r . -o $out -e $arch ' + '-- ' '$cxx /nologo /showIncludes /FC ' '@$out.rsp ' '$cflags_pch_cc /c $in %(outspec)s $pchobj /Fd$pdbname ') @@ -1418,28 +1472,28 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, 'cc', description='CC $out', command=cc_template % {'outspec': '/Fo$out'}, - depfile='$out.dl', + depfile='$out.d', rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_c') master_ninja.rule( 'cc_pch', description='CC PCH $out', command=cc_template % {'outspec': '/Fp$out /Fo$out.obj'}, - depfile='$out.dl', + depfile='$out.d', rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_c') master_ninja.rule( 'cxx', description='CXX $out', command=cxx_template % {'outspec': '/Fo$out'}, - depfile='$out.dl', + depfile='$out.d', rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_cc') master_ninja.rule( 'cxx_pch', description='CXX PCH $out', command=cxx_template % {'outspec': '/Fp$out /Fo$out.obj'}, - depfile='$out.dl', + depfile='$out.d', rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_cc') master_ninja.rule( @@ -1466,6 +1520,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, master_ninja.rule( 'alink', description='AR $out', + command='rm -f $out && $ar rcs $out $in') + master_ninja.rule( + 'alink_thin', + description='AR $out', command='rm -f $out && $ar rcsT $out $in') # This allows targets that only need to depend on $lib's API to declare an @@ -1514,7 +1572,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, rspfile_content='$in_newline $libflags') dlldesc = 'LINK(DLL) $dll' dllcmd = ('%s gyp-win-tool link-wrapper $arch ' - '$ld /nologo /IMPLIB:$implib /DLL /OUT:$dll ' + '$ld /nologo $implibflag /DLL /OUT:$dll ' '/PDB:$dll.pdb @$dll.rsp' % sys.executable) dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' '$mt -nologo -manifest $manifests -out:$dll.manifest' % @@ -1556,7 +1614,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, 'alink', description='LIBTOOL-STATIC $out, POSTBUILDS', command='rm -f $out && ' - './gyp-mac-tool filter-libtool libtool -static -o $out $in' + './gyp-mac-tool filter-libtool libtool $libtool_flags ' + '-static -o $out $in' '$postbuilds') # Record the public interface of $lib in $lib.TOC. See the corresponding @@ -1607,11 +1666,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, master_ninja.rule( 'mac_tool', description='MACTOOL $mactool_cmd $in', - command='$env $mac_tool $mactool_cmd $in $out') + command='$env ./gyp-mac-tool $mactool_cmd $in $out') master_ninja.rule( 'package_framework', description='PACKAGE FRAMEWORK $out, POSTBUILDS', - command='$mac_tool package-framework $out $version$postbuilds ' + command='./gyp-mac-tool package-framework $out $version$postbuilds ' '&& touch $out') if flavor == 'win': master_ninja.rule( @@ -1673,7 +1732,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, flavor, abs_build_dir=abs_build_dir) master_ninja.subninja(output_file) - target = writer.WriteSpec(spec, config_name, generator_flags) + target = writer.WriteSpec( + spec, config_name, generator_flags, case_sensitive_filesystem) if target: if name != target.FinalOutput() and spec['toolset'] == 'target': target_short_names.setdefault(name, []).append(target) @@ -1694,19 +1754,46 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, if all_outputs: master_ninja.newline() master_ninja.build('all', 'phony', list(all_outputs)) - master_ninja.default('all') + master_ninja.default(generator_flags.get('default_target', 'all')) -def GenerateOutput(target_list, target_dicts, data, params): - if params['options'].generator_output: - raise NotImplementedError, "--generator_output not implemented for ninja" +def PerformBuild(data, configurations, params): + options = params['options'] + for config in configurations: + builddir = os.path.join(options.toplevel_dir, 'out', config) + arguments = ['ninja', '-C', builddir] + print 'Building [%s]: %s' % (config, arguments) + subprocess.check_call(arguments) + + +def CallGenerateOutputForConfig(arglist): + # Ignore the interrupt signal so that the parent process catches it and + # kills all multiprocessing children. + signal.signal(signal.SIGINT, signal.SIG_IGN) + + (target_list, target_dicts, data, params, config_name) = arglist + GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) + +def GenerateOutput(target_list, target_dicts, data, params): user_config = params.get('generator_flags', {}).get('config', None) if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: config_names = target_dicts[target_list[0]]['configurations'].keys() - for config_name in config_names: - GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name) + if params['parallel']: + try: + pool = multiprocessing.Pool(len(config_names)) + arglists = [] + for config_name in config_names: + arglists.append( + (target_list, target_dicts, data, params, config_name)) + pool.map(CallGenerateOutputForConfig, arglists) + except KeyboardInterrupt, e: + pool.terminate() + raise e + else: + for config_name in config_names: + GenerateOutputForConfig(target_list, target_dicts, data, params, + config_name) diff --git a/tools/gyp/pylib/gyp/generator/scons.py b/tools/gyp/pylib/gyp/generator/scons.py index 4d0feb0c9a4..fe7cb581b3e 100644 --- a/tools/gyp/pylib/gyp/generator/scons.py +++ b/tools/gyp/pylib/gyp/generator/scons.py @@ -8,6 +8,7 @@ import os.path import pprint import re +import subprocess # TODO: remove when we delete the last WriteList() call in this module @@ -960,6 +961,30 @@ def TargetFilename(target, build_file=None, output_suffix=''): return output_file +def PerformBuild(data, configurations, params): + options = params['options'] + + # Due to the way we test gyp on the chromium typbots + # we need to look for 'scons.py' as well as the more common 'scons' + # TODO(sbc): update the trybots to have a more normal install + # of scons. + scons = 'scons' + paths = os.environ['PATH'].split(os.pathsep) + for scons_name in ['scons', 'scons.py']: + for path in paths: + test_scons = os.path.join(path, scons_name) + print 'looking for: %s' % test_scons + if os.path.exists(test_scons): + print "found scons: %s" % scons + scons = test_scons + break + + for config in configurations: + arguments = [scons, '-C', options.toplevel_dir, '--mode=%s' % config] + print "Building [%s]: %s" % (config, arguments) + subprocess.check_call(arguments) + + def GenerateOutput(target_list, target_dicts, data, params): """ Generates all the output files for the specified targets. diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py index 9ea4fbdff06..7b21bae8a98 100644 --- a/tools/gyp/pylib/gyp/generator/xcode.py +++ b/tools/gyp/pylib/gyp/generator/xcode.py @@ -587,6 +587,25 @@ def EscapeXCodeArgument(s): return '"' + s + '"' + +def PerformBuild(data, configurations, params): + options = params['options'] + + for build_file, build_file_dict in data.iteritems(): + (build_file_root, build_file_ext) = os.path.splitext(build_file) + if build_file_ext != '.gyp': + continue + xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' + if options.generator_output: + xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) + + for config in configurations: + arguments = ['xcodebuild', '-project', xcodeproj_path] + arguments += ['-configuration', config] + print "Building [%s]: %s" % (config, arguments) + subprocess.check_call(arguments) + + def GenerateOutput(target_list, target_dicts, data, params): options = params['options'] generator_flags = params.get('generator_flags', {}) diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index 2678bab2c42..65236671f97 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -12,12 +12,17 @@ import compiler import copy import gyp.common +import multiprocessing import optparse import os.path import re import shlex +import signal import subprocess import sys +import threading +import time +from gyp.common import GypError # A list of types that are treated as linkable. @@ -79,6 +84,7 @@ def IsPathSection(section): 'rules', 'run_as', 'sources', + 'standalone_static_library', 'suppress_wildcard', 'target_name', 'toolset', @@ -102,6 +108,7 @@ def IsPathSection(section): 'libraries', 'link_settings', 'sources', + 'standalone_static_library', 'target_name', 'type', ] @@ -175,9 +182,9 @@ def CheckNode(node, keypath): assert isinstance(c[n], Const) key = c[n].getChildren()[0] if key in dict: - raise KeyError, "Key '" + key + "' repeated at level " + \ - repr(len(keypath) + 1) + " with key path '" + \ - '.'.join(keypath) + "'" + raise GypError("Key '" + key + "' repeated at level " + + repr(len(keypath) + 1) + " with key path '" + + '.'.join(keypath) + "'") kp = list(keypath) # Make a copy of the list for descending this node. kp.append(key) dict[key] = CheckNode(c[n + 1], kp) @@ -205,7 +212,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, if os.path.exists(build_file_path): build_file_contents = open(build_file_path).read() else: - raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) + raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) build_file_data = None try: @@ -329,7 +336,7 @@ def ProcessToolsetsInDict(data): # a build file that contains targets and is expected to provide a targets dict # that contains the targets... def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, - depth, check): + depth, check, load_dependencies): # If depth is set, predefine the DEPTH variable to be a relative path from # this build file's directory to the directory identified by depth. if depth: @@ -348,7 +355,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, if build_file_path in data['target_build_files']: # Already loaded. - return + return False data['target_build_files'].add(build_file_path) gyp.DebugOutput(gyp.DEBUG_INCLUDES, @@ -363,7 +370,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, # Set up the included_files key indicating which .gyp files contributed to # this target dict. if 'included_files' in build_file_data: - raise KeyError, build_file_path + ' must not contain included_files key' + raise GypError(build_file_path + ' must not contain included_files key') included = GetIncludedBuildFiles(build_file_path, aux_data) build_file_data['included_files'] = [] @@ -390,25 +397,25 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, # Look at each project's target_defaults dict, and merge settings into # targets. if 'target_defaults' in build_file_data: + if 'targets' not in build_file_data: + raise GypError("Unable to find targets in build file %s" % + build_file_path) + index = 0 - if 'targets' in build_file_data: - while index < len(build_file_data['targets']): - # This procedure needs to give the impression that target_defaults is - # used as defaults, and the individual targets inherit from that. - # The individual targets need to be merged into the defaults. Make - # a deep copy of the defaults for each target, merge the target dict - # as found in the input file into that copy, and then hook up the - # copy with the target-specific data merged into it as the replacement - # target dict. - old_target_dict = build_file_data['targets'][index] - new_target_dict = copy.deepcopy(build_file_data['target_defaults']) - MergeDicts(new_target_dict, old_target_dict, - build_file_path, build_file_path) - build_file_data['targets'][index] = new_target_dict - index = index + 1 - else: - raise Exception, \ - "Unable to find targets in build file %s" % build_file_path + while index < len(build_file_data['targets']): + # This procedure needs to give the impression that target_defaults is + # used as defaults, and the individual targets inherit from that. + # The individual targets need to be merged into the defaults. Make + # a deep copy of the defaults for each target, merge the target dict + # as found in the input file into that copy, and then hook up the + # copy with the target-specific data merged into it as the replacement + # target dict. + old_target_dict = build_file_data['targets'][index] + new_target_dict = copy.deepcopy(build_file_data['target_defaults']) + MergeDicts(new_target_dict, old_target_dict, + build_file_path, build_file_path) + build_file_data['targets'][index] = new_target_dict + index += 1 # No longer needed. del build_file_data['target_defaults'] @@ -418,22 +425,182 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, # in other words, you can't put a "dependencies" section inside a "post" # conditional within a target. + dependencies = [] if 'targets' in build_file_data: for target_dict in build_file_data['targets']: if 'dependencies' not in target_dict: continue for dependency in target_dict['dependencies']: - other_build_file = \ - gyp.common.ResolveTarget(build_file_path, dependency, None)[0] - try: - LoadTargetBuildFile(other_build_file, data, aux_data, variables, - includes, depth, check) - except Exception, e: - gyp.common.ExceptionAppend( - e, 'while loading dependencies of %s' % build_file_path) - raise + dependencies.append( + gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) + + if load_dependencies: + for dependency in dependencies: + try: + LoadTargetBuildFile(dependency, data, aux_data, variables, + includes, depth, check, load_dependencies) + except Exception, e: + gyp.common.ExceptionAppend( + e, 'while loading dependencies of %s' % build_file_path) + raise + else: + return (build_file_path, dependencies) + + +def CallLoadTargetBuildFile(global_flags, + build_file_path, data, + aux_data, variables, + includes, depth, check): + """Wrapper around LoadTargetBuildFile for parallel processing. + + This wrapper is used when LoadTargetBuildFile is executed in + a worker process. + """ + + try: + signal.signal(signal.SIGINT, signal.SIG_IGN) + + # Apply globals so that the worker process behaves the same. + for key, value in global_flags.iteritems(): + globals()[key] = value + + # Save the keys so we can return data that changed. + data_keys = set(data) + aux_data_keys = set(aux_data) + + result = LoadTargetBuildFile(build_file_path, data, + aux_data, variables, + includes, depth, check, False) + if not result: + return result + + (build_file_path, dependencies) = result + + data_out = {} + for key in data: + if key == 'target_build_files': + continue + if key not in data_keys: + data_out[key] = data[key] + aux_data_out = {} + for key in aux_data: + if key not in aux_data_keys: + aux_data_out[key] = aux_data[key] + + # This gets serialized and sent back to the main process via a pipe. + # It's handled in LoadTargetBuildFileCallback. + return (build_file_path, + data_out, + aux_data_out, + dependencies) + except Exception, e: + print "Exception: ", e + return None + + +class ParallelProcessingError(Exception): + pass + + +class ParallelState(object): + """Class to keep track of state when processing input files in parallel. + + If build files are loaded in parallel, use this to keep track of + state during farming out and processing parallel jobs. It's stored + in a global so that the callback function can have access to it. + """ + + def __init__(self): + # The multiprocessing pool. + self.pool = None + # The condition variable used to protect this object and notify + # the main loop when there might be more data to process. + self.condition = None + # The "data" dict that was passed to LoadTargetBuildFileParallel + self.data = None + # The "aux_data" dict that was passed to LoadTargetBuildFileParallel + self.aux_data = None + # The number of parallel calls outstanding; decremented when a response + # was received. + self.pending = 0 + # The set of all build files that have been scheduled, so we don't + # schedule the same one twice. + self.scheduled = set() + # A list of dependency build file paths that haven't been scheduled yet. + self.dependencies = [] + # Flag to indicate if there was an error in a child process. + self.error = False - return data + def LoadTargetBuildFileCallback(self, result): + """Handle the results of running LoadTargetBuildFile in another process. + """ + self.condition.acquire() + if not result: + self.error = True + self.condition.notify() + self.condition.release() + return + (build_file_path0, data0, aux_data0, dependencies0) = result + self.data['target_build_files'].add(build_file_path0) + for key in data0: + self.data[key] = data0[key] + for key in aux_data0: + self.aux_data[key] = aux_data0[key] + for new_dependency in dependencies0: + if new_dependency not in self.scheduled: + self.scheduled.add(new_dependency) + self.dependencies.append(new_dependency) + self.pending -= 1 + self.condition.notify() + self.condition.release() + + +def LoadTargetBuildFileParallel(build_file_path, data, aux_data, + variables, includes, depth, check): + parallel_state = ParallelState() + parallel_state.condition = threading.Condition() + parallel_state.dependencies = [build_file_path] + parallel_state.scheduled = set([build_file_path]) + parallel_state.pending = 0 + parallel_state.data = data + parallel_state.aux_data = aux_data + + try: + parallel_state.condition.acquire() + while parallel_state.dependencies or parallel_state.pending: + if parallel_state.error: + break + if not parallel_state.dependencies: + parallel_state.condition.wait() + continue + + dependency = parallel_state.dependencies.pop() + + parallel_state.pending += 1 + data_in = {} + data_in['target_build_files'] = data['target_build_files'] + aux_data_in = {} + global_flags = { + 'path_sections': globals()['path_sections'], + 'non_configuration_keys': globals()['non_configuration_keys'], + 'absolute_build_file_paths': globals()['absolute_build_file_paths'], + 'multiple_toolsets': globals()['multiple_toolsets']} + + if not parallel_state.pool: + parallel_state.pool = multiprocessing.Pool(8) + parallel_state.pool.apply_async( + CallLoadTargetBuildFile, + args = (global_flags, dependency, + data_in, aux_data_in, + variables, includes, depth, check), + callback = parallel_state.LoadTargetBuildFileCallback) + except KeyboardInterrupt, e: + parallel_state.pool.terminate() + raise e + + parallel_state.condition.release() + if parallel_state.error: + sys.exit() # Look for the bracket that matches the first bracket seen in a @@ -693,8 +860,8 @@ def ExpandVariables(input, phase, variables, build_file): os.chdir(oldwd) assert replacement != None elif command_string: - raise Exception("Unknown command string '%s' in '%s'." % - (command_string, contents)) + raise GypError("Unknown command string '%s' in '%s'." % + (command_string, contents)) else: # Fix up command with platform specific workarounds. contents = FixupPlatformCommand(contents) @@ -710,8 +877,8 @@ def ExpandVariables(input, phase, variables, build_file): sys.stderr.write(p_stderr) # Simulate check_call behavior, since check_call only exists # in python 2.5 and later. - raise Exception("Call to '%s' returned exit status %d." % - (contents, p.returncode)) + raise GypError("Call to '%s' returned exit status %d." % + (contents, p.returncode)) replacement = p_stdout.rstrip() cached_command_results[cache_key] = replacement @@ -735,8 +902,8 @@ def ExpandVariables(input, phase, variables, build_file): # ], replacement = [] else: - raise KeyError, 'Undefined variable ' + contents + \ - ' in ' + build_file + raise GypError('Undefined variable ' + contents + + ' in ' + build_file) else: replacement = variables[contents] @@ -744,10 +911,10 @@ def ExpandVariables(input, phase, variables, build_file): for item in replacement: if (not contents[-1] == '/' and not isinstance(item, str) and not isinstance(item, int)): - raise TypeError, 'Variable ' + contents + \ - ' must expand to a string or list of strings; ' + \ - 'list contains a ' + \ - item.__class__.__name__ + raise GypError('Variable ' + contents + + ' must expand to a string or list of strings; ' + + 'list contains a ' + + item.__class__.__name__) # Run through the list and handle variable expansions in it. Since # the list is guaranteed not to contain dicts, this won't do anything # with conditions sections. @@ -755,9 +922,9 @@ def ExpandVariables(input, phase, variables, build_file): build_file) elif not isinstance(replacement, str) and \ not isinstance(replacement, int): - raise TypeError, 'Variable ' + contents + \ - ' must expand to a string or list of strings; ' + \ - 'found a ' + replacement.__class__.__name__ + raise GypError('Variable ' + contents + + ' must expand to a string or list of strings; ' + + 'found a ' + replacement.__class__.__name__) if expand_to_list: # Expanding in list context. It's guaranteed that there's only one @@ -855,12 +1022,12 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file): for condition in conditions_list: if not isinstance(condition, list): - raise TypeError, conditions_key + ' must be a list' + raise GypError(conditions_key + ' must be a list') if len(condition) != 2 and len(condition) != 3: # It's possible that condition[0] won't work in which case this # attempt will raise its own IndexError. That's probably fine. - raise IndexError, conditions_key + ' ' + condition[0] + \ - ' must be length 2 or 3, not ' + str(len(condition)) + raise GypError(conditions_key + ' ' + condition[0] + + ' must be length 2 or 3, not ' + str(len(condition))) [cond_expr, true_dict] = condition[0:2] false_dict = None @@ -1110,7 +1277,7 @@ def BuildTargetsDict(data): target['target_name'], target['toolset']) if target_name in targets: - raise KeyError, 'Duplicate target definitions for ' + target_name + raise GypError('Duplicate target definitions for ' + target_name) targets[target_name] = target return targets @@ -1151,8 +1318,8 @@ def QualifyDependencies(targets): # appears in the "dependencies" list. if dependency_key != 'dependencies' and \ dependency not in target_dict['dependencies']: - raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \ - ' of ' + target + ', but not in dependencies' + raise GypError('Found ' + dependency + ' in ' + dependency_key + + ' of ' + target + ', but not in dependencies') def ExpandWildcardDependencies(targets, data): @@ -1191,8 +1358,8 @@ def ExpandWildcardDependencies(targets, data): if dependency_build_file == target_build_file: # It's an error for a target to depend on all other targets in # the same file, because a target cannot depend on itself. - raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \ - target + ' referring to same build file' + raise GypError('Found wildcard in ' + dependency_key + ' of ' + + target + ' referring to same build file') # Take the wildcard out and adjust the index so that the next # dependency in the list will be processed the next time through the @@ -1249,7 +1416,7 @@ class DependencyGraphNode(object): dependents: List of DependencyGraphNodes that depend on this one. """ - class CircularException(Exception): + class CircularException(GypError): pass def __init__(self, ref): @@ -1396,14 +1563,14 @@ def LinkDependencies(self, targets, dependencies=None, initial=True): # but that's presently the easiest way to access the target dicts so that # this function can find target types. - if not 'target_name' in targets[self.ref]: - raise Exception("Missing 'target_name' field in target.") + if 'target_name' not in targets[self.ref]: + raise GypError("Missing 'target_name' field in target.") - try: - target_type = targets[self.ref]['type'] - except KeyError, e: - raise Exception("Missing 'type' field in target %s" % - targets[self.ref]['target_name']) + if 'type' not in targets[self.ref]: + raise GypError("Missing 'type' field in target %s" % + targets[self.ref]['target_name']) + + target_type = targets[self.ref]['type'] is_linkable = target_type in linkable_types @@ -1447,7 +1614,7 @@ def BuildDependencyList(targets): # access. dependency_nodes = {} for target, spec in targets.iteritems(): - if not target in dependency_nodes: + if target not in dependency_nodes: dependency_nodes[target] = DependencyGraphNode(target) # Set up the dependency links. Targets that have no dependencies are treated @@ -1456,21 +1623,18 @@ def BuildDependencyList(targets): for target, spec in targets.iteritems(): target_node = dependency_nodes[target] target_build_file = gyp.common.BuildFile(target) - if not 'dependencies' in spec or len(spec['dependencies']) == 0: + dependencies = spec.get('dependencies') + if not dependencies: target_node.dependencies = [root_node] root_node.dependents.append(target_node) else: - dependencies = spec['dependencies'] - for index in xrange(0, len(dependencies)): - try: - dependency = dependencies[index] - dependency_node = dependency_nodes[dependency] - target_node.dependencies.append(dependency_node) - dependency_node.dependents.append(target_node) - except KeyError, e: - gyp.common.ExceptionAppend(e, - 'while trying to load target %s' % target) - raise + for dependency in dependencies: + dependency_node = dependency_nodes.get(dependency) + if not dependency_node: + raise GypError("Dependency '%s' not found while " + "trying to load target %s" % (dependency, target)) + target_node.dependencies.append(dependency_node) + dependency_node.dependents.append(target_node) flat_list = root_node.FlattenToList() @@ -1478,9 +1642,9 @@ def BuildDependencyList(targets): # (cycle). If you need to figure out what's wrong, look for elements of # targets that are not in flat_list. if len(flat_list) != len(targets): - raise DependencyGraphNode.CircularException, \ - 'Some targets not reachable, cycle in dependency graph detected: ' + \ - ' '.join(set(flat_list) ^ set(targets)) + raise DependencyGraphNode.CircularException( + 'Some targets not reachable, cycle in dependency graph detected: ' + + ' '.join(set(flat_list) ^ set(targets))) return [dependency_nodes, flat_list] @@ -1502,18 +1666,22 @@ def VerifyNoGYPFileCircularDependencies(targets): for dependency in target_dependencies: try: dependency_build_file = gyp.common.BuildFile(dependency) - if dependency_build_file == build_file: - # A .gyp file is allowed to refer back to itself. - continue - dependency_node = dependency_nodes[dependency_build_file] - if dependency_node not in build_file_node.dependencies: - build_file_node.dependencies.append(dependency_node) - dependency_node.dependents.append(build_file_node) - except KeyError, e: + except GypError, e: gyp.common.ExceptionAppend( e, 'while computing dependencies of .gyp file %s' % build_file) raise + if dependency_build_file == build_file: + # A .gyp file is allowed to refer back to itself. + continue + dependency_node = dependency_nodes.get(dependency_build_file) + if not dependency_node: + raise GypError("Dependancy '%s' not found" % dependency_build_file) + if dependency_node not in build_file_node.dependencies: + build_file_node.dependencies.append(dependency_node) + dependency_node.dependents.append(build_file_node) + + # Files that have no dependencies are treated as dependent on root_node. root_node = DependencyGraphNode(None) for build_file_node in dependency_nodes.itervalues(): @@ -1552,8 +1720,8 @@ def DoDependentSettings(key, flat_list, targets, dependency_nodes): elif key == 'link_settings': dependencies = dependency_nodes[target].LinkDependencies(targets) else: - raise KeyError, "DoDependentSettings doesn't know how to determine " + \ - 'dependencies for ' + key + raise GypError("DoDependentSettings doesn't know how to determine " + 'dependencies for ' + key) for dependency in dependencies: dependency_dict = targets[dependency] @@ -1819,8 +1987,8 @@ def MergeDicts(to, fro, to_file, fro_file): # and prepend are the only policies that can coexist. for list_incompatible in lists_incompatible: if list_incompatible in fro: - raise KeyError, 'Incompatible list policies ' + k + ' and ' + \ - list_incompatible + raise GypError('Incompatible list policies ' + k + ' and ' + + list_incompatible) if list_base in to: if ext == '?': @@ -1952,8 +2120,8 @@ def SetUpConfigurations(target, target_dict): configuration_dict = target_dict['configurations'][configuration] for key in configuration_dict.keys(): if key in invalid_configuration_keys: - raise KeyError, ('%s not allowed in the %s configuration, found in ' - 'target %s' % (key, configuration, target)) + raise GypError('%s not allowed in the %s configuration, found in ' + 'target %s' % (key, configuration, target)) @@ -2084,9 +2252,9 @@ def ProcessListFiltersInDict(name, the_dict): # to be created. excluded_key = list_key + '_excluded' if excluded_key in the_dict: - raise KeyError, \ - name + ' key ' + excluded_key + ' must not be present prior ' + \ - ' to applying exclusion/regex filters for ' + list_key + raise GypError(name + ' key ' + excluded_key + + ' must not be present prior ' + ' to applying exclusion/regex filters for ' + list_key) excluded_list = [] @@ -2136,9 +2304,14 @@ def ValidateTargetType(target, target_dict): 'none') target_type = target_dict.get('type', None) if target_type not in VALID_TARGET_TYPES: - raise Exception("Target %s has an invalid target type '%s'. " - "Must be one of %s." % - (target, target_type, '/'.join(VALID_TARGET_TYPES))) + raise GypError("Target %s has an invalid target type '%s'. " + "Must be one of %s." % + (target, target_type, '/'.join(VALID_TARGET_TYPES))) + if (target_dict.get('standalone_static_library', 0) and + not target_type == 'static_library'): + raise GypError('Target %s has type %s but standalone_static_library flag is' + ' only valid for static_library type.' % (target, + target_type)) def ValidateSourcesInTarget(target, target_dict, build_file): @@ -2162,10 +2335,10 @@ def ValidateSourcesInTarget(target, target_dict, build_file): error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print ('static library %s has several files with the same basename:\n' % - target + error + 'Some build systems, e.g. MSVC08, ' - 'cannot handle that.') - raise KeyError, 'Duplicate basenames in sources section, see list above' + print('static library %s has several files with the same basename:\n' % + target + error + 'Some build systems, e.g. MSVC08, ' + 'cannot handle that.') + raise GypError('Duplicate basenames in sources section, see list above') def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): @@ -2189,25 +2362,25 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): # Make sure that there's no conflict among rule names and extensions. rule_name = rule['rule_name'] if rule_name in rule_names: - raise KeyError, 'rule %s exists in duplicate, target %s' % \ - (rule_name, target) + raise GypError('rule %s exists in duplicate, target %s' % + (rule_name, target)) rule_names[rule_name] = rule rule_extension = rule['extension'] if rule_extension in rule_extensions: - raise KeyError, ('extension %s associated with multiple rules, ' + - 'target %s rules %s and %s') % \ - (rule_extension, target, - rule_extensions[rule_extension]['rule_name'], - rule_name) + raise GypError(('extension %s associated with multiple rules, ' + + 'target %s rules %s and %s') % + (rule_extension, target, + rule_extensions[rule_extension]['rule_name'], + rule_name)) rule_extensions[rule_extension] = rule # Make sure rule_sources isn't already there. It's going to be # created below if needed. if 'rule_sources' in rule: - raise KeyError, \ - 'rule_sources must not exist in input, target %s rule %s' % \ - (target, rule_name) + raise GypError( + 'rule_sources must not exist in input, target %s rule %s' % + (target, rule_name)) extension = rule['extension'] rule_sources = [] @@ -2231,28 +2404,28 @@ def ValidateRunAsInTarget(target, target_dict, build_file): if not run_as: return if not isinstance(run_as, dict): - raise Exception("The 'run_as' in target %s from file %s should be a " - "dictionary." % - (target_name, build_file)) + raise GypError("The 'run_as' in target %s from file %s should be a " + "dictionary." % + (target_name, build_file)) action = run_as.get('action') if not action: - raise Exception("The 'run_as' in target %s from file %s must have an " - "'action' section." % - (target_name, build_file)) + raise GypError("The 'run_as' in target %s from file %s must have an " + "'action' section." % + (target_name, build_file)) if not isinstance(action, list): - raise Exception("The 'action' for 'run_as' in target %s from file %s " - "must be a list." % - (target_name, build_file)) + raise GypError("The 'action' for 'run_as' in target %s from file %s " + "must be a list." % + (target_name, build_file)) working_directory = run_as.get('working_directory') if working_directory and not isinstance(working_directory, str): - raise Exception("The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % - (target_name, build_file)) + raise GypError("The 'working_directory' for 'run_as' in target %s " + "in file %s should be a string." % + (target_name, build_file)) environment = run_as.get('environment') if environment and not isinstance(environment, dict): - raise Exception("The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % - (target_name, build_file)) + raise GypError("The 'environment' for 'run_as' in target %s " + "in file %s should be a dictionary." % + (target_name, build_file)) def ValidateActionsInTarget(target, target_dict, build_file): @@ -2262,15 +2435,15 @@ def ValidateActionsInTarget(target, target_dict, build_file): for action in actions: action_name = action.get('action_name') if not action_name: - raise Exception("Anonymous action in target %s. " - "An action must have an 'action_name' field." % - target_name) + raise GypError("Anonymous action in target %s. " + "An action must have an 'action_name' field." % + target_name) inputs = action.get('inputs', None) if inputs is None: - raise Exception('Action in target %s has no inputs.' % target_name) + raise GypError('Action in target %s has no inputs.' % target_name) action_command = action.get('action') if action_command and not action_command[0]: - raise Exception("Empty action as command in target %s." % target_name) + raise GypError("Empty action as command in target %s." % target_name) def TurnIntIntoStrInDict(the_dict): @@ -2327,13 +2500,13 @@ def VerifyNoCollidingTargets(targets): key = subdir + ':' + name if key in used: # Complain if this target is already used. - raise Exception('Duplicate target name "%s" in directory "%s" used both ' - 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) + raise GypError('Duplicate target name "%s" in directory "%s" used both ' + 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) used[key] = gyp def Load(build_files, variables, includes, depth, generator_input_info, check, - circular_check): + circular_check, parallel): # Set up path_sections and non_configuration_keys with the default data plus # the generator-specifc data. global path_sections @@ -2374,8 +2547,13 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, # used as keys to the data dict and for references between input files. build_file = os.path.normpath(build_file) try: - LoadTargetBuildFile(build_file, data, aux_data, variables, includes, - depth, check) + if parallel: + print >>sys.stderr, 'Using parallel processing (experimental).' + LoadTargetBuildFileParallel(build_file, data, aux_data, + variables, includes, depth, check) + else: + LoadTargetBuildFile(build_file, data, aux_data, + variables, includes, depth, check, True) except Exception, e: gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) raise diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py index b918c5826c1..69267694dc7 100755 --- a/tools/gyp/pylib/gyp/mac_tool.py +++ b/tools/gyp/pylib/gyp/mac_tool.py @@ -163,9 +163,10 @@ def ExecFilterLibtool(self, *cmd_list): """Calls libtool and filters out 'libtool: file: foo.o has no symbols'.""" libtool_re = re.compile(r'^libtool: file: .* has no symbols$') libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE) - for line in libtoolout.stderr: + _, err = libtoolout.communicate() + for line in err.splitlines(): if not libtool_re.match(line): - sys.stderr.write(line) + print >>sys.stderr, line return libtoolout.returncode def ExecPackageFramework(self, framework, version): diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py index 4a55aca0575..840a79b6736 100644 --- a/tools/gyp/pylib/gyp/msvs_emulation.py +++ b/tools/gyp/pylib/gyp/msvs_emulation.py @@ -152,6 +152,7 @@ def __init__(self, spec, generator_flags): ('msvs_disabled_warnings', list), ('msvs_precompiled_header', str), ('msvs_precompiled_source', str), + ('msvs_configuration_platform', str), ('msvs_target_platform', str), ] configs = spec['configurations'] @@ -165,8 +166,7 @@ def __init__(self, spec, generator_flags): def GetVSMacroEnv(self, base_to_build=None, config=None): """Get a dict of variables mapping internal VS macro names to their gyp equivalents.""" - target_platform = self.GetTargetPlatform(config) - target_platform = {'x86': 'Win32'}.get(target_platform, target_platform) + target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64' replacements = { '$(VSInstallDir)': self.vs_version.Path(), '$(VCInstallDir)': os.path.join(self.vs_version.Path(), 'VC') + '\\', @@ -215,29 +215,40 @@ def __call__(self, name, map=None, prefix='', default=None): return self.parent._GetAndMunge(self.field, self.base_path + [name], default=default, prefix=prefix, append=self.append, map=map) - def GetTargetPlatform(self, config): - target_platform = self.msvs_target_platform.get(config, '') - if not target_platform: - target_platform = 'Win32' - return {'Win32': 'x86'}.get(target_platform, target_platform) - - def _RealConfig(self, config): - target_platform = self.GetTargetPlatform(config) - if target_platform == 'x64' and not config.endswith('_x64'): + def GetArch(self, config): + """Get architecture based on msvs_configuration_platform and + msvs_target_platform. Returns either 'x86' or 'x64'.""" + configuration_platform = self.msvs_configuration_platform.get(config, '') + platform = self.msvs_target_platform.get(config, '') + if not platform: # If no specific override, use the configuration's. + platform = configuration_platform + # Map from platform to architecture. + return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86') + + def _TargetConfig(self, config): + """Returns the target-specific configuration.""" + # There's two levels of architecture/platform specification in VS. The + # first level is globally for the configuration (this is what we consider + # "the" config at the gyp level, which will be something like 'Debug' or + # 'Release_x64'), and a second target-specific configuration, which is an + # override for the global one. |config| is remapped here to take into + # account the local target-specific overrides to the global configuration. + arch = self.GetArch(config) + if arch == 'x64' and not config.endswith('_x64'): config += '_x64' + if arch == 'x86' and config.endswith('_x64'): + config = config.rsplit('_', 1)[0] return config def _Setting(self, path, config, default=None, prefix='', append=None, map=None): """_GetAndMunge for msvs_settings.""" - config = self._RealConfig(config) return self._GetAndMunge( self.msvs_settings[config], path, default, prefix, append, map) def _ConfigAttrib(self, path, config, default=None, prefix='', append=None, map=None): """_GetAndMunge for msvs_configuration_attributes.""" - config = self._RealConfig(config) return self._GetAndMunge( self.msvs_configuration_attributes[config], path, default, prefix, append, map) @@ -245,7 +256,7 @@ def _ConfigAttrib(self, path, config, def AdjustIncludeDirs(self, include_dirs, config): """Updates include_dirs to expand VS specific paths, and adds the system include dirs used for platform SDK and similar.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) includes = include_dirs + self.msvs_system_include_dirs[config] includes.extend(self._Setting( ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[])) @@ -254,7 +265,7 @@ def AdjustIncludeDirs(self, include_dirs, config): def GetComputedDefines(self, config): """Returns the set of defines that are injected to the defines list based on other VS settings.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) defines = [] if self._ConfigAttrib(['CharacterSet'], config) == '1': defines.extend(('_UNICODE', 'UNICODE')) @@ -267,7 +278,7 @@ def GetComputedDefines(self, config): def GetOutputName(self, config, expand_special): """Gets the explicitly overridden output name for a target or returns None if it's not overridden.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) type = self.spec['type'] root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool' # TODO(scottmg): Handle OutputDirectory without OutputFile. @@ -277,9 +288,19 @@ def GetOutputName(self, config, expand_special): output_file, config=config)) return output_file + def GetPDBName(self, config, expand_special): + """Gets the explicitly overridden pdb name for a target or returns None + if it's not overridden.""" + config = self._TargetConfig(config) + output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config) + if output_file: + output_file = expand_special(self.ConvertVSMacros( + output_file, config=config)) + return output_file + def GetCflags(self, config): """Returns the flags that need to be added to .c and .cc compilations.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) cflags = [] cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]]) cl = self._GetWrapper(self, self.msvs_settings[config], @@ -302,6 +323,7 @@ def GetCflags(self, config): cl('RuntimeLibrary', map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M') cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH') + cl('EnablePREfast', map={'true': '/analyze'}) cl('AdditionalOptions', prefix='') # ninja handles parallelism by itself, don't have the compiler do it too. cflags = filter(lambda x: not x.startswith('/MP'), cflags) @@ -310,13 +332,13 @@ def GetCflags(self, config): def GetPrecompiledHeader(self, config, gyp_to_build_path): """Returns an object that handles the generation of precompiled header build steps.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) return _PchHelper(self, config, gyp_to_build_path) def _GetPchFlags(self, config, extension): """Get the flags to be added to the cflags for precompiled header support. """ - config = self._RealConfig(config) + config = self._TargetConfig(config) # The PCH is only built once by a particular source file. Usage of PCH must # only be for the same language (i.e. C vs. C++), so only include the pch # flags when the language matches. @@ -329,18 +351,18 @@ def _GetPchFlags(self, config, extension): def GetCflagsC(self, config): """Returns the flags that need to be added to .c compilations.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) return self._GetPchFlags(config, '.c') def GetCflagsCC(self, config): """Returns the flags that need to be added to .cc compilations.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) return ['/TP'] + self._GetPchFlags(config, '.cc') def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): """Get and normalize the list of paths in AdditionalLibraryDirectories setting.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) libpaths = self._Setting((root, 'AdditionalLibraryDirectories'), config, default=[]) libpaths = [os.path.normpath( @@ -350,7 +372,7 @@ def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): def GetLibFlags(self, config, gyp_to_build_path): """Returns the flags that need to be added to lib commands.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) libflags = [] lib = self._GetWrapper(self, self.msvs_settings[config], 'VCLibrarianTool', append=libflags) @@ -374,7 +396,7 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, manifest_base_name, is_executable): """Returns the flags that need to be added to link commands, and the manifest files.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) ldflags = [] ld = self._GetWrapper(self, self.msvs_settings[config], 'VCLinkerTool', append=ldflags) @@ -387,6 +409,9 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, out = self.GetOutputName(config, expand_special) if out: ldflags.append('/OUT:' + out) + pdb = self.GetPDBName(config, expand_special) + if pdb: + ldflags.append('/PDB:' + pdb) ld('AdditionalOptions', prefix='') ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:') ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL') @@ -401,6 +426,7 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:') ld('ResourceOnlyDLL', map={'true': '/NOENTRY'}) ld('EntryPointSymbol', prefix='/ENTRY:') + ld('Profile', map={ 'true': '/PROFILE'}) # TODO(scottmg): This should sort of be somewhere else (not really a flag). ld('AdditionalDependencies', prefix='') # TODO(scottmg): These too. @@ -466,14 +492,14 @@ def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): def IsUseLibraryDependencyInputs(self, config): """Returns whether the target should be linked via Use Library Dependency Inputs (using component .objs of a given .lib).""" - config = self._RealConfig(config) + config = self._TargetConfig(config) uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config) return uldi == 'true' def GetRcflags(self, config, gyp_to_ninja_path): """Returns the flags that need to be added to invocations of the resource compiler.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) rcflags = [] rc = self._GetWrapper(self, self.msvs_settings[config], 'VCResourceCompilerTool', append=rcflags) @@ -510,18 +536,27 @@ def IsRuleRunUnderCygwin(self, rule): return int(rule.get('msvs_cygwin_shell', self.spec.get('msvs_cygwin_shell', 1))) != 0 - def HasExplicitIdlRules(self, spec): - """Determine if there's an explicit rule for idl files. When there isn't we - need to generate implicit rules to build MIDL .idl files.""" + def _HasExplicitRuleForExtension(self, spec, extension): + """Determine if there's an explicit rule for a particular extension.""" for rule in spec.get('rules', []): - if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): + if rule['extension'] == extension: return True return False + def HasExplicitIdlRules(self, spec): + """Determine if there's an explicit rule for idl files. When there isn't we + need to generate implicit rules to build MIDL .idl files.""" + return self._HasExplicitRuleForExtension(spec, 'idl') + + def HasExplicitAsmRules(self, spec): + """Determine if there's an explicit rule for asm files. When there isn't we + need to generate implicit rules to assemble .asm files.""" + return self._HasExplicitRuleForExtension(spec, 'asm') + def GetIdlBuildData(self, source, config): """Determine the implicit outputs for an idl file. Returns output directory, outputs, and variables and flags that are required.""" - config = self._RealConfig(config) + config = self._TargetConfig(config) midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool') def midl(name, default=None): return self.ConvertVSMacros(midl_get(name, default=default), @@ -689,3 +724,19 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out): f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb') f.write(env_block) f.close() + +def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): + """Emulate behavior of msvs_error_on_missing_sources present in the msvs + generator: Check that all regular source files, i.e. not created at run time, + exist on disk. Missing files cause needless recompilation when building via + VS, and we want this check to match for people/bots that build using ninja, + so they're not surprised when the VS build fails.""" + if int(generator_flags.get('msvs_error_on_missing_sources', 0)): + no_specials = filter(lambda x: '$' not in x, sources) + relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] + missing = filter(lambda x: not os.path.exists(x), relative) + if missing: + # They'll look like out\Release\..\..\stuff\things.cc, so normalize the + # path for a slightly less crazy looking output. + cleaned_up = [os.path.normpath(x) for x in missing] + raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up)) diff --git a/tools/gyp/pylib/gyp/ninja_syntax.py b/tools/gyp/pylib/gyp/ninja_syntax.py index ae85d0f54fa..0f3603a8ce1 100644 --- a/tools/gyp/pylib/gyp/ninja_syntax.py +++ b/tools/gyp/pylib/gyp/ninja_syntax.py @@ -12,8 +12,8 @@ import textwrap import re -def escape_spaces(word): - return word.replace('$ ','$$ ').replace(' ','$ ') +def escape_path(word): + return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:') class Writer(object): def __init__(self, output, width=78): @@ -35,8 +35,7 @@ def variable(self, key, value, indent=0): self._line('%s = %s' % (key, value), indent) def rule(self, name, command, description=None, depfile=None, - generator=False, restat=False, rspfile=None, - rspfile_content=None): + generator=False, restat=False, rspfile=None, rspfile_content=None): self._line('rule %s' % name) self.variable('command', command, indent=1) if description: @@ -56,15 +55,15 @@ def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None): outputs = self._as_list(outputs) all_inputs = self._as_list(inputs)[:] - out_outputs = list(map(escape_spaces, outputs)) - all_inputs = list(map(escape_spaces, all_inputs)) + out_outputs = list(map(escape_path, outputs)) + all_inputs = list(map(escape_path, all_inputs)) if implicit: - implicit = map(escape_spaces, self._as_list(implicit)) + implicit = map(escape_path, self._as_list(implicit)) all_inputs.append('|') all_inputs.extend(implicit) if order_only: - order_only = map(escape_spaces, self._as_list(order_only)) + order_only = map(escape_path, self._as_list(order_only)) all_inputs.append('||') all_inputs.extend(order_only) diff --git a/tools/gyp/pylib/gyp/system_test.py b/tools/gyp/pylib/gyp/system_test.py deleted file mode 100755 index 51c71e36bee..00000000000 --- a/tools/gyp/pylib/gyp/system_test.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os -import tempfile -import shutil -import subprocess - - -def TestCommands(commands, files={}, env={}): - """Run commands in a temporary directory, returning true if they all succeed. - Return false on failures or if any commands produce output. - - Arguments: - commands: an array of shell-interpretable commands, e.g. ['ls -l', 'pwd'] - each will be expanded with Python %-expansion using env first. - files: a dictionary mapping filename to contents; - files will be created in the temporary directory before running - the command. - env: a dictionary of strings to expand commands with. - """ - tempdir = tempfile.mkdtemp() - try: - for name, contents in files.items(): - f = open(os.path.join(tempdir, name), 'wb') - f.write(contents) - f.close() - for command in commands: - proc = subprocess.Popen(command % env, shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=tempdir) - output = proc.communicate()[0] - if proc.returncode != 0 or output: - return False - return True - finally: - shutil.rmtree(tempdir) - return False - - -def TestArSupportsT(ar_command='ar', cc_command='cc'): - """Test whether 'ar' supports the 'T' flag.""" - return TestCommands(['%(cc)s -c test.c', - '%(ar)s crsT test.a test.o', - '%(cc)s test.a'], - files={'test.c': 'int main(){}'}, - env={'ar': ar_command, 'cc': cc_command}) - - -def main(): - # Run the various test functions and print the results. - def RunTest(description, function, **kwargs): - print "Testing " + description + ':', - if function(**kwargs): - print 'ok' - else: - print 'fail' - RunTest("ar 'T' flag", TestArSupportsT) - RunTest("ar 'T' flag with ccache", TestArSupportsT, cc_command='ccache cc') - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/tools/gyp/pylib/gyp/win_tool.py b/tools/gyp/pylib/gyp/win_tool.py old mode 100644 new mode 100755 index c9c6a7b5a1d..7b065736383 --- a/tools/gyp/pylib/gyp/win_tool.py +++ b/tools/gyp/pylib/gyp/win_tool.py @@ -9,13 +9,13 @@ These functions are executed via gyp-win-tool when using the ninja generator. """ +from ctypes import windll, wintypes import os import shutil import subprocess import sys -import win32con -import win32file -import pywintypes + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) def main(args): @@ -26,19 +26,28 @@ def main(args): class LinkLock(object): - """A flock-style lock to limit the number of concurrent links to one. Based on - http://code.activestate.com/recipes/65203-portalocker-cross-platform-posixnt-api-for-flock-s/ + """A flock-style lock to limit the number of concurrent links to one. + + Uses a session-local mutex based on the file's directory. """ def __enter__(self): - self.file = open('LinkLock', 'w+') - self.file_handle = win32file._get_osfhandle(self.file.fileno()) - win32file.LockFileEx(self.file_handle, win32con.LOCKFILE_EXCLUSIVE_LOCK, - 0, -0x10000, pywintypes.OVERLAPPED()) + name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_') + self.mutex = windll.kernel32.CreateMutexW( + wintypes.c_int(0), + wintypes.c_int(0), + wintypes.create_unicode_buffer(name)) + assert self.mutex + result = windll.kernel32.WaitForSingleObject( + self.mutex, wintypes.c_int(0xFFFFFFFF)) + # 0x80 means another process was killed without releasing the mutex, but + # that this process has been given ownership. This is fine for our + # purposes. + assert result in (0, 0x80), ( + "%s, %s" % (result, windll.kernel32.GetLastError())) def __exit__(self, type, value, traceback): - win32file.UnlockFileEx( - self.file_handle, 0, -0x10000, pywintypes.OVERLAPPED()) - self.file.close() + windll.kernel32.ReleaseMutex(self.mutex) + windll.kernel32.CloseHandle(self.mutex) class WinTool(object): @@ -170,16 +179,6 @@ def ExecRcWrapper(self, arch, *args): print line return popen.returncode - def ExecClWrapper(self, arch, depname, *args): - """Runs cl.exe and filters output through ninja-deplist-helper to get - dependendency information which is stored in |depname|.""" - env = self._GetEnv(arch) - args = ' '.join(args) + \ - '| ninja-deplist-helper -r . -q -f cl -o ' + depname + '"' - popen = subprocess.Popen(args, shell=True, env=env) - popen.wait() - return popen.returncode - def ExecActionWrapper(self, arch, rspfile, *dir): """Runs an action command line from a response file using the environment for |arch|. If |dir| is supplied, use that as the working directory.""" diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/tools/gyp/pylib/gyp/xcode_emulation.py index 32b6f352463..ef5b46046ed 100644 --- a/tools/gyp/pylib/gyp/xcode_emulation.py +++ b/tools/gyp/pylib/gyp/xcode_emulation.py @@ -562,6 +562,22 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path): self.configname = None return ldflags + def GetLibtoolflags(self, configname): + """Returns flags that need to be passed to the static linker. + + Args: + configname: The name of the configuration to get ld flags for. + """ + self.configname = configname + libtoolflags = [] + + for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []): + libtoolflags.append(libtoolflag) + # TODO(thakis): ARCHS? + + self.configname = None + return libtoolflags + def GetPerTargetSettings(self): """Gets a list of all the per-target settings. This will only fetch keys whose values are the same across all configurations.""" @@ -923,6 +939,11 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, 'TARGET_BUILD_DIR' : built_products_dir, 'TEMP_DIR' : '${TMPDIR}', } + if xcode_settings.GetPerTargetSetting('SDKROOT'): + env['SDKROOT'] = xcode_settings._SdkPath() + else: + env['SDKROOT'] = '' + if spec['type'] in ( 'executable', 'static_library', 'shared_library', 'loadable_module'): env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py index 5124ed0bf6d..ec4cb96bc20 100644 --- a/tools/gyp/pylib/gyp/xcodeproj_file.py +++ b/tools/gyp/pylib/gyp/xcodeproj_file.py @@ -254,7 +254,7 @@ class XCObject(object): but in some cases an object's parent may wish to push a hashable value into its child, and it can do so by appending to _hashables. - Attribues: + Attributes: id: The object's identifier, a 24-character uppercase hexadecimal string. Usually, objects being created should not set id until the entire project file structure is built. At that point, UpdateIDs() should @@ -392,7 +392,10 @@ def Hashables(self): return hashables - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): + def HashablesForChild(self): + return None + + def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None): """Set "id" properties deterministically. An object's "id" property is set based on a hash of its class type and @@ -419,8 +422,10 @@ def _HashUpdate(hash, data): hash.update(struct.pack('>i', len(data))) hash.update(data) - if hash is None: - hash = _new_sha1() + if seed_hash is None: + seed_hash = _new_sha1() + + hash = seed_hash.copy() hashables = self.Hashables() assert len(hashables) > 0 @@ -428,8 +433,17 @@ def _HashUpdate(hash, data): _HashUpdate(hash, hashable) if recursive: + hashables_for_child = self.HashablesForChild() + if hashables_for_child is None: + child_hash = hash + else: + assert len(hashables_for_child) > 0 + child_hash = seed_hash.copy() + for hashable in hashables_for_child: + _HashUpdate(child_hash, hashable) + for child in self.Children(): - child.ComputeIDs(recursive, overwrite, hash.copy()) + child.ComputeIDs(recursive, overwrite, child_hash) if overwrite or self.id is None: # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is @@ -1104,6 +1118,26 @@ def __init__(self, properties=None, id=None, parent=None): for child in self._properties.get('children', []): self._AddChildToDicts(child) + def Hashables(self): + # super + hashables = XCHierarchicalElement.Hashables(self) + + # It is not sufficient to just rely on name and parent to build a unique + # hashable : a node could have two child PBXGroup sharing a common name. + # To add entropy the hashable is enhanced with the names of all its + # children. + for child in self._properties.get('children', []): + child_name = child.Name() + if child_name != None: + hashables.append(child_name) + + return hashables + + def HashablesForChild(self): + # To avoid a circular reference the hashables used to compute a child id do + # not include the child names. + return XCHierarchicalElement.Hashables(self) + def _AddChildToDicts(self, child): # Sets up this PBXGroup object's dicts to reference the child properly. child_path = child.PathFromSourceTreeAndPath() @@ -1440,40 +1474,41 @@ def __init__(self, properties=None, id=None, parent=None): # TODO(mark): This is the replacement for a replacement for a quick hack. # It is no longer incredibly sucky, but this list needs to be extended. extension_map = { - 'a': 'archive.ar', - 'app': 'wrapper.application', - 'bdic': 'file', - 'bundle': 'wrapper.cfbundle', - 'c': 'sourcecode.c.c', - 'cc': 'sourcecode.cpp.cpp', - 'cpp': 'sourcecode.cpp.cpp', - 'css': 'text.css', - 'cxx': 'sourcecode.cpp.cpp', - 'dylib': 'compiled.mach-o.dylib', - 'framework': 'wrapper.framework', - 'h': 'sourcecode.c.h', - 'hxx': 'sourcecode.cpp.h', - 'icns': 'image.icns', - 'java': 'sourcecode.java', - 'js': 'sourcecode.javascript', - 'm': 'sourcecode.c.objc', - 'mm': 'sourcecode.cpp.objcpp', - 'nib': 'wrapper.nib', - 'o': 'compiled.mach-o.objfile', - 'pdf': 'image.pdf', - 'pl': 'text.script.perl', - 'plist': 'text.plist.xml', - 'pm': 'text.script.perl', - 'png': 'image.png', - 'py': 'text.script.python', - 'r': 'sourcecode.rez', - 'rez': 'sourcecode.rez', - 's': 'sourcecode.asm', - 'strings': 'text.plist.strings', - 'ttf': 'file', - 'xcconfig': 'text.xcconfig', - 'xib': 'file.xib', - 'y': 'sourcecode.yacc', + 'a': 'archive.ar', + 'app': 'wrapper.application', + 'bdic': 'file', + 'bundle': 'wrapper.cfbundle', + 'c': 'sourcecode.c.c', + 'cc': 'sourcecode.cpp.cpp', + 'cpp': 'sourcecode.cpp.cpp', + 'css': 'text.css', + 'cxx': 'sourcecode.cpp.cpp', + 'dylib': 'compiled.mach-o.dylib', + 'framework': 'wrapper.framework', + 'h': 'sourcecode.c.h', + 'hxx': 'sourcecode.cpp.h', + 'icns': 'image.icns', + 'java': 'sourcecode.java', + 'js': 'sourcecode.javascript', + 'm': 'sourcecode.c.objc', + 'mm': 'sourcecode.cpp.objcpp', + 'nib': 'wrapper.nib', + 'o': 'compiled.mach-o.objfile', + 'pdf': 'image.pdf', + 'pl': 'text.script.perl', + 'plist': 'text.plist.xml', + 'pm': 'text.script.perl', + 'png': 'image.png', + 'py': 'text.script.python', + 'r': 'sourcecode.rez', + 'rez': 'sourcecode.rez', + 's': 'sourcecode.asm', + 'strings': 'text.plist.strings', + 'ttf': 'file', + 'xcconfig': 'text.xcconfig', + 'xcdatamodel': 'wrapper.xcdatamodel', + 'xib': 'file.xib', + 'y': 'sourcecode.yacc', } if is_dir: