From f2d305377d61462d4887b671b91eb6093e2fb57c Mon Sep 17 00:00:00 2001 From: Refael Ackermann Date: Wed, 24 Jul 2019 10:39:16 -0400 Subject: [PATCH 1/4] gyp: vendor in GYP3 6.0.4 https://github.com/refack/GYP3/commit/6a5d2545c6de9db64d6dc98882008666ea6056e8 --- gyp/.gitignore | 3 + gyp/AUTHORS | 13 - gyp/DEPS | 24 - gyp/LICENSE | 1 + gyp/OWNERS | 1 - gyp/PRESUBMIT.py | 138 - gyp/README.md | 8 + gyp/codereview.settings | 10 - gyp/gyp | 8 - gyp/gyp.bat | 5 - gyp/{pylib/gyp => gyp/MSVS}/MSVSNew.py | 257 +- gyp/{pylib/gyp => gyp/MSVS}/MSVSSettings.py | 38 +- gyp/{pylib/gyp => gyp/MSVS}/MSVSToolFile.py | 15 +- gyp/gyp/MSVS/MSVSVersion.py | 365 +++ gyp/gyp/MSVS/__init__.py | 507 ++++ gyp/gyp/Makefile.tmpl | 223 ++ .../make.py => gyp/MakefileWriter.py} | 1403 ++-------- gyp/gyp/NinjaWriter.py | 1407 ++++++++++ gyp/gyp/XCodeDetect.py | 65 + gyp/{pylib => }/gyp/__init__.py | 268 +- .../buildtime_helpers}/flock_tool.py | 15 +- .../buildtime_helpers}/large-pdb-shim.cc | 0 .../gyp => gyp/buildtime_helpers}/mac_tool.py | 225 +- .../gyp => gyp/buildtime_helpers}/win_tool.py | 58 +- gyp/{pylib => }/gyp/common.py | 146 +- gyp/{pylib => }/gyp/easy_xml.py | 34 +- gyp/{pylib => }/gyp/generator/__init__.py | 0 gyp/{pylib => }/gyp/generator/analyzer.py | 23 +- gyp/{pylib => }/gyp/generator/cmake.py | 204 +- .../gyp/generator/dump_dependency_json.py | 19 +- gyp/{pylib => }/gyp/generator/eclipse.py | 87 +- gyp/{pylib => }/gyp/generator/gypd.py | 7 +- gyp/{pylib => }/gyp/generator/gypsh.py | 10 +- gyp/gyp/generator/make.py | 557 ++++ gyp/{pylib => }/gyp/generator/msvs.py | 991 +++---- gyp/gyp/generator/ninja.py | 806 ++++++ gyp/{pylib => }/gyp/generator/xcode.py | 106 +- .../gyp => gyp/generator}/xcode_ninja.py | 27 +- .../gyp => gyp/generator}/xcodeproj_file.py | 209 +- gyp/{pylib => }/gyp/input.py | 930 ++----- gyp/gyp/lib/__init__.py | 0 gyp/{pylib/gyp => gyp/lib}/ninja_syntax.py | 0 gyp/{pylib/gyp => gyp/lib}/simple_copy.py | 27 +- gyp/{pylib => }/gyp/msvs_emulation.py | 625 ++--- .../unit_tests}/MSVSSettings_test.py | 553 ++-- gyp/gyp/unit_tests/NinjaWriter_test.py | 39 + gyp/gyp/unit_tests/__init__.py | 0 .../gyp => gyp/unit_tests}/common_test.py | 1 + .../gyp => gyp/unit_tests}/easy_xml_test.py | 5 +- .../unit_tests/generator_msvs_test.py} | 5 +- .../unit_tests/generator_xcode_test.py} | 0 .../gyp => gyp/unit_tests}/input_test.py | 1 - gyp/{pylib => }/gyp/xcode_emulation.py | 328 ++- gyp/gyp_main.py | 10 - gyp/pylib/gyp/MSVSProject.py | 208 -- gyp/pylib/gyp/MSVSUserFile.py | 147 - gyp/pylib/gyp/MSVSUtil.py | 270 -- gyp/pylib/gyp/MSVSVersion.py | 449 --- gyp/pylib/gyp/generator/android.py | 1097 -------- .../gyp/generator/compile_commands_json.py | 115 - gyp/pylib/gyp/generator/ninja.py | 2426 ----------------- gyp/pylib/gyp/generator/ninja_test.py | 46 - gyp/pylib/gyp/ordered_dict.py | 289 -- gyp/pylib/gyp/xml_fix.py | 69 - gyp/samples/samples | 81 - gyp/samples/samples.bat | 5 - gyp/setup.py | 19 - gyp/tools/graphviz.py | 4 +- gyp/tools/pretty_gyp.py | 27 +- gyp/tools/pretty_sln.py | 6 +- gyp/tools/pretty_vcproj.py | 71 +- gyp/tools/vssetup.powershell/LICENSE.txt | 8 + ...icrosoft.VisualStudio.Setup.PowerShell.dll | Bin 0 -> 39424 bytes gyp/tools/vssetup.powershell/VSQuery.ps1 | 4 + gyp/tools/vssetup.powershell/VSSetup.psd1 | 15 + .../vssetup.powershell/VSSetup.types.ps1xml | 66 + .../vssetup.powershell/about_VSSetup.help.txt | 46 + 77 files changed, 6727 insertions(+), 9548 deletions(-) delete mode 100644 gyp/AUTHORS delete mode 100644 gyp/DEPS delete mode 100644 gyp/OWNERS delete mode 100644 gyp/PRESUBMIT.py create mode 100644 gyp/README.md delete mode 100644 gyp/codereview.settings delete mode 100755 gyp/gyp delete mode 100644 gyp/gyp.bat rename gyp/{pylib/gyp => gyp/MSVS}/MSVSNew.py (54%) rename gyp/{pylib/gyp => gyp/MSVS}/MSVSSettings.py (97%) rename gyp/{pylib/gyp => gyp/MSVS}/MSVSToolFile.py (82%) create mode 100644 gyp/gyp/MSVS/MSVSVersion.py create mode 100644 gyp/gyp/MSVS/__init__.py create mode 100644 gyp/gyp/Makefile.tmpl rename gyp/{pylib/gyp/generator/make.py => gyp/MakefileWriter.py} (50%) create mode 100644 gyp/gyp/NinjaWriter.py create mode 100644 gyp/gyp/XCodeDetect.py rename gyp/{pylib => }/gyp/__init__.py (60%) mode change 100755 => 100644 rename gyp/{pylib/gyp => gyp/buildtime_helpers}/flock_tool.py (80%) mode change 100755 => 100644 rename gyp/{data/win => gyp/buildtime_helpers}/large-pdb-shim.cc (100%) rename gyp/{pylib/gyp => gyp/buildtime_helpers}/mac_tool.py (77%) mode change 100755 => 100644 rename gyp/{pylib/gyp => gyp/buildtime_helpers}/win_tool.py (87%) mode change 100755 => 100644 rename gyp/{pylib => }/gyp/common.py (88%) rename gyp/{pylib => }/gyp/easy_xml.py (88%) rename gyp/{pylib => }/gyp/generator/__init__.py (100%) rename gyp/{pylib => }/gyp/generator/analyzer.py (98%) rename gyp/{pylib => }/gyp/generator/cmake.py (87%) rename gyp/{pylib => }/gyp/generator/dump_dependency_json.py (85%) rename gyp/{pylib => }/gyp/generator/eclipse.py (86%) rename gyp/{pylib => }/gyp/generator/gypd.py (94%) rename gyp/{pylib => }/gyp/generator/gypsh.py (84%) create mode 100644 gyp/gyp/generator/make.py rename gyp/{pylib => }/gyp/generator/msvs.py (77%) create mode 100644 gyp/gyp/generator/ninja.py rename gyp/{pylib => }/gyp/generator/xcode.py (94%) rename gyp/{pylib/gyp => gyp/generator}/xcode_ninja.py (91%) rename gyp/{pylib/gyp => gyp/generator}/xcodeproj_file.py (94%) rename gyp/{pylib => }/gyp/input.py (76%) create mode 100644 gyp/gyp/lib/__init__.py rename gyp/{pylib/gyp => gyp/lib}/ninja_syntax.py (100%) rename gyp/{pylib/gyp => gyp/lib}/simple_copy.py (65%) rename gyp/{pylib => }/gyp/msvs_emulation.py (67%) rename gyp/{pylib/gyp => gyp/unit_tests}/MSVSSettings_test.py (81%) mode change 100755 => 100644 create mode 100644 gyp/gyp/unit_tests/NinjaWriter_test.py create mode 100644 gyp/gyp/unit_tests/__init__.py rename gyp/{pylib/gyp => gyp/unit_tests}/common_test.py (97%) mode change 100755 => 100644 rename gyp/{pylib/gyp => gyp/unit_tests}/easy_xml_test.py (97%) mode change 100755 => 100644 rename gyp/{pylib/gyp/generator/msvs_test.py => gyp/unit_tests/generator_msvs_test.py} (92%) mode change 100755 => 100644 rename gyp/{pylib/gyp/generator/xcode_test.py => gyp/unit_tests/generator_xcode_test.py} (100%) rename gyp/{pylib/gyp => gyp/unit_tests}/input_test.py (99%) mode change 100755 => 100644 rename gyp/{pylib => }/gyp/xcode_emulation.py (85%) delete mode 100644 gyp/pylib/gyp/MSVSProject.py delete mode 100644 gyp/pylib/gyp/MSVSUserFile.py delete mode 100644 gyp/pylib/gyp/MSVSUtil.py delete mode 100644 gyp/pylib/gyp/MSVSVersion.py delete mode 100644 gyp/pylib/gyp/generator/android.py delete mode 100644 gyp/pylib/gyp/generator/compile_commands_json.py delete mode 100644 gyp/pylib/gyp/generator/ninja.py delete mode 100644 gyp/pylib/gyp/generator/ninja_test.py delete mode 100644 gyp/pylib/gyp/ordered_dict.py delete mode 100644 gyp/pylib/gyp/xml_fix.py delete mode 100755 gyp/samples/samples delete mode 100644 gyp/samples/samples.bat delete mode 100755 gyp/setup.py create mode 100644 gyp/tools/vssetup.powershell/LICENSE.txt create mode 100644 gyp/tools/vssetup.powershell/Microsoft.VisualStudio.Setup.PowerShell.dll create mode 100644 gyp/tools/vssetup.powershell/VSQuery.ps1 create mode 100644 gyp/tools/vssetup.powershell/VSSetup.psd1 create mode 100644 gyp/tools/vssetup.powershell/VSSetup.types.ps1xml create mode 100644 gyp/tools/vssetup.powershell/about_VSSetup.help.txt diff --git a/gyp/.gitignore b/gyp/.gitignore index 0d20b6487c..912cf723b1 100644 --- a/gyp/.gitignore +++ b/gyp/.gitignore @@ -1 +1,4 @@ *.pyc +/out/ +/.venv/ +*.tests diff --git a/gyp/AUTHORS b/gyp/AUTHORS deleted file mode 100644 index d76d8cd768..0000000000 --- a/gyp/AUTHORS +++ /dev/null @@ -1,13 +0,0 @@ -# Names should be added to this file like so: -# Name or Organization - -Google Inc. <*@google.com> -Bloomberg Finance L.P. <*@bloomberg.net> -IBM Inc. <*@*.ibm.com> -Yandex LLC <*@yandex-team.ru> - -Steven Knight -Ryan Norton -David J. Sankel -Eric N. Vander Weele -Tom Freudenberg diff --git a/gyp/DEPS b/gyp/DEPS deleted file mode 100644 index 2e1120f274..0000000000 --- a/gyp/DEPS +++ /dev/null @@ -1,24 +0,0 @@ -# DEPS file for gclient use in buildbot execution of gyp tests. -# -# (You don't need to use gclient for normal GYP development work.) - -vars = { - "chrome_trunk": "http://src.chromium.org/svn/trunk", - "googlecode_url": "http://%s.googlecode.com/svn", -} - -deps = { -} - -deps_os = { - "win": { - "third_party/cygwin": - Var("chrome_trunk") + "/deps/third_party/cygwin@66844", - - "third_party/python_26": - Var("chrome_trunk") + "/tools/third_party/python_26@89111", - - "src/third_party/pefile": - (Var("googlecode_url") % "pefile") + "/trunk@63", - }, -} diff --git a/gyp/LICENSE b/gyp/LICENSE index ab6b011a10..06544248d3 100644 --- a/gyp/LICENSE +++ b/gyp/LICENSE @@ -1,4 +1,5 @@ Copyright (c) 2009 Google Inc. All rights reserved. +Copyright (c) 2019 Refael Ackermann. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are diff --git a/gyp/OWNERS b/gyp/OWNERS deleted file mode 100644 index 72e8ffc0db..0000000000 --- a/gyp/OWNERS +++ /dev/null @@ -1 +0,0 @@ -* diff --git a/gyp/PRESUBMIT.py b/gyp/PRESUBMIT.py deleted file mode 100644 index e52f9d2d22..0000000000 --- a/gyp/PRESUBMIT.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -"""Top-level presubmit script for GYP. - -See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts -for more details about the presubmit API built into gcl. -""" - - -PYLINT_BLACKLIST = [ - # TODO: fix me. - # From SCons, not done in google style. - 'test/lib/TestCmd.py', - 'test/lib/TestCommon.py', - 'test/lib/TestGyp.py', -] - - -PYLINT_DISABLED_WARNINGS = [ - # TODO: fix me. - # Many tests include modules they don't use. - 'W0611', - # Possible unbalanced tuple unpacking with sequence. - 'W0632', - # Attempting to unpack a non-sequence. - 'W0633', - # Include order doesn't properly include local files? - 'F0401', - # Some use of built-in names. - 'W0622', - # Some unused variables. - 'W0612', - # Operator not preceded/followed by space. - 'C0323', - 'C0322', - # Unnecessary semicolon. - 'W0301', - # Unused argument. - 'W0613', - # String has no effect (docstring in wrong place). - 'W0105', - # map/filter on lambda could be replaced by comprehension. - 'W0110', - # Use of eval. - 'W0123', - # Comma not followed by space. - 'C0324', - # Access to a protected member. - 'W0212', - # Bad indent. - 'W0311', - # Line too long. - 'C0301', - # Undefined variable. - 'E0602', - # Not exception type specified. - 'W0702', - # No member of that name. - 'E1101', - # Dangerous default {}. - 'W0102', - # Cyclic import. - 'R0401', - # Others, too many to sort. - 'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231', - 'R0201', 'E0101', 'C0321', - # ************* Module copy - # W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect - 'W0104', -] - - -def _LicenseHeader(input_api): - # Accept any year number from 2009 to the current year. - current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(range(2009, current_year + 1))) - years_re = '(' + '|'.join(allowed_years) + ')' - - # The (c) is deprecated, but tolerate it until it's removed from all files. - return ( - r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n' - r'.*? Use of this source code is governed by a BSD-style license that ' - r'can be\n' - r'.*? found in the LICENSE file\.\n' - ) % { - 'year': years_re, - } - -def CheckChangeOnUpload(input_api, output_api): - report = [] - report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api, license_header=_LicenseHeader(input_api))) - return report - - -def CheckChangeOnCommit(input_api, output_api): - report = [] - - report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api, license_header=_LicenseHeader(input_api))) - report.extend(input_api.canned_checks.CheckTreeIsOpen( - input_api, output_api, - 'http://gyp-status.appspot.com/status', - 'http://gyp-status.appspot.com/current')) - - import os - import sys - old_sys_path = sys.path - try: - sys.path = ['pylib', 'test/lib'] + sys.path - blacklist = PYLINT_BLACKLIST - if sys.platform == 'win32': - blacklist = [os.path.normpath(x).replace('\\', '\\\\') - for x in PYLINT_BLACKLIST] - report.extend(input_api.canned_checks.RunPylint( - input_api, - output_api, - black_list=blacklist, - disabled_warnings=PYLINT_DISABLED_WARNINGS)) - finally: - sys.path = old_sys_path - return report - - -TRYBOTS = [ - 'linux_try', - 'mac_try', - 'win_try', -] - - -def GetPreferredTryMasters(_, change): - return { - 'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS }, - } diff --git a/gyp/README.md b/gyp/README.md new file mode 100644 index 0000000000..55567a907a --- /dev/null +++ b/gyp/README.md @@ -0,0 +1,8 @@ +[a fork of the unmaintained [google/GYP](https://chromium.googlesource.com/external/gyp)] + +[![Build Status](https://travis-ci.com/refack/GYP.svg?branch=master)](https://travis-ci.com/refack/GYP) + +Generate You Projects +=================================== + +Documentation is available at [http://gyp3.org/](http://gyp3.org/) (or at the [`gh-pages`](https://github.com/refack/GYP/blob/gh-pages/index.md) branch). diff --git a/gyp/codereview.settings b/gyp/codereview.settings deleted file mode 100644 index faf37f1145..0000000000 --- a/gyp/codereview.settings +++ /dev/null @@ -1,10 +0,0 @@ -# This file is used by gcl to get repository specific information. -CODE_REVIEW_SERVER: codereview.chromium.org -CC_LIST: gyp-developer@googlegroups.com -VIEW_VC: https://chromium.googlesource.com/external/gyp/+/ -TRY_ON_UPLOAD: False -TRYSERVER_PROJECT: gyp -TRYSERVER_PATCHLEVEL: 1 -TRYSERVER_ROOT: gyp -TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl -PROJECT: gyp diff --git a/gyp/gyp b/gyp/gyp deleted file mode 100755 index 1b8b9bdfb0..0000000000 --- a/gyp/gyp +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# Copyright 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -set -e -base=$(dirname "$0") -exec python "${base}/gyp_main.py" "$@" diff --git a/gyp/gyp.bat b/gyp/gyp.bat deleted file mode 100644 index c0b4ca24e5..0000000000 --- a/gyp/gyp.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python "%~dp0gyp_main.py" %* diff --git a/gyp/pylib/gyp/MSVSNew.py b/gyp/gyp/MSVS/MSVSNew.py similarity index 54% rename from gyp/pylib/gyp/MSVSNew.py rename to gyp/gyp/MSVS/MSVSNew.py index 9b64e2c1c8..0381200d08 100644 --- a/gyp/pylib/gyp/MSVSNew.py +++ b/gyp/gyp/MSVS/MSVSNew.py @@ -6,65 +6,71 @@ import hashlib import os -import random - import gyp.common -try: - cmp -except NameError: - def cmp(x, y): - return (x > y) - (x < y) -# Initialize random number generator -random.seed() +class MSVSSolutionEntry(object): + """ + Set GUID from path + TODO(rspangler): This is fragile. + 1. We can't just use the project filename sans path, since there could + be multiple projects with the same base name (for example, + foo/unittest.vcproj and bar/unittest.vcproj). + 2. The path needs to be relative to $SOURCE_ROOT, so that the project + GUID is the same whether it's included from base/base.sln or + foo/bar/baz/baz.sln. + 3. The GUID needs to be the same each time this builder is invoked, so + that we don't need to rebuild the solution when the project changes. + 4. We should be able to handle pre-built project files by reading the + GUID from the files. + """ + + guid_seed = "" -# GUIDs for project types -ENTRY_TYPE_GUIDS = { + # GUIDs for project types + ENTRY_TYPE_GUIDS = { 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}', 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}', -} - -#------------------------------------------------------------------------------ -# Helper functions - + } -def MakeGuid(name, seed='msvs_new'): - """Returns a GUID for the specified target name. - - Args: - name: Target name. - seed: Seed for MD5 hash. - Returns: - A GUID-line string calculated from the name and seed. - - This generates something which looks like a GUID, but depends only on the - name and seed. This means the same name/seed will always generate the same - GUID, so that projects and solutions which refer to each other can explicitly - determine the GUID to refer to explicitly. It also means that the GUID will - not change when the project for a target is rebuilt. - """ - # Calculate a MD5 signature for the seed and name. - d = hashlib.md5((str(seed) + str(name)).encode('utf-8')).hexdigest().upper() - # Convert most of the signature to GUID form (discard the rest) - guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] - + '-' + d[20:32] + '}') - return guid - -#------------------------------------------------------------------------------ + @classmethod + def MakeGuid(cls, name): + """Returns a GUID for the specified target name. + Args: + name: Target name. + Returns: + A GUID-line string calculated from the name and seed. + + This generates something which looks like a GUID, but depends only on the + name and seed. This means the same name/seed will always generate the same + GUID, so that projects and solutions which refer to each other can explicitly + determine the GUID to refer to explicitly. It also means that the GUID will + not change when the project for a target is rebuilt. + """ + # Calculate a MD5 signature for the seed and name. + key = (str(cls.guid_seed) + str(name)).encode('utf-8') + d = hashlib.md5(key).hexdigest().upper() + # Convert most of the signature to GUID form (discard the rest) + guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] + '-' + d[20:32] + '}') + return guid + + def __init__(self, path, name, guid): + self.path = path + self.name = name + self.guid = guid or MSVSSolutionEntry.MakeGuid(self.path) -class MSVSSolutionEntry(object): - def __cmp__(self, other): + def __lt__(self, other): # Sort by name then guid (so things are in order on vs2008). - return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) + return self.name < other.name -class MSVSFolder(MSVSSolutionEntry): +class MSVSFolderEntry(MSVSSolutionEntry): """Folder in a Visual Studio project or solution.""" - def __init__(self, path, name = None, entries = None, - guid = None, items = None): + guid_seed = 'msvs_folder' + + def __init__(self, path, name=None, entries=None, guid=None, items=None): """Initializes the folder. Args: @@ -76,63 +82,44 @@ def __init__(self, path, name = None, entries = None, items: List of solution items to include in the folder project. May be None, if the folder does not directly contain items. """ - if name: - self.name = name - else: - # Use last layer. - self.name = os.path.basename(path) - + super(MSVSFolderEntry, self).__init__(path, name or os.path.basename(path), guid) self.path = path - self.guid = guid # Copy passed lists (or set to empty lists) self.entries = sorted(list(entries or [])) self.items = list(items or []) - self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] - - def get_guid(self): - if self.guid is None: - # Use consistent guids for folders (so things don't regenerate). - self.guid = MakeGuid(self.path, seed='msvs_folder') - return self.guid - - -#------------------------------------------------------------------------------ + self.entry_type_guid = self.ENTRY_TYPE_GUIDS['folder'] -class MSVSProject(MSVSSolutionEntry): +class MSVSProjectEntry(MSVSSolutionEntry): """Visual Studio project.""" - def __init__(self, path, name = None, dependencies = None, guid = None, - spec = None, build_file = None, config_platform_overrides = None, - fixpath_prefix = None): + guid_seed = 'msvs_new' + + def __init__(self, path, name=None, dependencies=None, guid=None, spec=None, build_file=None, config_platform_overrides=None, fixpath_prefix=None): """Initializes the project. Args: path: Absolute path to the project file. - name: Name of project. If None, the name will be the same as the base - name of the project file. - dependencies: List of other Project objects this project is dependent - upon, if not None. - guid: GUID to use for project, if not None. + name: Name of project. If None, the name will be the same as the base name of the project file. + dependencies: List of other Project objects this project is dependent upon, if not None. spec: Dictionary specifying how to build this project. build_file: Filename of the .gyp file that the vcproj file comes from. config_platform_overrides: optional dict of configuration platforms to used in place of the default for this target. fixpath_prefix: the path used to adjust the behavior of _fixpath """ - self.path = path - self.guid = guid + + # Use project filename if name not specified + super(MSVSProjectEntry, self).__init__(path, name or os.path.splitext(os.path.basename(path))[0], guid) self.spec = spec self.build_file = build_file - # Use project filename if name not specified - self.name = name or os.path.splitext(os.path.basename(path))[0] # Copy passed lists (or set to empty lists) self.dependencies = list(dependencies or []) - self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] + self.entry_type_guid = self.ENTRY_TYPE_GUIDS['project'] if config_platform_overrides: self.config_platform_overrides = config_platform_overrides @@ -144,34 +131,17 @@ def __init__(self, path, name = None, dependencies = None, guid = None, def set_dependencies(self, dependencies): self.dependencies = list(dependencies or []) - def get_guid(self): - if self.guid is None: - # Set GUID from path - # TODO(rspangler): This is fragile. - # 1. We can't just use the project filename sans path, since there could - # be multiple projects with the same base name (for example, - # foo/unittest.vcproj and bar/unittest.vcproj). - # 2. The path needs to be relative to $SOURCE_ROOT, so that the project - # GUID is the same whether it's included from base/base.sln or - # foo/bar/baz/baz.sln. - # 3. The GUID needs to be the same each time this builder is invoked, so - # that we don't need to rebuild the solution when the project changes. - # 4. We should be able to handle pre-built project files by reading the - # GUID from the files. - self.guid = MakeGuid(self.name) - return self.guid - def set_msbuild_toolset(self, msbuild_toolset): self.msbuild_toolset = msbuild_toolset + #------------------------------------------------------------------------------ class MSVSSolution(object): """Visual Studio solution.""" - def __init__(self, path, version, entries=None, variants=None, - websiteProperties=True): + def __init__(self, path, version, entries=None, variants=None, websiteProperties=True): """Initializes the solution. Args: @@ -197,14 +167,12 @@ def __init__(self, path, version, entries=None, variants=None, else: # Use default self.variants = ['Debug|Win32', 'Release|Win32'] - # TODO(rspangler): Need to be able to handle a mapping of solution config - # to project config. Should we be able to handle variants being a dict, - # or add a separate variant_map variable? If it's a dict, we can't - # guarantee the order of variants since dict keys aren't ordered. + # TODO(rspangler): Need to be able to handle a mapping of solution config to project config. + # Should we be able to handle variants being a dict, or add a separate variant_map variable? + # If it's a dict, we can't guarantee the order of variants since dict keys aren't ordered. - # TODO(rspangler): Automatically write to disk for now; should delay until - # node-evaluation time. + # TODO(rspangler): Automatically write to disk for now; should delay until node-evaluation time. self.Write() @@ -227,16 +195,17 @@ def Write(self, writer=gyp.common.WriteOnDiff): all_entries.add(e) # If this is a folder, check its entries too. - if isinstance(e, MSVSFolder): + if isinstance(e, MSVSFolderEntry): entries_to_check += e.entries all_entries = sorted(all_entries) # Open file and print header f = writer(self.path) - f.write('Microsoft Visual Studio Solution File, ' - 'Format Version %s\r\n' % self.version.SolutionVersion()) - f.write('# %s\r\n' % self.version.Description()) + f.write( + 'Microsoft Visual Studio Solution File, Format Version %s\n' + '# %s\n' % (self.version.solution_version, self.version.description) + ) # Project entries sln_root = os.path.split(self.path)[0] @@ -245,91 +214,91 @@ def Write(self, writer=gyp.common.WriteOnDiff): # msbuild does not accept an empty folder_name. # use '.' in case relative_path is empty. folder_name = relative_path.replace('/', '\\') or '.' - f.write('Project("%s") = "%s", "%s", "%s"\r\n' % ( + f.write('Project("%s") = "%s", "%s", "%s"\n' % ( e.entry_type_guid, # Entry type GUID e.name, # Folder name folder_name, # Folder name (again) - e.get_guid(), # Entry GUID + e.guid, # Entry GUID )) # TODO(rspangler): Need a way to configure this stuff if self.websiteProperties: - f.write('\tProjectSection(WebsiteProperties) = preProject\r\n' - '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' - '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' - '\tEndProjectSection\r\n') + f.write('\tProjectSection(WebsiteProperties) = preProject\n' + '\t\tDebug.AspNetCompiler.Debug = "True"\n' + '\t\tRelease.AspNetCompiler.Debug = "False"\n' + '\tEndProjectSection\n') - if isinstance(e, MSVSFolder): + if isinstance(e, MSVSFolderEntry): if e.items: - f.write('\tProjectSection(SolutionItems) = preProject\r\n') - for i in e.items: - f.write('\t\t%s = %s\r\n' % (i, i)) - f.write('\tEndProjectSection\r\n') + f.write('\tProjectSection(SolutionItems) = preProject\n') + for i in sorted(e.items): + f.write('\t\t%s = %s\n' % (i, i)) + f.write('\tEndProjectSection\n') - if isinstance(e, MSVSProject): + if isinstance(e, MSVSProjectEntry): if e.dependencies: - f.write('\tProjectSection(ProjectDependencies) = postProject\r\n') - for d in e.dependencies: - f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid())) - f.write('\tEndProjectSection\r\n') + f.write('\tProjectSection(ProjectDependencies) = postProject\n') + for d in sorted(e.dependencies): + f.write('\t\t%s = %s\n' % (d.guid, d.guid)) + f.write('\tEndProjectSection\n') - f.write('EndProject\r\n') + f.write('EndProject\n') # Global section - f.write('Global\r\n') + f.write('Global\n') # Configurations (variants) - f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n') - for v in self.variants: - f.write('\t\t%s = %s\r\n' % (v, v)) - f.write('\tEndGlobalSection\r\n') + f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n') + for v in sorted(self.variants): + f.write('\t\t%s = %s\n' % (v, v)) + f.write('\tEndGlobalSection\n') # Sort config guids for easier diffing of solution changes. config_guids = [] config_guids_overrides = {} for e in all_entries: - if isinstance(e, MSVSProject): - config_guids.append(e.get_guid()) - config_guids_overrides[e.get_guid()] = e.config_platform_overrides + if isinstance(e, MSVSProjectEntry): + config_guids.append(e.guid) + config_guids_overrides[e.guid] = e.config_platform_overrides config_guids.sort() - f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n') + f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n') for g in config_guids: - for v in self.variants: + for v in sorted(self.variants): nv = config_guids_overrides[g].get(v, v) # Pick which project configuration to build for this solution # configuration. - f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % ( + f.write('\t\t%s.%s.ActiveCfg = %s\n' % ( g, # Project GUID v, # Solution build configuration nv, # Project build config for that solution config )) # Enable project in this solution configuration. - f.write('\t\t%s.%s.Build.0 = %s\r\n' % ( + f.write('\t\t%s.%s.Build.0 = %s\n' % ( g, # Project GUID v, # Solution build configuration nv, # Project build config for that solution config )) - f.write('\tEndGlobalSection\r\n') + f.write('\tEndGlobalSection\n') # TODO(rspangler): Should be able to configure this stuff too (though I've # never seen this be any different) - f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n') - f.write('\t\tHideSolutionNode = FALSE\r\n') - f.write('\tEndGlobalSection\r\n') + f.write('\tGlobalSection(SolutionProperties) = preSolution\n') + f.write('\t\tHideSolutionNode = FALSE\n') + f.write('\tEndGlobalSection\n') # Folder mappings # Omit this section if there are no folders - if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]): - f.write('\tGlobalSection(NestedProjects) = preSolution\r\n') + if any([e.entries for e in all_entries if isinstance(e, MSVSFolderEntry)]): + f.write('\tGlobalSection(NestedProjects) = preSolution\n') for e in all_entries: - if not isinstance(e, MSVSFolder): + if not isinstance(e, MSVSFolderEntry): continue # Does not apply to projects, only folders for subentry in e.entries: - f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid())) - f.write('\tEndGlobalSection\r\n') + f.write('\t\t%s = %s\n' % (subentry.guid, e.guid)) + f.write('\tEndGlobalSection\n') - f.write('EndGlobal\r\n') + f.write('EndGlobal\n') f.close() diff --git a/gyp/pylib/gyp/MSVSSettings.py b/gyp/gyp/MSVS/MSVSSettings.py similarity index 97% rename from gyp/pylib/gyp/MSVSSettings.py rename to gyp/gyp/MSVS/MSVSSettings.py index 065a339a80..db61bb082b 100644 --- a/gyp/pylib/gyp/MSVSSettings.py +++ b/gyp/gyp/MSVS/MSVSSettings.py @@ -16,11 +16,12 @@ from __future__ import print_function -from gyp import string_types - import sys import re +if 'basestring' not in __builtins__: + basestring = str + # Dictionaries of settings validators. The key is the tool name, the value is # a dictionary mapping setting names to validation functions. _msvs_validators = {} @@ -110,11 +111,11 @@ class _String(_Type): """A setting that's just a string.""" def ValidateMSVS(self, value): - if not isinstance(value, string_types): + if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ValidateMSBuild(self, value): - if not isinstance(value, string_types): + if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ConvertToMSBuild(self, value): @@ -126,11 +127,11 @@ class _StringList(_Type): """A settings that's a list of strings.""" def ValidateMSVS(self, value): - if not isinstance(value, string_types) and not isinstance(value, list): + if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ValidateMSBuild(self, value): - if not isinstance(value, string_types) and not isinstance(value, list): + if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ConvertToMSBuild(self, value): @@ -302,6 +303,7 @@ def _MSVSOnly(tool, name, setting_type): setting_type: the type of this setting. """ + # noinspection PyUnusedLocal def _Translate(unused_value, unused_msbuild_settings): # Since this is for MSVS only settings, no translation will happen. pass @@ -421,7 +423,7 @@ def FixVCMacroSlashes(s): def ConvertVCMacrosToMSBuild(s): - """Convert the MSVS macros found in the string to the MSBuild equivalent. + """Convert the the MSVS macros found in the string to the MSBuild equivalent. This list is probably not exhaustive. Add as needed. """ @@ -466,8 +468,9 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) except ValueError as e: - print('Warning: while converting %s/%s to MSBuild, ' - '%s' % (msvs_tool_name, msvs_setting, e), file=stderr) + print(('Warning: while converting %s/%s to MSBuild, ' + '%s' % (msvs_tool_name, msvs_setting, e)), + file=stderr) else: _ValidateExclusionSetting(msvs_setting, msvs_tool, @@ -476,8 +479,8 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): (msvs_tool_name, msvs_setting)), stderr) else: - print('Warning: unrecognized tool %s while converting to ' - 'MSBuild.' % msvs_tool_name, file=stderr) + print(('Warning: unrecognized tool %s while converting to ' + 'MSBuild.' % msvs_tool_name), file=stderr) return msbuild_settings @@ -522,8 +525,8 @@ def _ValidateSettings(validators, settings, stderr): try: tool_validators[setting](value) except ValueError as e: - print('Warning: for %s/%s, %s' % - (tool_name, setting, e), file=stderr) + print(('Warning: for %s/%s, %s' % + (tool_name, setting, e)), file=stderr) else: _ValidateExclusionSetting(setting, tool_validators, @@ -532,7 +535,7 @@ def _ValidateSettings(validators, settings, stderr): stderr) else: - print('Warning: unrecognized tool %s' % (tool_name), file=stderr) + print(('Warning: unrecognized tool %s' % tool_name), file=stderr) # MSVS and MBuild names of the tools. @@ -543,7 +546,7 @@ def _ValidateSettings(validators, settings, stderr): _lib = _Tool('VCLibrarianTool', 'Lib') _manifest = _Tool('VCManifestTool', 'Manifest') _masm = _Tool('MASM', 'MASM') -_armasm = _Tool('ARMASM', 'ARMASM') +_marmasm = _Tool('MARMASM', 'MARMASM') _AddTool(_compile) @@ -553,7 +556,7 @@ def _ValidateSettings(validators, settings, stderr): _AddTool(_lib) _AddTool(_manifest) _AddTool(_masm) -_AddTool(_armasm) +_AddTool(_marmasm) # Add sections only found in the MSBuild settings. _msbuild_validators[''] = {} _msbuild_validators['ProjectReference'] = {} @@ -598,6 +601,7 @@ def _ValidateSettings(validators, settings, stderr): _Same(_compile, 'UseFullPaths', _boolean) # /FC _Same(_compile, 'WholeProgramOptimization', _boolean) # /GL _Same(_compile, 'XMLDocumentationFileName', _file_name) +_Same(_compile, 'CompileAsWinRT', _boolean) # /ZW _Same(_compile, 'AssemblerOutput', _Enumeration(['NoListing', @@ -977,7 +981,7 @@ def _ValidateSettings(validators, settings, stderr): 'Itanium', # /env ia64 'X64', # /env x64 'ARM64', # /env arm64 - ])) + ])) _Same(_midl, 'EnableErrorChecks', _Enumeration(['EnableCustom', 'None', # /error none diff --git a/gyp/pylib/gyp/MSVSToolFile.py b/gyp/gyp/MSVS/MSVSToolFile.py similarity index 82% rename from gyp/pylib/gyp/MSVSToolFile.py rename to gyp/gyp/MSVS/MSVSToolFile.py index 74e529a17f..12cc29e81d 100644 --- a/gyp/pylib/gyp/MSVSToolFile.py +++ b/gyp/gyp/MSVS/MSVSToolFile.py @@ -4,7 +4,6 @@ """Visual Studio project reader/writer.""" -import gyp.common import gyp.easy_xml as easy_xml @@ -48,11 +47,9 @@ def AddCustomBuildRule(self, name, cmd, description, def WriteIfChanged(self): """Writes the tool file.""" - content = ['VisualStudioToolFile', - {'Version': '8.00', - 'Name': self.name - }, - self.rules_section - ] - easy_xml.WriteXmlIfChanged(content, self.tool_file_path, - encoding="Windows-1252") + content = [ + 'VisualStudioToolFile', + {'Version': '8.00', 'Name': self.name}, + self.rules_section + ] + easy_xml.WriteXmlIfChanged(content, self.tool_file_path, encoding="Windows-1252") diff --git a/gyp/gyp/MSVS/MSVSVersion.py b/gyp/gyp/MSVS/MSVSVersion.py new file mode 100644 index 0000000000..79e6c3dffd --- /dev/null +++ b/gyp/gyp/MSVS/MSVSVersion.py @@ -0,0 +1,365 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Handle version information related to Visual Stdio.""" + +import os +import glob +from gyp.MSVS import TryQueryRegistryValue + +msvs_version_map = { + 'auto': ('16.0', '15.0', '14.0', '12.0', '10.0', '9.0', '8.0', '11.0'), + '2005': ('8.0',), + '2005e': ('8.0',), + '2008': ('9.0',), + '2008e': ('9.0',), + '2010': ('10.0',), + '2010e': ('10.0',), + '2012': ('11.0',), + '2012e': ('11.0',), + '2013': ('12.0',), + '2013e': ('12.0',), + '2015': ('14.0',), + '2017': ('15.0',), + '2019': ('16.0',), +} +version_to_year = { + '8.0': '2005', + '9.0': '2008', + '10.0': '2010', + '11.0': '2012', + '12.0': '2013', + '14.0': '2015', + '15.0': '2017', + '16.0': '2019', +} + +def _JoinPath(*args): + return os.path.normpath(os.path.join(*args)) + + +class VisualStudioVersion(object): + """Information regarding a version of Visual Studio.""" + + def __init__(self, + short_name, description, + solution_version, project_version, + flat_sln=False, uses_vcxproj=True, + default_toolset=None, compatible_sdks=None): + self.short_name = short_name + self.description = description + self.solution_version = solution_version + self.project_version = project_version + self.flat_sln = flat_sln + self.uses_vcxproj = uses_vcxproj + self.default_toolset = default_toolset + self.compatible_sdks = compatible_sdks + self.path = '' + self.sdk_based = False + + def ProjectExtension(self): + """Returns the file extension for the project.""" + return self.uses_vcxproj and '.vcxproj' or '.vcproj' + + def ToolPath(self, tool): + """Returns the path to a given compiler tool. """ + return os.path.normpath(os.path.join(self.path, "VC", "bin", tool)) + + def DefaultToolset(self): + """Returns the msbuild toolset version that will be used in the absence + of a user override.""" + return self.default_toolset + + def _SetupScriptInternal(self, target_arch): + """Returns a command (with arguments) to be used to set up the + environment.""" + # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the + # depot_tools build tools and should run SetEnv.Cmd to set up the + # environment. The check for WindowsSDKDir alone is not sufficient because + # this is set by running vcvarsall.bat. + sdk_dir = os.environ.get('WindowsSDKDir', '') + setup_path = _JoinPath(sdk_dir, 'Bin', 'SetEnv.Cmd') + if self.sdk_based and sdk_dir and os.path.exists(setup_path): + return [setup_path, '/' + target_arch] + + is_host_arch_x64 = ( + os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or + os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64' + ) + + # For VS2017 (and newer) it's fairly easy + if self.short_name >= '2017': + script_path = _JoinPath(self.path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat') + + # Always use a native executable, cross-compiling if necessary. + host_arch = 'amd64' if is_host_arch_x64 else 'x86' + msvc_target_arch = 'amd64' if target_arch == 'x64' else 'x86' + arg = host_arch + if host_arch != msvc_target_arch: + arg += '_' + msvc_target_arch + + return [script_path, arg] + + # We try to find the best version of the env setup batch. + vcvarsall = _JoinPath(self.path, 'VC', 'vcvarsall.bat') + if target_arch == 'x86': + if self.short_name >= '2013' and self.short_name[-1] != 'e' and is_host_arch_x64: + # VS2013 and later, non-Express have a x64-x86 cross that we want + # to prefer. + return [vcvarsall, 'amd64_x86'] + else: + # Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat + # for x86 because vcvarsall calls vcvars32, which it can only find if + # VS??COMNTOOLS is set, which isn't guaranteed. + return [_JoinPath(self.path, 'Common7', 'Tools', 'vsvars32.bat')] + elif target_arch == 'x64': + arg = 'x86_amd64' + # Use the 64-on-64 compiler if we're not using an express edition and + # we're running on a 64bit OS. + if self.short_name[-1] != 'e' and is_host_arch_x64: + arg = 'amd64' + return [vcvarsall, arg] + + def SetupScript(self, target_arch): + script_data = self._SetupScriptInternal(target_arch) + script_path = script_data[0] + if not os.path.exists(script_path): + raise Exception('%s is missing - make sure VC++ tools are installed.' % script_path) + return script_data + + +MSVS_VERSIONS = { + '2019': + VisualStudioVersion( + short_name='2019', + description='Visual Studio 2019', + solution_version='12.00', + project_version='15.0', + default_toolset='v142', + compatible_sdks='8.1,10.0' + ), + '2017': + VisualStudioVersion( + short_name='2017', + description='Visual Studio 2017', + solution_version='12.00', + project_version='15.0', + default_toolset='v141', + compatible_sdks='8.1,10.0' + ), + '2015': + VisualStudioVersion( + short_name='2015', + description='Visual Studio 2015', + solution_version='12.00', + project_version='14.0', + default_toolset='v140' + ), + '2013': + VisualStudioVersion( + short_name='2013', + description='Visual Studio 2013', + solution_version='13.00', + project_version='12.0', + default_toolset='v120' + ), + '2013e': + VisualStudioVersion( + short_name='2013e', + description='Visual Studio 2013', + solution_version='13.00', + project_version='12.0', + default_toolset='v120' + ), + '2012': + VisualStudioVersion( + short_name='2012', + description='Visual Studio 2012', + solution_version='12.00', + project_version='4.0', + default_toolset='v110' + ), + '2012e': + VisualStudioVersion( + short_name='2012e', + description='Visual Studio 2012', + solution_version='12.00', + project_version='4.0', + flat_sln=True, + default_toolset='v110' + ), + '2010': + VisualStudioVersion( + short_name='2010', + description='Visual Studio 2010', + solution_version='11.00', + project_version='4.0', + ), + '2010e': + VisualStudioVersion( + short_name='2010e', + description='Visual C++ Express 2010', + solution_version='11.00', + project_version='4.0', + flat_sln=True, + ), + '2008': + VisualStudioVersion( + short_name='2008', + description='Visual Studio 2008', + solution_version='10.00', + project_version='9.00', + uses_vcxproj=False, + ), + '2008e': + VisualStudioVersion( + short_name='2008e', + description='Visual Studio 2008', + solution_version='10.00', + project_version='9.00', + flat_sln=True, + uses_vcxproj=False, + ), + '2005': + VisualStudioVersion( + short_name='2005', + description='Visual Studio 2005', + solution_version='9.00', + project_version='8.00', + flat_sln=False, + uses_vcxproj=False, + ), + '2005e': + VisualStudioVersion( + short_name='2005e', + description='Visual Studio 2005', + solution_version='9.00', + project_version='8.00', + flat_sln=True, + uses_vcxproj=False, + ), +} + +def _CreateVersion(name, path, sdk_based=False): + """ + Sets up MSVS project generation. + + Setup is based off the GYP_MSVS_VERSION environment variable or whatever is auto-detected if GYP_MSVS_VERSION + is not explicitly specified. If a version is passed in that doesn't match a value in versions python will throw a error. + """ + if path: + path = os.path.normpath(path) + version = MSVS_VERSIONS[str(name)] + version.path = path + version.sdk_based = sdk_based + return version + + +def _DetectVisualStudioVersion(wanted_version, force_express): + for version in msvs_version_map[wanted_version]: + # Old method of searching for which VS version is installed + # We don't use the 2010-encouraged-way because we also want to get the + # path to the binaries, which it doesn't offer. + keys = [ + r'Software\Microsoft\VisualStudio\%s' % version, + r'Software\Wow6432Node\Microsoft\VisualStudio\%s' % version, + r'Software\Microsoft\VCExpress\%s' % version, + r'Software\Wow6432Node\Microsoft\VCExpress\%s' % version + ] + for key in keys: + path = TryQueryRegistryValue(key, 'InstallDir') + if not path: + continue + # Check for full. + full_path = os.path.join(path, 'devenv.exe') + express_path = os.path.join(path, '*express.exe') + vc_root_path = os.path.join(path, '..', '..') + if not force_express and os.path.exists(full_path): + # Add this one. + return _CreateVersion(version_to_year[version], vc_root_path) + # Check for express. + elif glob.glob(express_path): + # Add this one. + return _CreateVersion(version_to_year[version] + 'e', vc_root_path) + + # The old method above does not work when only SDK is installed. + keys2 = [ + r'Software\Microsoft\VisualStudio\SxS\VC7', + r'Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7', + r'Software\Microsoft\VisualStudio\SxS\VS7', + r'Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7' + ] + for key in keys2: + path = TryQueryRegistryValue(key, version) + if not path: + continue + if version == '15.0': + if os.path.exists(path): + return _CreateVersion('2017', path) + elif version == '16.0': + if os.path.exists(path): + return _CreateVersion('2019', path) + elif version != '14.0': # There is no Express edition for 2015. + return _CreateVersion(version_to_year[version] + 'e', os.path.join(path, '..'), sdk_based=True) + + return None + + +def SelectVisualStudioVersion(wanted_version='auto'): + """Select which version of Visual Studio projects to generate. + + Arguments: + wanted_version: Hook to allow caller to force a particular version (vs auto). + Returns: + An object representing a visual studio project format version. + """ + wanted_version = str(wanted_version) + + override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH') + gyp_env_version = os.environ.get('GYP_MSVS_VERSION', wanted_version) + if override_path: + if gyp_env_version == 'auto': + raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be set to a particular version.') + return _CreateVersion(gyp_env_version, override_path, sdk_based=True) + + # In auto mode, check environment variable for override. + if wanted_version == 'auto': + wanted_version = version_to_year.get(os.environ.get('VisualStudioVersion'), gyp_env_version) + version = _DetectVisualStudioVersion(wanted_version, 'e' in wanted_version) + if version: + return version + if wanted_version != 'auto': + # Even if we did not actually detect a version, we fake it + return _CreateVersion(wanted_version, None) + raise ValueError('Could not locate Visual Studio installation.') + + +def WindowsTargetPlatformVersion(possible_sdk_versions): + # If the environment is set, ignore the possible version hint + env_sdk_version = os.environ.get('WindowsSDKVersion', '') + if env_sdk_version: + return env_sdk_version.replace('\\', '') + + if not possible_sdk_versions: + return None + + versions_args = possible_sdk_versions.split(',') + key_template = r'Software\%sMicrosoft\Microsoft SDKs\Windows\%s' + keys = [(key_template % (sub, ver)) for ver in versions_args for sub in ['', 'Wow6432Node\\',]] + for key in keys: + sdk_dir = TryQueryRegistryValue(key, 'InstallationFolder') + if not sdk_dir: + continue + # Find a matching entry in sdk_dir\include. + product_version = TryQueryRegistryValue(key, 'ProductVersion') + sdk_include_dir = os.path.join(sdk_dir, 'include') + if not os.path.isdir(sdk_include_dir): + continue + names = sorted(x for x in os.listdir(sdk_include_dir) if x.startswith(product_version)) + if names: + return names[-1] + + return None + + diff --git a/gyp/gyp/MSVS/__init__.py b/gyp/gyp/MSVS/__init__.py new file mode 100644 index 0000000000..5dce120835 --- /dev/null +++ b/gyp/gyp/MSVS/__init__.py @@ -0,0 +1,507 @@ +from __future__ import print_function + +import copy +import json +import os +import subprocess +import sys +import traceback + +from gyp.common import memoize + +try: + WindowsError +except NameError: + # noinspection PyShadowingBuiltins + WindowsError = OSError + +try: + import winreg +except ImportError: + try: + import _winreg as winreg + except ImportError: + # Just a mock class to silence static analysers. + class winreg(object): + HKEY_LOCAL_MACHINE = '' + @staticmethod + def OpenKey(root, key): + raise NotImplementedError() + @staticmethod + def QueryValueEx(key, value): + raise NotImplementedError() + +class Tool(object): + """Visual Studio tool.""" + + def __init__(self, name, attrs=None): + """Initializes the tool. + + Args: + name: Tool name. + attrs: Dict of tool attributes; may be None. + """ + self._attrs = attrs or {} + self._attrs['Name'] = name + + def _GetSpecification(self): + """Creates an element for the tool. + + Returns: + A new xml.dom.Element for the tool. + """ + return ['Tool', self._attrs] + + +class Filter(object): + """Visual Studio filter - that is, a virtual folder.""" + + def __init__(self, name, contents=None): + """Initializes the folder. + + Args: + name: Filter (folder) name. + contents: List of filenames and/or Filter objects contained. + """ + self.name = name + self.contents = list(contents or []) + + +# A dictionary mapping supported target types to extensions. +TARGET_TYPE_EXT = { + 'executable': 'exe', + 'loadable_module': 'dll', + 'shared_library': 'dll', + 'static_library': 'lib', + 'windows_driver': 'sys', +} + + +def _GetLargePdbShimCcPath(): + """Returns the path of the large_pdb_shim.cc file.""" + this_dir = os.path.dirname(__file__) + lib_dir = os.path.join(this_dir, '..') + large_pdb_shim_cc_rel = os.path.join(lib_dir, 'buildtime_helpers', 'large-pdb-shim.cc') + large_pdb_shim_cc = os.path.abspath(large_pdb_shim_cc_rel) + return large_pdb_shim_cc + + +def _DeepCopySomeKeys(in_dict, keys): + """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. + + Arguments: + in_dict: The dictionary to copy. + keys: The keys to be copied. If a key is in this list and doesn't exist in + |in_dict| this is not an error. + Returns: + The partially deep-copied dictionary. + """ + d = {} + for key in keys: + if key not in in_dict: + continue + d[key] = copy.deepcopy(in_dict[key]) + return d + + +def _SuffixName(name, suffix): + """Add a suffix to the end of a target. + + Arguments: + name: name of the target (foo#target) + suffix: the suffix to be added + Returns: + Target name with suffix added (foo_suffix#target) + """ + parts = name.rsplit('#', 1) + parts[0] = '%s_%s' % (parts[0], suffix) + return '#'.join(parts) + + +def _ShardName(name, number): + """Add a shard number to the end of a target. + + Arguments: + name: name of the target (foo#target) + number: shard number + Returns: + Target name with shard added (foo_1#target) + """ + return _SuffixName(name, str(number)) + + +def ShardTargets(target_list, target_dicts): + """Shard some targets apart to work around the linkers limits. + + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + Returns: + Tuple of the new sharded versions of the inputs. + """ + # Gather the targets to shard, and how many pieces. + targets_to_shard = {} + for t in target_dicts: + shards = int(target_dicts[t].get('msvs_shard', 0)) + if shards: + targets_to_shard[t] = shards + # Shard target_list. + new_target_list = [] + for t in target_list: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + new_target_list.append(_ShardName(t, i)) + else: + new_target_list.append(t) + # Shard target_dict. + new_target_dicts = {} + for t in target_dicts: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + name = _ShardName(t, i) + new_target_dicts[name] = copy.copy(target_dicts[t]) + new_target_dicts[name]['target_name'] = _ShardName( + new_target_dicts[name]['target_name'], i) + sources = new_target_dicts[name].get('sources', []) + new_sources = [] + for pos in range(i, len(sources), targets_to_shard[t]): + new_sources.append(sources[pos]) + new_target_dicts[name]['sources'] = new_sources + else: + new_target_dicts[t] = target_dicts[t] + # Shard dependencies. + for t in sorted(new_target_dicts): + for dep_type in ('dependencies', 'dependencies_original'): + dependencies = copy.copy(new_target_dicts[t].get(dep_type, [])) + new_dependencies = [] + for d in dependencies: + if d in targets_to_shard: + for i in range(targets_to_shard[d]): + new_dependencies.append(_ShardName(d, i)) + else: + new_dependencies.append(d) + new_target_dicts[t][dep_type] = new_dependencies + + return new_target_list, new_target_dicts + + +def _GetPdbPath(target_dict, config_name, gyp_vars): + """Returns the path to the PDB file that will be generated by a given + configuration. + + The lookup proceeds as follows: + - Look for an explicit path in the VCLinkerTool configuration block. + - Look for an 'msvs_large_pdb_path' variable. + - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is + specified. + - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'. + + Arguments: + target_dict: The target dictionary to be searched. + config_name: The name of the configuration of interest. + gyp_vars: A dictionary of common GYP variables with generator-specific values. + Returns: + The path of the corresponding PDB file. + """ + config = target_dict['configurations'][config_name] + msvs = config.setdefault('msvs_settings', {}) + + linker = msvs.get('VCLinkerTool', {}) + + pdb_path = linker.get('ProgramDatabaseFile') + if pdb_path: + return pdb_path + + variables = target_dict.get('variables', {}) + pdb_path = variables.get('msvs_large_pdb_path', None) + if pdb_path: + return pdb_path + + + pdb_base = target_dict.get('product_name', target_dict['target_name']) + pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']]) + pdb_path = gyp_vars['PRODUCT_DIR'] + '/' + pdb_base + + return pdb_path + + +def InsertLargePdbShims(target_list, target_dicts, gyp_vars): + """Insert a shim target that forces the linker to use 4KB pagesize PDBs. + + This is a workaround for targets with PDBs greater than 1GB in size, the + limit for the 1KB pagesize PDBs created by the linker by default. + + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + gyp_vars: A dictionary of common GYP variables with generator-specific values. + Returns: + Tuple of the shimmed version of the inputs. + """ + # Determine which targets need shimming. + targets_to_shim = [] + for t in target_dicts: + target_dict = target_dicts[t] + + # We only want to shim targets that have msvs_large_pdb enabled. + if not int(target_dict.get('msvs_large_pdb', 0)): + continue + # This is intended for executable, shared_library and loadable_module + # targets where every configuration is set up to produce a PDB output. + # If any of these conditions is not true then the shim logic will fail + # below. + targets_to_shim.append(t) + + large_pdb_shim_cc = _GetLargePdbShimCcPath() + + for t in targets_to_shim: + target_dict = target_dicts[t] + target_name = target_dict.get('target_name') + + base_dict = _DeepCopySomeKeys(target_dict, + ['configurations', 'default_configuration', 'toolset']) + + # This is the dict for copying the source file (part of the GYP tree) + # to the intermediate directory of the project. This is necessary because + # we can't always build a relative path to the shim source file (on Windows + # GYP and the project may be on different drives), and Ninja hates absolute + # paths (it ends up generating the .obj and .obj.d alongside the source + # file, polluting GYPs tree). + copy_suffix = 'large_pdb_copy' + copy_target_name = target_name + '_' + copy_suffix + full_copy_target_name = _SuffixName(t, copy_suffix) + shim_cc_basename = os.path.basename(large_pdb_shim_cc) + shim_cc_dir = gyp_vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name + shim_cc_path = shim_cc_dir + '/' + shim_cc_basename + copy_dict = copy.deepcopy(base_dict) + copy_dict['target_name'] = copy_target_name + copy_dict['type'] = 'none' + copy_dict['sources'] = [ large_pdb_shim_cc ] + copy_dict['copies'] = [{ + 'destination': shim_cc_dir, + 'files': [ large_pdb_shim_cc ] + }] + + # This is the dict for the PDB generating shim target. It depends on the + # copy target. + shim_suffix = 'large_pdb_shim' + shim_target_name = target_name + '_' + shim_suffix + full_shim_target_name = _SuffixName(t, shim_suffix) + shim_dict = copy.deepcopy(base_dict) + shim_dict['target_name'] = shim_target_name + shim_dict['type'] = 'static_library' + shim_dict['sources'] = [ shim_cc_path ] + shim_dict['dependencies'] = [ full_copy_target_name ] + + # Set up the shim to output its PDB to the same location as the final linker + # target. + for config_name, config in shim_dict.get('configurations').items(): + pdb_path = _GetPdbPath(target_dict, config_name, gyp_vars) + + # A few keys that we don't want to propagate. + for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']: + config.pop(key, None) + + msvs = config.setdefault('msvs_settings', {}) + + # Update the compiler directives in the shim target. + compiler = msvs.setdefault('VCCLCompilerTool', {}) + compiler['DebugInformationFormat'] = '3' + compiler['ProgramDataBaseFileName'] = pdb_path + + # Set the explicit PDB path in the appropriate configuration of the + # original target. + config = target_dict['configurations'][config_name] + msvs = config.setdefault('msvs_settings', {}) + linker = msvs.setdefault('VCLinkerTool', {}) + linker['GenerateDebugInformation'] = 'true' + linker['ProgramDatabaseFile'] = pdb_path + + # Add the new targets. They must go to the beginning of the list so that + # the dependency generation works as expected in ninja. + target_list.insert(0, full_copy_target_name) + target_list.insert(0, full_shim_target_name) + target_dicts[full_copy_target_name] = copy_dict + target_dicts[full_shim_target_name] = shim_dict + + # Update the original target to depend on the shim target. + target_dict.setdefault('dependencies', []).append(full_shim_target_name) + + return target_list, target_dicts + + +def TryQueryRegistryValue(key, value=None, root=winreg.HKEY_LOCAL_MACHINE): + try: + with winreg.OpenKey(root, key) as kh: + value, value_type = winreg.QueryValueEx(kh, value) + return value + except WindowsError: + return None + except NotImplementedError: + return None + + +def FindVisualStudioInstallation(): + """ + Returns appropriate values for .build_tool and .uses_msbuild fields + of TestGypBase for Visual Studio. + + We use the value specified by GYP_MSVS_VERSION. If not specified, we + search for likely deployment paths. + """ + msvs_version = 'auto' + for flag in (f for f in sys.argv if f.startswith('msvs_version=')): + msvs_version = flag.split('=')[-1] + msvs_version = os.environ.get('GYP_MSVS_VERSION', msvs_version) + + override_build_tool = os.environ.get('GYP_BUILD_TOOL') + if override_build_tool: + return override_build_tool, True, override_build_tool, msvs_version + + if msvs_version == 'auto' or msvs_version >= '2017': + msbuild_exes = [] + top_vs_info = VSSetup_PowerShell() + if top_vs_info: + inst_path = top_vs_info['InstallationPath'] + args2 = ['cmd.exe', '/d', '/c', + 'cd', '/d', inst_path, + '&', 'dir', '/b', '/s', 'msbuild.exe'] + msbuild_exes = subprocess.check_output(args2).strip().splitlines() + if len(msbuild_exes): + msbuild_path = str(msbuild_exes[0].decode('utf-8')) + os.environ['GYP_MSVS_VERSION'] = top_vs_info['CatalogVersion'] + os.environ['GYP_BUILD_TOOL'] = msbuild_path + return msbuild_path, True, msbuild_path, msvs_version + + possible_roots = ['%s:\\Program Files%s' % (chr(drive), suffix) + for drive in range(ord('C'), ord('Z') + 1) + for suffix in ['', ' (x86)']] + possible_paths = { + '2015': r'Microsoft Visual Studio 14.0\Common7\IDE\devenv.com', + '2013': r'Microsoft Visual Studio 12.0\Common7\IDE\devenv.com', + '2012': r'Microsoft Visual Studio 11.0\Common7\IDE\devenv.com', + '2010': r'Microsoft Visual Studio 10.0\Common7\IDE\devenv.com', + '2008': r'Microsoft Visual Studio 9.0\Common7\IDE\devenv.com', + '2005': r'Microsoft Visual Studio 8\Common7\IDE\devenv.com' + } + + # Check that the path to the specified GYP_MSVS_VERSION exists. + if msvs_version in possible_paths: + path = possible_paths[msvs_version] + for r in possible_roots: + build_tool = os.path.join(r, path) + if os.path.exists(build_tool): + uses_msbuild = msvs_version >= '2010' + msbuild_path = FindMSBuildInstallation(msvs_version) + return build_tool, uses_msbuild, msbuild_path, msvs_version + else: + print('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" ' + 'but corresponding "%s" was not found.' % (msvs_version, path)) + print('Error: could not find MSVS version %s' % msvs_version) + sys.exit(1) + + +def FindMSBuildInstallation(msvs_version = 'auto'): + """Returns path to MSBuild for msvs_version or latest available. + + Looks in the registry to find install location of MSBuild. + MSBuild before v4.0 will not build c++ projects, so only use newer versions. + """ + + msvs_to_msbuild = { + '2015': '14.0', + '2013': '12.0', + '2012': '4.0', # Really v4.0.30319 which comes with .NET 4.5. + '2010': '4.0' + } + + msbuild_base_key = r'SOFTWARE\Microsoft\MSBuild\ToolsVersions' + if not TryQueryRegistryValue(msbuild_base_key): + print('Error: could not find MSBuild base registry entry') + return None + + msbuild_key = '' + found_msbuild_ver = '' + if msvs_version in msvs_to_msbuild: + msbuild_test_version = msvs_to_msbuild[msvs_version] + msbuild_key = msbuild_base_key + '\\' + msbuild_test_version + if TryQueryRegistryValue(msbuild_key): + found_msbuild_ver = msbuild_test_version + else: + print('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" but corresponding MSBuild "%s" was not found.' % (msvs_version, found_msbuild_ver)) + if not found_msbuild_ver: + for msvs_version in sorted(msvs_to_msbuild.keys(), reverse=True): + msbuild_test_version = msvs_to_msbuild[msvs_version] + msbuild_key = msbuild_base_key + '\\' + msbuild_test_version + if TryQueryRegistryValue(msbuild_key): + found_msbuild_ver = msbuild_test_version + break + if not found_msbuild_ver: + print('Error: could not find am MSBuild registry entry') + return None + + msbuild_path = TryQueryRegistryValue(msbuild_key, 'MSBuildToolsPath') + if not msbuild_path: + print('Error: could not get MSBuildToolsPath registry entry value for MSBuild version %s' % found_msbuild_ver) + return None + + return os.path.join(msbuild_path, 'MSBuild.exe') + + +@memoize +def VSWhere(component=None): + args1 = [ + r'C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe', + '-all', '-latest', '-sort', '-format', 'json', + ] + if not component: + args1.append('-legacy') + + try: + vswhere_json = subprocess.check_output(args1) + vswhere_infos = json.loads(vswhere_json) + if len(vswhere_infos) == 0: + raise IOError("vswhere did not find any MSVS instances.") + return vswhere_infos[0] + except subprocess.CalledProcessError as e: + traceback.print_exc(file=sys.stderr) + print(e, file=sys.stderr) + return None + + +new_vs_map = { + 16: '2019', + 15: '2017', + 14: '2015', +} + + +@memoize +def VSSetup_PowerShell(): + powershell = os.path.join(os.environ['SystemRoot'], 'System32', 'WindowsPowerShell', 'v1.0', 'powershell.exe') + query_script_path = os.path.join(os.path.dirname(__file__), '..', '..', 'tools', 'vssetup.powershell', 'VSQuery.ps1') + args = [ + powershell, '-ExecutionPolicy', 'Unrestricted', '-NoProfile', + query_script_path + ] + try: + vs_query_json = subprocess.check_output(args) + except subprocess.CalledProcessError as e: + print(e, file=sys.stderr) + print(e.stderr, file=sys.stderr) + raise e + try: + vs_query_infos = json.loads(vs_query_json) + except json.decoder.JSONDecodeError as e: + print(e, file=sys.stderr) + print(vs_query_json, file=sys.stderr) + raise e + assert vs_query_infos + if isinstance(vs_query_infos, dict): + vs_query_infos = [vs_query_infos] + if len(vs_query_infos) == 0: + raise IOError("vssetup.powershell did not find any MSVS instances.") + for ver in vs_query_infos: + ver['CatalogVersion'] = new_vs_map[ver['InstallationVersion']['Major']] + return vs_query_infos[0] + diff --git a/gyp/gyp/Makefile.tmpl b/gyp/gyp/Makefile.tmpl new file mode 100644 index 0000000000..938948ebaf --- /dev/null +++ b/gyp/gyp/Makefile.tmpl @@ -0,0 +1,223 @@ +# We borrow heavily from the kernel build setup, though we are simpler since +# we don't have Kconfig tweaking settings on us. + +# The implicit make rules have it looking for RCS files, among other things. +# We instead explicitly write all the rules we care about. +# It's even quicker (saves ~200ms) to pass -r on the command line. +MAKEFLAGS=-r + +# The source directory tree. +srcdir := %(srcdir)s +abs_srcdir := $(abspath $(srcdir)) + +# The name of the builddir. +builddir_name ?= %(builddir)s + +# The V=1 flag on command line makes us verbosely print command lines. +ifdef V + quiet= +else + quiet=quiet_ +endif + +# Specify BUILDTYPE=Release on the command line for a release build. +BUILDTYPE ?= %(default_configuration)s + +# Directory all our build output goes into. +# Note that this must be two directories beneath src/ for unit tests to pass, +# as they reach into the src/ directory for data with relative paths. +builddir ?= $(builddir_name)/$(BUILDTYPE) +abs_builddir := $(abspath $(builddir)) +depsdir := $(builddir)/.deps + +# Object output directory. +obj := $(builddir)/obj +abs_obj := $(abspath $(obj)) + +# We build up a list of every single one of the targets so we can slurp in the +# generated dependency rule Makefiles in one pass. +all_deps := + +%(make_global_settings)s + +CC.target ?= %(CC.target)s +CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) +CXX.target ?= %(CXX.target)s +CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) +LINK.target ?= %(LINK.target)s +LDFLAGS.target ?= $(LDFLAGS) +AR.target ?= $(AR) + +# C++ apps need to be linked with g++. +LINK ?= $(CXX.target) + +# TODO(evan): move all cross-compilation logic to gyp-time so we don't need +# to replicate this environment fallback in make as well. +CC.host ?= %(CC.host)s +CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) +CXX.host ?= %(CXX.host)s +CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) +LINK.host ?= %(LINK.host)s +LDFLAGS.host ?= $(LDFLAGS_host) +AR.host ?= %(AR.host)s + +# Define a dir function that can handle spaces. +# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions +# "leading spaces cannot appear in the text of the first argument as written. +# These characters can be put into the argument value by variable substitution." +empty := +space := $(empty) $(empty) + +# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces +replace_spaces = $(subst $(space),{SPACE_REPLACEMENT},$1) +unreplace_spaces = $(subst {SPACE_REPLACEMENT},$(space),$1) +dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) + +# Flags to make gcc output dependency info. Note that you need to be +# careful here to use the flags that ccache and distcc can understand. +# We write to a dep file on the side first and then rename at the end +# so we can't end up with a broken dep file. +depfile = $(depsdir)/$(call replace_spaces,$@).d +DEPFLAGS = %(makedep_args)s -MF $(depfile).raw + +# We have to fixup the deps output in a few ways. +# (1) the file output should mention the proper .o file. +# ccache or distcc lose the path to the target, so we convert a rule of +# the form: +# foobar.o: DEP1 DEP2 +# into +# path/to/foobar.o: DEP1 DEP2 +# (2) we want missing files not to cause us to fail to build. +# We want to rewrite +# foobar.o: DEP1 DEP2 \ +# DEP3 +# to +# DEP1: +# DEP2: +# DEP3: +# so if the files are missing, they're just considered phony rules. +# We have to do some pretty insane escaping to get those backslashes +# and dollar signs past make, the shell, and sed at the same time. +# Doesn't work with spaces, but that's fine: .d files have spaces in +# their names replaced with other characters. +define fixup_dep +# The depfile may not exist if the input file didn't have any #includes. +touch $(depfile).raw +# Fixup path as in (1). +sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) +# Add extra rules as in (2). +# We remove slashes and replace spaces with new lines; +# remove blank lines; +# delete the first line and append a colon to the remaining lines. +sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ + grep -v '^$$' |\ + sed -e 1d -e 's|$$|:|' \ + >> $(depfile) +rm $(depfile).raw +endef +# Command definitions: +# - cmd_foo is the actual command to run; +# - quiet_cmd_foo is the brief-output summary of the command. + +quiet_cmd_cc = CC($(TOOLSET)) $@ +cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< + +quiet_cmd_cxx = CXX($(TOOLSET)) $@ +cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +%(extra_commands)s +quiet_cmd_touch = TOUCH $@ +cmd_touch = touch $@ + +quiet_cmd_copy = COPY $@ +# send stderr to /dev/null to ignore messages when linking directories. +cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@") + +%(link_commands)s +# Define an escape_quotes function to escape single quotes. +# This allows us to handle quotes properly as long as we always use +# use single quotes and escape_quotes. +escape_quotes = $(subst ','\'',$(1)) +# This comment is here just to include a ' to unconfuse syntax highlighting. +# Define an escape_vars function to escape '$' variable syntax. +# This allows us to read/write command lines with shell variables (e.g. +# $LD_LIBRARY_PATH), without triggering make substitution. +escape_vars = $(subst $$,$$$$,$(1)) +# Helper that expands to a shell command to echo a string exactly as it is in +# make. This uses printf instead of echo because printf's behaviour with respect +# to escape sequences is more portable than echo's across different shells +# (e.g., dash, bash). +exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))' +# Helper to compare the command we're about to run against the command +# we logged the last time we ran the command. Produces an empty +# string (false) when the commands match. +# Tricky point: Make has no string-equality test function. +# The kernel uses the following, but it seems like it would have false +# positives, where one string reordered its arguments. +# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ +# $(filter-out $(cmd_$@), $(cmd_$(1)))) +# We instead substitute each for the empty string into the other, and +# say they're equal if both substitutions produce the empty string. +# .d files contain {SPACE_REPLACEMENT} instead of spaces, take that into account. +command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))), $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) + +# Helper that is non-empty when a prerequisite changes. +# Normally make does this implicitly, but we force rules to always run +# so we can check their command lines. +# $? -- new prerequisites +# $| -- order-only dependencies +prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) + +# Helper that executes all postbuilds until one fails. +define do_postbuilds + @E=0;\ + for p in $(POSTBUILDS); do\ + eval $$p;\ + E=$$?;\ + if [ $$E -ne 0 ]; then\ + break;\ + fi;\ + done;\ + if [ $$E -ne 0 ]; then\ + rm -rf "$@";\ + exit $$E;\ + fi +endef + +# do_cmd: run a command via the above cmd_foo names, if necessary. +# Should always run for a given target to handle command-line changes. +# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. +# Third argument, if non-zero, makes it do POSTBUILDS processing. +# Note: We intentionally do NOT call dirx for depfile, since it contains {SPACE_REPLACEMENT} for +# spaces already and dirx strips the {SPACE_REPLACEMENT} characters. +define do_cmd +$(if $(or $(command_changed),$(prereq_changed)), + @$(call exact_echo, $($(quiet)cmd_$(1))) + @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" + $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))), + @$(cmd_$(1)) + @echo " $(quiet_cmd_$(1)): Finished", + @$(cmd_$(1)) + ) + @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) + @$(if $(2),$(fixup_dep)) + $(if $(and $(3), $(POSTBUILDS)), + $(call do_postbuilds) + ) +) +endef + +# Declare the "%(default_target)s" target first so it is the default, +# even though we don't have the deps yet. +.PHONY: %(default_target)s +%(default_target)s: + +# make looks for ways to re-generate included makefiles, but in our case, we +# don't have a direct way. Explicitly telling make that it has nothing to do +# for them makes it go faster. +%%.d: ; + +# Use FORCE_DO_CMD to force a target to run. Should be coupled with +# do_cmd. +.PHONY: FORCE_DO_CMD +FORCE_DO_CMD: + diff --git a/gyp/pylib/gyp/generator/make.py b/gyp/gyp/MakefileWriter.py similarity index 50% rename from gyp/pylib/gyp/generator/make.py rename to gyp/gyp/MakefileWriter.py index 385a0f7a08..7ddb7b0377 100644 --- a/gyp/pylib/gyp/generator/make.py +++ b/gyp/gyp/MakefileWriter.py @@ -1,129 +1,11 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This is all roughly based on the Makefile system used by the Linux -# kernel, but is a non-recursive make -- we put the entire dependency -# graph in front of make and let it figure it out. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level Makefile. This means that all -# variables in .mk-files clobber one another. Be careful to use := -# where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last beween different -# .mk files. -# -# TODOs: -# -# Global settings and utility functions are currently stuffed in the -# toplevel Makefile. It may make sense to generate some .mk files on -# the side to keep the files readable. - -from __future__ import print_function - +import hashlib import os import re -import sys -import subprocess -import gyp -import gyp.common -import gyp.xcode_emulation -from gyp.common import GetEnvironFallback -from gyp.common import GypError - -import hashlib - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni', - 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen', - 'PRODUCT_DIR': '$(builddir)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(abspath $<)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - 'CONFIGURATION_NAME': '$(BUILDTYPE)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - -# Request sorted dependencies in the order from dependents to dependencies. -generator_wants_sorted_dependencies = False - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - flavor = gyp.common.GetFlavor(params) - if flavor == 'mac': - default_variables.setdefault('OS', 'mac') - default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') - default_variables.setdefault('SHARED_LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - default_variables.setdefault('LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Make generator. - import gyp.generator.xcode as xcode_generator - global generator_additional_non_configuration_keys - generator_additional_non_configuration_keys = getattr(xcode_generator, - 'generator_additional_non_configuration_keys', []) - global generator_additional_path_sections - generator_additional_path_sections = getattr(xcode_generator, - 'generator_additional_path_sections', []) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr(xcode_generator, - 'generator_extra_sources_for_rules', []) - COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'}) - else: - operating_system = flavor - if flavor == 'android': - operating_system = 'linux' # Keep this legacy behavior for now. - default_variables.setdefault('OS', operating_system) - if flavor == 'aix': - default_variables.setdefault('SHARED_LIB_SUFFIX', '.a') - else: - default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') - default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)') - default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)') - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get('generator_flags', {}) - android_ndk_version = generator_flags.get('android_ndk_version', None) - # Android NDK requires a strict link order. - if android_ndk_version: - global generator_wants_sorted_dependencies - generator_wants_sorted_dependencies = True - - output_dir = params['options'].generator_output or \ - params['options'].toplevel_dir - builddir_name = generator_flags.get('output_dir', 'out') - qualified_out_dir = os.path.normpath(os.path.join( - output_dir, builddir_name, 'gypfiles')) - - global generator_filelist_paths - generator_filelist_paths = { - 'toplevel': params['options'].toplevel_dir, - 'qualified_out_dir': qualified_out_dir, - } +import gyp +from gyp import xcode_emulation +from gyp.common import GypError, EnsureDirExists +from gyp.generator.make import generator_default_variables,CalculateVariables # The .d checking code below uses these functions: # wildcard, sort, foreach, shell, wordlist @@ -137,391 +19,37 @@ def CalculateGeneratorInputInfo(params): # This is the replacement character. SPACE_REPLACEMENT = '?' - -LINK_COMMANDS_LINUX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) $(LIBS) -Wl,--end-group - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -""" - -LINK_COMMANDS_MAC = """\ -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" - -LINK_COMMANDS_ANDROID = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -quiet_cmd_link_host = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) -cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" - - -LINK_COMMANDS_AIX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" - - -LINK_COMMANDS_OS390 = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) -Wl,DLL - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -Wl,DLL - -""" - - # Header of toplevel Makefile. # This should go into the build tree, but it's easier to keep it here for now. -SHARED_HEADER = ("""\ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := %(srcdir)s -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= %(builddir)s - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= %(default_configuration)s - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - -%(make_global_settings)s - -CC.target ?= %(CC.target)s -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= %(CXX.target)s -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= %(LINK.target)s -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= %(CC.host)s -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= %(CXX.host)s -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= %(LINK.host)s -LDFLAGS.host ?= -AR.host ?= %(AR.host)s - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1) -unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = %(makedep_args)s -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \\ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters.""" -r""" -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef -""" -""" -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< +_dirname = os.path.dirname(os.path.abspath(__file__)) +with open(os.path.join(_dirname, 'Makefile.tmpl'), 'rt') as f: + file_content = f.read() + SHARED_HEADER = file_content.format(SPACE_REPLACEMENT=SPACE_REPLACEMENT) -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -%(extra_commands)s -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ +SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\ +# Suffix rules, putting all outputs into $(obj). +""") -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@" +SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\ +# Try building from generated source, too. +""") -%(link_commands)s -""" +header = """\ +# This file is generated by gyp; do not edit. -r""" -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))' -""" """ -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain """ + SPACE_REPLACEMENT + \ - """ instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\\ - for p in $(POSTBUILDS); do\\ - eval $$p;\\ - E=$$?;\\ - if [ $$E -ne 0 ]; then\\ - break;\\ - fi;\\ - done;\\ - if [ $$E -ne 0 ]; then\\ - rm -rf "$@";\\ - exit $$E;\\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \ - SPACE_REPLACEMENT + """ for -# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \ - """ characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "%(default_target)s" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: %(default_target)s -%(default_target)s: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: -""") - -SHARED_HEADER_MAC_COMMANDS = """ -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" -""" +# Maps every compilable file extension to the do_cmd that compiles it. +COMPILABLE_EXTENSIONS = { + '.c': 'cc', + '.cc': 'cxx', + '.cpp': 'cxx', + '.cxx': 'cxx', + '.s': 'cc', + '.S': 'cc', + '.m': 'objc', + '.mm': 'objcxx', +} def WriteRootHeaderSuffixRules(writer): @@ -534,8 +62,7 @@ def WriteRootHeaderSuffixRules(writer): writer.write('\n# Try building from generated source, too.\n') for ext in extensions: - writer.write( - '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext) + writer.write('$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext) writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext]) writer.write('\n') for ext in extensions: @@ -544,44 +71,6 @@ def WriteRootHeaderSuffixRules(writer): writer.write('\n') -SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\ -# Suffix rules, putting all outputs into $(obj). -""") - - -SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\ -# Try building from generated source, too. -""") - - -SHARED_FOOTER = """\ -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Maps every compilable file extension to the do_cmd that compiles it. -COMPILABLE_EXTENSIONS = { - '.c': 'cc', - '.cc': 'cxx', - '.cpp': 'cxx', - '.cxx': 'cxx', - '.s': 'cc', - '.S': 'cc', -} - def Compilable(filename): """Return true if the file is compilable (should be in OBJS).""" for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS): @@ -590,6 +79,16 @@ def Compilable(filename): return False + +# Map from qualified target to path to output. +target_outputs = {} +# Map from qualified target to any linkable output. A subset +# of target_outputs. E.g. when mybinary depends on liba, we want to +# include liba in the linker line; when otherbinary depends on +# mybinary, we just want to build mybinary first. +target_link_deps = {} + + def Linkable(filename): """Return true if the file is linkable (should be on the link line).""" return filename.endswith('.o') @@ -601,16 +100,15 @@ def Target(filename): def EscapeShellArgument(s): - """Quotes an argument so that it will be interpreted literally by a POSIX - shell. Taken from - http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python - """ + """ + Quotes an argument so that it will be interpreted literally by a POSIX shell. + Taken from http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python + """ return "'" + s.replace("'", "'\\''") + "'" def EscapeMakeVariableExpansion(s): - """Make has its own variable expansion syntax using $. We must escape it for - string to be interpreted literally.""" + """Make has its own variable expansion syntax using $. We must escape it for string to be interpreted literally.""" return s.replace('$', '$$') @@ -624,8 +122,7 @@ def EscapeCppDefine(s): def QuoteIfNecessary(string): - """TODO: Should this ideally be replaced with one or more of the above - functions?""" + """TODO: Should this ideally be replaced with one or more of the above functions?""" if '"' in string: string = '"' + string.replace('"', '\\"') + '"' return string @@ -636,26 +133,24 @@ def StringToMakefileVariable(string): return re.sub('[^a-zA-Z0-9_]', '_', string) -srcdir_prefix = '' def Sourceify(path): """Convert a path to its source directory form.""" if '$(' in path: return path if os.path.isabs(path): return path - return srcdir_prefix + path + return Sourceify.srcdir_prefix + path +Sourceify.srcdir_prefix = '' def QuoteSpaces(s, quote=r'\ '): return s.replace(' ', quote) -def SourceifyAndQuoteSpaces(path): - """Convert a path to its source directory form and quote spaces.""" - return QuoteSpaces(Sourceify(path)) # TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py. def _ValidateSourcesForOSX(spec, all_sources): - """Makes sure if duplicate basenames are not specified in the source list. + """ + Makes sure if duplicate basenames are not specified in the source list. Arguments: spec: The target dictionary containing the properties of the target. @@ -666,8 +161,7 @@ def _ValidateSourcesForOSX(spec, all_sources): basenames = {} for source in all_sources: name, ext = os.path.splitext(source) - is_compiled_file = ext in [ - '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] + is_compiled_file = ext in ['.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] if not is_compiled_file: continue basename = os.path.basename(name) # Don't include extension. @@ -679,20 +173,11 @@ def _ValidateSourcesForOSX(spec, all_sources): error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print(('static library %s has several files with the same basename:\n' % spec['target_name']) - + error + 'libtool on OS X will generate' + ' warnings for them.') + print('static library %s%s libtool on OS X will generate warnings for them. has several files with the same basename:\n' % (spec['target_name'], error)) raise GypError('Duplicate basenames in sources section, see list above') -# Map from qualified target to path to output. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - +# noinspection PyAttributeOutsideInit class MakefileWriter(object): """MakefileWriter packages up the writing of one target-specific foobar.mk. @@ -710,35 +195,40 @@ def __init__(self, generator_flags, flavor): # Generate suffix rules for all compilable extensions. for ext in COMPILABLE_EXTENSIONS.keys(): # Suffix rules for source folder. - self.suffix_rules_srcdir.update({ext: ("""\ + self.suffix_rules_srcdir.update({ + ext: ("""\ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) +""" % (ext, COMPILABLE_EXTENSIONS[ext])) + }) # Suffix rules for generated source files. - self.suffix_rules_objdir1.update({ext: ("""\ + self.suffix_rules_objdir1.update({ + ext: ("""\ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) - self.suffix_rules_objdir2.update({ext: ("""\ +""" % (ext, COMPILABLE_EXTENSIONS[ext])) + }) + self.suffix_rules_objdir2.update({ + ext: ("""\ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) +""" % (ext, COMPILABLE_EXTENSIONS[ext])) + }) - - def Write(self, qualified_target, base_path, output_filename, spec, configs, - part_of_all): - """The main entry point: writes a .mk file for a single target. + def Write(self, qualified_target, base_path, output_filename, spec, configs, part_of_all): + """ + The main entry point: writes a .mk file for a single target. Arguments: qualified_target: target we're generating - base_path: path relative to source root we're building in, used to resolve - target-relative paths + base_path: path relative to source root we're building in, used to resolve target-relative paths output_filename: output .mk file name to write - spec, configs: gyp info + spec: gyp info + configs: gyp info part_of_all: flag indicating this target is part of 'all' """ - gyp.common.EnsureDirExists(output_filename) + EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') @@ -750,9 +240,9 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, self.type = spec['type'] self.toolset = spec['toolset'] - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) + self.is_mac_bundle = xcode_emulation.IsMacBundle(self.flavor, spec) if self.flavor == 'mac': - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + self.xcode_settings = xcode_emulation.XcodeSettings(spec) else: self.xcode_settings = None @@ -768,15 +258,14 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, mac_bundle_deps = [] if self.is_mac_bundle: - self.output = self.ComputeMacBundleOutput(spec) - self.output_binary = self.ComputeMacBundleBinaryOutput(spec) + self.output = self.ComputeMacBundleOutput() + self.output_binary = self.ComputeMacBundleBinaryOutput() else: self.output = self.output_binary = self.ComputeOutput(spec) self.is_standalone_static_library = bool( - spec.get('standalone_static_library', 0)) - self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', - 'shared_library') + spec.get('standalone_static_library', 0)) + self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', 'shared_library') if (self.is_standalone_static_library or self.type in self._INSTALLABLE_TARGETS): self.alias = os.path.basename(self.output) @@ -795,18 +284,17 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, # Rules must be early like actions. if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all) + self.WriteRules(spec['rules'], extra_sources, extra_outputs, extra_mac_bundle_resources) if 'copies' in spec: self.WriteCopies(spec['copies'], extra_outputs, part_of_all) # Bundle resources. if self.is_mac_bundle: - all_mac_bundle_resources = ( - spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources) + all_mac_bundle_resources = spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) self.WriteMacInfoPlist(mac_bundle_deps) + self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) # Sources. all_sources = spec.get('sources', []) + extra_sources @@ -816,12 +304,10 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, # target. _ValidateSourcesForOSX(spec, all_sources) self.WriteSources( - configs, deps, all_sources, extra_outputs, - extra_link_deps, part_of_all, - gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)), - self.Pchify)) - sources = list(filter(Compilable, all_sources)) + configs, deps, all_sources, extra_outputs, extra_link_deps, + gyp.xcode_emulation.MacPrefixHeader(self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)), self.Pchify) + ) + sources = [x for x in all_sources if Compilable(x)] if sources: self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) extensions = set([os.path.splitext(s)[1] for s in sources]) @@ -858,7 +344,6 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, self.fp.close() - def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): """Write a "sub-project" Makefile. @@ -885,7 +370,6 @@ def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets))) self.fp.close() - def WriteActions(self, actions, extra_sources, extra_outputs, extra_mac_bundle_resources, part_of_all): """Write Makefile code for any 'actions' from the gyp input. @@ -909,9 +393,9 @@ def WriteActions(self, actions, extra_sources, extra_outputs, # Collect the output dirs we'll need. dirs = set() for out in outputs: - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) + d = os.path.split(out)[0] + if d: + dirs.add(d) if int(action.get('process_outputs_as_sources', False)): extra_sources += outputs if int(action.get('process_outputs_as_mac_bundle_resources', False)): @@ -920,8 +404,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs, # Write the actual command. action_commands = action['action'] if self.flavor == 'mac': - action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action_commands] + action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env) for command in action_commands] command = gyp.common.EncodePOSIXShellList(action_commands) if 'message' in action: self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message'])) @@ -950,7 +433,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs, '%s%s' % (name, cd_action, command)) self.WriteLn() - outputs = [self.Absolutify(output) for output in outputs] + outputs = [self.Absolutify(o) for o in outputs] # The makefile rules are all relative to the top dir, but the gyp actions # are defined relative to their containing dir. This replaces the obj # variable for the action rule with an absolute version so that the output @@ -963,19 +446,16 @@ def WriteActions(self, actions, extra_sources, extra_outputs, self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0])) self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv()) - for input in inputs: - assert ' ' not in input, ( - "Spaces in action input filenames not supported (%s)" % input) + for i in inputs: + assert ' ' not in i, ("Spaces in action input filenames not supported (%s)" % i) for output in outputs: - assert ' ' not in output, ( - "Spaces in action output filenames not supported (%s)" % output) + assert ' ' not in output, ("Spaces in action output filenames not supported (%s)" % output) # See the comment in WriteCopies about expanding env vars. outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - self.WriteDoCmd(outputs, [Sourceify(self.Absolutify(i)) for i in inputs], - part_of_all=part_of_all, command=name) + self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)), part_of_all=part_of_all, command=name) # Stuff the outputs in a variable so we can refer to them later. outputs_variable = 'action_%s_outputs' % name @@ -985,9 +465,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs, self.WriteLn() - - def WriteRules(self, rules, extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all): + def WriteRules(self, rules, extra_sources, extra_outputs, extra_mac_bundle_resources): """Write Makefile code for any 'rules' from the gyp input. extra_sources: a list that will be filled in with newly generated source @@ -1008,23 +486,22 @@ def WriteRules(self, rules, extra_sources, extra_outputs, for rule_source in rule.get('rule_sources', []): dirs = set() (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) + (rule_source_root, rule_source_ext) = os.path.splitext(rule_source_basename) outputs = [self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) for out in rule['outputs']] for out in outputs: - dir = os.path.dirname(out) - if dir: - dirs.add(dir) + d = os.path.dirname(out) + if d: + dirs.add(d) if int(rule.get('process_outputs_as_sources', False)): extra_sources += outputs if int(rule.get('process_outputs_as_mac_bundle_resources', False)): extra_mac_bundle_resources += outputs - inputs = [Sourceify(self.Absolutify(i)) for i - in [rule_source] + rule.get('inputs', [])] + inputs = map(Sourceify, map(self.Absolutify, [rule_source] + + rule.get('inputs', []))) actions = ['$(call do_cmd,%s_%d)' % (name, count)] if name == 'resources_grit': @@ -1040,7 +517,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs, outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - outputs = [self.Absolutify(output) for output in outputs] + outputs = [self.Absolutify(o) for o in outputs] all_outputs += outputs # Only write the 'obj' and 'builddir' rules for the "primary" output # (:1); it's superfluous for the "extra outputs", and this avoids @@ -1057,7 +534,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs, for output in outputs: output = re.sub(variables_with_spaces, '', output) assert ' ' not in output, ( - "Spaces in rule filenames not yet supported (%s)" % output) + "Spaces in rule filenames not yet supported (%s)" % output) self.WriteLn('all_deps += %s' % ' '.join(outputs)) action = [self.ExpandInputRoot(ac, rule_source_root, @@ -1113,7 +590,6 @@ def WriteRules(self, rules, extra_sources, extra_outputs, self.WriteLn('### Finished generating for all rules') self.WriteLn('') - def WriteCopies(self, copies, extra_outputs, part_of_all): """Write Makefile code for any 'copies' from the gyp input. @@ -1147,56 +623,49 @@ def WriteCopies(self, copies, extra_outputs, part_of_all): path = gyp.xcode_emulation.ExpandEnvVars(path, env) self.WriteDoCmd([output], [path], 'copy', part_of_all) outputs.append(output) - self.WriteLn('%s = %s' % (variable, ' '.join(QuoteSpaces(o) for o in outputs))) + self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs)))) extra_outputs.append('$(%s)' % variable) self.WriteLn() - def WriteMacBundleResources(self, resources, bundle_deps): """Writes Makefile code for 'mac_bundle_resources'.""" self.WriteLn('### Generated for mac_bundle_resources') - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables['PRODUCT_DIR'], self.xcode_settings, - [Sourceify(self.Absolutify(r)) for r in resources]): + product_dir = generator_default_variables['PRODUCT_DIR'] + sources = map(Sourceify, map(self.Absolutify, resources)) + bundle_resources = gyp.xcode_emulation.GetMacBundleResources(product_dir, self.xcode_settings, sources) + for output, res in bundle_resources: _, ext = os.path.splitext(output) + # TODO(refack): actualy figure this out for `copy-bundle-resource` + # is_binary = xcode_emulation.IsBinaryOutputFormat(output) if ext != '.xcassets': # Make does not supports '.xcassets' emulation. - self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource', - part_of_all=True) + self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource', part_of_all=True) bundle_deps.append(output) - def WriteMacInfoPlist(self, bundle_deps): """Write Makefile code for bundle Info.plist files.""" info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables['PRODUCT_DIR'], self.xcode_settings, - lambda p: Sourceify(self.Absolutify(p))) + generator_default_variables['PRODUCT_DIR'], self.xcode_settings, + lambda p: Sourceify(self.Absolutify(p))) if not info_plist: return if defines: # Create an intermediate file to store preprocessed results. - intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' + - os.path.basename(info_plist)) - self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D', - quoter=EscapeCppDefine) + intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' + os.path.basename(info_plist)) + self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D', quoter=EscapeCppDefine) self.WriteMakeRule([intermediate_plist], [info_plist], - ['$(call do_cmd,infoplist)', - # "Convert" the plist so that any weird whitespace changes from the - # preprocessor do not affect the XML parser in mac_tool. - '@plutil -convert xml1 $@ $@']) + ['$(call do_cmd,infoplist)', + # "Convert" the plist so that any weird whitespace changes from the + # preprocessor do not affect the XML parser in mac_tool. + '@plutil -convert xml1 $@ $@']) info_plist = intermediate_plist # plists can contain envvars and substitute them into the file. - self.WriteSortedXcodeEnv( - out, self.GetSortedXcodeEnv(additional_settings=extra_env)) - self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist', - part_of_all=True) + self.WriteSortedXcodeEnv(out, self.GetSortedXcodeEnv(additional_settings=extra_env)) + self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist', part_of_all=True) bundle_deps.append(out) - - def WriteSources(self, configs, deps, sources, - extra_outputs, extra_link_deps, - part_of_all, precompiled_header): + def WriteSources(self, configs, deps, sources, extra_outputs, extra_link_deps, precompiled_header): """Write Makefile code for any 'sources' from the gyp input. These are source files necessary to build the current target. @@ -1211,8 +680,7 @@ def WriteSources(self, configs, deps, sources, # Write configuration-specific variables for CFLAGS, etc. for configname in sorted(configs.keys()): config = configs[configname] - self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D', - quoter=EscapeCppDefine) + self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D', quoter=EscapeCppDefine) if self.flavor == 'mac': cflags = self.xcode_settings.GetCflags(configname) @@ -1224,30 +692,32 @@ def WriteSources(self, configs, deps, sources, cflags = config.get('cflags') cflags_c = config.get('cflags_c') cflags_cc = config.get('cflags_cc') + cflags_objc = None + cflags_objcc = None - self.WriteLn("# Flags passed to all source files."); + self.WriteLn("# Flags passed to all source files.") self.WriteList(cflags, 'CFLAGS_%s' % configname) - self.WriteLn("# Flags passed to only C files."); + self.WriteLn("# Flags passed to only C files.") self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname) - self.WriteLn("# Flags passed to only C++ files."); + self.WriteLn("# Flags passed to only C++ files.") self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname) if self.flavor == 'mac': - self.WriteLn("# Flags passed to only ObjC files."); + self.WriteLn("# Flags passed to only ObjC files.") self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname) - self.WriteLn("# Flags passed to only ObjC++ files."); + self.WriteLn("# Flags passed to only ObjC++ files.") self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname) includes = config.get('include_dirs') if includes: - includes = [Sourceify(self.Absolutify(i)) for i in includes] + includes = [Sourceify(self.Absolutify(include)) for include in includes] self.WriteList(includes, 'INCS_%s' % configname, prefix='-I') - compilable = list(filter(Compilable, sources)) - objs = [self.Objectify(self.Absolutify(Target(c))) for c in compilable] + compilable = filter(Compilable, sources) + objs = [self.Objectify(self.Absolutify(Target(x))) for x in compilable] self.WriteList(objs, 'OBJS') for obj in objs: assert ' ' not in obj, ( - "Spaces in object filenames not supported (%s)" % obj) + "Spaces in object filenames not supported (%s)" % obj) self.WriteLn('# Add to the list of files we specially track ' 'dependencies for.') self.WriteLn('all_deps += $(OBJS)') @@ -1256,20 +726,18 @@ def WriteSources(self, configs, deps, sources, # Make sure our dependencies are built first. if deps: self.WriteMakeRule(['$(OBJS)'], deps, - comment = 'Make sure our dependencies are built ' - 'before any of us.', - order_only = True) + comment='Make sure our dependencies are built before any of us.', + order_only=True) # Make sure the actions and rules run first. # If they generate any extra headers etc., the per-.o file dep tracking # will catch the proper rebuilds, so order only is still ok here. if extra_outputs: self.WriteMakeRule(['$(OBJS)'], extra_outputs, - comment = 'Make sure our actions/rules run ' - 'before any of us.', - order_only = True) + comment='Make sure our actions/rules run before any of us.', + order_only=True) - pchdeps = precompiled_header.GetObjDependencies(compilable, objs ) + pchdeps = precompiled_header.GetObjDependencies(compilable, objs) if pchdeps: self.WriteLn('# Dependencies from obj files to their precompiled headers') for source, obj, gch in pchdeps: @@ -1314,7 +782,7 @@ def WriteSources(self, configs, deps, sources, # If there are any object files in our input file list, link them into our # output. - extra_link_deps += list(filter(Linkable, sources)) + extra_link_deps += [source for source in sources if Linkable(source)] self.WriteLn() @@ -1323,7 +791,7 @@ def WritePchTargets(self, pch_commands): if not pch_commands: return - for gch, lang_flag, lang, input in pch_commands: + for gch, lang_flag, lang, inpt in pch_commands: extra_flags = { 'c': '$(CFLAGS_C_$(BUILDTYPE))', 'cc': '$(CFLAGS_CC_$(BUILDTYPE))', @@ -1342,15 +810,14 @@ def WritePchTargets(self, pch_commands): "$(CFLAGS_$(BUILDTYPE)) " + extra_flags) - self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input)) + self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, inpt)) self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang) self.WriteLn('') assert ' ' not in gch, ( - "Spaces in gch filenames not supported (%s)" % gch) + "Spaces in gch filenames not supported (%s)" % gch) self.WriteLn('all_deps += %s' % gch) self.WriteLn('') - def ComputeOutputBasename(self, spec): """Return the 'output basename' of a gyp spec. @@ -1382,8 +849,8 @@ def ComputeOutputBasename(self, spec): elif self.type == 'none': target = '%s.stamp' % target elif self.type != 'executable': - print("ERROR: What output file should be generated?", - "type", self.type, "target", target) + print(("ERROR: What output file should be generated?", + "type", self.type, "target", target)) target_prefix = spec.get('product_prefix', target_prefix) target = spec.get('product_name', target) @@ -1393,11 +860,8 @@ def ComputeOutputBasename(self, spec): return target_prefix + target + target_ext - def _InstallImmediately(self): - return self.toolset == 'target' and self.flavor == 'mac' and self.type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module') - + return self.toolset == 'target' and self.flavor == 'mac' and self.type in ('static_library', 'executable', 'shared_library', 'loadable_module') def ComputeOutput(self, spec): """Return the 'output' (full output path) of a gyp spec. @@ -1413,21 +877,19 @@ def ComputeOutput(self, spec): path = spec.get('product_dir', path) return os.path.join(path, self.ComputeOutputBasename(spec)) - - def ComputeMacBundleOutput(self, spec): + def ComputeMacBundleOutput(self): """Return the 'output' (full output path) to a bundle output directory.""" assert self.is_mac_bundle path = generator_default_variables['PRODUCT_DIR'] return os.path.join(path, self.xcode_settings.GetWrapperName()) - - def ComputeMacBundleBinaryOutput(self, spec): + def ComputeMacBundleBinaryOutput(self): """Return the 'output' (full output path) to the binary in a bundle.""" path = generator_default_variables['PRODUCT_DIR'] return os.path.join(path, self.xcode_settings.GetExecutablePath()) - - def ComputeDeps(self, spec): + @staticmethod + def ComputeDeps(spec): """Compute the dependencies of a gyp spec. Returns a tuple (deps, link_deps), where each is a list of @@ -1446,21 +908,19 @@ def ComputeDeps(self, spec): # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? # This hack makes it work: # link_deps.extend(spec.get('libraries', [])) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteDependencyOnExtraOutputs(self, target, extra_outputs): - self.WriteMakeRule([self.output_binary], extra_outputs, - comment = 'Build our special outputs first.', - order_only = True) + return gyp.common.uniquer(deps), gyp.common.uniquer(link_deps) + def WriteDependencyOnExtraOutputs(self, extra_outputs): + self.WriteMakeRule([self.output_binary], extra_outputs, comment='Build our special outputs first.', order_only=True) - def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, - extra_outputs, part_of_all): - """Write Makefile code to produce the final target of the gyp spec. + def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all): + """ + Write Makefile code to produce the final target of the gyp spec. - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() + spec: input from gyp. + configs: input from gyp. + deps: dependency lists; see ComputeDeps() + link_deps: dependency lists; see ComputeDeps() extra_outputs: any extra outputs that our target should depend on part_of_all: flag indicating this target is part of 'all' """ @@ -1468,11 +928,8 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, self.WriteLn('### Rules for final target.') if extra_outputs: - self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) - self.WriteMakeRule(extra_outputs, deps, - comment=('Preserve order dependency of ' - 'special output on deps.'), - order_only = True) + self.WriteDependencyOnExtraOutputs(extra_outputs) + self.WriteMakeRule(extra_outputs, deps, comment='Preserve order dependency of special output on deps.', order_only=True) target_postbuilds = {} if self.type != 'none': @@ -1480,17 +937,15 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, config = configs[configname] if self.flavor == 'mac': ldflags = self.xcode_settings.GetLdflags(configname, - generator_default_variables['PRODUCT_DIR'], - lambda p: Sourceify(self.Absolutify(p))) + generator_default_variables['PRODUCT_DIR'], + lambda p: Sourceify(self.Absolutify(p))) # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. gyp_to_build = gyp.common.InvertRelativePath(self.path) target_postbuild = self.xcode_settings.AddImplicitPostbuilds( - configname, - QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, - self.output))), - QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, - self.output_binary)))) + configname, + QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, self.output))), + QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, self.output_binary)))) if target_postbuild: target_postbuilds[configname] = target_postbuild else: @@ -1515,13 +970,11 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, if self.flavor == 'mac': libraries = self.xcode_settings.AdjustLibraries(libraries) self.WriteList(libraries, 'LIBS') - self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % - QuoteSpaces(self.output_binary)) + self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % QuoteSpaces(self.output_binary)) self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary)) if self.flavor == 'mac': - self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' % - QuoteSpaces(self.output_binary)) + self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' % QuoteSpaces(self.output_binary)) # Postbuild actions. Like actions, but implicitly depend on the target's # output. @@ -1529,8 +982,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, if self.flavor == 'mac': if target_postbuilds: postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))') - postbuilds.extend( - gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) + postbuilds.extend(gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) if postbuilds: # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), @@ -1539,20 +991,17 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) for configname in target_postbuilds: - self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' % - (QuoteSpaces(self.output), - configname, - gyp.common.EncodePOSIXShellList(target_postbuilds[configname]))) + self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' % (QuoteSpaces(self.output), configname, gyp.common.EncodePOSIXShellList(target_postbuilds[configname]))) # Postbuilds expect to be run in the gyp file's directory, so insert an # implicit postbuild to cd to there. postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) - for i in range(len(postbuilds)): - if not postbuilds[i].startswith('$'): - postbuilds[i] = EscapeShellArgument(postbuilds[i]) + for i, postbuild in enumerate(postbuilds): + if not postbuild.startswith('$'): + postbuilds[i] = EscapeShellArgument(postbuild) self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output)) self.WriteLn('%s: POSTBUILDS := %s' % ( - QuoteSpaces(self.output), ' '.join(postbuilds))) + QuoteSpaces(self.output), ' '.join(postbuilds))) # A bundle directory depends on its dependencies such as bundle resources # and bundle binary. When all dependencies have been built, the bundle @@ -1560,18 +1009,17 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, if self.is_mac_bundle: # If the framework doesn't contain a binary, then nothing depends # on the actions -- make the framework depend on them directly too. - self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) + self.WriteDependencyOnExtraOutputs(extra_outputs) # Bundle dependencies. Note that the code below adds actions to this # target, so if you move these two lines, move the lines below as well. - self.WriteList([QuoteSpaces(dep) for dep in bundle_deps], 'BUNDLE_DEPS') + self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS') self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output)) # After the framework is built, package it. Needs to happen before # postbuilds, since postbuilds depend on this. if self.type in ('shared_library', 'loadable_module'): - self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' % - self.xcode_settings.GetFrameworkVersion()) + self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' % self.xcode_settings.GetFrameworkVersion()) # Bundle postbuilds can depend on the whole bundle, so run them after # the bundle is packaged, not already after the bundle binary is done. @@ -1590,54 +1038,37 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output)) if postbuilds: - assert not self.is_mac_bundle, ('Postbuilds for bundles should be done ' - 'on the bundle, not the binary (target \'%s\')' % self.target) - assert 'product_dir' not in spec, ('Postbuilds do not work with ' - 'custom product_dir') + assert not self.is_mac_bundle, ('Postbuilds for bundles should be done on the bundle, not the binary (target \'%s\')' % self.target) + assert 'product_dir' not in spec, 'Postbuilds do not work with custom product_dir' if self.type == 'executable': - self.WriteLn('%s: LD_INPUTS := %s' % ( - QuoteSpaces(self.output_binary), - ' '.join(QuoteSpaces(dep) for dep in link_deps))) + self.WriteLn('%s: LD_INPUTS := %s' % (QuoteSpaces(self.output_binary), ' '.join(map(QuoteSpaces, link_deps)))) if self.toolset == 'host' and self.flavor == 'android': - self.WriteDoCmd([self.output_binary], link_deps, 'link_host', - part_of_all, postbuilds=postbuilds) + self.WriteDoCmd([self.output_binary], link_deps, 'link_host', part_of_all, postbuilds=postbuilds) else: - self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all, - postbuilds=postbuilds) + self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all, postbuilds=postbuilds) elif self.type == 'static_library': for link_dep in link_deps: - assert ' ' not in link_dep, ( - "Spaces in alink input filenames not supported (%s)" % link_dep) + assert ' ' not in link_dep, ("Spaces in alink input filenames not supported (%s)" % link_dep) if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not - self.is_standalone_static_library): - self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', - part_of_all, postbuilds=postbuilds) + self.is_standalone_static_library): + self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', part_of_all, postbuilds=postbuilds) else: - self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, - postbuilds=postbuilds) + self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, postbuilds=postbuilds) elif self.type == 'shared_library': - self.WriteLn('%s: LD_INPUTS := %s' % ( - QuoteSpaces(self.output_binary), - ' '.join(QuoteSpaces(dep) for dep in link_deps))) - self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all, - postbuilds=postbuilds) + self.WriteLn('%s: LD_INPUTS := %s' % (QuoteSpaces(self.output_binary), ' '.join(map(QuoteSpaces, link_deps)))) + self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all, postbuilds=postbuilds) elif self.type == 'loadable_module': for link_dep in link_deps: - assert ' ' not in link_dep, ( - "Spaces in module input filenames not supported (%s)" % link_dep) + assert ' ' not in link_dep, ("Spaces in module input filenames not supported (%s)" % link_dep) if self.toolset == 'host' and self.flavor == 'android': - self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host', - part_of_all, postbuilds=postbuilds) + self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host', part_of_all, postbuilds=postbuilds) else: - self.WriteDoCmd( - [self.output_binary], link_deps, 'solink_module', part_of_all, - postbuilds=postbuilds) + self.WriteDoCmd([self.output_binary], link_deps, 'solink_module', part_of_all, postbuilds=postbuilds) elif self.type == 'none': # Write a stamp line. - self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, - postbuilds=postbuilds) + self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, postbuilds=postbuilds) else: print("WARNING: no output for", self.type, self.target) @@ -1645,12 +1076,9 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, # Installable target aliases are created below. if ((self.output and self.output != self.target) and (self.type not in self._INSTALLABLE_TARGETS)): - self.WriteMakeRule([self.target], [self.output], - comment='Add target alias', phony = True) + self.WriteMakeRule([self.target], [self.output], comment='Add target alias', phony=True) if part_of_all: - self.WriteMakeRule(['all'], [self.target], - comment = 'Add target alias to "all" target.', - phony = True) + self.WriteMakeRule(['all'], [self.target], comment='Add target alias to "all" target.', phony=True) # Add special-case rules for our installable targets. # 1) They need to install to the build dir or "product" dir. @@ -1669,31 +1097,23 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, if (self.flavor == 'mac' and not 'product_dir' in spec and self.toolset == 'target'): # On mac, products are created in install_path immediately. - assert install_path == self.output, '%s != %s' % ( - install_path, self.output) + assert install_path == self.output, '%s != %s' % (install_path, self.output) # Point the target alias to the final binary output. self.WriteMakeRule([self.target], [install_path], - comment='Add target alias', phony = True) + comment='Add target alias', phony=True) if install_path != self.output: assert not self.is_mac_bundle # See comment a few lines above. - self.WriteDoCmd([install_path], [self.output], 'copy', - comment = 'Copy this to the %s output path.' % - file_desc, part_of_all=part_of_all) + self.WriteDoCmd([install_path], [self.output], 'copy', comment='Copy this to the %s output path.' % file_desc, part_of_all=part_of_all) installable_deps.append(install_path) if self.output != self.alias and self.alias != self.target: - self.WriteMakeRule([self.alias], installable_deps, - comment = 'Short alias for building this %s.' % - file_desc, phony = True) + self.WriteMakeRule([self.alias], installable_deps, comment='Short alias for building this %s.' % file_desc, phony=True) if part_of_all: - self.WriteMakeRule(['all'], [install_path], - comment = 'Add %s to "all" target.' % file_desc, - phony = True) - + self.WriteMakeRule(['all'], [install_path], comment='Add %s to "all" target.' % file_desc, phony=True) - def WriteList(self, value_list, variable=None, prefix='', - quoter=QuoteIfNecessary): - """Write a variable definition that is a list of values. + def WriteList(self, value_list, variable=None, prefix='', quoter=QuoteIfNecessary): + """ + Write a variable definition that is a list of values. E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out foo = blaha blahb @@ -1705,10 +1125,11 @@ def WriteList(self, value_list, variable=None, prefix='', values = ' \\\n\t' + ' \\\n\t'.join(value_list) self.fp.write('%s :=%s\n\n' % (variable, values)) - - def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None, - postbuilds=False): - """Write a Makefile rule that uses do_cmd. + # TODO(refack) `part_of_all` is not used, but is part of signature used in many other places + # noinspection PyUnusedLocal + def WriteDoCmd(self, outputs, inputs, command, part_of_all=False, comment=None, postbuilds=None): + """ + Write a Makefile rule that uses do_cmd. This makes the outputs dependent on the command line that was run, as well as support the V= make command line flag. @@ -1717,11 +1138,14 @@ def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None, if postbuilds: assert ',' not in command suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS - self.WriteMakeRule(outputs, inputs, - actions = ['$(call do_cmd,%s%s)' % (command, suffix)], - comment = comment, - command = command, - force = True) + self.WriteMakeRule( + outputs, + inputs, + actions=['$(call do_cmd,%s%s)' % (command, suffix)], + comment=comment, + command=command, + force=True + ) # Add our outputs to the list of targets we read depfiles from. # all_deps is only used for deps file reading, and for deps files we replace # spaces with ? because escaping doesn't work with make's $(sort) and @@ -1729,10 +1153,9 @@ def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None, outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, - order_only=False, force=False, phony=False, command=None): - """Write a Makefile rule, with some extra tricks. + def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, order_only=False, force=False, phony=False, command=None): + """ + Write a Makefile rule, with some extra tricks. outputs: a list of outputs for the rule (note: this is not directly supported by make; see comments below) @@ -1747,7 +1170,7 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, command: (optional) command name to generate unambiguous labels """ outputs = [QuoteSpaces(o) for o in outputs] - inputs = [QuoteSpaces(i) for i in inputs] + inputs = map(QuoteSpaces, inputs) if comment: self.WriteLn('# ' + comment) @@ -1760,29 +1183,27 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, if order_only: # Order only rule: Just write a simple rule. # TODO(evanm): just make order_only a list of deps instead of this hack. - self.WriteLn('%s: | %s%s' % - (' '.join(outputs), ' '.join(inputs), force_append)) + self.WriteLn('%s: | %s%s' % (' '.join(outputs), ' '.join(inputs), force_append)) elif len(outputs) == 1: # Regular rule, one output: Just write a simple rule. self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append)) else: - # Regular rule, more than one output: Multiple outputs are tricky in - # make. We will write three rules: + # Regular rule, more than one output: Multiple outputs are tricky in make. We will write three rules: # - All outputs depend on an intermediate file. # - Make .INTERMEDIATE depend on the intermediate. - # - The intermediate file depends on the inputs and executes the - # actual command. + # - The intermediate file depends on the inputs and executes the actual command. # - The intermediate recipe will 'touch' the intermediate file. - # - The multi-output rule will have an do-nothing recipe. + # - The multi-output rule will have a do-nothing recipe. # Hash the target name to avoid generating overlong filenames. - cmddigest = hashlib.sha1(command if command else self.target).hexdigest() - intermediate = "%s.intermediate" % cmddigest + key = (command if command else self.target).encode('utf-8') + slug = re.sub(r'\w', key, '') + cmddigest = hashlib.sha1(key).hexdigest() + intermediate = "%s.%s.intermediate" % (cmddigest, slug) self.WriteLn('%s: %s' % (' '.join(outputs), intermediate)) - self.WriteLn('\t%s' % '@:'); + self.WriteLn('\t%s' % '@:') self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate)) - self.WriteLn('%s: %s%s' % - (intermediate, ' '.join(inputs), force_append)) + self.WriteLn('%s: %s%s' % (intermediate, ' '.join(inputs), force_append)) actions.insert(0, '$(call do_cmd,touch)') if actions: @@ -1790,7 +1211,6 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, self.WriteLn('\t%s' % action) self.WriteLn() - def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): """Write a set of LOCAL_XXX definitions for Android NDK. @@ -1836,7 +1256,7 @@ def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): default_cpp_ext = ext self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext) - self.WriteList(list(map(self.Absolutify, filter(Compilable, all_sources))), + self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)), 'LOCAL_SRC_FILES') # Filter out those which do not match prefix and suffix and produce @@ -1844,9 +1264,9 @@ def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): def DepsToModules(deps, prefix, suffix): modules = [] for filepath in deps: - filename = os.path.basename(filepath) - if filename.startswith(prefix) and filename.endswith(suffix): - modules.append(filename[len(prefix):-len(suffix)]) + mod_filename = os.path.basename(filepath) + if mod_filename.startswith(prefix) and mod_filename.endswith(suffix): + modules.append(mod_filename[len(prefix):-len(suffix)]) return modules # Retrieve the default value of 'SHARED_LIB_SUFFIX' @@ -1855,15 +1275,11 @@ def DepsToModules(deps, prefix, suffix): CalculateVariables(default_variables, params) self.WriteList( - DepsToModules(link_deps, - generator_default_variables['SHARED_LIB_PREFIX'], - default_variables['SHARED_LIB_SUFFIX']), - 'LOCAL_SHARED_LIBRARIES') + DepsToModules(link_deps, generator_default_variables['SHARED_LIB_PREFIX'], default_variables['SHARED_LIB_SUFFIX']), + 'LOCAL_SHARED_LIBRARIES') self.WriteList( - DepsToModules(link_deps, - generator_default_variables['STATIC_LIB_PREFIX'], - generator_default_variables['STATIC_LIB_SUFFIX']), - 'LOCAL_STATIC_LIBRARIES') + DepsToModules(link_deps, generator_default_variables['STATIC_LIB_PREFIX'], generator_default_variables['STATIC_LIB_SUFFIX']), + 'LOCAL_STATIC_LIBRARIES') if self.type == 'executable': self.WriteLn('include $(BUILD_EXECUTABLE)') @@ -1873,28 +1289,19 @@ def DepsToModules(deps, prefix, suffix): self.WriteLn('include $(BUILD_STATIC_LIBRARY)') self.WriteLn() - def WriteLn(self, text=''): self.fp.write(text + '\n') - def GetSortedXcodeEnv(self, additional_settings=None): - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, "$(abs_builddir)", - os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)", - additional_settings) - + return gyp.xcode_emulation.GetSortedXcodeEnv(self.xcode_settings, "$(abs_builddir)", os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)", additional_settings) def GetSortedXcodePostbuildEnv(self): # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - 'CHROMIUM_STRIP_SAVE_FILE', '') + strip_save_file = self.xcode_settings.GetPerTargetSetting('CHROMIUM_STRIP_SAVE_FILE', '') # Even if strip_save_file is empty, explicitly write it. Else a postbuild # might pick up an export from an earlier target. - return self.GetSortedXcodeEnv( - additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file}) - + return self.GetSortedXcodeEnv(additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file}) def WriteSortedXcodeEnv(self, target, env): for k, v in env: @@ -1906,7 +1313,6 @@ def WriteSortedXcodeEnv(self, target, env): # So don't escape spaces in |env[k]|. self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v)) - def Objectify(self, path): """Convert a path to its output directory form.""" if '$(' in path: @@ -1915,7 +1321,6 @@ def Objectify(self, path): path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path) return path - def Pchify(self, path, lang): """Convert a prefix header path to its output directory form.""" path = self.Absolutify(path) @@ -1925,7 +1330,6 @@ def Pchify(self, path, lang): return path return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path) - def Absolutify(self, path): """Convert a subdirectory-relative path into a base-relative path. Skips over paths that contain variables.""" @@ -1936,330 +1340,23 @@ def Absolutify(self, path): return path.rstrip('/') return os.path.normpath(os.path.join(self.path, path)) - - def ExpandInputRoot(self, template, expansion, dirname): + @staticmethod + def ExpandInputRoot(template, expansion, dirname): if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: return template path = template % { - 'INPUT_ROOT': expansion, - 'INPUT_DIRNAME': dirname, - } + 'INPUT_ROOT': expansion, + 'INPUT_DIRNAME': dirname, + } return path - def _InstallableTargetInstallPath(self): """Returns the location of the final output for an installable target.""" # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files # rely on this. Emulate this behavior for mac. - - # XXX(TooTallNate): disabling this code since we don't want this behavior... - #if (self.type == 'shared_library' and - # (self.flavor != 'mac' or self.toolset != 'target')): - # # Install all shared libs into a common directory (per toolset) for - # # convenient access with LD_LIBRARY_PATH. - # return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias) + if (self.type == 'shared_library' and + (self.flavor != 'mac' or self.toolset != 'target')): + # Install all shared libs into a common directory (per toolset) for + # convenient access with LD_LIBRARY_PATH. + return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias) return '$(builddir)/' + self.alias - - -def WriteAutoRegenerationRule(params, root_makefile, makefile_name, - build_files): - """Write the target to regenerate the Makefile.""" - options = params['options'] - build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir) - for filename in params['build_files_arg']] - - gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'], - options.toplevel_dir) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join('.', gyp_binary) - - root_makefile.write( - "quiet_cmd_regen_makefile = ACTION Regenerating $@\n" - "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n" - "%(makefile_name)s: %(deps)s\n" - "\t$(call do_cmd,regen_makefile)\n\n" % { - 'makefile_name': makefile_name, - 'deps': ' '.join(SourceifyAndQuoteSpaces(bf) for bf in build_files), - 'cmd': gyp.common.EncodePOSIXShellList( - [gyp_binary, '-fmake'] + - gyp.RegenerateFlags(options) + - build_files_args)}) - - -def PerformBuild(data, configurations, params): - options = params['options'] - for config in configurations: - arguments = ['make'] - if options.toplevel_dir and options.toplevel_dir != '.': - arguments += '-C', options.toplevel_dir - arguments.append('BUILDTYPE=' + config) - print('Building [%s]: %s' % (config, arguments)) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - android_ndk_version = generator_flags.get('android_ndk_version', None) - default_target = generator_flags.get('default_target', 'all') - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - if options.generator_output: - output_file = os.path.join( - options.depth, options.generator_output, base_path, base_name) - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.toplevel_dir) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'Makefile' + options.suffix - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - if options.generator_output: - global srcdir_prefix - makefile_path = os.path.join( - options.toplevel_dir, options.generator_output, makefile_name) - srcdir = gyp.common.RelativePath(srcdir, options.generator_output) - srcdir_prefix = '$(srcdir)/' - - flock_command= 'flock' - copy_archive_arguments = '-af' - makedep_arguments = '-MMD' - header_params = { - 'default_target': default_target, - 'builddir': builddir_name, - 'default_configuration': default_configuration, - 'flock': flock_command, - 'flock_index': 1, - 'link_commands': LINK_COMMANDS_LINUX, - 'extra_commands': '', - 'srcdir': srcdir, - 'copy_archive_args': copy_archive_arguments, - 'makedep_args': makedep_arguments, - 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), - 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), - 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), - 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), - 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'), - 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'), - 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'), - 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'), - } - if flavor == 'mac': - flock_command = './gyp-mac-tool flock' - header_params.update({ - 'flock': flock_command, - 'flock_index': 2, - 'link_commands': LINK_COMMANDS_MAC, - 'extra_commands': SHARED_HEADER_MAC_COMMANDS, - }) - elif flavor == 'android': - header_params.update({ - 'link_commands': LINK_COMMANDS_ANDROID, - }) - elif flavor == 'zos': - copy_archive_arguments = '-fPR' - makedep_arguments = '-qmakedep=gcc' - header_params.update({ - 'copy_archive_args': copy_archive_arguments, - 'makedep_args': makedep_arguments, - 'link_commands': LINK_COMMANDS_OS390, - 'CC.target': GetEnvironFallback(('CC_target', 'CC'), 'njsc'), - 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), 'njsc++'), - 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'njsc'), - 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'njsc++'), - }) - elif flavor == 'solaris': - header_params.update({ - 'flock': './gyp-flock-tool flock', - 'flock_index': 2, - }) - elif flavor == 'freebsd': - # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific. - header_params.update({ - 'flock': 'lockf', - }) - elif flavor == 'openbsd': - copy_archive_arguments = '-pPRf' - header_params.update({ - 'copy_archive_args': copy_archive_arguments, - }) - elif flavor == 'aix': - copy_archive_arguments = '-pPRf' - header_params.update({ - 'copy_archive_args': copy_archive_arguments, - 'link_commands': LINK_COMMANDS_AIX, - 'flock': './gyp-flock-tool flock', - 'flock_index': 2, - }) - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_array = data[build_file].get('make_global_settings', []) - wrappers = {} - for key, value in make_global_settings_array: - if key.endswith('_wrapper'): - wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value - make_global_settings = '' - for key, value in make_global_settings_array: - if re.match('.*_wrapper', key): - continue - if value[0] != '$': - value = '$(abspath %s)' % value - wrapper = wrappers.get(key) - if wrapper: - value = '%s %s' % (wrapper, value) - del wrappers[key] - if key in ('CC', 'CC.host', 'CXX', 'CXX.host'): - make_global_settings += ( - 'ifneq (,$(filter $(origin %s), undefined default))\n' % key) - # Let gyp-time envvars win over global settings. - env_key = key.replace('.', '_') # CC.host -> CC_host - if env_key in os.environ: - value = os.environ[env_key] - make_global_settings += ' %s = %s\n' % (key, value) - make_global_settings += 'endif\n' - else: - make_global_settings += '%s ?= %s\n' % (key, value) - # TODO(ukai): define cmd when only wrapper is specified in - # make_global_settings. - - header_params['make_global_settings'] = make_global_settings - - gyp.common.EnsureDirExists(makefile_path) - root_makefile = open(makefile_path, 'w') - root_makefile.write(SHARED_HEADER % header_params) - # Currently any versions have the same effect, but in future the behavior - # could be different. - if android_ndk_version: - root_makefile.write( - '# Define LOCAL_PATH for build of Android applications.\n' - 'LOCAL_PATH := $(call my-dir)\n' - '\n') - for toolset in toolsets: - root_makefile.write('TOOLSET := %s\n' % toolset) - WriteRootHeaderSuffixRules(root_makefile) - - # Put build-time support tools next to the root Makefile. - dest_path = os.path.dirname(makefile_path) - gyp.common.CopyTool(flavor, dest_path) - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - - this_make_global_settings = data[build_file].get('make_global_settings', []) - assert make_global_settings_array == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets. %s vs. %s" % - (this_make_global_settings, make_global_settings)) - - build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - if flavor == 'mac': - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - writer = MakefileWriter(generator_flags, flavor) - writer.Write(qualified_target, base_path, output_file, spec, configs, - part_of_all=qualified_target in needed_targets) - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add(mkfile_rel_path) - - # Write out per-gyp (sub-project) Makefiles. - depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) - for build_file in build_files: - # The paths in build_files were relativized above, so undo that before - # testing against the non-relativized items in target_list and before - # calculating the Makefile path. - build_file = os.path.join(depth_rel_path, build_file) - gyp_targets = [target_dicts[target]['target_name'] for target in target_list - if target.startswith(build_file) and - target in needed_targets] - # Only generate Makefiles for gyp files with targets. - if not gyp_targets: - continue - base_path, output_file = CalculateMakefilePath(build_file, - os.path.splitext(os.path.basename(build_file))[0] + '.Makefile') - makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path), - os.path.dirname(output_file)) - writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, - builddir_name) - - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include_file in sorted(include_list): - # We wrap each .mk include in an if statement so users can tell make to - # not load a file by setting NO_LOAD. The below make code says, only - # load the .mk file if the .mk filename doesn't start with a token in - # NO_LOAD. - root_makefile.write( - "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n" - " $(findstring $(join ^,$(prefix)),\\\n" - " $(join ^," + include_file + ")))),)\n") - root_makefile.write(" include " + include_file + "\n") - root_makefile.write("endif\n") - root_makefile.write('\n') - - if (not generator_flags.get('standalone') - and generator_flags.get('auto_regeneration', True)): - WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff --git a/gyp/gyp/NinjaWriter.py b/gyp/gyp/NinjaWriter.py new file mode 100644 index 0000000000..055b750e23 --- /dev/null +++ b/gyp/gyp/NinjaWriter.py @@ -0,0 +1,1407 @@ +import copy +import hashlib +import json +import os.path +import re +import sys +from collections import OrderedDict + +import gyp +from gyp import msvs_emulation, xcode_emulation +from gyp.generator.ninja import generator_default_variables, OpenOutput, GetWinLinkRuleNameSuffix, CalculateVariables +from gyp.lib import ninja_syntax + + +class Target(object): + """Target represents the paths used within a single gyp target. + + Conceptually, building a single target A is a series of steps: + + 1) actions/rules/copies generates source/resources/etc. + 2) compiles generates .o files + 3) link generates a binary (library/executable) + 4) bundle merges the above in a mac bundle + + (Any of these steps can be optional.) + + From a build ordering perspective, a dependent target B could just + depend on the last output of this series of steps. + + But some dependent commands sometimes need to reach inside the box. + For example, when linking B it needs to get the path to the static + library generated by A. + + This object stores those paths. To keep things simple, member + variables only store concrete paths to single files, while methods + compute derived values like "the last output of the target". + """ + + def __init__(self, type_arg): + # Gyp type ("static_library", etc.) of this target. + self.type = type_arg + # File representing whether any input dependencies necessary for + # dependent actions have completed. + self.preaction_stamp = None + # File representing whether any input dependencies necessary for + # dependent compiles have completed. + self.precompile_stamp = None + # File representing the completion of actions/rules/copies, if any. + self.actions_stamp = None + # Path to the output of the link step, if any. + self.binary = None + # Path to the file representing the completion of building the bundle, + # if any. + self.bundle = None + # On Windows, incremental linking requires linking against all the .objs + # that compose a .lib (rather than the .lib itself). That list is stored + # here. In this case, we also need to save the compile_deps for the target, + # so that the the target that directly depends on the .objs can also depend + # on those. + self.component_objs = None + self.compile_deps = None + # Windows only. The import .lib is the output of a build step, but + # because dependents only link against the lib (not both the lib and the + # dll) we keep track of the import library here. + self.import_lib = None + # Track if this target contains any C++ files, to decide if gcc or g++ + # should be used for linking. + self.uses_cpp = False + + def Linkable(self): + """Return true if this is a target that can be linked against.""" + return self.type in ('static_library', 'shared_library') + + def UsesToc(self, flavor): + """Return true if the target should produce a restat rule based on a TOC + file.""" + # For bundles, the .TOC should be produced for the binary, not for + # FinalOutput(). But the naive approach would put the TOC file into the + # bundle, so don't do this for bundles for now. + if flavor == 'win' or self.bundle: + return False + return self.type in ('shared_library', 'loadable_module') + + def PreActionInput(self, flavor): + """Return the path, if any, that should be used as a dependency of + any dependent action step.""" + if self.UsesToc(flavor): + return self.FinalOutput() + '.TOC' + return self.FinalOutput() or self.preaction_stamp + + def PreCompileInput(self): + """Return the path, if any, that should be used as a dependency of + any dependent compile step.""" + return self.actions_stamp or self.precompile_stamp + + def FinalOutput(self): + """Return the last output of the target, which depends on all prior + steps.""" + return self.bundle or self.binary or self.actions_stamp + + +# A small discourse on paths as used within the Ninja build: +# All files we produce (both at gyp and at build time) appear in the build directory (e.g. out/Debug). +# +# Paths within a given .gyp file are always relative to the directory containing the .gyp file. Call these "gyp paths". +# This includes sources as well as the starting directory a given gyp rule/action expects to be run from. +# We call the path from the source root to the gyp file the "base directory" within the per-.gyp-file NinjaWriter code. +# +# All paths as written into the .ninja files are relative to the build directory. Call these paths "ninja paths". +# +# We translate between these two notions of paths with two helper functions: +# +# - NinjaWriter._GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) into the equivalent ninja path. +# +# - NinjaWriter._GypPathToUniqueOutput translates a gyp path into a ninja path to write an output file; the result can be namespaced such that it is unique +# to the input file name as well as the output target name. +class NinjaWriter(object): + def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir, output_file, toplevel_build, output_file_name, flavor, spec, generator_flags, config_name, toplevel_dir): + """ + base_dir: path from source root to directory containing this gyp file, + by gyp semantics, all input paths are relative to this + build_dir: path from source root to build output + toplevel_dir: path to the toplevel directory + """ + self.flavor = flavor + self.spec = spec + self.generator_flags = generator_flags + self.xcode_settings = self.msvs_settings = None + self.is_mac_bundle = xcode_emulation.IsMacBundle(self.flavor, spec) + self.target_rpath = self.generator_flags.get('target_rpath', r'\$$ORIGIN/lib/') + self.is_standalone_static_library = bool(spec.get('standalone_static_library', 0)) + self.target = Target(spec['type']) + self.toolset = spec['toolset'] + self.name = spec['target_name'] + self.config_name = config_name + self.config = spec['configurations'][self.config_name] + self.hash_for_rules = hash_for_rules + self.target_outputs = target_outputs + self.base_dir = base_dir + self.build_dir = build_dir + self.ninja = ninja_syntax.Writer(output_file) + self.toplevel_build = toplevel_build + self.output_file_name = output_file_name + self.archs = [] + self.arch_subninjas = [] + + self.abs_build_dir = None + if toplevel_dir is not None: + self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) + self.obj_ext = '.obj' if flavor == 'win' else '.o' + if flavor == 'win': + # See docstring of msvs_emulation.GenerateEnvironmentFiles(). + self.win_env = {} + for arch in ('x86', 'x64'): + self.win_env[arch] = 'environment.' + arch + + # Relative path from build output dir to base dir. + build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) + self.build_to_base = os.path.join(build_to_top, base_dir) + # Relative path from base dir to build dir. + base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) + self.base_to_build = os.path.join(base_to_top, build_dir) + + def _ExpandSpecial(self, path, product_dir=None): + """Expand specials like $!PRODUCT_DIR in |path|. + + If |product_dir| is None, assumes the cwd is already the product + dir. Otherwise, |product_dir| is the relative path to the product + dir. + """ + + PRODUCT_DIR = '$!PRODUCT_DIR' + if PRODUCT_DIR in path: + if product_dir: + path = path.replace(PRODUCT_DIR, product_dir) + else: + path = path.replace(PRODUCT_DIR + '/', '') + path = path.replace(PRODUCT_DIR + '\\', '') + path = path.replace(PRODUCT_DIR, '.') + + INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' + if INTERMEDIATE_DIR in path: + int_dir = self._GypPathToUniqueOutput('gen') + # _GypPathToUniqueOutput generates a path relative to the product dir, + # so insert product_dir in front if it is provided. + path = path.replace(INTERMEDIATE_DIR, os.path.join(product_dir or '', int_dir)) + + CONFIGURATION_NAME = '$|CONFIGURATION_NAME' + path = path.replace(CONFIGURATION_NAME, self.config_name) + + return path + + def _ExpandRuleVariables(self, path, root, dirname, source, ext, name): + if self.flavor == 'win': + path = self.msvs_settings.ConvertVSMacros(path, config=self.config_name) + path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) + path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], dirname) + path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) + path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) + path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) + return path + + def _GypPathToNinja(self, path, env=None): + """Translate a gyp path to a ninja path, optionally expanding environment + variable references in |path| with |env|. + + See the above discourse on path conversions.""" + if env: + if self.flavor == 'mac': + path = gyp.xcode_emulation.ExpandEnvVars(path, env) + elif self.flavor == 'win': + path = gyp.msvs_emulation.ExpandMacros(path, env) + if path.startswith('$!'): + expanded = self._ExpandSpecial(path) + if self.flavor == 'win': + expanded = os.path.normpath(expanded) + return expanded + if '$|' in path: + path = self._ExpandSpecial(path) + assert '$' not in path, path + return os.path.normpath(os.path.join(self.build_to_base, path)) + + def _GypPathToUniqueOutput(self, path, qualified=True): + """Translate a gyp path to a ninja path for writing output. + + If qualified is True, qualify the resulting filename with the name + of the target. This is necessary when e.g. compiling the same + path twice for two separate output targets. + + See the above discourse on path conversions.""" + + path = self._ExpandSpecial(path) + assert not path.startswith('$'), path + + # Translate the path following this scheme: + # Input: foo/bar.gyp, target targ, references baz/out.o + # Output: obj/foo/baz/targ.out.o (if qualified) + # obj/foo/baz/out.o (otherwise) + # (and obj.host instead of obj for cross-compiles) + # + # Why this scheme and not some other one? + # 1) for a given input, you can compute all derived outputs by matching + # its path, even if the input is brought via a gyp file with '..'. + # 2) simple files like libraries and stamps have a simple filename. + + obj = 'obj' + if self.toolset != 'target': + obj += '.' + self.toolset + + path_dir, path_basename = os.path.split(path) + assert not os.path.isabs(path_dir), ("'%s' can not be absolute path (see crbug.com/462153)." % path_dir) + + if qualified: + path_basename = self.name + '.' + path_basename + return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, path_basename)) + + def _WriteCollapsedDependencies(self, name, targets, order_only=None): + """Given a list of targets, return a path for a single file + representing the result of building all the targets or None. + + Uses a stamp file if necessary.""" + + assert targets == [t for t in targets if t], targets + if len(targets) == 0: + assert not order_only + return None + if len(targets) > 1 or order_only: + stamp = self._GypPathToUniqueOutput(name + '.stamp') + targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) + self.ninja.newline() + return targets[0] + + def _SubninjaNameForArch(self, arch): + output_file_base = os.path.splitext(self.output_file_name)[0] + return '%s.%s.ninja' % (output_file_base, arch) + + def _WinIdlRule(self, source, prebuild, outputs): + """Handle the implicit VS .idl rule for one source file. Fills |outputs| + with files that are generated.""" + outdir, output, raw_vars, flags = self.msvs_settings.GetIdlBuildData(source, self.config_name) + outdir = self._GypPathToNinja(outdir) + + def fix_path(path, rel=None): + path = os.path.join(outdir, path) + dirname, basename = os.path.split(source) + root, ext = os.path.splitext(basename) + path = self._ExpandRuleVariables(path, root, dirname, source, ext, basename) + if rel: + path = os.path.relpath(path, rel) + return path + + variables = [(name, fix_path(value, outdir)) for name, value in raw_vars] + output = [fix_path(p) for p in output] + variables.append(('outdir', outdir)) + variables.append(('idlflags', flags)) + inpt = self._GypPathToNinja(source) + self.ninja.build(output, 'idl', inpt, variables=variables, order_only=prebuild) + outputs.extend(output) + + def _WriteWinIdlFiles(self, spec, prebuild): + """Writes rules to match MSVS's implicit idl handling.""" + assert self.flavor == 'win' + if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): + return [] + outputs = [] + for source in filter(lambda x: x.endswith('.idl'), spec['sources']): + self._WinIdlRule(source, prebuild, outputs) + return outputs + + def _WriteActionsRulesCopies(self, spec, extra_sources, prebuild, mac_bundle_depends): + """Write out the Actions, Rules, and Copies steps. Return a path + representing the outputs of these steps.""" + outputs = [] + if self.is_mac_bundle: + mac_bundle_resources = spec.get('mac_bundle_resources', [])[:] + else: + mac_bundle_resources = [] + extra_mac_bundle_resources = [] + + if 'actions' in spec: + outputs += self._WriteActions(spec['actions'], extra_sources, prebuild, extra_mac_bundle_resources) + if 'rules' in spec: + outputs += self._WriteRules(spec['rules'], extra_sources, prebuild, mac_bundle_resources, extra_mac_bundle_resources) + if 'copies' in spec: + outputs += self._WriteCopies(spec['copies'], prebuild, mac_bundle_depends) + + if 'sources' in spec and self.flavor == 'win': + outputs += self._WriteWinIdlFiles(spec, prebuild) + + if self.xcode_settings and self.xcode_settings.IsIosFramework(): + self._WriteIOSFrameworkHeaders(spec, outputs, prebuild) + + stamp = self._WriteCollapsedDependencies('actions_rules_copies', outputs) + + if self.is_mac_bundle: + xcassets = self._WriteMacBundleResources(extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends) + partial_info_plist = self._WriteMacXCAssets(xcassets, mac_bundle_depends) + self._WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) + + return stamp + + def _GenerateDescription(self, verb, message, fallback): + """Generate and return a description of a build step. + + |verb| is the short summary, e.g. ACTION or RULE. + |message| is a hand-written description, or None if not available. + |fallback| is the gyp-level name of the step, usable as a fallback. + """ + if self.toolset != 'target': + verb += '(%s)' % self.toolset + if message: + return '%s %s' % (verb, self._ExpandSpecial(message)) + else: + return '%s %s: %s' % (verb, self.name, fallback) + + def _WriteActions(self, actions, extra_sources, prebuild, extra_mac_bundle_resources): + # Actions cd into the base directory. + env = self._GetToolchainEnv() + all_outputs = [] + for action in actions: + # First write out a rule for the action. + name = '%s_%s' % (action['action_name'], self.hash_for_rules) + description = self._GenerateDescription('ACTION', action.get('message', None), name) + is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action) if self.flavor == 'win' else False) + args = action['action'] + depfile = action.get('depfile', None) + if depfile: + depfile = self._ExpandSpecial(depfile, self.base_to_build) + pool = 'console' if int(action.get('ninja_use_console', 0)) else None + rule_name, _ = self._WriteNewNinjaRule(name, args, description, is_cygwin, env, pool, depfile=depfile) + + inputs = [self._GypPathToNinja(i, env) for i in action['inputs']] + if int(action.get('process_outputs_as_sources', False)): + extra_sources += action['outputs'] + if int(action.get('process_outputs_as_mac_bundle_resources', False)): + extra_mac_bundle_resources += action['outputs'] + outputs = [self._GypPathToNinja(o, env) for o in action['outputs']] + + # Then write out an edge using the rule. + self.ninja.build(outputs, rule_name, inputs, order_only=prebuild) + all_outputs += outputs + + self.ninja.newline() + + return all_outputs + + def _WriteRules(self, rules, extra_sources, prebuild, mac_bundle_resources, extra_mac_bundle_resources): + env = self._GetToolchainEnv() + all_outputs = [] + for rule in rules: + # Skip a rule with no action and no inputs. + if 'action' not in rule and not rule.get('rule_sources', []): + continue + + # First write out a rule for the rule action. + name = '%s_%s' % (rule['rule_name'], self.hash_for_rules) + + args = rule['action'] + description = self._GenerateDescription('RULE', rule.get('message', None), ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) + is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule) if self.flavor == 'win' else False) + pool = 'console' if int(rule.get('ninja_use_console', 0)) else None + rule_name, args = self._WriteNewNinjaRule(name, args, description, is_cygwin, env, pool) + + # TODO: if the command references the outputs directly, we should + # simplify it to just use $out. + + # Rules can potentially make use of some special variables which + # must vary per source file. + # Compute the list of variables we'll need to provide. + special_locals = ('source', 'root', 'dirname', 'ext', 'name') + needed_variables = {'source'} + for argument in args: + for var in special_locals: + if '${%s}' % var in argument: + needed_variables.add(var) + needed_variables = sorted(needed_variables) + + def cygwin_mung(path): + if is_cygwin: + return path.replace('\\', '/') + return path + + inputs = [self._GypPathToNinja(i, env) for i in rule.get('inputs', [])] + + # If there are n source files matching the rule, and m additional rule + # inputs, then adding 'inputs' to each build edge written below will + # write m * n inputs. Collapsing reduces this to m + n. + sources = rule.get('rule_sources', []) + num_inputs = len(inputs) + if prebuild: + num_inputs += 1 + if num_inputs > 2 and len(sources) > 2: + inputs = [self._WriteCollapsedDependencies(rule['rule_name'], inputs, order_only=prebuild)] + prebuild = [] + + # For each source file, write an edge that generates all the outputs. + for source in sources: + source = os.path.normpath(source) + dirname, basename = os.path.split(source) + root, ext = os.path.splitext(basename) + + # Gather the list of inputs and outputs, expanding $vars if possible. + outputs = [self._ExpandRuleVariables(o, root, dirname, source, ext, basename) for o in rule['outputs']] + + if int(rule.get('process_outputs_as_sources', False)): + extra_sources += outputs + + was_mac_bundle_resource = source in mac_bundle_resources + if was_mac_bundle_resource or int(rule.get('process_outputs_as_mac_bundle_resources', False)): + extra_mac_bundle_resources += outputs + # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed + # items in a set and remove them all in a single pass if this becomes + # a performance issue. + if was_mac_bundle_resource: + mac_bundle_resources.remove(source) + + extra_bindings = [] + for var in needed_variables: + if var == 'root': + extra_bindings.append(('root', cygwin_mung(root))) + elif var == 'dirname': + # '$dirname' is a parameter to the rule action, which means + # it shouldn't be converted to a Ninja path. But we don't + # want $!PRODUCT_DIR in there either. + dirname_expanded = self._ExpandSpecial(dirname, self.base_to_build) + extra_bindings.append(('dirname', cygwin_mung(dirname_expanded))) + elif var == 'source': + # '$source' is a parameter to the rule action, which means + # it shouldn't be converted to a Ninja path. But we don't + # want $!PRODUCT_DIR in there either. + source_expanded = self._ExpandSpecial(source, self.base_to_build) + extra_bindings.append(('source', cygwin_mung(source_expanded))) + elif var == 'ext': + extra_bindings.append(('ext', ext)) + elif var == 'name': + extra_bindings.append(('name', cygwin_mung(basename))) + else: + assert var is None, repr(var) + + outputs = [self._GypPathToNinja(o, env) for o in outputs] + if self.flavor == 'win': + # _WriteNewNinjaRule uses unique_name for creating an rsp file on win. + extra_bindings.append(('unique_name', hashlib.md5(outputs[0].encode('utf-8')).hexdigest())) + + self.ninja.build(outputs, rule_name, self._GypPathToNinja(source), implicit=inputs, order_only=prebuild, variables=extra_bindings) + + all_outputs.extend(outputs) + + return all_outputs + + def _WriteCopies(self, copies, prebuild, mac_bundle_depends): + outputs = [] + if self.xcode_settings: + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self._GetToolchainEnv(additional_settings=extra_env) + else: + env = self._GetToolchainEnv() + for cpy in copies: + for path in cpy['files']: + # Normalize the path so trailing slashes don't confuse us. + path = os.path.normpath(path) + basename = os.path.split(path)[1] + src = self._GypPathToNinja(path, env) + dst = self._GypPathToNinja(os.path.join(cpy['destination'], basename), env) + outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) + if self.is_mac_bundle: + # gyp has mac_bundle_resources to copy things into a bundle's + # Resources folder, but there's no built-in way to copy files to other + # places in the bundle. Hence, some targets use copies for this. Check + # if this file is copied into the current bundle, and if so add it to + # the bundle depends so that dependent targets get rebuilt if the copy + # input changes. + if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()): + mac_bundle_depends.append(dst) + + return outputs + + def _WriteIOSFrameworkHeaders(self, spec, outputs, prebuild): + """Prebuild steps to generate hmap files and copy headers to destination.""" + framework = self._ComputeMacBundleOutput() + all_sources = spec['sources'] + copy_headers = spec['mac_framework_headers'] + output = self._GypPathToUniqueOutput('headers.hmap') + self.xcode_settings.header_map_path = output + all_headers = map(self._GypPathToNinja, filter(lambda x: x.endswith('.h'), all_sources)) + variables = [('framework', framework), ('copy_headers', map(self._GypPathToNinja, copy_headers))] + outputs.extend(self.ninja.build(output, 'compile_ios_framework_headers', all_headers, variables=variables, order_only=prebuild)) + + def _WriteMacBundleResources(self, resources, bundle_depends): + """Writes ninja edges for 'mac_bundle_resources'.""" + xcassets = [] + + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self._GetSortedXcodeEnv(additional_settings=extra_env) + env = self._ComputeExportEnvString(env) + isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) + + for output, res in gyp.xcode_emulation.GetMacBundleResources(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, map(self._GypPathToNinja, resources)): + output = self._ExpandSpecial(output) + if os.path.splitext(output)[-1] != '.xcassets': + self.ninja.build(output, 'mac_tool', res, variables=[('mactool_cmd', 'copy-bundle-resource'), ('env', env), ('binary', isBinary)]) + bundle_depends.append(output) + else: + xcassets.append(res) + return xcassets + + def _WriteMacXCAssets(self, xcassets, bundle_depends): + """Writes ninja edges for 'mac_bundle_resources' .xcassets files. + + This add an invocation of 'actool' via the 'mac_tool.py' helper script. + It assumes that the assets catalogs define at least one image-set and + thus an Assets.car file will be generated in the application resources + directory. If this is not the case, then the build will probably be done + at each invocation of ninja.""" + if not xcassets: + return + + extra_arguments = {} + settings_to_arg = { + 'XCASSETS_APP_ICON': 'app-icon', + 'XCASSETS_LAUNCH_IMAGE': 'launch-image', + } + settings = self.xcode_settings.xcode_settings[self.config_name] + for settings_key, arg_name in settings_to_arg.items(): + value = settings.get(settings_key) + if value: + extra_arguments[arg_name] = value + + partial_info_plist = None + if extra_arguments: + partial_info_plist = self._GypPathToUniqueOutput('assetcatalog_generated_info.plist') + extra_arguments['output-partial-info-plist'] = partial_info_plist + + outputs = [os.path.join(self.xcode_settings.GetBundleResourceFolder(), 'Assets.car')] + if partial_info_plist: + outputs.append(partial_info_plist) + + keys = self._QuoteShellArgument(json.dumps(extra_arguments), self.flavor) + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self._GetSortedXcodeEnv(additional_settings=extra_env) + env = self._ComputeExportEnvString(env) + + bundle_depends.extend(self.ninja.build(outputs, 'compile_xcassets', xcassets, variables=[('env', env), ('keys', keys)])) + return partial_info_plist + + def _WriteMacInfoPlist(self, partial_info_plist, bundle_depends): + """Write build rules for bundle Info.plist files.""" + info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, self._GypPathToNinja) + if not info_plist: + return + out = self._ExpandSpecial(out) + if defines: + # Create an intermediate file to store preprocessed results. + intermediate_plist = self._GypPathToUniqueOutput(os.path.basename(info_plist)) + defines = ' '.join([self._Define(d, self.flavor) for d in defines]) + info_plist = self.ninja.build(intermediate_plist, 'preprocess_infoplist', info_plist, variables=[('defines', defines)]) + + env = self._GetSortedXcodeEnv(additional_settings=extra_env) + env = self._ComputeExportEnvString(env) + + if partial_info_plist: + intermediate_plist = self._GypPathToUniqueOutput('merged_info.plist') + info_plist = self.ninja.build(intermediate_plist, 'merge_infoplist', [partial_info_plist, info_plist]) + + keys = self.xcode_settings.GetExtraPlistItems(self.config_name) + keys = self._QuoteShellArgument(json.dumps(keys), self.flavor) + isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) + self.ninja.build(out, 'copy_infoplist', info_plist, variables=[('env', env), ('keys', keys), ('binary', isBinary)]) + bundle_depends.append(out) + + def _WriteSources(self, config_name, config, sources, pre_depends, precompiled_header, spec): + """Write build rules to compile all of |sources|.""" + if self.toolset == 'host': + self.ninja.variable('ar', '$ar_host') + self.ninja.variable('cc', '$cc_host') + self.ninja.variable('cxx', '$cxx_host') + self.ninja.variable('ld', '$ld_host') + self.ninja.variable('ldxx', '$ldxx_host') + self.ninja.variable('nm', '$nm_host') + self.ninja.variable('readelf', '$readelf_host') + + if self.flavor != 'mac' or len(self.archs) == 1: + return self._WriteSourcesForArch(self.ninja, config_name, config, sources, pre_depends, precompiled_header, spec) + else: + return OrderedDict( + (arch, self._WriteSourcesForArch(self.arch_subninjas[arch], config_name, config, sources, pre_depends, precompiled_header, spec, arch=arch)) + for arch in self.archs + ) + + def _WriteSourcesForArch(self, ninja_file, config_name, config, sources, pre_depends, precompiled_header, spec, arch=None): + """Write build rules to compile all of |sources|.""" + + extra_defines = [] + asmflags = '' + cflags_objc = '' + cflags_objcc = '' + if self.flavor == 'mac': + cflags = self.xcode_settings.GetCflags(config_name, arch=arch) + cflags_c = self.xcode_settings.GetCflagsC(config_name) + cflags_cc = self.xcode_settings.GetCflagsCC(config_name) + cflags_objc = ['$cflags_c'] + self.xcode_settings.GetCflagsObjC(config_name) + cflags_objcc = ['$cflags_cc'] + self.xcode_settings.GetCflagsObjCC(config_name) + elif self.flavor == 'win': + asmflags = self.msvs_settings.GetAsmflags(config_name) + cflags = self.msvs_settings.GetCflags(config_name) + cflags_c = self.msvs_settings.GetCflagsC(config_name) + cflags_cc = self.msvs_settings.GetCflagsCC(config_name) + extra_defines = self.msvs_settings.GetComputedDefines(config_name) + # See comment at cc_command for why there's two .pdb files. + pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(config_name, self._ExpandSpecial) + if not pdbpath_c: + obj = 'obj' + if self.toolset != 'target': + obj += '.' + self.toolset + pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) + pdbpath_c = pdbpath + '.c.pdb' + pdbpath_cc = pdbpath + '.cc.pdb' + self._WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c]) + self._WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc]) + self._WriteVariableList(ninja_file, 'pchprefix', [self.name]) + else: + cflags = config.get('cflags', []) + cflags_c = config.get('cflags_c', []) + cflags_cc = config.get('cflags_cc', []) + + # Respect environment variables related to build, but target-specific + # flags can still override them. + if self.toolset == 'target': + cflags_c = (os.environ.get('CPPFLAGS', '').split() + os.environ.get('CFLAGS', '').split() + cflags_c) + cflags_cc = (os.environ.get('CPPFLAGS', '').split() + os.environ.get('CXXFLAGS', '').split() + cflags_cc) + elif self.toolset == 'host': + cflags_c = (os.environ.get('CPPFLAGS_host', '').split() + os.environ.get('CFLAGS_host', '').split() + cflags_c) + cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() + os.environ.get('CXXFLAGS_host', '').split() + cflags_cc) + + defines = config.get('defines', []) + extra_defines + self._WriteVariableList(ninja_file, 'defines', [self._Define(d, self.flavor) for d in defines]) + if self.flavor == 'win': + self._WriteVariableList(ninja_file, 'asmflags', map(self._ExpandSpecial, asmflags)) + self._WriteVariableList(ninja_file, 'rcflags', [self._QuoteShellArgument(self._ExpandSpecial(f), self.flavor) for f in self.msvs_settings.GetRcflags(config_name, self._GypPathToNinja)]) + + include_dirs = config.get('include_dirs', []) + + env = self._GetToolchainEnv() + if self.flavor == 'win': + include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs, config_name) + self._WriteVariableList(ninja_file, 'includes', [self._QuoteShellArgument('-I' + self._GypPathToNinja(i, env), self.flavor) for i in include_dirs]) + + if self.flavor == 'win': + midl_include_dirs = config.get('midl_include_dirs', []) + midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(midl_include_dirs, config_name) + self._WriteVariableList(ninja_file, 'midl_includes', [self._QuoteShellArgument('-I' + self._GypPathToNinja(i, env), self.flavor) for i in midl_include_dirs]) + + pch_commands = precompiled_header.GetPchBuildCommands(arch) + if self.flavor == 'mac': + # Most targets use no precompiled headers, so only write these if needed. + for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'), ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]: + include = precompiled_header.GetInclude(ext, arch) + if include: + ninja_file.variable(var, include) + + arflags = config.get('arflags', []) + + self._WriteVariableList(ninja_file, 'cflags', map(self._ExpandSpecial, cflags)) + self._WriteVariableList(ninja_file, 'cflags_c', map(self._ExpandSpecial, cflags_c)) + self._WriteVariableList(ninja_file, 'cflags_cc', map(self._ExpandSpecial, cflags_cc)) + if self.flavor == 'mac': + self._WriteVariableList(ninja_file, 'cflags_objc', map(self._ExpandSpecial, cflags_objc)) + self._WriteVariableList(ninja_file, 'cflags_objcc', map(self._ExpandSpecial, cflags_objcc)) + self._WriteVariableList(ninja_file, 'arflags', map(self._ExpandSpecial, arflags)) + ninja_file.newline() + outputs = [] + has_rc_source = False + for source in sources: + filename, ext = os.path.splitext(source) + ext = ext[1:] + obj_ext = self.obj_ext + if ext in ('cc', 'cpp', 'cxx'): + command = 'cxx' + self.target.uses_cpp = True + elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): + command = 'cc' + elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. + command = 'cc_s' + elif self.flavor == 'win' and ext == 'asm' and not self.msvs_settings.HasExplicitAsmRules(spec): + command = 'asm' + # Add the _asm suffix as msvs is capable of handling .cc and + # .asm files of the same name without collision. + obj_ext = '_asm.obj' + elif self.flavor == 'mac' and ext == 'm': + command = 'objc' + elif self.flavor == 'mac' and ext == 'mm': + command = 'objcxx' + self.target.uses_cpp = True + elif self.flavor == 'win' and ext == 'rc': + command = 'rc' + obj_ext = '.res' + has_rc_source = True + else: + # Ignore unhandled extensions. + continue + inpt = self._GypPathToNinja(source) + otpt = self._GypPathToUniqueOutput(filename + obj_ext) + if arch is not None: + otpt = self._AddArch(otpt, arch) + implicit = precompiled_header.GetObjDependencies([inpt], [otpt], arch) + variables = [] + if self.flavor == 'win': + variables, otpt, implicit = precompiled_header.GetFlagsModifications(inpt, otpt, implicit, command, cflags_c, cflags_cc, self._ExpandSpecial) + ninja_file.build(otpt, command, inpt, implicit=[gch for _, _, gch in implicit], order_only=pre_depends, variables=variables) + outputs.append(otpt) + + if has_rc_source: + resource_include_dirs = config.get('resource_include_dirs', include_dirs) + self._WriteVariableList(ninja_file, 'resource_includes', [self._QuoteShellArgument('-I' + self._GypPathToNinja(i, env), self.flavor) for i in resource_include_dirs]) + + self._WritePchTargets(ninja_file, pch_commands) + + ninja_file.newline() + return outputs + + @staticmethod + def _WritePchTargets(ninja_file, pch_commands): + """Writes ninja rules to compile prefix headers.""" + if not pch_commands: + return + + for gch, lang_flag, lang, inpt in pch_commands: + var_name = { + 'c': 'cflags_pch_c', + 'cc': 'cflags_pch_cc', + 'm': 'cflags_pch_objc', + 'mm': 'cflags_pch_objcc', + }[lang] + + lang_map = {'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', } + cmd = lang_map.get(lang) + ninja_file.build(gch, cmd, inpt, variables=[(var_name, lang_flag)]) + + def _WriteLink(self, spec, config_name, config, link_deps, compile_deps): + """Write out a link step. Fills out target.binary. """ + if self.flavor != 'mac' or len(self.archs) == 1: + return self._WriteLinkForArch(self.ninja, spec, config_name, config, link_deps, compile_deps) + else: + output = self._ComputeOutput(spec) + inputs = [self._WriteLinkForArch(self.arch_subninjas[arch], spec, config_name, config, link_deps[arch], compile_deps, arch=arch) for arch in self.archs] + extra_bindings = [] + build_output = output + if not self.is_mac_bundle: + self._AppendPostbuildVariable(extra_bindings, spec, output, output) + + # TODO(yyanagisawa): more work needed to fix: + # https://code.google.com/p/gyp/issues/detail?id=411 + if spec['type'] in ('shared_library', 'loadable_module') and not self.is_mac_bundle: + extra_bindings.append(('lib', output)) + self.ninja.build([output, output + '.TOC'], 'solipo', inputs, variables=extra_bindings) + else: + self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings) + return output + + def _WriteLinkForArch(self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None): + """Write out a link step. Fills out target.binary. """ + command = { + 'executable': 'link', + 'loadable_module': 'solink_module', + 'shared_library': 'solink', + }[spec['type']] + command_suffix = '' + + implicit_deps = set() + solibs = set() + order_deps = set() + + if compile_deps: + # Normally, the compiles of the target already depend on compile_deps, + # but a shared_library target might have no sources and only link together + # a few static_library deps, so the link step also needs to depend + # on compile_deps to make sure actions in the shared_library target + # get run before the link. + order_deps.add(compile_deps) + + if 'dependencies' in spec: + # Two kinds of dependencies: + # - Linkable dependencies (like a .a or a .so): add them to the link line. + # - Non-linkable dependencies (like a rule that generates a file + # and writes a stamp file): add them to implicit_deps + extra_link_deps = set() + for dep in spec['dependencies']: + target = self.target_outputs.get(dep) + if not target: + continue + linkable = target.Linkable() + if linkable: + new_deps = [] + if self.flavor == 'win' and target.component_objs and self.msvs_settings.IsUseLibraryDependencyInputs(config_name): + new_deps = target.component_objs + if target.compile_deps: + order_deps.add(target.compile_deps) + elif self.flavor == 'win' and target.import_lib: + new_deps = [target.import_lib] + elif target.UsesToc(self.flavor): + solibs.add(target.binary) + implicit_deps.add(target.binary + '.TOC') + else: + new_deps = [target.binary] + for new_dep in new_deps: + if new_dep not in extra_link_deps: + extra_link_deps.add(new_dep) + link_deps.append(new_dep) + + final_output = target.FinalOutput() + if not linkable or final_output != target.binary: + implicit_deps.add(final_output) + + extra_bindings = [] + if self.target.uses_cpp and self.flavor != 'win': + extra_bindings.append(('ld', '$ldxx')) + + output = self._ComputeOutput(spec, arch) + if arch is None and not self.is_mac_bundle: + self._AppendPostbuildVariable(extra_bindings, spec, output, output) + + is_executable = spec['type'] == 'executable' + # The ldflags config key is not used on mac or win. On those platforms + # linker flags are set via xcode_settings and msvs_settings, respectively. + if self.toolset == 'target': + env_ldflags = os.environ.get('LDFLAGS', '').split() + else: # self.toolset == 'host' + env_ldflags = os.environ.get('LDFLAGS_host', '').split() + if self.flavor == 'mac': + ldflags = self.xcode_settings.GetLdflags(config_name, self._ExpandSpecial(generator_default_variables['PRODUCT_DIR']), self._GypPathToNinja, arch) + ldflags = env_ldflags + ldflags + elif self.flavor == 'win': + manifest_base_name = self._GypPathToUniqueOutput(self.ComputeOutputFileName(spec)) + ldflags, intermediate_manifest, manifest_files = self.msvs_settings.GetLdflags(config_name, self._GypPathToNinja, self._ExpandSpecial, manifest_base_name, output, is_executable, self.toplevel_build) + ldflags = env_ldflags + ldflags + self._WriteVariableList(ninja_file, 'manifests', manifest_files) + implicit_deps = implicit_deps.union(manifest_files) + if intermediate_manifest: + self._WriteVariableList(ninja_file, 'intermediatemanifest', [intermediate_manifest]) + command_suffix = GetWinLinkRuleNameSuffix(self.msvs_settings.IsEmbedManifest(config_name)) + def_file = self.msvs_settings.GetDefFile(self._GypPathToNinja) + if def_file: + implicit_deps.add(def_file) + else: + # Respect environment variables related to build, but target-specific + # flags can still override them. + ldflags = env_ldflags + config.get('ldflags', []) + if is_executable and len(solibs): + rpath = 'lib/' + if self.toolset != 'target': + rpath += self.toolset + ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath) + else: + ldflags.append('-Wl,-rpath=%s' % self.target_rpath) + ldflags.append('-Wl,-rpath-link=%s' % rpath) + self._WriteVariableList(ninja_file, 'ldflags', map(self._ExpandSpecial, ldflags)) + + library_dirs = config.get('library_dirs', []) + if self.flavor == 'win': + library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name) for l in library_dirs] + library_dirs = ['/LIBPATH:' + self._QuoteShellArgument(self._GypPathToNinja(l), self.flavor) for l in library_dirs] + else: + library_dirs = [self._QuoteShellArgument('-L' + self._GypPathToNinja(l), self.flavor) for l in library_dirs] + + libraries = gyp.common.uniquer(map(self._ExpandSpecial, spec.get('libraries', []))) + if self.flavor == 'mac': + libraries = self.xcode_settings.AdjustLibraries(libraries, config_name) + elif self.flavor == 'win': + libraries = self.msvs_settings.AdjustLibraries(libraries) + + self._WriteVariableList(ninja_file, 'libs', library_dirs + libraries) + + linked_binary = output + + if command in ('solink', 'solink_module'): + extra_bindings.append(('soname', os.path.split(output)[1])) + extra_bindings.append(('lib', gyp.common.EncodePOSIXShellArgument(output))) + if self.flavor != 'win': + link_file_list = output + if self.is_mac_bundle: + # 'Dependency Framework.framework/Versions/A/Dependency Framework' -> + # 'Dependency Framework.framework.rsp' + link_file_list = self.xcode_settings.GetWrapperName() + if arch: + link_file_list += '.' + arch + link_file_list += '.rsp' + # If an rspfile contains spaces, ninja surrounds the filename with + # quotes around it and then passes it to open(), creating a file with + # quotes in its name (and when looking for the rsp file, the name + # makes it through bash which strips the quotes) :-/ + link_file_list = link_file_list.replace(' ', '_') + extra_bindings.append(('link_file_list', gyp.common.EncodePOSIXShellArgument(link_file_list))) + if self.flavor == 'win': + extra_bindings.append(('binary', output)) + if '/NOENTRY' not in ldflags and not self.msvs_settings.GetNoImportLibrary(config_name): + self.target.import_lib = output + '.lib' + extra_bindings.append(('implibflag', '/IMPLIB:%s' % self.target.import_lib)) + pdbname = self.msvs_settings.GetPDBName(config_name, self._ExpandSpecial, output + '.pdb') + output = [output, self.target.import_lib] + if pdbname: + output.append(pdbname) + elif not self.is_mac_bundle: + output = [output, output + '.TOC'] + else: + command = command + '_notoc' + elif self.flavor == 'win': + extra_bindings.append(('binary', output)) + pdbname = self.msvs_settings.GetPDBName(config_name, self._ExpandSpecial, output + '.pdb') + if pdbname: + output = [output, pdbname] + + if len(solibs): + extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(sorted(solibs)))) + + ninja_file.build(output, command + command_suffix, link_deps, implicit=sorted(implicit_deps), order_only=list(order_deps), variables=extra_bindings) + return linked_binary + + def _WriteTarget(self, spec, config_name, config, link_deps, compile_deps): + extra_link_deps = any(self.target_outputs.get(dep).Linkable() for dep in spec.get('dependencies', []) if dep in self.target_outputs) + if spec['type'] == 'none' or (not link_deps and not extra_link_deps): + # TODO(evan): don't call this function for 'none' target types, as + # it doesn't do anything, and we fake out a 'binary' with a stamp file. + self.target.binary = compile_deps + self.target.type = 'none' + elif spec['type'] == 'static_library': + self.target.binary = self._ComputeOutput(spec) + if self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not self.is_standalone_static_library: + self.ninja.build(self.target.binary, 'alink_thin', link_deps, order_only=compile_deps) + else: + variables = [] + if self.xcode_settings: + libtool_flags = self.xcode_settings.GetLibtoolflags(config_name) + if libtool_flags: + variables.append(('libtool_flags', libtool_flags)) + if self.msvs_settings: + libflags = self.msvs_settings.GetLibFlags(config_name, self._GypPathToNinja) + variables.append(('libflags', libflags)) + + if self.flavor != 'mac' or len(self.archs) == 1: + self._AppendPostbuildVariable(variables, spec, self.target.binary, self.target.binary) + self.ninja.build(self.target.binary, 'alink', link_deps, order_only=compile_deps, variables=variables) + else: + inputs = [] + for arch in self.archs: + output = self._ComputeOutput(spec, arch) + self.arch_subninjas[arch].build(output, 'alink', link_deps[arch], order_only=compile_deps, variables=variables) + inputs.append(output) + # TODO: It's not clear if libtool_flags should be passed to the alink + # call that combines single-arch .a files into a fat .a file. + self._AppendPostbuildVariable(variables, spec, self.target.binary, self.target.binary) + self.ninja.build(self.target.binary, 'alink', inputs, + # FIXME: test proving order_only=compile_deps isn't needed. + variables=variables) + else: + self.target.binary = self._WriteLink(spec, config_name, config, link_deps, compile_deps) + return self.target.binary + + def _WriteMacBundle(self, spec, mac_bundle_depends, is_empty): + assert self.is_mac_bundle + package_framework = spec['type'] in ('shared_library', 'loadable_module') + output = self._ComputeMacBundleOutput() + if is_empty: + output += '.stamp' + variables = [] + self._AppendPostbuildVariable(variables, spec, output, self.target.binary, is_command_start=not package_framework) + if package_framework and not is_empty: + if spec['type'] == 'shared_library' and self.xcode_settings.isIOS: + self.ninja.build(output, 'package_ios_framework', mac_bundle_depends, variables=variables) + else: + variables.append(('version', self.xcode_settings.GetFrameworkVersion())) + self.ninja.build(output, 'package_framework', mac_bundle_depends, variables=variables) + else: + self.ninja.build(output, 'stamp', mac_bundle_depends, variables=variables) + self.target.bundle = output + return output + + def _GetToolchainEnv(self, additional_settings=None): + """Returns the variables toolchain would set for build steps.""" + env = self._GetSortedXcodeEnv(additional_settings=additional_settings) + if self.flavor == 'win': + env = self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR', config=self.config_name) + return env + + def _GetSortedXcodeEnv(self, additional_settings=None): + """Returns the variables Xcode would set for build steps.""" + assert self.abs_build_dir + path = os.path.join(self.abs_build_dir, self.build_to_base) + return gyp.xcode_emulation.GetSortedXcodeEnv(self.xcode_settings, self.abs_build_dir, path, self.config_name, additional_settings) + + def _GetSortedXcodePostbuildEnv(self): + """Returns the variables Xcode would set for postbuild steps.""" + postbuild_settings = {} + # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. + # TODO(thakis): It would be nice to have some general mechanism instead. + strip_save_file = self.xcode_settings.GetPerTargetSetting('CHROMIUM_STRIP_SAVE_FILE') + if strip_save_file: + postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file + return self._GetSortedXcodeEnv(additional_settings=postbuild_settings) + + def _AppendPostbuildVariable(self, variables, spec, output, binary, is_command_start=False): + """Adds a 'postbuild' variable if there is a postbuild for |output|.""" + postbuild = self._GetPostbuildCommand(spec, output, binary, is_command_start) + if postbuild: + variables.append(('postbuilds', postbuild)) + + def _GetPostbuildCommand(self, spec, output, output_binary, is_command_start): + """Returns a shell command that runs all the postbuilds, and removes + |output| if any of them fails. If |is_command_start| is False, then the + returned string will start with ' && '.""" + if not self.xcode_settings or spec['type'] == 'none' or not output: + return '' + output = self._QuoteShellArgument(output, self.flavor) + postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) + if output_binary is not None: + output_path = os.path.normpath(os.path.join(self.base_to_build, output)) + output_binary_path = self._QuoteShellArgument(os.path.normpath(os.path.join(self.base_to_build, output_binary)), self.flavor) + postbuilds = self.xcode_settings.AddImplicitPostbuilds(self.config_name, output_path, output_binary_path, postbuilds, quiet=True) + + if not postbuilds: + return '' + # Postbuilds expect to be run in the gyp file's directory, so insert an + # implicit postbuild to cd to there. + postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.build_to_base])) + env = self._ComputeExportEnvString(self._GetSortedXcodePostbuildEnv()) + # G will be non-null if any postbuild fails. Run all postbuilds in a + # subshell. + commands = env + ' (' + ' && '.join([ninja_syntax.escape(command) for command in postbuilds]) + command_string = (commands + '); G=$$?; ' + # Remove the final output if any postbuild failed. + '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)') + if is_command_start: + return '(' + command_string + ' && ' + else: + return '$ && (' + command_string + + @staticmethod + def _ComputeExportEnvString(env): + """Given an environment, returns a string looking like + 'export FOO=foo; export BAR="${FOO} bar;' + that exports |env| to the shell.""" + export_str = [] + for k, v in env: + export_str.append('export %s=%s;' % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v)))) + return ' '.join(export_str) + + def _ComputeMacBundleOutput(self): + """Return the 'output' (full output path) to a bundle output directory.""" + assert self.is_mac_bundle + path = generator_default_variables['PRODUCT_DIR'] + return self._ExpandSpecial(os.path.join(path, self.xcode_settings.GetWrapperName())) + + def _ComputeOutput(self, spec, arch=None): + """Compute the path for the final output of the spec.""" + typ = spec['type'] + + if self.flavor == 'win': + override = self.msvs_settings.GetOutputName(self.config_name, self._ExpandSpecial) + if override: + return override + + if arch is None and self.flavor == 'mac' and typ in ( + 'static_library', 'executable', 'shared_library', 'loadable_module'): + filename = self.xcode_settings.GetExecutablePath() + else: + filename = self.ComputeOutputFileName(spec, typ) + + if arch is None and 'product_dir' in spec: + path = os.path.join(spec['product_dir'], filename) + return self._ExpandSpecial(path) + + # Some products go into the output root, libraries go into shared library + # dir, and everything else goes into the normal place. + type_in_output_root = ['executable', 'loadable_module'] + if self.flavor == 'mac' and self.toolset == 'target': + type_in_output_root += ['shared_library', 'static_library'] + elif self.flavor == 'win' and self.toolset == 'target': + type_in_output_root += ['shared_library'] + + if arch is not None: + # Make sure partial executables don't end up in a bundle or the regular + # output directory. + archdir = 'arch' + if self.toolset != 'target': + archdir = os.path.join('arch', '%s' % self.toolset) + return os.path.join(archdir, self._AddArch(filename, arch)) + elif typ in type_in_output_root or self.is_standalone_static_library: + return filename + elif typ == 'shared_library': + libdir = 'lib' + if self.toolset != 'target': + libdir = os.path.join('lib', '%s' % self.toolset) + return os.path.join(libdir, filename) + else: + return self._GypPathToUniqueOutput(filename, qualified=False) + + @staticmethod + def _WriteVariableList(ninja_file, var, values): + assert not isinstance(values, str) + if values is None: + values = [] + ninja_file.variable(var, ' '.join(values)) + + def _WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool, depfile=None): + """Write out a new ninja "rule" statement for a given command. + + Returns the name of the new rule, and a copy of |args| with variables + expanded.""" + + if self.flavor == 'win': + args = [self.msvs_settings.ConvertVSMacros(arg, self.base_to_build, config=self.config_name) for arg in args] + description = self.msvs_settings.ConvertVSMacros(description, config=self.config_name) + elif self.flavor == 'mac': + # |env| is an empty list on non-mac. + args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] + description = gyp.xcode_emulation.ExpandEnvVars(description, env) + + # TODO: we shouldn't need to qualify names; we do it because + # currently the ninja rule namespace is global, but it really + # should be scoped to the subninja. + rule_name = self.name + if self.toolset == 'target': + rule_name += '.' + self.toolset + rule_name += '.' + name + rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) + + # Remove variable references, but not if they refer to the magic rule + # variables. This is not quite right, as it also protects these for + # actions, not just for rules where they are valid. Good enough. + protect = ['${root}', '${dirname}', '${source}', '${ext}', '${name}'] + protect = '(?!' + '|'.join(map(re.escape, protect)) + ')' + description = re.sub(protect + r'\$', '_', description) + + # gyp dictates that commands are run from the base directory. + # cd into the directory before running, and adjust paths in + # the arguments to point to the proper locations. + rspfile = None + rspfile_content = None + args = [self._ExpandSpecial(arg, self.base_to_build) for arg in args] + if self.flavor == 'win': + rspfile = rule_name + '.$unique_name.rsp' + # The cygwin case handles this inside the bash sub-shell. + run_in = '' if is_cygwin else ' ' + self.build_to_base + if is_cygwin: + rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(args, self.build_to_base) + else: + rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) + command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable + rspfile + run_in) + else: + env = self._ComputeExportEnvString(env) + command = gyp.common.EncodePOSIXShellList(args) + command = 'cd %s; ' % self.build_to_base + env + command + + # GYP rules/actions express being no-ops by not touching their outputs. + # Avoid executing downstream dependencies in this case by specifying + # restat=1 to ninja. + self.ninja.rule(rule_name, command, description, depfile=depfile, restat=True, pool=pool, rspfile=rspfile, rspfile_content=rspfile_content) + self.ninja.newline() + + return rule_name, args + + def ComputeOutputFileName(self, spec, typ=None): + """Compute the filename of the final output for the current target.""" + if not typ: + typ = spec['type'] + + default_variables = copy.copy(generator_default_variables) + CalculateVariables(default_variables, {'flavor': self.flavor}) + + # Compute filename prefix: the product prefix, or a default for + # the product type. + DEFAULT_PREFIX = { + 'loadable_module': default_variables['SHARED_LIB_PREFIX'], + 'shared_library': default_variables['SHARED_LIB_PREFIX'], + 'static_library': default_variables['STATIC_LIB_PREFIX'], + 'executable': default_variables['EXECUTABLE_PREFIX'], + } + prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(typ, '')) + + # Compute filename extension: the product extension, or a default + # for the product type. + DEFAULT_EXTENSION = { + 'loadable_module': default_variables['SHARED_LIB_SUFFIX'], + 'shared_library': default_variables['SHARED_LIB_SUFFIX'], + 'static_library': default_variables['STATIC_LIB_SUFFIX'], + 'executable': default_variables['EXECUTABLE_SUFFIX'], + } + extension = spec.get('product_extension') + if extension: + extension = '.' + extension + else: + extension = DEFAULT_EXTENSION.get(typ, '') + + if 'product_name' in spec: + # If we were given an explicit name, use that. + target = spec['product_name'] + else: + # Otherwise, derive a name from the target name. + target = spec['target_name'] + if prefix == 'lib': + # Snip out an extra 'lib' from libs if appropriate. + target = self._StripPrefix(target, 'lib') + + if typ in ('static_library', 'loadable_module', 'shared_library', 'executable'): + return '%s%s%s' % (prefix, target, extension) + elif typ == 'none': + return '%s.stamp' % target + else: + raise Exception('Unhandled output type %s' % typ) + + def WriteSpec(self): + """ + The entry point for NinjaWriter: write the build rules for a spec. + + Returns a Target object, which represents the output paths for this spec. + Returns None if there are no outputs (e.g. a settings-only 'none' type + target). + """ + + if self.flavor == 'mac': + self.xcode_settings = xcode_emulation.XcodeSettings(self.spec) + mac_toolchain_dir = self.generator_flags.get('mac_toolchain_dir', None) + if mac_toolchain_dir: + self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir + + if self.flavor == 'win': + self.msvs_settings = msvs_emulation.MsvsSettings(self.spec, self.generator_flags) + arch = self.msvs_settings.GetArch(self.config_name) + self.ninja.variable('arch', self.win_env[arch]) + self.ninja.variable('cc', '$cl_' + arch) + self.ninja.variable('cxx', '$cl_' + arch) + self.ninja.variable('cc_host', '$cl_' + arch) + self.ninja.variable('cxx_host', '$cl_' + arch) + self.ninja.variable('asm', '$ml_' + arch) + + if self.flavor == 'mac': + self.archs = self.xcode_settings.GetActiveArchs(self.config_name) + if len(self.archs) > 1: + self.arch_subninjas = dict( + (arch, ninja_syntax.Writer(OpenOutput(os.path.join(self.toplevel_build, self._SubninjaNameForArch(arch))))) + for arch in self.archs + ) + + # Compute pre-depends for all rules. + # actions_depends is the dependencies this target depends on before running any of its action/rule/copy steps. + # compile_depends is the dependencies this target depends on before running any of its compile steps. + actions_depends = [] + compile_depends = [] + # TODO(evan): it is rather confusing which things are lists and which + # are strings. Fix these. + if 'dependencies' in self.spec: + for dep in self.spec['dependencies']: + if dep in self.target_outputs: + target = self.target_outputs[dep] + actions_depends.append(target.PreActionInput(self.flavor)) + compile_depends.append(target.PreCompileInput()) + if target.uses_cpp: + self.target.uses_cpp = True + actions_depends = [d for d in actions_depends if d] + compile_depends = [d for d in compile_depends if d] + actions_depends = self._WriteCollapsedDependencies('actions_depends', actions_depends) + compile_depends = self._WriteCollapsedDependencies('compile_depends', compile_depends) + self.target.preaction_stamp = actions_depends + self.target.precompile_stamp = compile_depends + + # Write out actions, rules, and copies. These must happen before we + # compile any sources, so compute a list of predependencies for sources + # while we do it. + extra_sources = [] + mac_bundle_depends = [] + self.target.actions_stamp = self._WriteActionsRulesCopies(self.spec, extra_sources, actions_depends, mac_bundle_depends) + + # If we have actions/rules/copies, we depend directly on those, but + # otherwise we depend on dependent target's actions/rules/copies etc. + # We never need to explicitly depend on previous target's link steps, + # because no compile ever depends on them. + compile_depends_stamp = self.target.actions_stamp or compile_depends + + # Write out the compilation steps, if any. + link_deps = [] + try: + sources = extra_sources + self.spec.get('sources', []) + except TypeError: + print('extra_sources: ', str(extra_sources)) + print('spec.get("sources"): ', str(self.spec.get('sources'))) + raise + if sources: + if self.flavor == 'mac' and len(self.archs) > 1: + # Write subninja file containing compile and link commands scoped to + # a single arch if a fat binary is being built. + for arch in self.archs: + self.ninja.subninja(self._SubninjaNameForArch(arch)) + + if self.flavor == 'win': + gyp.msvs_emulation.VerifyMissingSources(sources, self.abs_build_dir, self.generator_flags, self._GypPathToNinja) + pch = gyp.msvs_emulation.PrecompiledHeader(self.msvs_settings, self.config_name, self._GypPathToNinja, self._GypPathToUniqueOutput, self.obj_ext) + else: + pch = gyp.xcode_emulation.MacPrefixHeader(self.xcode_settings, self._GypPathToNinja, lambda path, lang: self._GypPathToUniqueOutput(path + '-' + lang)) + link_deps = self._WriteSources(self.config_name, self.config, sources, compile_depends_stamp, pch, self.spec) + # Some actions/rules output 'sources' that are already object files. + obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] + if obj_outputs: + if self.flavor != 'mac' or len(self.archs) == 1: + link_deps += [self._GypPathToNinja(o) for o in obj_outputs] + else: + print("Warning: Actions/rules writing object files don't work with multi-arch targets, dropping. (target %s)" % self.spec['target_name']) + elif self.flavor == 'mac' and len(self.archs) > 1: + link_deps = OrderedDict((a, []) for a in self.archs) + + compile_deps = self.target.actions_stamp or actions_depends + if self.flavor == 'win' and self.target.type == 'static_library': + self.target.component_objs = link_deps + self.target.compile_deps = compile_deps + + # Write out a link step, if needed. + output = None + is_empty_bundle = not link_deps and not mac_bundle_depends + if link_deps or self.target.actions_stamp or actions_depends: + output = self._WriteTarget(self.spec, self.config_name, self.config, link_deps, compile_deps) + if self.is_mac_bundle: + mac_bundle_depends.append(output) + + # Bundle all of the above together, if needed. + if self.is_mac_bundle: + output = self._WriteMacBundle(self.spec, mac_bundle_depends, is_empty_bundle) + + if not output: + return None + + assert self.target.FinalOutput(), output + return self.target + + + def _Define(self, d, flavor): + """Takes a preprocessor define and returns a -D parameter that's ninja- and + shell-escaped.""" + if flavor == 'win': + # cl.exe replaces literal # characters with = in preprocesor definitions for + # some reason. Octal-encode to work around that. + d = d.replace('#', '\\%03o' % ord('#')) + return self._QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) + + + @staticmethod + def _StripPrefix(arg, prefix): + if arg.startswith(prefix): + return arg[len(prefix):] + return arg + + + @staticmethod + def _QuoteShellArgument(arg, flavor): + """Quote a string such that it will be interpreted as a single argument + by the shell.""" + # Rather than attempting to enumerate the bad shell characters, just + # whitelist common OK ones and quote anything else. + if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): + return arg # No quoting necessary. + if flavor == 'win': + return gyp.msvs_emulation.QuoteForRspFile(arg) + return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" + + + @staticmethod + def _AddArch(output, arch): + """Adds an arch string to an output path.""" + output, extension = os.path.splitext(output) + return '%s.%s%s' % (output, arch, extension) diff --git a/gyp/gyp/XCodeDetect.py b/gyp/gyp/XCodeDetect.py new file mode 100644 index 0000000000..cc7b8fac5a --- /dev/null +++ b/gyp/gyp/XCodeDetect.py @@ -0,0 +1,65 @@ +import subprocess +import sys + + +def run(*cmd_args): + return subprocess.check_output(cmd_args, stderr=subprocess.PIPE).decode('utf-8') + + +class XCodeDetect(object): + """Simplify access to Xcode information.""" + _cache = {} + + @staticmethod + def Version(): + if 'Version' not in XCodeDetect._cache: + version = '' + try: + lines = run('xcodebuild', '-version').splitlines() + version = ''.join(lines[0].decode('utf-8').split()[-1].split('.')) + version = (version + '0' * (3 - len(version))).zfill(4) + except subprocess.CalledProcessError: + pass + try: + lines = run('pkgutil', '--pkg-info=com.apple.pkg.CLTools_Executables').splitlines() + for l in lines: + n, v = l.split(': ', 1) + if n != 'version': + continue + parts = v.split('.',4) + version = '%s%s%s%s' % tuple(parts[0:4]) + break + except subprocess.CalledProcessError: + pass + XCodeDetect._cache['Version'] = version + return XCodeDetect._cache['Version'] + + + @staticmethod + def SDKVersion(): + if 'SDKVersion' not in XCodeDetect._cache: + out = '' + try: + out = run('xcrun', '--show-sdk-version') + except subprocess.CalledProcessError: + pass + try: + out = run('xcodebuild', '-version', '-sdk', '', 'SDKVersion') + except subprocess.CalledProcessError: + pass + XCodeDetect._cache['SDKVersion'] = out.strip() + return XCodeDetect._cache['SDKVersion'] + + + @staticmethod + def HasIPhoneSDK(): + if not sys.platform == 'darwin': + return False + + if 'HasIPhoneSDK' not in XCodeDetect._cache: + try: + out = run('xcrun', '--sdk', 'iphoneos', '--show-sdk-path') + except subprocess.CalledProcessError: + out = 1 + XCodeDetect._cache['HasIPhoneSDK'] = out == 0 + return XCodeDetect._cache['HasIPhoneSDK'] diff --git a/gyp/pylib/gyp/__init__.py b/gyp/gyp/__init__.py old mode 100755 new mode 100644 similarity index 60% rename from gyp/pylib/gyp/__init__.py rename to gyp/gyp/__init__.py index dee834013f..68e047f7dd --- a/gyp/pylib/gyp/__init__.py +++ b/gyp/gyp/__init__.py @@ -8,20 +8,17 @@ import copy import gyp.input -import argparse +import optparse import os.path import re import shlex import sys import traceback +from collections import OrderedDict from gyp.common import GypError -try: - # Python 2 - string_types = basestring -except NameError: - # Python 3 - string_types = str +if not 'basestring' in __builtins__: + basestring = str # Default debug modes for GYP debug = {} @@ -43,8 +40,8 @@ def DebugOutput(mode, message, *args): pass if args: message %= args - print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), - ctx[1], ctx[2], message)) + print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message)) + def FindBuildFiles(): extension = '.gyp' @@ -56,33 +53,28 @@ def FindBuildFiles(): return build_files -def Load(build_files, format, default_variables={}, - includes=[], depth='.', params=None, check=False, - circular_check=True, duplicate_basename_check=True): +def Load(build_files, output_format, default_variables, includes, params, depth): """ Loads one or more specified build files. default_variables and includes will be copied before use. - Returns the generator for the specified format and the + Returns the generator for the specified output_format and the data returned by loading the specified build files. """ - if params is None: - params = {} - - if '-' in format: - format, params['flavor'] = format.split('-', 1) + if '-' in output_format: + output_format, params['flavor'] = output_format.split('-', 1) default_variables = copy.copy(default_variables) # Default variables provided by this program and its modules should be # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, # avoiding collisions with user and automatic variables. - default_variables['GENERATOR'] = format + default_variables['GENERATOR'] = output_format default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '') # Format can be a custom python file, or by default the name of a module # within gyp.generator. - if format.endswith('.py'): - generator_name = os.path.splitext(format)[0] + if output_format.endswith('.py'): + generator_name = os.path.splitext(output_format)[0] path, generator_name = os.path.split(generator_name) # Make sure the path to the custom generator is in sys.path @@ -93,7 +85,7 @@ def Load(build_files, format, default_variables={}, if path not in sys.path: sys.path.insert(0, path) else: - generator_name = 'gyp.generator.' + format + generator_name = 'gyp.generator.' + output_format # These parameters are passed in order (as opposed to by key) # because ActivePython cannot handle key parameters to __import__. @@ -103,49 +95,39 @@ def Load(build_files, format, default_variables={}, # Give the generator the opportunity to set additional variables based on # the params it will receive in the output phase. - if getattr(generator, 'CalculateVariables', None): + if hasattr(generator, 'CalculateVariables'): generator.CalculateVariables(default_variables, params) # Give the generator the opportunity to set generator_input_info based on # the params it will receive in the output phase. - if getattr(generator, 'CalculateGeneratorInputInfo', None): + if hasattr(generator, 'CalculateGeneratorInputInfo'): generator.CalculateGeneratorInputInfo(params) # Fetch the generator specific info that gets fed to input, we use getattr # so we can default things and the generators only have to provide what # they need. generator_input_info = { - 'non_configuration_keys': - getattr(generator, 'generator_additional_non_configuration_keys', []), - 'path_sections': - getattr(generator, 'generator_additional_path_sections', []), - 'extra_sources_for_rules': - getattr(generator, 'generator_extra_sources_for_rules', []), - 'generator_supports_multiple_toolsets': - getattr(generator, 'generator_supports_multiple_toolsets', False), - 'generator_wants_static_library_dependencies_adjusted': - getattr(generator, - 'generator_wants_static_library_dependencies_adjusted', True), - 'generator_wants_sorted_dependencies': - getattr(generator, 'generator_wants_sorted_dependencies', False), - 'generator_filelist_paths': - getattr(generator, 'generator_filelist_paths', None), + 'non_configuration_keys': getattr(generator, 'generator_additional_non_configuration_keys', []), + 'path_sections': getattr(generator, 'generator_additional_path_sections', []), + 'extra_sources_for_rules': getattr(generator, 'generator_extra_sources_for_rules', []), + 'generator_supports_multiple_toolsets': getattr(generator, 'generator_supports_multiple_toolsets', False), + 'generator_wants_static_library_dependencies_adjusted': getattr(generator, 'generator_wants_static_library_dependencies_adjusted', True), + 'generator_wants_sorted_dependencies': getattr(generator, 'generator_wants_sorted_dependencies', False), + 'generator_filelist_paths': getattr(generator, 'generator_filelist_paths', None), } # Process the input specific to this generator. - result = gyp.input.Load(build_files, default_variables, includes[:], - depth, generator_input_info, check, circular_check, - duplicate_basename_check, - params['parallel'], params['root_targets']) + result = gyp.input.Load(build_files, default_variables, includes[:], depth, generator_input_info, params['root_targets']) return [generator] + result + def NameValueListToDict(name_value_list): """ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is. """ - result = { } + result = OrderedDict() for item in name_value_list: tokens = item.split('=', 1) if len(tokens) == 2: @@ -161,17 +143,20 @@ def NameValueListToDict(name_value_list): result[tokens[0]] = True return result + def ShlexEnv(env_name): flags = os.environ.get(env_name, []) if flags: flags = shlex.split(flags) return flags + def FormatOpt(opt, value): if opt.startswith('--'): return '%s=%s' % (opt, value) return opt + value + def RegenerateAppendFlag(flag, values, predicate, env_name, options): """Regenerate a list of command line flags, for an option of action='append'. @@ -194,6 +179,7 @@ def RegenerateAppendFlag(flag, values, predicate, env_name, options): flags.append(FormatOpt(flag, predicate(flag_value))) return flags + def RegenerateFlags(options): """Given a parsed options object, and taking the environment variables into account, returns a list of flags that should regenerate an equivalent options @@ -204,6 +190,7 @@ def RegenerateFlags(options): The format flag is not included, as it is assumed the calling generator will set that as appropriate. """ + def FixPath(path): path = gyp.common.FixIfRelativePath(path, options.depth) if not path: @@ -223,36 +210,32 @@ def Noop(value): action = metadata['action'] env_name = metadata['env_name'] if action == 'append': - flags.extend(RegenerateAppendFlag(opt, value, value_predicate, - env_name, options)) + flags.extend(RegenerateAppendFlag(opt, value, value_predicate, env_name, options)) elif action in ('store', None): # None is a synonym for 'store'. if value: flags.append(FormatOpt(opt, value_predicate(value))) elif options.use_environment and env_name and os.environ.get(env_name): flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) elif action in ('store_true', 'store_false'): - if ((action == 'store_true' and value) or - (action == 'store_false' and not value)): + if ((action == 'store_true' and value) or (action == 'store_false' and not value)): flags.append(opt) elif options.use_environment and env_name: - print('Warning: environment regeneration unimplemented ' - 'for %s flag %r env_name %r' % (action, opt, - env_name), file=sys.stderr) + print('Warning: environment regeneration unimplemented for %s flag %r env_name %r' % (action, opt, env_name), file=sys.stderr) else: - print('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt), file=sys.stderr) + print('Warning: regeneration unimplemented for action %r flag %r' % (action, opt), file=sys.stderr) return flags -class RegeneratableOptionParser(argparse.ArgumentParser): - def __init__(self, usage): + +class RegeneratableOptionParser(optparse.OptionParser): + def __init__(self): self.__regeneratable_options = {} - argparse.ArgumentParser.__init__(self, usage=usage) + optparse.OptionParser.__init__(self) - def add_argument(self, *args, **kw): + def add_option(self, *args, **kw): """Add an option to the parser. - This accepts the same arguments as ArgumentParser.add_argument, plus the + This accepts the same arguments as OptionParser.add_option, plus the following: regenerate: can be set to False to prevent this option from being included in regeneration. @@ -269,98 +252,51 @@ def add_argument(self, *args, **kw): # it as a string. type = kw.get('type') if type == 'path': - kw['type'] = str + kw['type'] = 'string' self.__regeneratable_options[dest] = { - 'action': kw.get('action'), - 'type': type, - 'env_name': env_name, - 'opt': args[0], - } + 'action': kw.get('action'), + 'type': type, + 'env_name': env_name, + 'opt': args[0], + } - argparse.ArgumentParser.add_argument(self, *args, **kw) + optparse.OptionParser.add_option(self, *args, **kw) def parse_args(self, *args): - values, args = argparse.ArgumentParser.parse_known_args(self, *args) + values, args = optparse.OptionParser.parse_args(self, *args) values._regeneration_metadata = self.__regeneratable_options return values, args + def gyp_main(args): - my_name = os.path.basename(sys.argv[0]) - usage = 'usage: %(prog)s [options ...] [build_file ...]' - - - parser = RegeneratableOptionParser(usage=usage.replace('%s', '%(prog)s')) - parser.add_argument('--build', dest='configs', action='append', - help='configuration for build after project generation') - parser.add_argument('--check', dest='check', action='store_true', - help='check format of gyp files') - parser.add_argument('--config-dir', dest='config_dir', action='store', - env_name='GYP_CONFIG_DIR', default=None, - help='The location for configuration files like ' - 'include.gypi.') - parser.add_argument('-d', '--debug', dest='debug', metavar='DEBUGMODE', - action='append', default=[], help='turn on a debugging ' - 'mode for debugging GYP. Supported modes are "variables", ' - '"includes" and "general" or "all" for all of them.') - parser.add_argument('-D', dest='defines', action='append', metavar='VAR=VAL', - env_name='GYP_DEFINES', - help='sets variable VAR to value VAL') - parser.add_argument('--depth', dest='depth', metavar='PATH', type='path', - help='set DEPTH gyp variable to a relative path to PATH') - parser.add_argument('-f', '--format', dest='formats', action='append', - env_name='GYP_GENERATORS', regenerate=False, - help='output formats to generate') - parser.add_argument('-G', dest='generator_flags', action='append', default=[], - metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', - help='sets generator flag FLAG to VAL') - parser.add_argument('--generator-output', dest='generator_output', - action='store', default=None, metavar='DIR', type='path', - env_name='GYP_GENERATOR_OUTPUT', - help='puts generated build files under DIR') - parser.add_argument('--ignore-environment', dest='use_environment', - action='store_false', default=True, regenerate=False, - help='do not read options from environment variables') - parser.add_argument('-I', '--include', dest='includes', action='append', - metavar='INCLUDE', type='path', - help='files to include in all loaded .gyp files') - # --no-circular-check disables the check for circular relationships between - # .gyp files. These relationships should not exist, but they've only been - # observed to be harmful with the Xcode generator. Chromium's .gyp files - # currently have some circular relationships on non-Mac platforms, so this - # option allows the strict behavior to be used on Macs and the lenient - # behavior to be used elsewhere. - # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_argument('--no-circular-check', dest='circular_check', - action='store_false', default=True, regenerate=False, - help="don't check for circular relationships between files") - # --no-duplicate-basename-check disables the check for duplicate basenames - # in a static_library/shared_library project. Visual C++ 2008 generator - # doesn't support this configuration. Libtool on Mac also generates warnings - # when duplicate basenames are passed into Make generator on Mac. - # TODO(yukawa): Remove this option when these legacy generators are - # deprecated. - parser.add_argument('--no-duplicate-basename-check', - dest='duplicate_basename_check', action='store_false', - default=True, regenerate=False, - help="don't check for duplicate basenames") - parser.add_argument('--no-parallel', action='store_true', default=False, - help='Disable multiprocessing') - parser.add_argument('-S', '--suffix', dest='suffix', default='', - help='suffix to add to generated files') - parser.add_argument('--toplevel-dir', dest='toplevel_dir', action='store', - default=None, metavar='DIR', type='path', - help='directory to use as the root of the source tree') - parser.add_argument('-R', '--root-target', dest='root_targets', - action='append', metavar='TARGET', - help='include only TARGET and its deep dependencies') + parser = RegeneratableOptionParser() + usage = 'usage: %s [options ...] [build_file ...]' + parser.set_usage(usage.replace('%s', '%prog')) + parser.add_option('--build', dest='configs', action='append', help='configuration for build after project generation') + parser.add_option('--config-dir', dest='config_dir', action='store', env_name='GYP_CONFIG_DIR', default=None, help='The location for configuration files like include.gypi.') + parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', action='append', default=[], help='turn on a debugging mode for debugging GYP. Supported modes are "variables", "includes" and "general" or "all" for all of them.') + parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', env_name='GYP_DEFINES', help='sets variable VAR to value VAL') + parser.add_option('--depth', dest='depth', metavar='PATH', type='path', help='set DEPTH gyp variable to a relative path to PATH') + parser.add_option('-f', '--format', dest='formats', action='append', env_name='GYP_GENERATORS', regenerate=False, help='output formats to generate') + parser.add_option('-G', dest='generator_flags', action='append', default=[], metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', help='sets generator flag FLAG to VAL') + parser.add_option('--generator-output', dest='generator_output', action='store', default=None, metavar='DIR', type='path', env_name='GYP_GENERATOR_OUTPUT', help='puts generated build files under DIR') + parser.add_option('--ignore-environment', dest='use_environment', action='store_false', default=True, regenerate=False, help='do not read options from environment variables') + parser.add_option('-I', '--include', dest='includes', action='append', metavar='INCLUDE', type='path', help='files to include in all loaded .gyp files') + parser.add_option('-R', '--root-target', dest='root_targets', action='append', metavar='TARGET', help='include only TARGET and its deep dependencies') + parser.add_option('-S', '--suffix', dest='suffix', default='', help='suffix to add to generated files') + parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', default=None, metavar='DIR', type='path', help='directory to use as the root of the source tree') + # TODO(refack) deprecated - Have no effect. Kept as to not break CLI usage + parser.add_option('--check', dest='check', action='store_true', help='check format of gyp files') + parser.add_option('--no-circular-check', dest='circular_check', action='store_false', default=True, regenerate=False, help="don't check for circular relationships between files") + parser.add_option('--no-duplicate-basename-check', dest='duplicate_basename_check', action='store_false', default=True, regenerate=False, help="don't check for duplicate basenames") + parser.add_option('--no-parallel', action='store_true', default=False, help='Disable multiprocessing') options, build_files_arg = parser.parse_args(args) build_files = build_files_arg # Set up the configuration directory (defaults to ~/.gyp) if not options.config_dir: - home = None home_dot_gyp = None if options.use_environment: home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None) @@ -373,7 +309,7 @@ def gyp_main(args): home_vars.append('USERPROFILE') for home_var in home_vars: home = os.getenv(home_var) - if home != None: + if home is not None: home_dot_gyp = os.path.join(home, '.gyp') if not os.path.exists(home_dot_gyp): home_dot_gyp = None @@ -386,7 +322,7 @@ def gyp_main(args): home_dot_gyp = None if not options.formats: - # If no format was given on the command line, then check the env variable. + # If no output_format was given on the command line, then check the env variable. generate_formats = [] if options.use_environment: generate_formats = os.environ.get('GYP_GENERATORS', []) @@ -408,8 +344,6 @@ def gyp_main(args): if g_o: options.generator_output = g_o - options.parallel = not options.no_parallel - for mode in options.debug: gyp.debug[mode] = 1 @@ -419,7 +353,7 @@ def gyp_main(args): for option, value in sorted(options.__dict__.items()): if option[0] == '_': continue - if isinstance(value, string_types): + if isinstance(value, basestring): DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) else: DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) @@ -427,8 +361,8 @@ def gyp_main(args): if not build_files: build_files = FindBuildFiles() if not build_files: - raise GypError((usage + '\n\n%s: error: no build_file') % - (my_name, my_name)) + my_name = os.path.basename(sys.argv[0]) + raise GypError((usage + '\n\n%s: error: no build_file') % (my_name, my_name)) # TODO(mark): Chromium-specific hack! # For Chromium, the gyp "depth" variable should always be a relative path @@ -440,12 +374,11 @@ def gyp_main(args): for build_file in build_files: build_file_dir = os.path.abspath(os.path.dirname(build_file)) build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in range(components_len - 1, -1, -1): - if build_file_dir_components[index] == 'src': + for component in reversed(build_file_dir_components): + if component == 'src': options.depth = os.path.sep.join(build_file_dir_components) break - del build_file_dir_components[index] + del build_file_dir_components[-1] # If the inner loop found something, break without advancing to another # build file. @@ -465,7 +398,6 @@ def gyp_main(args): # -D on the command line sets variable defaults - D isn't just for define, # it's for default. Perhaps there should be a way to force (-F?) a # variable's value so that it can't be overridden by anything else. - cmdline_default_variables = {} defines = [] if options.use_environment: defines += ShlexEnv('GYP_DEFINES') @@ -473,15 +405,14 @@ def gyp_main(args): defines += options.defines cmdline_default_variables = NameValueListToDict(defines) if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, - "cmdline_default_variables: %s", cmdline_default_variables) + DebugOutput(DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables) # Set up includes. includes = [] # If ~/.gyp/include.gypi exists, it'll be forcibly included into every # .gyp file that's loaded, before anything else is included. - if home_dot_gyp != None: + if home_dot_gyp is not None: default_include = os.path.join(home_dot_gyp, 'include.gypi') if os.path.exists(default_include): print('Using overrides found in ' + default_include) @@ -499,28 +430,25 @@ def gyp_main(args): if options.generator_flags: gen_flags += options.generator_flags generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug.keys(): + if DEBUG_GENERAL in gyp.debug: DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) - # Generate all requested formats (use a set in case we got one format request - # twice) - for format in set(options.formats): - params = {'options': options, - 'build_files': build_files, - 'generator_flags': generator_flags, - 'cwd': os.getcwd(), - 'build_files_arg': build_files_arg, - 'gyp_binary': sys.argv[0], - 'home_dot_gyp': home_dot_gyp, - 'parallel': options.parallel, - 'root_targets': options.root_targets, - 'target_arch': cmdline_default_variables.get('target_arch', '')} + # Generate all requested formats (use a set in case we got one output_format request twice) + for output_format in set(options.formats): + params = { + 'options': options, + 'build_files': build_files, + 'generator_flags': generator_flags, + 'cwd': os.getcwd(), + 'build_files_arg': build_files_arg, + 'gyp_binary': sys.argv[0], + 'home_dot_gyp': home_dot_gyp, + 'root_targets': options.root_targets, + 'target_arch': cmdline_default_variables.get('target_arch', '') + } # Start with the default variables from the command line. - [generator, flat_list, targets, data] = Load( - build_files, format, cmdline_default_variables, includes, options.depth, - params, options.check, options.circular_check, - options.duplicate_basename_check) + [generator, flat_list, targets, data] = Load(build_files, output_format, cmdline_default_variables, includes, params, options.depth) # TODO(mark): Pass |data| for now because the generator needs a list of # build files that came in. In the future, maybe it should just accept @@ -532,7 +460,7 @@ def gyp_main(args): generator.GenerateOutput(flat_list, targets, data, params) if options.configs: - valid_configs = targets[flat_list[0]]['configurations'].keys() + valid_configs = targets[flat_list[0]]['configurations'] for conf in options.configs: if conf not in valid_configs: raise GypError('Invalid config specified via --build: %s' % conf) @@ -549,9 +477,11 @@ def main(args): sys.stderr.write("gyp: %s\n" % e) return 1 + # NOTE: setuptools generated console_scripts calls function with no arguments def script_main(): return main(sys.argv[1:]) + if __name__ == '__main__': sys.exit(script_main()) diff --git a/gyp/pylib/gyp/flock_tool.py b/gyp/gyp/buildtime_helpers/flock_tool.py old mode 100755 new mode 100644 similarity index 80% rename from gyp/pylib/gyp/flock_tool.py rename to gyp/gyp/buildtime_helpers/flock_tool.py index 81fb79d136..edc4b07430 --- a/gyp/pylib/gyp/flock_tool.py +++ b/gyp/gyp/buildtime_helpers/flock_tool.py @@ -6,6 +6,7 @@ """These functions are executed via gyp-flock-tool when using the Makefile generator. Used on systems that don't have a built-in flock.""" +# noinspection PyUnresolvedReferences import fcntl import os import struct @@ -33,12 +34,14 @@ def _CommandifyName(self, name_string): return name_string.title().replace('-', '') def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - # Note that the stock python on SunOS has a bug - # where fcntl.flock(fd, LOCK_EX) always fails - # with EBADF, that's why we use this F_SETLK - # hack instead. + """ + Emulates the most basic behavior of Linux's flock(1). + + Rely on exception handling to report errors. + Note that the stock python on SunOS has a bug where fcntl.flock(fd, LOCK_EX) always fails with EBADF, + that's why we use this F_SETLK hack instead. + """ + # noinspection PyUnresolvedReferences fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666) if sys.platform.startswith('aix'): # Python on AIX is compiled with LARGEFILE support, which changes the diff --git a/gyp/data/win/large-pdb-shim.cc b/gyp/gyp/buildtime_helpers/large-pdb-shim.cc similarity index 100% rename from gyp/data/win/large-pdb-shim.cc rename to gyp/gyp/buildtime_helpers/large-pdb-shim.cc diff --git a/gyp/pylib/gyp/mac_tool.py b/gyp/gyp/buildtime_helpers/mac_tool.py old mode 100755 new mode 100644 similarity index 77% rename from gyp/pylib/gyp/mac_tool.py rename to gyp/gyp/buildtime_helpers/mac_tool.py index b8b7344eff..84f8863947 --- a/gyp/pylib/gyp/mac_tool.py +++ b/gyp/gyp/buildtime_helpers/mac_tool.py @@ -18,7 +18,7 @@ import plistlib import re import shutil -import string +import struct import subprocess import sys import tempfile @@ -50,6 +50,7 @@ def _CommandifyName(self, name_string): def ExecCopyBundleResource(self, source, dest, convert_to_binary): """Copies a resource file to the bundle/Resources directory, performing any necessary compilation on each resource.""" + convert_to_binary = convert_to_binary == 'True' extension = os.path.splitext(source)[1].lower() if os.path.isdir(source): # Copy tree. @@ -63,11 +64,16 @@ def ExecCopyBundleResource(self, source, dest, convert_to_binary): return self._CopyXIBFile(source, dest) elif extension == '.storyboard': return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) + elif extension == '.strings' and not convert_to_binary: + self._CopyStringsFile(source, dest) else: + if os.path.exists(dest): + os.unlink(dest) shutil.copy(source, dest) + if convert_to_binary and extension in ('.plist', '.strings'): + self._ConvertToBinary(dest) + def _CopyXIBFile(self, source, dest): """Compiles a XIB file with ibtool into a binary plist in the bundle.""" @@ -78,27 +84,49 @@ def _CopyXIBFile(self, source, dest): if os.path.relpath(dest): dest = os.path.join(base, dest) - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] + args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices'] + + if os.environ['XCODE_VERSION_ACTUAL'] > '0700': + args.extend(['--auto-activate-custom-fonts']) + if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ: + args.extend([ + '--target-device', 'iphone', '--target-device', 'ipad', + '--minimum-deployment-target', + os.environ['IPHONEOS_DEPLOYMENT_TARGET'], + ]) + else: + args.extend([ + '--target-device', 'mac', + '--minimum-deployment-target', + os.environ['MACOSX_DEPLOYMENT_TARGET'], + ]) + + args.extend(['--output-format', 'human-readable-text', '--compile', dest, + source]) + ibtool_section_re = re.compile(r'/\*.*\*/') ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) + try: + stdout = subprocess.check_output(args) + except subprocess.CalledProcessError as e: + print(e.output) + raise current_section_header = None - for line in ibtoolout.stdout: + for line in stdout.splitlines(): if ibtool_section_re.match(line): current_section_header = line elif not ibtool_re.match(line): if current_section_header: - sys.stdout.write(current_section_header) + print(current_section_header) current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode + print(line) + return 0 def _ConvertToBinary(self, dest): subprocess.check_call([ 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - def _CopyStringsFile(self, source, dest, convert_to_binary): + def _CopyStringsFile(self, source, dest): """Copies a .strings file using iconv to reconvert the input into UTF-16.""" input_code = self._DetectInputEncoding(source) or "UTF-8" @@ -118,24 +146,21 @@ def _CopyStringsFile(self, source, dest, convert_to_binary): fp.write(s.decode(input_code).encode('UTF-16')) fp.close() - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - def _DetectInputEncoding(self, file_name): """Reads the first few bytes from file_name and tries to guess the text encoding. Returns None as a guess if it can't detect it.""" fp = open(file_name, 'rb') try: header = fp.read(3) - except Exception: + except: fp.close() return None fp.close() - if header.startswith("\xFE\xFF"): + if header.startswith(b"\xFE\xFF"): return "UTF-16" - elif header.startswith("\xFF\xFE"): + elif header.startswith(b"\xFF\xFE"): return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): + elif header.startswith(b"\xEF\xBB\xBF"): return "UTF-8" else: return None @@ -150,18 +175,18 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). plist = plistlib.readPlistFromString(lines) if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) + plist.update(json.loads(keys[0])) lines = plistlib.writePlistToString(plist) # Go through all the environment variables and replace them as variables in # the file. - IDENT_RE = re.compile(r'[/\s]') + IDENT_RE = re.compile(r'[_/\s]') for key in os.environ: if key.startswith('_'): continue evar = '${%s}' % key evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) + lines = lines.replace(evar, evalue) # Xcode supports various suffices on environment variables, which are # all undocumented. :rfc1034identifier is used in the standard project @@ -171,11 +196,11 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): # in a URL either -- oops, hence :rfc1034identifier was born. evar = '${%s:identifier}' % key evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) + lines = lines.replace(evar, evalue) evar = '${%s:rfc1034identifier}' % key evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) + lines = lines.replace(evar, evalue) # Remove any keys with values that haven't been replaced. lines = lines.split('\n') @@ -230,7 +255,8 @@ def ExecFlock(self, lockfile, *cmd_list): def ExecFilterLibtool(self, *cmd_list): """Calls libtool and filters out '/path/to/libtool: file: foo.o has no symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') + libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?' + r'file: .* has no symbols$') libtool_re5 = re.compile( r'^.*libtool: warning for library: ' + r'.* the table of contents is empty ' + @@ -255,6 +281,23 @@ def ExecFilterLibtool(self, *cmd_list): break return libtoolout.returncode + def ExecPackageIosFramework(self, framework): + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split('.')[0] + module_path = os.path.join(framework, 'Modules'); + if not os.path.exists(module_path): + os.mkdir(module_path) + module_template = 'framework module %s {\n' \ + ' umbrella header "%s.h"\n' \ + '\n' \ + ' export *\n' \ + ' module * { export * }\n' \ + '}\n' % (binary, binary) + + module_file = open(os.path.join(module_path, 'module.modulemap'), "w") + module_file.write(module_template) + module_file.close() + def ExecPackageFramework(self, framework, version): """Takes a path to Something.framework and the Current version of that and sets up all the symlinks.""" @@ -291,6 +334,23 @@ def _Relink(self, dest, link): os.remove(link) os.symlink(dest, link) + def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers): + framework_name = os.path.basename(framework).split('.')[0] + all_headers = map(os.path.abspath, all_headers) + filelist = {} + for header in all_headers: + filename = os.path.basename(header) + filelist[filename] = header + filelist[os.path.join(framework_name, filename)] = header + WriteHmap(out, filelist) + + def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers): + header_path = os.path.join(framework, 'Headers'); + if not os.path.exists(header_path): + os.makedirs(header_path) + for header in copy_headers: + shutil.copy(header, os.path.join(header_path, os.path.basename(header))) + def ExecCompileXcassets(self, keys, *inputs): """Compiles multiple .xcassets files into a single .car file. @@ -351,49 +411,28 @@ def ExecMergeInfoPlist(self, output, *inputs): self._MergePlist(merged_plist, plist) plistlib.writePlist(merged_plist, output) - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): + def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve): """Code sign a bundle. This function tries to code sign an iOS bundle, following the same algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, + 1. pick the provisioning profile that best match the bundle identifier, and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. + 2. copy Entitlements.plist from user or SDK next to the bundle, + 3. code sign the bundle. """ - resource_rules_path = self._InstallResourceRules(resource_rules) substitutions, overrides = self._InstallProvisioningProfile( provisioning, self._GetCFBundleIdentifier()) entitlements_path = self._InstallEntitlements( entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path + args = ['codesign', '--force', '--sign', key] + if preserve == 'True': + args.extend(['--deep', '--preserve-metadata=identifier,entitlements']) + else: + args.extend(['--entitlements', entitlements_path]) + args.extend(['--timestamp=none', path]) + subprocess.check_call(args) def _InstallProvisioningProfile(self, profile, bundle_identifier): """Installs embedded.mobileprovision into the bundle. @@ -442,7 +481,9 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): profiles_dir = os.path.join( os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') if not os.path.isdir(profiles_dir): - print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + print(( + 'cannot find mobile provisioning for %s' % bundle_identifier), + file=sys.stderr) sys.exit(1) provisioning_profiles = None if profile: @@ -463,7 +504,9 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): valid_provisioning_profiles[app_id_pattern] = ( profile_path, profile_data, team_identifier) if not valid_provisioning_profiles: - print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + print(( + 'cannot find mobile provisioning for %s' % bundle_identifier), + file=sys.stderr) sys.exit(1) # If the user has multiple provisioning profiles installed that can be # used for ${bundle_identifier}, pick the most specific one (ie. the @@ -606,5 +649,71 @@ def _ExpandVariables(self, data, substitutions): return {k: self._ExpandVariables(data[k], substitutions) for k in data} return data +def NextGreaterPowerOf2(x): + return 2**(x).bit_length() + +def WriteHmap(output_name, filelist): + """Generates a header map based on |filelist|. + + Per Mark Mentovai: + A header map is structured essentially as a hash table, keyed by names used + in #includes, and providing pathnames to the actual files. + + The implementation below and the comment above comes from inspecting: + http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt + while also looking at the implementation in clang in: + https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp + """ + magic = 1751998832 + version = 1 + _reserved = 0 + count = len(filelist) + capacity = NextGreaterPowerOf2(count) + strings_offset = 24 + (12 * capacity) + max_value_length = len(max(filelist.items(), key=lambda t: len(t[1]))[1]) + + out = open(output_name, "wb") + out.write(struct.pack('\n%s RT_MANIFEST "%s"' % ( resource_name, os.path.abspath(manifest_path).replace('\\', '/'))) @@ -244,49 +241,42 @@ def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, '/proxy', proxy, idl] env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() + out = subprocess.check_output(args, shell=True, env=env, stderr=subprocess.STDOUT).decode('utf-8') # Filter junk out of stdout, and write filtered versions. Output we want # to filter is pairs of lines that look like this: # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl # objidl.idl lines = out.splitlines() prefixes = ('Processing ', '64 bit Processing ') - processing = set(os.path.basename(x) - for x in lines if x.startswith(prefixes)) + processing = set(os.path.basename(x) for x in lines if x.startswith(prefixes)) for line in lines: if not line.startswith(prefixes) and line not in processing: print(line) - return popen.returncode + return 0 def ExecAsmWrapper(self, arch, *args): """Filter logo banner from invocations of asm.exe.""" env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() + out = subprocess.check_output(args, shell=True, env=env, stderr=subprocess.STDOUT).decode('utf-8') for line in out.splitlines(): if (not line.startswith('Copyright (C) Microsoft Corporation') and not line.startswith('Microsoft (R) Macro Assembler') and not line.startswith(' Assembling: ') and line): print(line) - return popen.returncode + return 0 def ExecRcWrapper(self, arch, *args): """Filter logo banner from invocations of rc.exe. Older versions of RC don't support the /nologo flag.""" env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() + out = subprocess.check_output(args, shell=True, env=env, stderr=subprocess.STDOUT).decode('utf-8') for line in out.splitlines(): if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and not line.startswith('Copyright (C) Microsoft Corporation') and line): print(line) - return popen.returncode + return 0 def ExecActionWrapper(self, arch, rspfile, *dir): """Runs an action command line from a response file using the environment diff --git a/gyp/pylib/gyp/common.py b/gyp/gyp/common.py similarity index 88% rename from gyp/pylib/gyp/common.py rename to gyp/gyp/common.py index 657134a78f..0f073b0beb 100644 --- a/gyp/pylib/gyp/common.py +++ b/gyp/gyp/common.py @@ -2,17 +2,19 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import collections import errno import filecmp +import os import os.path import re import tempfile import sys -# A minimal memoizing decorator. It'll blow up if the args aren't immutable, -# among other "problems". +# A minimal memoizing decorator. It'll blow up if the args aren't immutable, among other "problems". class memoize(object): def __init__(self, func): self.func = func @@ -90,8 +92,7 @@ def ResolveTarget(build_file, target, toolset): # interpreting it as relative to build_file. If parsed_build_file is # absolute, it is usable as a path regardless of the current directory, # and os.path.join will return it as-is. - build_file = os.path.normpath(os.path.join(os.path.dirname(build_file), - parsed_build_file)) + build_file = os.path.normpath(os.path.join(os.path.dirname(build_file), parsed_build_file)) # Further (to handle cases like ../cwd), make it relative to cwd) if not os.path.isabs(build_file): build_file = RelativePath(build_file, '.') @@ -145,31 +146,13 @@ def RelativePath(path, relative_to, follow_path_symlink=True): path = os.path.abspath(path) relative_to = os.path.realpath(relative_to) - # On Windows, we can't create a relative path to a different drive, so just - # use the absolute path. + # On Windows, we can't create a relative path to a different drive, so just use the absolute path. if sys.platform == 'win32': - if (os.path.splitdrive(path)[0].lower() != - os.path.splitdrive(relative_to)[0].lower()): + if os.path.splitdrive(path)[0].lower() != os.path.splitdrive(relative_to)[0].lower(): return path - # Split the paths into components. - path_split = path.split(os.path.sep) - relative_to_split = relative_to.split(os.path.sep) - - # Determine how much of the prefix the two paths share. - prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) - - # Put enough ".." components to back up out of relative_to to the common - # prefix, and then append the part of path_split after the common prefix. - relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \ - path_split[prefix_len:] - - if len(relative_split) == 0: - # The paths were the same. - return '' - - # Turn it back into a string and we're done. - return os.path.join(*relative_split) + rel = os.path.relpath(path, relative_to) + return rel @memoize @@ -278,15 +261,16 @@ def EncodePOSIXShellArgument(argument): return encoded -def EncodePOSIXShellList(list): - """Encodes |list| suitably for consumption by POSIX shells. +def EncodePOSIXShellList(lst): + """ + Encodes |lst| suitably for consumption by POSIX shells. - Returns EncodePOSIXShellArgument for each item in list, and joins them + Returns EncodePOSIXShellArgument for each item in lst, and joins them together using the space character as an argument separator. """ encoded_arguments = [] - for argument in list: + for argument in lst: encoded_arguments.append(EncodePOSIXShellArgument(argument)) return ' '.join(encoded_arguments) @@ -339,11 +323,12 @@ class Writer(object): def __init__(self): # Pick temporary file. tmp_fd, self.tmp_path = tempfile.mkstemp( - suffix='.tmp', - prefix=os.path.split(filename)[1] + '.gyp.', - dir=os.path.split(filename)[0]) + suffix='.tmp', + prefix=os.path.split(filename)[1] + '.gyp.', + dir=os.path.split(filename)[0] + ) try: - self.tmp_file = os.fdopen(tmp_fd, 'wb') + self.tmp_file = os.fdopen(tmp_fd, 'w') except Exception: # Don't leave turds behind. os.unlink(self.tmp_path) @@ -366,8 +351,7 @@ def close(self): raise if same: - # The new file is identical to the old one, just get rid of the new - # one. + # The new file is identical to the old one, just get rid of the new one. os.unlink(self.tmp_path) else: # The new file is different from the old one, or there is no old one. @@ -394,9 +378,6 @@ def close(self): os.unlink(self.tmp_path) raise - def write(self, s): - self.tmp_file.write(s.encode('utf-8')) - return Writer() @@ -415,7 +396,6 @@ def GetFlavor(params): 'win32': 'win', 'darwin': 'mac', } - if 'flavor' in params: return params['flavor'] if sys.platform in flavors: @@ -434,35 +414,47 @@ def GetFlavor(params): return 'zos' if sys.platform.startswith('os390'): return 'zos' - return 'linux' -def CopyTool(flavor, out_path): - """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it - to |out_path|.""" - # aix and solaris just need flock emulation. mac and win use more complicated - # support scripts. +def CopyTool(flavor, out_path, mac_toolchain_dir=None): + """ + Finds (flock|mac|win)_tool.py in the gyp directory and copies it to |out_path|. + Refs: flock_tool.py => gyp-flock-tool + Refs: mac_tool.py => gyp-mac-tool + Refs: win_tool.py => gyp-win-tool + """ + # aix and solaris just need flock emulation. mac and win use more complicated support scripts. prefix = { - 'aix': 'flock', - 'solaris': 'flock', - 'mac': 'mac', - 'win': 'win' - }.get(flavor, None) + 'aix': 'flock', + 'solaris': 'flock', + 'mac': 'mac', + 'win': 'win' + }.get(flavor) if not prefix: return # Slurp input file. - source_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix) - with open(source_path) as source_file: + source_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'buildtime_helpers', '%s_tool.py' % prefix) + with open(source_path, 'rt') as source_file: source = source_file.readlines() + # Set custom header flags. + headers = [ + source.pop(0), # original shebang + '# Generated by gyp. Do not edit.\n' + ] + if flavor == 'mac' and mac_toolchain_dir: + headers += [ + 'import os\n', + "os.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir + ] + # Add header and write it out. tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix) - with open(tool_path, 'w') as tool_file: - tool_file.write( - ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:])) + with open(tool_path, 'wt') as tool_file: + tool_src_lines = headers + source + tool_file.writelines(tool_src_lines) # Make file executable. os.chmod(tool_path, 0o755) @@ -473,7 +465,6 @@ def CopyTool(flavor, out_path): # ASPN: Python Cookbook: Remove duplicates from a sequence # First comment, dated 2001/10/13. # (Also in the printed Python Cookbook.) - def uniquer(seq, idfun=None): if idfun is None: idfun = lambda x: x @@ -494,7 +485,7 @@ def __init__(self, iterable=None): end += [None, end, end] # sentinel node for doubly linked list self.map = {} # key --> [key, prev, next] if iterable is not None: - self |= iterable + self.update(iterable) def __len__(self): return len(self.map) @@ -562,7 +553,8 @@ def __str__(self): def TopologicallySorted(graph, get_edges): - r"""Topologically sort based on a user provided edge definition. + r""" + Topologically sort based on a user provided edge definition. Args: graph: A list of node names. @@ -578,7 +570,7 @@ def TopologicallySorted(graph, get_edges): graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'} def GetEdges(node): return re.findall(r'\$\(([^))]\)', graph[node]) - print TopologicallySorted(graph.keys(), GetEdges) + print(TopologicallySorted(graph.keys(), GetEdges)) ==> ['a', 'c', b'] """ @@ -586,21 +578,22 @@ def GetEdges(node): visited = set() visiting = set() ordered_nodes = [] - def Visit(node): - if node in visiting: + def Visit(n): + if n in visiting: raise CycleError(visiting) - if node in visited: + if n in visited: return - visited.add(node) - visiting.add(node) - for neighbor in get_edges(node): + visited.add(n) + visiting.add(n) + for neighbor in get_edges(n): Visit(neighbor) - visiting.remove(node) - ordered_nodes.insert(0, node) + visiting.remove(n) + ordered_nodes.insert(0, n) for node in sorted(graph): Visit(node) return ordered_nodes + def CrossCompileRequested(): # TODO: figure out how to not build extra host objects in the # non-cross-compile case when this is enabled, and enable unconditionally. @@ -611,3 +604,18 @@ def CrossCompileRequested(): os.environ.get('AR_target') or os.environ.get('CC_target') or os.environ.get('CXX_target')) + + +def IsStrCanonicalInt(string): + """ + Returns True if |string| is a canonical integer form str. + The canonical form is such that str(int(string)) == string. + """ + if type(string) is not str: + return False + # noinspection PyBroadException + try: + i = int(string) + return str(i) == string + except: + return False diff --git a/gyp/pylib/gyp/easy_xml.py b/gyp/gyp/easy_xml.py similarity index 88% rename from gyp/pylib/gyp/easy_xml.py rename to gyp/gyp/easy_xml.py index 1ddd909175..e0d0ab0ac6 100644 --- a/gyp/pylib/gyp/easy_xml.py +++ b/gyp/gyp/easy_xml.py @@ -3,9 +3,10 @@ # found in the LICENSE file. import re -import os import locale -from functools import reduce + +if 'reduce' not in __builtins__: + from functools import reduce def XmlToString(content, encoding='utf-8', pretty=False): @@ -104,8 +105,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): xml_parts.append('/>%s' % new_line) -def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, - win32=False): +def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, win32=False): """ Writes the XML content to disk, touching the file only if it has changed. Args: @@ -113,28 +113,27 @@ def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, path: Location of the file. encoding: The encoding to report on the first line of the XML file. pretty: True if we want pretty printing with indents and new lines. + win32: True if we want \r\n as line terminator. """ xml_string = XmlToString(content, encoding, pretty) - if win32 and os.linesep != '\r\n': - xml_string = xml_string.replace('\n', '\r\n') - default_encoding = locale.getdefaultlocale()[1] - if default_encoding.upper() != encoding.upper(): - xml_string = xml_string.encode(encoding) + if default_encoding and default_encoding.upper() != encoding.upper(): + if hasattr(xml_string, 'decode'): + xml_string = xml_string.decode(default_encoding) # Get the old content try: - f = open(path, 'r') - existing = f.read() - f.close() + with open(path, 'r') as f: + existing = f.read().decode(encoding, 'ignore') except: existing = None # It has changed, write it - if existing != xml_string: - f = open(path, 'wb') - f.write(xml_string) - f.close() + if existing == xml_string: + return + + with open(path, 'wb') as f: + f.write(xml_string.encode('utf-8')) _xml_escape_map = { @@ -148,8 +147,7 @@ def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, } -_xml_escape_re = re.compile( - "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) +_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) def _XmlEscape(value, attr=False): diff --git a/gyp/pylib/gyp/generator/__init__.py b/gyp/gyp/generator/__init__.py similarity index 100% rename from gyp/pylib/gyp/generator/__init__.py rename to gyp/gyp/generator/__init__.py diff --git a/gyp/pylib/gyp/generator/analyzer.py b/gyp/gyp/generator/analyzer.py similarity index 98% rename from gyp/pylib/gyp/generator/analyzer.py rename to gyp/gyp/generator/analyzer.py index 0416b5d9be..5d2e5388b2 100644 --- a/gyp/pylib/gyp/generator/analyzer.py +++ b/gyp/gyp/generator/analyzer.py @@ -64,12 +64,11 @@ from __future__ import print_function -import gyp.common -import gyp.ninja_syntax as ninja_syntax import json import os import posixpath -import sys +import gyp.msvs_emulation +import gyp.common debug = False @@ -293,8 +292,7 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): _ToGypPath(gyp.common.UnrelativePath(include_file, build_file)) if _ToLocalPath(toplevel_dir, rel_include_file) in files: if debug: - print('included gyp file modified, gyp_file=', build_file, - 'included file=', rel_include_file) + print('included gyp file modified, gyp_file=', build_file, 'included file=', rel_include_file) return True return False @@ -491,7 +489,8 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result): target.added_to_compile_targets, 'add_if_no_ancestor', add_if_no_ancestor, 'requires_build', target.requires_build, 'is_static_library', target.is_static_library, - 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor) + 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor + ) result.add(target) target.added_to_compile_targets = True @@ -581,14 +580,10 @@ def CalculateVariables(default_variables, params): default_variables.setdefault('OS', 'mac') elif flavor == 'win': default_variables.setdefault('OS', 'win') - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - + # # Copy additional generator configuration data from VS, which is shared by the Windows Ninja generator. + # import gyp.generator.msvs as msvs_generator + # generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) + # generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) else: operating_system = flavor diff --git a/gyp/pylib/gyp/generator/cmake.py b/gyp/gyp/generator/cmake.py similarity index 87% rename from gyp/pylib/gyp/generator/cmake.py rename to gyp/gyp/generator/cmake.py index 7aabddb633..e5cb24cbe7 100644 --- a/gyp/pylib/gyp/generator/cmake.py +++ b/gyp/gyp/generator/cmake.py @@ -30,12 +30,17 @@ from __future__ import print_function -import multiprocessing import os -import signal import string import subprocess import gyp.common +import gyp.xcode_emulation + +try: + _maketrans = str.maketrans +except NameError: + # noinspection PyUnresolvedReferences + _maketrans = string.maketrans generator_default_variables = { 'EXECUTABLE_PREFIX': '', @@ -105,8 +110,7 @@ def NormjoinPathForceCMakeSource(base_path, rel_path): if any([rel_path.startswith(var) for var in FULL_PATH_VARS]): return rel_path # TODO: do we need to check base_path for absolute variables as well? - return os.path.join('${CMAKE_CURRENT_LIST_DIR}', - os.path.normpath(os.path.join(base_path, rel_path))) + return os.path.join('${CMAKE_CURRENT_LIST_DIR}', os.path.normpath(os.path.join(base_path, rel_path))) def NormjoinPath(base_path, rel_path): @@ -239,11 +243,10 @@ def StringToCMakeTargetName(a): Invalid for make: ':' Invalid for unknown reasons but cause failures: '.' """ - return a.translate(string.maketrans(' /():."', '_______')) + return a.translate(_maketrans(' /():."', '_______')) -def WriteActions(target_name, actions, extra_sources, extra_deps, - path_to_gyp, output): +def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, output): """Write CMake for the 'actions' in the target. Args: @@ -260,12 +263,10 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, inputs = action['inputs'] inputs_name = action_target_name + '__input' - SetVariableList(output, inputs_name, - [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs]) + SetVariableList(output, inputs_name, [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs]) outputs = action['outputs'] - cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out) - for out in outputs] + cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out) for out in outputs] outputs_name = action_target_name + '__output' SetVariableList(output, outputs_name, cmake_outputs) @@ -322,19 +323,18 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source): - if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")): + if rel_path.startswith(("${RULE_INPUT_PATH}", "${RULE_INPUT_DIRNAME}")): if any([rule_source.startswith(var) for var in FULL_PATH_VARS]): return rel_path return NormjoinPathForceCMakeSource(base_path, rel_path) -def WriteRules(target_name, rules, extra_sources, extra_deps, - path_to_gyp, output): +def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, output): """Write CMake for the 'rules' in the target. Args: target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. + rules: the Gyp 'rules' dict for this target. extra_sources: [(, )] to append with generated source files. extra_deps: [] to append with generated targets. path_to_gyp: relative path from CMakeLists.txt being generated to @@ -345,8 +345,7 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, inputs = rule.get('inputs', []) inputs_name = rule_name + '__input' - SetVariableList(output, inputs_name, - [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs]) + SetVariableList(output, inputs_name, [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs]) outputs = rule['outputs'] var_outputs = [] @@ -370,9 +369,7 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, these_outputs = [] for output_index, out in enumerate(outputs): output_name = action_name + '_' + str(output_index) - SetVariable(output, output_name, - NormjoinRulePathForceCMakeSource(path_to_gyp, out, - rule_source)) + SetVariable(output, output_name, NormjoinRulePathForceCMakeSource(path_to_gyp, out, rule_source)) if int(rule.get('process_outputs_as_sources', False)): extra_sources.append(('${' + output_name + '}', out)) these_outputs.append('${' + output_name + '}') @@ -447,7 +444,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output): Args: target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. + copies: the Gyp 'copies' dict for this target. extra_deps: [] to append with generated targets. path_to_gyp: relative path from CMakeLists.txt being generated to the Gyp file in which the target being generated is defined. @@ -552,8 +549,7 @@ def __init__(self, ext, command): def CreateCMakeTargetBaseName(qualified_target): """This is the name we would like the target to have.""" - _, gyp_target_name, gyp_target_toolset = ( - gyp.common.ParseQualifiedTarget(qualified_target)) + _, gyp_target_name, gyp_target_toolset = (gyp.common.ParseQualifiedTarget(qualified_target)) cmake_target_base_name = gyp_target_name if gyp_target_toolset and gyp_target_toolset != 'target': cmake_target_base_name += '_' + gyp_target_toolset @@ -562,8 +558,7 @@ def CreateCMakeTargetBaseName(qualified_target): def CreateCMakeTargetFullName(qualified_target): """An unambiguous name for the target.""" - gyp_file, gyp_target_name, gyp_target_toolset = ( - gyp.common.ParseQualifiedTarget(qualified_target)) + gyp_file, gyp_target_name, gyp_target_toolset = (gyp.common.ParseQualifiedTarget(qualified_target)) cmake_target_full_name = gyp_file + ':' + gyp_target_name if gyp_target_toolset and gyp_target_toolset != 'target': cmake_target_full_name += '_' + gyp_target_toolset @@ -590,6 +585,7 @@ class CMakeNamer(object): building. However, it also makes sense for an IDE, as it is possible for defines to be different. """ + def __init__(self, target_list): self.cmake_target_base_names_conficting = set() @@ -609,9 +605,7 @@ def CreateCMakeTargetName(self, qualified_target): return base_name -def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, - options, generator_flags, all_qualified_targets, output): - +def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, options, generator_flags, all_qualified_targets, flavor, output): # The make generator does this always. # TODO: It would be nice to be able to tell CMake all dependencies. circular_libs = generator_flags.get('circular', True) @@ -635,14 +629,17 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, spec = target_dicts.get(qualified_target, {}) config = spec.get('configurations', {}).get(config_to_use, {}) + xcode_settings = None + if flavor == 'mac': + xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + target_name = spec.get('target_name', '') target_type = spec.get('type', '') target_toolset = spec.get('toolset') cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) if cmake_target_type is None: - print('Target %s has unknown target type %s, skipping.' % - ( target_name, target_type )) + print('Target %s has unknown target type %s, skipping.' % (target_name, target_type)) return SetVariable(output, 'TARGET', target_name) @@ -655,18 +652,15 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, # Actions must come first, since they can generate more OBJs for use below. if 'actions' in spec: - WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps, - path_from_cmakelists_to_gyp, output) + WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps, path_from_cmakelists_to_gyp, output) # Rules must be early like actions. if 'rules' in spec: - WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps, - path_from_cmakelists_to_gyp, output) + WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps, path_from_cmakelists_to_gyp, output) # Copies if 'copies' in spec: - WriteCopies(cmake_target_name, spec['copies'], extra_deps, - path_from_cmakelists_to_gyp, output) + WriteCopies(cmake_target_name, spec['copies'], extra_deps, path_from_cmakelists_to_gyp, output) # Target and sources srcs = spec.get('sources', []) @@ -745,8 +739,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, other_sources_name) if target_type == 'executable' and not has_sources: dummy_sources_name = cmake_target_name + '__dummy_srcs' - SetVariable(output, dummy_sources_name, - "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c") + SetVariable(output, dummy_sources_name, "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c") output.write('if(NOT EXISTS "') WriteVariable(output, dummy_sources_name) output.write('")\n') @@ -755,7 +748,6 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, output.write('" "")\n') output.write("endif()\n") - # CMake is opposed to setting linker directories and considers the practice # of setting linker directories dangerous. Instead, it favors the use of # find_library and passing absolute paths to target_link_libraries. @@ -796,13 +788,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, output.write(')\n') # Let CMake know if the 'all' target should depend on this target. - exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets - else 'FALSE') - SetTargetProperty(output, cmake_target_name, - 'EXCLUDE_FROM_ALL', exclude_from_all) + exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets else 'FALSE') + SetTargetProperty(output, cmake_target_name, 'EXCLUDE_FROM_ALL', exclude_from_all) for extra_target_name in extra_deps: - SetTargetProperty(output, extra_target_name, - 'EXCLUDE_FROM_ALL', exclude_from_all) + SetTargetProperty(output, extra_target_name, 'EXCLUDE_FROM_ALL', exclude_from_all) # Output name and location. if target_type != 'none': @@ -832,19 +821,12 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, elif spec.get('standalone_static_library', False): target_output_directory = generator_default_variables['PRODUCT_DIR'] else: - base_path = gyp.common.RelativePath(os.path.dirname(gyp_file), - options.toplevel_dir) + base_path = gyp.common.RelativePath(os.path.dirname(gyp_file), options.toplevel_dir) target_output_directory = '${obj}.${TOOLSET}' - target_output_directory = ( - os.path.join(target_output_directory, base_path)) + target_output_directory = (os.path.join(target_output_directory, base_path)) - cmake_target_output_directory = NormjoinPathForceCMakeSource( - path_from_cmakelists_to_gyp, - target_output_directory) - SetTargetProperty(output, - cmake_target_name, - cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY', - cmake_target_output_directory) + cmake_target_output_directory = NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, target_output_directory) + SetTargetProperty(output, cmake_target_name, cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY', cmake_target_output_directory) # Output name default_product_prefix = '' @@ -852,21 +834,18 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, default_product_ext = '' if target_type == 'static_library': static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX'] - default_product_name = RemovePrefix(default_product_name, - static_library_prefix) + default_product_name = RemovePrefix(default_product_name, static_library_prefix) default_product_prefix = static_library_prefix default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX'] elif target_type in ('loadable_module', 'shared_library'): shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX'] - default_product_name = RemovePrefix(default_product_name, - shared_library_prefix) + default_product_name = RemovePrefix(default_product_name, shared_library_prefix) default_product_prefix = shared_library_prefix default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX'] elif target_type != 'executable': - print('ERROR: What output file should be generated?', - 'type', target_type, 'target', target_name) + print(('ERROR: What output file should be generated?', 'type', target_type, 'target', target_name)) product_prefix = spec.get('product_prefix', default_product_prefix) product_name = spec.get('product_name', default_product_name) @@ -877,15 +856,12 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, product_ext = default_product_ext SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix) - SetTargetProperty(output, cmake_target_name, - cmake_target_type.property_modifier + '_OUTPUT_NAME', - product_name) + SetTargetProperty(output, cmake_target_name, cmake_target_type.property_modifier + '_OUTPUT_NAME', product_name) SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext) # Make the output of this target referenceable as a source. cmake_target_output_basename = product_prefix + product_name + product_ext - cmake_target_output = os.path.join(cmake_target_output_directory, - cmake_target_output_basename) + cmake_target_output = os.path.join(cmake_target_output_directory, cmake_target_output_basename) SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '') # Includes @@ -893,9 +869,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, if includes: # This (target include directories) is what requires CMake 2.8.8 includes_name = cmake_target_name + '__include_dirs' - SetVariableList(output, includes_name, - [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include) - for include in includes]) + SetVariableList(output, includes_name, [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include) for include in includes]) output.write('set_property(TARGET ') output.write(cmake_target_name) output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ') @@ -905,11 +879,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, # Defines defines = config.get('defines') if defines is not None: - SetTargetProperty(output, - cmake_target_name, - 'COMPILE_DEFINITIONS', - defines, - ';') + SetTargetProperty(output, cmake_target_name, 'COMPILE_DEFINITIONS', defines, ';') # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493 # CMake currently does not have target C and CXX flags. @@ -929,6 +899,11 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, cflags = config.get('cflags', []) cflags_c = config.get('cflags_c', []) cflags_cxx = config.get('cflags_cc', []) + if xcode_settings: + cflags = xcode_settings.GetCflags(config_to_use) + cflags_c = xcode_settings.GetCflagsC(config_to_use) + cflags_cxx = xcode_settings.GetCflagsCC(config_to_use) # cflags_objc = xcode_settings.GetCflagsObjC(config_to_use) # cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use) + if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources): SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ') @@ -967,6 +942,11 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, if ldflags is not None: SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ') + # XCode settings + xcode_settings = config.get('xcode_settings', {}) + for xcode_setting, xcode_value in xcode_settings.viewitems(): + SetTargetProperty(output, cmake_target_name, "XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value, '' if isinstance(xcode_value, str) else ' ') + # Note on Dependencies and Libraries: # CMake wants to handle link order, resolving the link line up front. # Gyp does not retain or enforce specifying enough information to do so. @@ -986,7 +966,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, if dep_target_type == 'static_library': static_deps.append(dep_cmake_name) - elif dep_target_type == 'shared_library': + elif dep_target_type == 'shared_library': shared_deps.append(dep_cmake_name) else: other_deps.append(dep_cmake_name) @@ -1031,7 +1011,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, output.write(cmake_target_name) output.write('\n') if static_deps: - write_group = circular_libs and len(static_deps) > 1 + write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac' if write_group: output.write('-Wl,--start-group\n') for dep in gyp.common.uniquer(static_deps): @@ -1047,9 +1027,9 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, output.write('\n') if external_libs: for lib in gyp.common.uniquer(external_libs): - output.write(' ') - output.write(lib) - output.write('\n') + output.write(' "') + output.write(RemovePrefix(lib, "$(SDKROOT)")) + output.write('"\n') output.write(')\n') @@ -1057,10 +1037,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, UnsetVariable(output, 'TARGET') -def GenerateOutputForConfig(target_list, target_dicts, data, - params, config_to_use): +def GenerateOutputForConfig(target_list, target_dicts, data, params, config_to_use): options = params['options'] generator_flags = params['generator_flags'] + flavor = gyp.common.GetFlavor(params) # generator_dir: relative path from pwd to where make puts build files. # Makes migrating from make to cmake easier, cmake doesn't put anything here. @@ -1073,9 +1053,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, # build_dir: relative path from source root to our output files. # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(generator_dir, - output_dir, - config_to_use)) + build_dir = os.path.normpath(os.path.join(generator_dir, output_dir, config_to_use)) toplevel_build = os.path.join(options.toplevel_dir, build_dir) @@ -1098,8 +1076,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, cxx = None make_global_settings = data[gyp_file].get('make_global_settings', []) - build_to_top = gyp.common.InvertRelativePath(build_dir, - options.toplevel_dir) + build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) for key, value in make_global_settings: if key == 'AR': ar = os.path.join(build_to_top, value) @@ -1143,7 +1120,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, # Force ninja to use rsp files. Otherwise link and ar lines can get too long, # resulting in 'Argument list too long' errors. - output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n') + # However, rsp files don't work correctly on Mac. + if flavor != 'mac': + output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n') output.write('\n') namer = CMakeNamer(target_list) @@ -1152,19 +1131,21 @@ def GenerateOutputForConfig(target_list, target_dicts, data, # CMake has it's own implicit 'all' target, one is not created explicitly. all_qualified_targets = set() for build_file in params['build_files']: - for qualified_target in gyp.common.AllTargets(target_list, - target_dicts, - os.path.normpath(build_file)): + for qualified_target in gyp.common.AllTargets(target_list, target_dicts, os.path.normpath(build_file)): all_qualified_targets.add(qualified_target) for qualified_target in target_list: - WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, - options, generator_flags, all_qualified_targets, output) + if flavor == 'mac': + gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) + spec = target_dicts[qualified_target] + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec) + + WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, options, generator_flags, all_qualified_targets, flavor, output) output.close() -def PerformBuild(data, configurations, params): +def PerformBuild(_, configurations, params): options = params['options'] generator_flags = params['generator_flags'] @@ -1178,9 +1159,7 @@ def PerformBuild(data, configurations, params): for config_name in configurations: # build_dir: relative path from source root to our output files. # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(generator_dir, - output_dir, - config_name)) + build_dir = os.path.normpath(os.path.join(generator_dir, output_dir, config_name)) arguments = ['cmake', '-G', 'Ninja'] print('Generating [%s]: %s' % (config_name, arguments)) subprocess.check_call(arguments, cwd=build_dir) @@ -1190,34 +1169,11 @@ def PerformBuild(data, configurations, params): subprocess.check_call(arguments) -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - target_list, target_dicts, data, params, config_name = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - def GenerateOutput(target_list, target_dicts, data, params): user_config = params.get('generator_flags', {}).get('config', None) if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, - params, user_config) + GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: - config_names = target_dicts[target_list[0]]['configurations'].keys() - if params['parallel']: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append((target_list, target_dicts, data, - params, config_name)) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt as e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig(target_list, target_dicts, data, - params, config_name) + config_names = target_dicts[target_list[0]]['configurations'] + for config_name in config_names: + GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) diff --git a/gyp/pylib/gyp/generator/dump_dependency_json.py b/gyp/gyp/generator/dump_dependency_json.py similarity index 85% rename from gyp/pylib/gyp/generator/dump_dependency_json.py rename to gyp/gyp/generator/dump_dependency_json.py index 8e4f3168f3..d0bb303413 100644 --- a/gyp/pylib/gyp/generator/dump_dependency_json.py +++ b/gyp/gyp/generator/dump_dependency_json.py @@ -1,15 +1,14 @@ -from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import collections +from __future__ import print_function + import os import gyp import gyp.common import gyp.msvs_emulation import json -import sys generator_supports_multiple_toolsets = True @@ -41,14 +40,10 @@ def CalculateVariables(default_variables, params): flavor = gyp.common.GetFlavor(params) if flavor =='win': - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - + # # Copy additional generator configuration data from VS, which is shared by the Windows Ninja generator. + # import gyp.generator.msvs as msvs_generator + # generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) + # generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) @@ -72,7 +67,7 @@ def CalculateGeneratorInputInfo(params): 'qualified_out_dir': qualified_out_dir, } -def GenerateOutput(target_list, target_dicts, data, params): +def GenerateOutput(target_list, target_dicts, _, params): # Map of target -> list of targets it depends on. edges = {} diff --git a/gyp/pylib/gyp/generator/eclipse.py b/gyp/gyp/generator/eclipse.py similarity index 86% rename from gyp/pylib/gyp/generator/eclipse.py rename to gyp/gyp/generator/eclipse.py index 372ceec246..3d63f38491 100644 --- a/gyp/pylib/gyp/generator/eclipse.py +++ b/gyp/gyp/generator/eclipse.py @@ -57,14 +57,10 @@ def CalculateVariables(default_variables, params): flavor = gyp.common.GetFlavor(params) default_variables.setdefault('OS', flavor) if flavor == 'win': - # Copy additional generator configuration data from VS, which is shared - # by the Eclipse generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - + # # Copy additional generator configuration data from VS, which is shared by the Windows Ninja generator. + # import gyp.generator.msvs as msvs_generator + # generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) + # generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) @@ -118,8 +114,6 @@ def GetAllIncludeDirectories(target_list, target_dicts, compiler_includes_list.append(include_dir) flavor = gyp.common.GetFlavor(params) - if flavor == 'win': - generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if config_name in target['configurations']: @@ -130,6 +124,7 @@ def GetAllIncludeDirectories(target_list, target_dicts, # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and # remove this. if flavor == 'win': + generator_flags = params.get('generator_flags', {}) msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) cflags = msvs_settings.GetCflags(config_name) else: @@ -190,8 +185,7 @@ def GetCompilerPath(target_list, data, options): return 'gcc' -def GetAllDefines(target_list, target_dicts, data, config_name, params, - compiler_path): +def GetAllDefines(target_list, target_dicts, _, config_name, params, compiler_path): """Calculate the defines for a project. Returns: @@ -202,12 +196,11 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, # Get defines declared in the gyp files. all_defines = {} flavor = gyp.common.GetFlavor(params) - if flavor == 'win': - generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if flavor == 'win': + generator_flags = params.get('generator_flags', {}) msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) extra_defines = msvs_settings.GetComputedDefines(config_name) else: @@ -252,14 +245,12 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, def WriteIncludePaths(out, eclipse_langs, include_dirs): """Write the includes section of a CDT settings export file.""" - out.write('
\n') + out.write('
\n') out.write(' \n') for lang in eclipse_langs: out.write(' \n' % lang) for include_dir in include_dirs: - out.write(' %s\n' % - include_dir) + out.write(' %s\n' % include_dir) out.write(' \n') out.write('
\n') @@ -267,78 +258,68 @@ def WriteIncludePaths(out, eclipse_langs, include_dirs): def WriteMacros(out, eclipse_langs, defines): """Write the macros section of a CDT settings export file.""" - out.write('
\n') + out.write('
\n') out.write(' \n') for lang in eclipse_langs: out.write(' \n' % lang) - for key in sorted(defines): - out.write(' %s%s\n' % - (escape(key), escape(defines[key]))) + for key in sorted(defines.keys()): + out.write(' %s%s\n' % (escape(key), escape(defines[key]))) out.write(' \n') out.write('
\n') -def GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name): +def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): options = params['options'] generator_flags = params.get('generator_flags', {}) # build_dir: relative path from source root to our output files. # e.g. "out/Debug" - build_dir = os.path.join(generator_flags.get('output_dir', 'out'), - config_name) + build_dir = os.path.join(generator_flags.get('output_dir', 'out'), config_name) toplevel_build = os.path.join(options.toplevel_dir, build_dir) # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the # SHARED_INTERMEDIATE_DIR. Include both possible locations. - shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), - os.path.join(toplevel_build, 'gen')] + shared_intermediate_dirs = [ + os.path.join(toplevel_build, 'obj', 'gen'), + os.path.join(toplevel_build, 'gen') + ] GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, - os.path.join(toplevel_build, - 'eclipse-cdt-settings.xml'), + os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), options, shared_intermediate_dirs) GenerateClasspathFile(target_list, target_dicts, options.toplevel_dir, toplevel_build, - os.path.join(toplevel_build, - 'eclipse-classpath.xml')) + os.path.join(toplevel_build, 'eclipse-classpath.xml')) -def GenerateCdtSettingsFile(target_list, target_dicts, data, params, - config_name, out_name, options, - shared_intermediate_dirs): +def GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, out_name, options, shared_intermediate_dirs): gyp.common.EnsureDirExists(out_name) with open(out_name, 'w') as out: out.write('\n') out.write('\n') - eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', - 'GNU C++', 'GNU C', 'Assembly'] + eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', 'GNU C++', 'GNU C', 'Assembly'] compiler_path = GetCompilerPath(target_list, data, options) - include_dirs = GetAllIncludeDirectories(target_list, target_dicts, - shared_intermediate_dirs, - config_name, params, compiler_path) + include_dirs = GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path) WriteIncludePaths(out, eclipse_langs, include_dirs) - defines = GetAllDefines(target_list, target_dicts, data, config_name, - params, compiler_path) + defines = GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path) WriteMacros(out, eclipse_langs, defines) out.write('\n') -def GenerateClasspathFile(target_list, target_dicts, toplevel_dir, - toplevel_build, out_name): - '''Generates a classpath file suitable for symbol navigation and code - completion of Java code (such as in Android projects) by finding all - .java and .jar files used as action inputs.''' +def GenerateClasspathFile(target_list, target_dicts, toplevel_dir, toplevel_build, out_name): + """ + Generates a classpath file suitable for symbol navigation and code completion of Java code (such as in Android projects) + by finding all .java and .jar files used as action inputs. + """ gyp.common.EnsureDirExists(out_name) result = ET.Element('classpath') @@ -357,7 +338,7 @@ def AddElements(kind, paths): entry_element.set('kind', kind) entry_element.set('path', path) - AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir)) + AddElements('lib', GetJavaJars(target_list, target_dicts)) AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) # Include the standard JRE container and a dummy out folder AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER']) @@ -368,8 +349,8 @@ def AddElements(kind, paths): ET.ElementTree(result).write(out_name) -def GetJavaJars(target_list, target_dicts, toplevel_dir): - '''Generates a sequence of all .jars used as inputs.''' +def GetJavaJars(target_list, target_dicts): + """Generates a sequence of all .jars used as inputs.""" for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): @@ -382,7 +363,7 @@ def GetJavaJars(target_list, target_dicts, toplevel_dir): def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): - '''Generates a sequence of all likely java package root directories.''' + """Generates a sequence of all likely java package root directories.""" for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): @@ -418,7 +399,7 @@ def GenerateOutput(target_list, target_dicts, data, params): GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: - config_names = target_dicts[target_list[0]]['configurations'].keys() + config_names = target_dicts[target_list[0]]['configurations'] for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) diff --git a/gyp/pylib/gyp/generator/gypd.py b/gyp/gyp/generator/gypd.py similarity index 94% rename from gyp/pylib/gyp/generator/gypd.py rename to gyp/gyp/generator/gypd.py index 78eeaa61b2..568f6d1193 100644 --- a/gyp/pylib/gyp/generator/gypd.py +++ b/gyp/gyp/generator/gypd.py @@ -32,8 +32,6 @@ import gyp.common -import errno -import os import pprint @@ -74,11 +72,10 @@ generator_default_variables[v] = '<(%s)' % v -def GenerateOutput(target_list, target_dicts, data, params): +def GenerateOutput(target_list, _, data, params): output_files = {} for qualified_target in target_list: - [input_file, target] = \ - gyp.common.ParseQualifiedTarget(qualified_target)[0:2] + [input_file, __] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2] if input_file[-4:] != '.gyp': continue diff --git a/gyp/pylib/gyp/generator/gypsh.py b/gyp/gyp/generator/gypsh.py similarity index 84% rename from gyp/pylib/gyp/generator/gypsh.py rename to gyp/gyp/generator/gypsh.py index bd405f43a9..dcd9e24f97 100644 --- a/gyp/pylib/gyp/generator/gypsh.py +++ b/gyp/gyp/generator/gypsh.py @@ -40,8 +40,8 @@ generator_default_variables[v] = '<(%s)' % v -def GenerateOutput(target_list, target_dicts, data, params): - locals = { +def GenerateOutput(target_list, target_dicts, data, _): + locals_vars = { 'target_list': target_list, 'target_dicts': target_dicts, 'data': data, @@ -50,7 +50,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # Use a banner that looks like the stock Python one and like what # code.interact uses by default, but tack on something to indicate what # locals are available, and identify gypsh. - banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \ - (sys.version, sys.platform, repr(sorted(locals.keys()))) + locals_repr = repr(sorted(locals_vars.keys())) + banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % (sys.version, sys.platform, locals_repr) - code.interact(banner, local=locals) + code.interact(banner, local=locals_vars) diff --git a/gyp/gyp/generator/make.py b/gyp/gyp/generator/make.py new file mode 100644 index 0000000000..4b5b7e92dd --- /dev/null +++ b/gyp/gyp/generator/make.py @@ -0,0 +1,557 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Notes: +# +# This is all roughly based on the Makefile system used by the Linux +# kernel, but is a non-recursive make -- we put the entire dependency +# graph in front of make and let it figure it out. +# +# The code below generates a separate .mk file for each target, but +# all are sourced by the top-level Makefile. This means that all +# variables in .mk-files clobber one another. Be careful to use := +# where appropriate for immediate evaluation, and similarly to watch +# that you're not relying on a variable value to last beween different +# .mk files. +# + +# TODO +# Global settings and utility functions are currently stuffed in the toplevel Makefile. +# It may make sense to generate some .mk files on the side to keep the the files readable. + +from __future__ import print_function + +import os +import re +import subprocess +import gyp +import gyp.common +import gyp.xcode_emulation +from gyp.common import GetEnvironFallback + +generator_default_variables = { + 'EXECUTABLE_PREFIX': '', + 'EXECUTABLE_SUFFIX': '', + 'STATIC_LIB_PREFIX': 'lib', + 'SHARED_LIB_PREFIX': 'lib', + 'STATIC_LIB_SUFFIX': '.a', + 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni', + 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen', + 'PRODUCT_DIR': '$(builddir)', + 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. + 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. + 'RULE_INPUT_PATH': '$(abspath $<)', + 'RULE_INPUT_EXT': '$(suffix $<)', + 'RULE_INPUT_NAME': '$(notdir $<)', + 'CONFIGURATION_NAME': '$(BUILDTYPE)', +} + +# Make supports multiple toolsets +generator_supports_multiple_toolsets = True + +# Request sorted dependencies in the order from dependents to dependencies. +generator_wants_sorted_dependencies = False + +# Placates pylint. +generator_additional_non_configuration_keys = [] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] +generator_filelist_paths = None + +LINK_COMMANDS_LINUX = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) + +# Due to circular dependencies between libraries :(, we wrap the +# special "figure out circular dependencies" flags around the entire +# input list during linking. +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) + +# We support two kinds of shared objects (.so): +# 1) shared_library, which is just bundling together many dependent libraries +# into a link line. +# 2) loadable_module, which is generating a module intended for dlopen(). +# +# They differ only slightly: +# In the former case, we want to package all dependent code into the .so. +# In the latter case, we want to package just the API exposed by the +# outermost module. +# This means shared_library uses --whole-archive, while loadable_module doesn't. +# (Note that --whole-archive is incompatible with the --start-group used in +# normal linking.) + +# Other shared-object link notes: +# - Set SONAME to the library filename so our binaries don't reference +# the local, absolute paths used on the link command-line. +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) +""" + +LINK_COMMANDS_MAC = """\ +quiet_cmd_alink = LIBTOOL-STATIC $@ +cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) +""" + +LINK_COMMANDS_ANDROID = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) + +# Due to circular dependencies between libraries :(, we wrap the +# special "figure out circular dependencies" flags around the entire +# input list during linking. +quiet_cmd_link = LINK($(TOOLSET)) $@ +quiet_cmd_link_host = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) +cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +# Other shared-object link notes: +# - Set SONAME to the library filename so our binaries don't reference +# the local, absolute paths used on the link command-line. +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) +quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) +""" + +LINK_COMMANDS_AIX = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) +""" + +LINK_COMMANDS_OS390 = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) -Wl,DLL + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -Wl,DLL +""" + +SHARED_HEADER_MAC_COMMANDS = """ +quiet_cmd_objc = CXX($(TOOLSET)) $@ +cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< + +quiet_cmd_objcxx = CXX($(TOOLSET)) $@ +cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# Commands for precompiled header files. +quiet_cmd_pch_c = CXX($(TOOLSET)) $@ +cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ +cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_m = CXX($(TOOLSET)) $@ +cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< +quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ +cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# gyp-mac-tool is written next to the root Makefile by gyp. +# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd +# already. +quiet_cmd_mac_tool = MACTOOL $(4) $< +cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" + +quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ +cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) + +quiet_cmd_infoplist = INFOPLIST $@ +cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" +""" + +SHARED_FOOTER = """\ +# "all" is a concatenation of the "all" targets from all the included +# sub-makefiles. This is just here to clarify. +all: + +# Add in dependency-tracking rules. $(all_deps) is the list of every single +# target in our tree. Only consider the ones with .d (dependency) info: +d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) +ifneq ($(d_files),) + include $(d_files) +endif +""" + + +def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files, Sourceify): + """Write the target to regenerate the Makefile.""" + options = params['options'] + build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir) for filename in params['build_files_arg']] + + gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'], options.toplevel_dir) + if not gyp_binary.startswith(os.sep): + gyp_binary = os.path.join('.', gyp_binary) + + root_makefile.write( + "quiet_cmd_regen_makefile = ACTION Regenerating $@\n" + "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n" + "%(makefile_name)s: %(deps)s\n" + "\t$(call do_cmd,regen_makefile)\n\n" % { + 'makefile_name': makefile_name, + 'deps': ' '.join(map(Sourceify, build_files)), + 'cmd': gyp.common.EncodePOSIXShellList([gyp_binary, '-fmake'] + gyp.RegenerateFlags(options) + build_files_args) + } + ) + + +def PerformBuild(_, configurations, params): + options = params['options'] + for config in configurations: + arguments = ['make'] + if options.toplevel_dir and options.toplevel_dir != '.': + arguments += '-C', options.toplevel_dir + arguments.append('BUILDTYPE=' + config) + print('Building [%s]: %s' % (config, arguments)) + subprocess.check_call(arguments) + + +def GenerateOutput(target_list, target_dicts, data, params): + from gyp.MakefileWriter import MakefileWriter, Sourceify, WriteRootHeaderSuffixRules, SHARED_HEADER + options = params['options'] + flavor = gyp.common.GetFlavor(params) + generator_flags = params.get('generator_flags', {}) + builddir_name = generator_flags.get('output_dir', 'out') + android_ndk_version = generator_flags.get('android_ndk_version', None) + default_target = generator_flags.get('default_target', 'all') + + def CalculateMakefilePath(build_file_arg, base_name): + """Determine where to write a Makefile for a given gyp file.""" + # Paths in gyp files are relative to the .gyp file, but we want + # paths relative to the source root for the master makefile. Grab + # the path of the .gyp file as the base to relativize against. + # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". + base_makefile_path = gyp.common.RelativePath(os.path.dirname(build_file_arg), options.depth) + # We write the file in the base_makefile_path directory. + output_makefile = os.path.join(options.depth, base_makefile_path, base_name) + if options.generator_output: + output_makefile = os.path.join(options.depth, options.generator_output, base_makefile_path, base_name) + base_makefile_path = gyp.common.RelativePath(os.path.dirname(build_file_arg), options.toplevel_dir) + return base_makefile_path, output_makefile + + # TODO: search for the first non-'Default' target. This can go + # away when we add verification that all targets have the + # necessary configurations. + default_configuration = None + toolsets = set([target_dicts[target]['toolset'] for target in target_list]) + for target in target_list: + spec = target_dicts[target] + if spec['default_configuration'] != 'Default': + default_configuration = spec['default_configuration'] + break + if not default_configuration: + default_configuration = 'Default' + + srcdir = '.' + makefile_name = 'Makefile' + options.suffix + makefile_path = os.path.join(options.toplevel_dir, makefile_name) + if options.generator_output: + makefile_path = os.path.join(options.toplevel_dir, options.generator_output, makefile_name) + srcdir = gyp.common.RelativePath(srcdir, options.generator_output) + Sourceify.srcdir_prefix = '$(srcdir)/' + + flock_command = 'flock' + copy_archive_arguments = '-af' + makedep_arguments = '-MMD' + header_params = { + 'default_target': default_target, + 'builddir': builddir_name, + 'default_configuration': default_configuration, + 'flock': flock_command, + 'flock_index': 1, + 'link_commands': LINK_COMMANDS_LINUX, + 'extra_commands': '', + 'srcdir': srcdir, + 'copy_archive_args': copy_archive_arguments, + 'makedep_args': makedep_arguments, + } + if flavor == 'mac': + flock_command = './gyp-mac-tool flock' + header_params.update({ + 'flock': flock_command, + 'flock_index': 2, + 'link_commands': LINK_COMMANDS_MAC, + 'extra_commands': SHARED_HEADER_MAC_COMMANDS, + }) + elif flavor == 'android': + header_params.update({ + 'link_commands': LINK_COMMANDS_ANDROID, + }) + elif flavor == 'zos': + copy_archive_arguments = '-fPR' + makedep_arguments = '-qmakedep=gcc' + header_params.update({ + 'copy_archive_args': copy_archive_arguments, + 'makedep_args': makedep_arguments, + 'link_commands': LINK_COMMANDS_OS390, + }) + elif flavor == 'solaris': + header_params.update({ + 'flock': './gyp-flock-tool flock', + 'flock_index': 2, + }) + elif flavor == 'freebsd': + # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific. + header_params.update({ + 'flock': 'lockf', + }) + elif flavor == 'openbsd': + copy_archive_arguments = '-pPRf' + header_params.update({ + 'copy_archive_args': copy_archive_arguments, + }) + elif flavor == 'aix': + copy_archive_arguments = '-pPRf' + header_params.update({ + 'copy_archive_args': copy_archive_arguments, + 'link_commands': LINK_COMMANDS_AIX, + 'flock': './gyp-flock-tool flock', + 'flock_index': 2, + }) + + header_params.update({ + 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), + 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), + 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), + 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), + 'CC.host': GetEnvironFallback(('CC_host',), 'cc'), + 'AR.host': GetEnvironFallback(('AR_host',), 'ar'), + 'CXX.host': GetEnvironFallback(('CXX_host',), 'c++'), + 'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'), + }) + + build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) + make_global_settings_array = data[build_file].get('make_global_settings', []) + wrappers = {} + for key, value in make_global_settings_array: + if key.endswith('_wrapper'): + wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value + make_global_settings = '' + for key, value in make_global_settings_array: + if re.match('.*_wrapper', key): + continue + if value[0] != '$': + value = '$(abspath %s)' % value + wrapper = wrappers.get(key) + if wrapper: + value = '%s %s' % (wrapper, value) + del wrappers[key] + if key in ('CC', 'CC.host', 'CXX', 'CXX.host'): + make_global_settings += 'ifneq (,$(filter $(origin %s), undefined default))\n' % key + # Let gyp-time envvars win over global settings. + env_key = key.replace('.', '_') # CC.host -> CC_host + if env_key in os.environ: + value = os.environ[env_key] + make_global_settings += ' %s = %s\n' % (key, value) + make_global_settings += 'endif\n' + else: + make_global_settings += '%s ?= %s\n' % (key, value) + # TODO(ukai): define cmd when only wrapper is specified in + # make_global_settings. + + header_params['make_global_settings'] = make_global_settings + + gyp.common.EnsureDirExists(makefile_path) + root_makefile = open(makefile_path, 'w') + root_makefile.write(SHARED_HEADER % header_params) + # Currently any versions have the same effect, but in future the behavior + # could be different. + if android_ndk_version: + root_makefile.write( + '# Define LOCAL_PATH for build of Android applications.\n' + 'LOCAL_PATH := $(call my-dir)\n' + '\n' + ) + for toolset in toolsets: + root_makefile.write('TOOLSET := %s\n' % toolset) + WriteRootHeaderSuffixRules(root_makefile) + + # Put build-time support tools next to the root Makefile. + dest_path = os.path.dirname(makefile_path) + gyp.common.CopyTool(flavor, dest_path) + + # Find the list of targets that derive from the gyp file(s) being built. + needed_targets = set() + for build_file in params['build_files']: + for target in gyp.common.AllTargets(target_list, target_dicts, build_file): + needed_targets.add(target) + + build_files = set() + include_list = set() + writer = None + for qualified_target in target_list: + build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target) + + this_make_global_settings = data[build_file].get('make_global_settings', []) + assert make_global_settings_array == this_make_global_settings, ( + "make_global_settings needs to be the same for all targets. %s vs. %s" % + (this_make_global_settings, make_global_settings)) + + build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) + included_files = data[build_file]['included_files'] + for included_file in included_files: + # The included_files entries are relative to the dir of the build file + # that included them, so we have to undo that and then make them relative + # to the root dir. + relative_include_file = gyp.common.RelativePath( + gyp.common.UnrelativePath(included_file, build_file), + options.toplevel_dir + ) + abs_include_file = os.path.abspath(relative_include_file) + # If the include file is from the ~/.gyp dir, we should use absolute path + # so that relocating the src dir doesn't break the path. + if params['home_dot_gyp'] and abs_include_file.startswith(params['home_dot_gyp']): + build_files.add(abs_include_file) + else: + build_files.add(relative_include_file) + + base_path, output_file = CalculateMakefilePath(build_file, target + '.' + toolset + options.suffix + '.mk') + + spec = target_dicts[qualified_target] + configs = spec['configurations'] + + if flavor == 'mac': + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) + + writer = MakefileWriter(generator_flags, flavor) + writer.Write(qualified_target, base_path, output_file, spec, configs, part_of_all=qualified_target in needed_targets) + + # Our root_makefile lives at the source root. Compute the relative path + # from there to the output_file for including. + mkfile_rel_path = gyp.common.RelativePath(output_file, os.path.dirname(makefile_path)) + include_list.add(mkfile_rel_path) + + assert writer + # Write out per-gyp (sub-project) Makefiles. + depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) + for build_file in build_files: + # The paths in build_files were relativized above, so undo that before + # testing against the non-relativized items in target_list and before + # calculating the Makefile path. + build_file_path = os.path.join(depth_rel_path, build_file) + related_gyp_targets = [t for t in target_list if t.startswith(build_file) and t in needed_targets] + # Only generate Makefiles for gyp files with targets. + if not related_gyp_targets: + continue + build_file_name = "%s.Makefile" % os.path.splitext(os.path.basename(build_file))[0] + _, submake_output_file = CalculateMakefilePath(build_file_path, build_file_name) + makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path), os.path.dirname(submake_output_file)) + gyp_targets_names = [target_dicts[t]['target_name'] for t in related_gyp_targets] + writer.WriteSubMake(submake_output_file, makefile_rel_path, gyp_targets_names, builddir_name) + + # Write out the sorted list of includes. + root_makefile.write('\n') + for include_file in sorted(include_list): + # We wrap each .mk include in an if statement so users can tell make to + # not load a file by setting NO_LOAD. The below make code says, only + # load the .mk file if the .mk filename doesn't start with a token in + # NO_LOAD. + include_conditional_tmpl="""\ +ifeq ($(strip $(foreach prefix,$(NO_LOAD), $(findstring $(join ^,$(prefix)), $(join ^,%(include_file)s)))),) + include %(include_file)s +endif +""" + root_makefile.write(include_conditional_tmpl % { 'include_file': include_file }) + root_makefile.write('\n') + + if (not generator_flags.get('standalone') + and generator_flags.get('auto_regeneration', True)): + WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files, Sourceify) + + root_makefile.write(SHARED_FOOTER) + + root_makefile.close() + + +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + flavor = gyp.common.GetFlavor(params) + if flavor == 'mac': + default_variables.setdefault('OS', 'mac') + default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') + default_variables.setdefault('SHARED_LIB_DIR', generator_default_variables['PRODUCT_DIR']) + default_variables.setdefault('LIB_DIR', generator_default_variables['PRODUCT_DIR']) + + # Copy additional generator configuration data from Xcode, which is shared + # by the Mac Make generator. + import gyp.generator.xcode as xcode_generator + global generator_additional_non_configuration_keys + generator_additional_non_configuration_keys = getattr(xcode_generator, 'generator_additional_non_configuration_keys', []) + global generator_additional_path_sections + generator_additional_path_sections = getattr(xcode_generator, 'generator_additional_path_sections', []) + global generator_extra_sources_for_rules + generator_extra_sources_for_rules = getattr(xcode_generator, 'generator_extra_sources_for_rules', []) + else: + operating_system = flavor + if flavor == 'android': + operating_system = 'linux' # Keep this legacy behavior for now. + default_variables.setdefault('OS', operating_system) + if flavor == 'aix': + default_variables.setdefault('SHARED_LIB_SUFFIX', '.a') + else: + default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') + default_variables.setdefault('SHARED_LIB_DIR', '$(builddir)/lib.$(TOOLSET)') + default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)') + + +def CalculateGeneratorInputInfo(params): + """Calculate the generator specific info that gets fed to input (called by + gyp).""" + generator_flags = params.get('generator_flags', {}) + android_ndk_version = generator_flags.get('android_ndk_version', None) + # Android NDK requires a strict link order. + if android_ndk_version: + global generator_wants_sorted_dependencies + generator_wants_sorted_dependencies = True + + output_dir = params['options'].generator_output or \ + params['options'].toplevel_dir + builddir_name = generator_flags.get('output_dir', 'out') + qualified_out_dir = os.path.normpath(os.path.join( + output_dir, builddir_name, 'gypfiles')) + + global generator_filelist_paths + generator_filelist_paths = { + 'toplevel': params['options'].toplevel_dir, + 'qualified_out_dir': qualified_out_dir, + } diff --git a/gyp/pylib/gyp/generator/msvs.py b/gyp/gyp/generator/msvs.py similarity index 77% rename from gyp/pylib/gyp/generator/msvs.py rename to gyp/gyp/generator/msvs.py index 5e5b2ee52a..6e798ad1b8 100644 --- a/gyp/pylib/gyp/generator/msvs.py +++ b/gyp/gyp/generator/msvs.py @@ -4,7 +4,7 @@ from __future__ import print_function -import copy +from collections import OrderedDict import ntpath import os import posixpath @@ -12,28 +12,12 @@ import subprocess import sys -import gyp.common +import gyp.common as common import gyp.easy_xml as easy_xml import gyp.generator.ninja as ninja_generator -import gyp.MSVSNew as MSVSNew -import gyp.MSVSProject as MSVSProject -import gyp.MSVSSettings as MSVSSettings -import gyp.MSVSToolFile as MSVSToolFile -import gyp.MSVSUserFile as MSVSUserFile -import gyp.MSVSUtil as MSVSUtil -import gyp.MSVSVersion as MSVSVersion -from gyp.common import GypError -from gyp.common import OrderedSet - -# TODO: Remove once bots are on 2.7, http://crbug.com/241769 -def _import_OrderedDict(): - import collections - try: - return collections.OrderedDict - except AttributeError: - import gyp.ordered_dict - return gyp.ordered_dict.OrderedDict -OrderedDict = _import_OrderedDict() +import gyp.MSVS as MSVS +from gyp.MSVS import MSVSNew, MSVSSettings, MSVSToolFile, MSVSVersion +from gyp.common import GypError, OrderedSet # Regular expression for validating Visual Studio GUIDs. If the GUID @@ -48,6 +32,8 @@ def _import_OrderedDict(): generator_default_variables = { + 'DRIVER_PREFIX': '', + 'DRIVER_SUFFIX': '.sys', 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '.exe', 'STATIC_LIB_PREFIX': '', @@ -88,12 +74,12 @@ def _import_OrderedDict(): 'msvs_enable_winrt', 'msvs_requires_importlibrary', 'msvs_enable_winphone', - 'msvs_enable_marmasm', 'msvs_application_type_revision', 'msvs_target_platform_version', 'msvs_target_platform_minversion', ] +generator_filelist_paths = None # List of precompiled header related keys. precomp_keys = [ @@ -114,7 +100,7 @@ def _import_OrderedDict(): # 64-bit. def _GetDomainAndUserName(): if sys.platform not in ('win32', 'cygwin'): - return ('DOMAIN', 'USERNAME') + return 'DOMAIN', 'USERNAME' global cached_username global cached_domain if not cached_domain or not cached_username: @@ -134,9 +120,7 @@ def _GetDomainAndUserName(): domain = domain_match.group(1) cached_domain = domain cached_username = username - return (cached_domain, cached_username) - -fixpath_prefix = None + return cached_domain, cached_username def _NormalizedSource(source): @@ -157,6 +141,8 @@ def _NormalizedSource(source): return source +fixpath_prefix = '' + def _FixPath(path): """Convert paths to a form that will make sense in a vcproj file. @@ -179,8 +165,7 @@ def _FixPaths(paths): return [_FixPath(i) for i in paths] -def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, - list_excluded=True, msvs_version=None): +def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, list_excluded=True, msvs_version=None): """Converts a list split source file paths into a vcproj folder hierarchy. Arguments: @@ -211,35 +196,32 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, excluded_result.append(filename) else: result.append(filename) - elif msvs_version and not msvs_version.UsesVcxproj(): + elif msvs_version and not msvs_version.uses_vcxproj: # For MSVS 2008 and earlier, we need to process all files before walking # the sub folders. if not folders.get(s[0]): folders[s[0]] = [] folders[s[0]].append(s[1:]) else: - contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]], - excluded=excluded, - list_excluded=list_excluded, - msvs_version=msvs_version) - contents = MSVSProject.Filter(s[0], contents=contents) + contents = _ConvertSourcesToFilterHierarchy( + [s[1:]], prefix + [s[0]], excluded=excluded, list_excluded=list_excluded, msvs_version=msvs_version + ) + contents = MSVS.Filter(s[0], contents=contents) result.append(contents) # Add a folder for excluded files. if excluded_result and list_excluded: - excluded_folder = MSVSProject.Filter('_excluded_files', - contents=excluded_result) + excluded_folder = MSVS.Filter('_excluded_files', contents=excluded_result) result.append(excluded_folder) - if msvs_version and msvs_version.UsesVcxproj(): + if msvs_version and msvs_version.uses_vcxproj: return result # Populate all the folders. for f in folders: - contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f], - excluded=excluded, - list_excluded=list_excluded, - msvs_version=msvs_version) - contents = MSVSProject.Filter(f, contents=contents) + contents = _ConvertSourcesToFilterHierarchy( + folders[f], prefix=prefix + [f], excluded=excluded, list_excluded=list_excluded, msvs_version=msvs_version + ) + contents = MSVS.Filter(f, contents=contents) result.append(contents) return result @@ -259,19 +241,24 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): if not tools.get(tool_name): tools[tool_name] = dict() tool = tools[tool_name] + if 'CompileAsWinRT' == setting: + return if tool.get(setting): if only_if_unset: return if type(tool[setting]) == list and type(value) == list: tool[setting] += value else: raise TypeError( - 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' - 'not allowed, previous value: %s' % ( - value, setting, tool_name, str(tool[setting]))) + 'Appending "%s" to a non-list setting "%s" for tool "%s" is not allowed, previous value: %s' % (value, setting, tool_name, str(tool[setting])) + ) else: tool[setting] = value +def _ConfigTargetVersion(config_data): + return config_data.get('msvs_target_version', 'Windows7') + + def _ConfigPlatform(config_data): return config_data.get('msvs_configuration_platform', 'Win32') @@ -288,31 +275,14 @@ def _ConfigFullName(config_name, config_data): return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) -def _ConfigWindowsTargetPlatformVersion(config_data): - ver = config_data.get('msvs_windows_target_platform_version') - if not ver or re.match(r'^\d+', ver): - return ver - for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s', - r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']: - sdkdir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder') - if not sdkdir: - continue - version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or '' - # find a matching entry in sdkdir\include - names = sorted([x for x in os.listdir(r'%s\include' % sdkdir) \ - if x.startswith(version)], reverse = True) - return names[0] - - -def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, - quote_cmd, do_setup_env): +def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env): if [x for x in cmd if '$(InputDir)' in x]: input_dir_preamble = ( 'set INPUTDIR=$(InputDir)\n' 'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n' 'set INPUTDIR=%INPUTDIR:~0,-1%\n' - ) + ) else: input_dir_preamble = '' @@ -321,18 +291,13 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) # Prepare command. direct_cmd = cmd - direct_cmd = [i.replace('$(IntDir)', - '`cygpath -m "${INTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(OutDir)', - '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(InputDir)', - '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd] + direct_cmd = [i.replace('$(IntDir)', '`cygpath -m "${INTDIR}"`') for i in direct_cmd] + direct_cmd = [i.replace('$(OutDir)', '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] + direct_cmd = [i.replace('$(InputDir)', '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd] if has_input_path: - direct_cmd = [i.replace('$(InputPath)', - '`cygpath -m "${INPUTPATH}"`') - for i in direct_cmd] + direct_cmd = [i.replace('$(InputPath)', '`cygpath -m "${INPUTPATH}"`') for i in direct_cmd] direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] - # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) + # direct_cmd = common.EncodePOSIXShellList(direct_cmd) direct_cmd = ' '.join(direct_cmd) # TODO(quote): regularize quoting path names throughout the module cmd = '' @@ -348,8 +313,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, if has_input_path and direct_cmd.find('INPUTPATH') >= 0: cmd += 'set INPUTPATH=$(InputPath) && ' cmd += 'bash -c "%(cmd)s"' - cmd = cmd % {'cygwin_dir': cygwin_dir, - 'cmd': direct_cmd} + cmd = cmd % {'cygwin_dir': cygwin_dir, 'cmd': direct_cmd} return input_dir_preamble + cmd else: # Convert cat --> type to mimic unix. @@ -390,8 +354,7 @@ def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env): elif isinstance(mcs, str): mcs = int(mcs) quote_cmd = int(rule.get('msvs_quote_cmd', 1)) - return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path, - quote_cmd, do_setup_env=do_setup_env) + return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path, quote_cmd, do_setup_env=do_setup_env) def _AddActionStep(actions_dict, inputs, outputs, description, command): @@ -429,8 +392,7 @@ def _AddActionStep(actions_dict, inputs, outputs, description, command): actions_dict[chosen_input].append(action) -def _AddCustomBuildToolForMSVS(p, spec, primary_input, - inputs, outputs, description, cmd): +def _AddCustomBuildToolForMSVS(p, spec, primary_input, inputs, outputs, description, cmd): """Add a custom build tool to execute something. Arguments: @@ -444,17 +406,18 @@ def _AddCustomBuildToolForMSVS(p, spec, primary_input, """ inputs = _FixPaths(inputs) outputs = _FixPaths(outputs) - tool = MSVSProject.Tool( - 'VCCustomBuildTool', - {'Description': description, - 'AdditionalDependencies': ';'.join(inputs), - 'Outputs': ';'.join(outputs), - 'CommandLine': cmd, - }) + tool = MSVS.Tool( + 'VCCustomBuildTool', + { + 'Description': description, + 'AdditionalDependencies': ';'.join(inputs), + 'Outputs': ';'.join(outputs), + 'CommandLine': cmd, + } + ) # Add to the properties of primary input for each config. for config_name, c_data in spec['configurations'].items(): - p.AddFileConfig(_FixPath(primary_input), - _ConfigFullName(config_name, c_data), tools=[tool]) + p.AddFileConfig(_FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool]) def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): @@ -496,16 +459,16 @@ def _RuleExpandPath(path, input_file): Returns: The string substituted path. """ - path = path.replace('$(InputName)', - os.path.splitext(os.path.split(input_file)[1])[0]) + path = path.replace('$(InputName)', os.path.splitext(os.path.split(input_file)[1])[0]) path = path.replace('$(InputDir)', os.path.dirname(input_file)) - path = path.replace('$(InputExt)', - os.path.splitext(os.path.split(input_file)[1])[1]) + path = path.replace('$(InputExt)', os.path.splitext(os.path.split(input_file)[1])[1]) path = path.replace('$(InputFileName)', os.path.split(input_file)[1]) path = path.replace('$(InputPath)', input_file) return path +# TODO(refack): Figure out whats up here +# noinspection PyUnusedLocal def _FindRuleTriggerFiles(rule, sources): """Find the list of files which a particular rule applies to. @@ -536,7 +499,7 @@ def _RuleInputsAndOutputs(rule, trigger_file): inputs.add(_RuleExpandPath(i, trigger_file)) for o in raw_outputs: outputs.add(_RuleExpandPath(o, trigger_file)) - return (inputs, outputs) + return inputs, outputs def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): @@ -596,7 +559,7 @@ def _GenerateExternalRules(rules, output_dir, spec, actions_to_add: The list of actions we will add to. """ filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix) - mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) + mk_file = common.WriteOnDiff(os.path.join(output_dir, filename)) # Find cygwin style versions of some paths. mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') @@ -704,7 +667,7 @@ def _Replace(match): # For a literal quote, CommandLineToArgv requires an odd number of # backslashes preceding it, and it produces half as many literal backslashes # (rounded down). So we need to produce 2n+1 backslashes. - return 2 * match.group(1) + '\\"' + return str(2 * match.group(1)) + '\\"' # Escape all quotes so that they are interpreted literally. s = quote_replacer_regex.sub(_Replace, s) @@ -742,7 +705,7 @@ def _Replace(match): # For a non-literal quote, CommandLineToArgv requires an even number of # backslashes preceding it, and it produces half as many literal # backslashes. So we need to produce 2n backslashes. - return 2 * match.group(1) + '"' + match.group(2) + '"' + return str(2 * match.group(1)) + '"' + str(match.group(2)) + '"' segments = s.split('"') # The unquoted segments are at the even-numbered indices. @@ -758,8 +721,8 @@ def _Replace(match): # the VCProj but cause the same problem on the final command-line. Moving # the item to the end of the list does works, but that's only possible if # there's only one such item. Let's just warn the user. - print('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s, file=sys.stderr) + print(('Warning: MSVS may misinterpret the odd number of ' + + 'quotes in ' + s), file=sys.stderr) return s @@ -781,7 +744,7 @@ def _EscapeCommandLineArgumentForMSBuild(s): """Escapes a Windows command-line argument for use by MSBuild.""" def _Replace(match): - return (len(match.group(1)) / 2 * 4) * '\\' + '\\"' + return str('\\' * (len(match.group(1)) // 2 * 4)) + '\\"' # Escape all quotes so that they are interpreted literally. s = quote_replacer_regex2.sub(_Replace, s) @@ -881,11 +844,11 @@ def _GetDefaultConfiguration(spec): return spec['configurations'][spec['default_configuration']] -def _GetGuidOfProject(proj_path, spec): - """Get the guid for the project. +def _TryGetGuidOfProject(spec): + """ + Get the guid for the project. Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. spec: The target dictionary containing the properties of the target. Returns: the guid. @@ -898,18 +861,15 @@ def _GetGuidOfProject(proj_path, spec): guid = default_config.get('msvs_guid') if guid: if VALID_MSVS_GUID_CHARS.match(guid) is None: - raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % - (guid, VALID_MSVS_GUID_CHARS.pattern)) + raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % (guid, VALID_MSVS_GUID_CHARS.pattern)) guid = '{%s}' % guid - guid = guid or MSVSNew.MakeGuid(proj_path) return guid -def _GetMsbuildToolsetOfProject(proj_path, spec, version): +def _GetMsbuildToolsetOfProject(spec, version): """Get the platform toolset for the project. Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. spec: The target dictionary containing the properties of the target. version: The MSVSVersion object. Returns: @@ -917,9 +877,9 @@ def _GetMsbuildToolsetOfProject(proj_path, spec, version): """ # Pluck out the default configuration. default_config = _GetDefaultConfiguration(spec) - toolset = default_config.get('msbuild_toolset') - if not toolset and version.DefaultToolset(): - toolset = version.DefaultToolset() + toolset = default_config.get('msbuild_toolset', version.default_toolset) + if spec['type'] == 'windows_driver': + toolset = 'WindowsKernelModeDriver10.0' return toolset @@ -940,116 +900,7 @@ def _GenerateProject(project, options, version, generator_flags): if default_config.get('msvs_existing_vcproj'): return [] - if version.UsesVcxproj(): - return _GenerateMSBuildProject(project, options, version, generator_flags) - else: - return _GenerateMSVSProject(project, options, version, generator_flags) - - -# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py. -def _ValidateSourcesForMSVSProject(spec, version): - """Makes sure if duplicate basenames are not specified in the source list. - - Arguments: - spec: The target dictionary containing the properties of the target. - version: The VisualStudioVersion object. - """ - # This validation should not be applied to MSVC2010 and later. - assert not version.UsesVcxproj() - - # TODO: Check if MSVC allows this for loadable_module targets. - if spec.get('type', None) not in ('static_library', 'shared_library'): - return - sources = spec.get('sources', []) - basenames = {} - for source in sources: - name, ext = os.path.splitext(source) - is_compiled_file = ext in [ - '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] - if not is_compiled_file: - continue - basename = os.path.basename(name) # Don't include extension. - basenames.setdefault(basename, []).append(source) - - error = '' - for basename, files in basenames.items(): - if len(files) > 1: - error += ' %s: %s\n' % (basename, ' '.join(files)) - - if error: - print('static library %s has several files with the same basename:\n' % spec['target_name'] - + error + 'MSVC08 cannot handle that.') - raise GypError('Duplicate basenames in sources section, see list above') - - -def _GenerateMSVSProject(project, options, version, generator_flags): - """Generates a .vcproj file. It may create .rules and .user files too. - - Arguments: - project: The project object we will generate the file for. - options: Global options passed to the generator. - version: The VisualStudioVersion object. - generator_flags: dict of generator-specific flags. - """ - spec = project.spec - gyp.common.EnsureDirExists(project.path) - - platforms = _GetUniquePlatforms(spec) - p = MSVSProject.Writer(project.path, version, spec['target_name'], - project.guid, platforms) - - # Get directory project file is in. - project_dir = os.path.split(project.path)[0] - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - - config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec['configurations'].items(): - _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) - - # MSVC08 and prior version cannot handle duplicate basenames in the same - # target. - # TODO: Take excluded sources into consideration if possible. - _ValidateSourcesForMSVSProject(spec, version) - - # Prepare list of sources and excluded sources. - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, - gyp_file) - - # Add rules. - actions_to_add = {} - _GenerateRulesForMSVS(p, project_dir, options, spec, - sources, excluded_sources, - actions_to_add) - list_excluded = generator_flags.get('msvs_list_excluded_files', True) - sources, excluded_sources, excluded_idl = ( - _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir, - sources, excluded_sources, - list_excluded, version)) - - # Add in files. - missing_sources = _VerifySourcesExist(sources, project_dir) - p.AddFiles(sources) - - _AddToolFilesToMSVS(p, spec) - _HandlePreCompiledHeaders(p, sources, spec) - _AddActions(actions_to_add, spec, relative_path_of_gyp_file) - _AddCopies(actions_to_add, spec) - _WriteMSVSUserFile(project.path, version, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded( - excluded_sources, actions_to_add) - _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, - list_excluded) - _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) - - # Write it out. - p.WriteIfChanged() - - return missing_sources + return _GenerateMSBuildProject(project, options, version, generator_flags) def _GetUniquePlatforms(spec): @@ -1068,24 +919,6 @@ def _GetUniquePlatforms(spec): return platforms -def _CreateMSVSUserFile(proj_path, version, spec): - """Generates a .user file for the user running this Gyp program. - - Arguments: - proj_path: The path of the project file being created. The .user file - shares the same path (with an appropriate suffix). - version: The VisualStudioVersion object. - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - (domain, username) = _GetDomainAndUserName() - vcuser_filename = '.'.join([proj_path, domain, username, 'user']) - user_file = MSVSUserFile.Writer(vcuser_filename, version, - spec['target_name']) - return user_file - - def _GetMSVSConfigurationType(spec, build_file): """Returns the configuration type for this project. @@ -1103,6 +936,7 @@ def _GetMSVSConfigurationType(spec, build_file): 'shared_library': '2', # .dll 'loadable_module': '2', # .dll 'static_library': '4', # .lib + 'windows_driver': '5', # .sys 'none': '10', # Utility type }[spec['type']] except KeyError: @@ -1116,93 +950,6 @@ def _GetMSVSConfigurationType(spec, build_file): return config_type -def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): - """Adds a configuration to the MSVS project. - - Many settings in a vcproj file are specific to a configuration. This - function the main part of the vcproj file that's configuration specific. - - Arguments: - p: The target project being generated. - spec: The target dictionary containing the properties of the target. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - config: The dictionary that defines the special processing to be done - for this configuration. - """ - # Get the information for this configuration - include_dirs, midl_include_dirs, resource_include_dirs = \ - _GetIncludeDirs(config) - libraries = _GetLibraries(spec) - library_dirs = _GetLibraryDirs(config) - out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) - defines = _GetDefines(config) - defines = [_EscapeCppDefineForMSVS(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(config) - prebuild = config.get('msvs_prebuild') - postbuild = config.get('msvs_postbuild') - def_file = _GetModuleDefinition(spec) - precompiled_header = config.get('msvs_precompiled_header') - - # Prepare the list of tools as a dictionary. - tools = dict() - # Add in user specified msvs_settings. - msvs_settings = config.get('msvs_settings', {}) - MSVSSettings.ValidateMSVSSettings(msvs_settings) - - # Prevent default library inheritance from the environment. - _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)']) - - for tool in msvs_settings: - settings = config['msvs_settings'][tool] - for setting in settings: - _ToolAppend(tools, tool, setting, settings[setting]) - # Add the information to the appropriate tool - _ToolAppend(tools, 'VCCLCompilerTool', - 'AdditionalIncludeDirectories', include_dirs) - _ToolAppend(tools, 'VCMIDLTool', - 'AdditionalIncludeDirectories', midl_include_dirs) - _ToolAppend(tools, 'VCResourceCompilerTool', - 'AdditionalIncludeDirectories', resource_include_dirs) - # Add in libraries. - _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries) - _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories', - library_dirs) - if out_file: - _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True) - # Add defines. - _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines) - _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions', - defines) - # Change program database directory to prevent collisions. - _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName', - '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True) - # Add disabled warnings. - _ToolAppend(tools, 'VCCLCompilerTool', - 'DisableSpecificWarnings', disabled_warnings) - # Add Pre-build. - _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild) - # Add Post-build. - _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2') - _ToolAppend(tools, 'VCCLCompilerTool', - 'PrecompiledHeaderThrough', precompiled_header) - _ToolAppend(tools, 'VCCLCompilerTool', - 'ForcedIncludeFiles', precompiled_header) - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec['type'] == 'loadable_module': - _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true') - # Set the module definition file if any. - if def_file: - _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file) - - _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) - - def _GetIncludeDirs(config): """Returns the list of directories to be used for #include directives. @@ -1258,7 +1005,7 @@ def _GetLibraries(spec): found = OrderedSet() unique_libraries_list = [] for entry in reversed(libraries): - library = re.sub(r'^\-l', '', entry) + library = re.sub(r'^-l', '', entry) if not os.path.splitext(library)[1]: library += '.lib' if library not in found: @@ -1287,6 +1034,7 @@ def _GetOutputFilePathAndTool(spec, msbuild): 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'), 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), + 'windows_driver': ('VCLinkerTool', 'Link', '$(OutDir)', '.sys'), 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'), } output_file_props = output_file_map.get(spec['type']) @@ -1349,7 +1097,8 @@ def _GetDisabledWarnings(config): def _GetModuleDefinition(spec): def_file = '' - if spec['type'] in ['shared_library', 'loadable_module', 'executable']: + if spec['type'] in ['shared_library', 'loadable_module', 'executable', + 'windows_driver']: def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] if len(def_files) == 1: def_file = _FixPath(def_files[0]) @@ -1384,30 +1133,28 @@ def _ConvertToolsToExpectedForm(tools): else: settings_fixed[setting] = value # Add in this tool. - tool_list.append(MSVSProject.Tool(tool, settings_fixed)) + tool_list.append(MSVS.Tool(tool, settings_fixed)) return tool_list -def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): - """Add to the project file the configuration specified by config. +def _AddConfigurationToMSVS(p, tools, config, config_type, config_name): + """ + Add to the project file the configuration specified by config. Arguments: p: The target project being generated. - spec: the target project dict. tools: A dictionary of settings; the tool name is the key. - config: The dictionary that defines the special processing to be done - for this configuration. + config: The dictionary that defines the special processing to be done for this configuration. config_type: The configuration type, a number as defined by Microsoft. config_name: The name of the configuration. """ - attributes = _GetMSVSAttributes(spec, config, config_type) + attributes = _GetMSVSAttributes(config, config_type) # Add in this configuration. tool_list = _ConvertToolsToExpectedForm(tools) - p.AddConfig(_ConfigFullName(config_name, config), - attrs=attributes, tools=tool_list) + p.AddConfig(_ConfigFullName(config_name, config), attrs=attributes, tools=tool_list) -def _GetMSVSAttributes(spec, config, config_type): +def _GetMSVSAttributes(config, config_type): # Prepare configuration attributes. prepared_attrs = {} source_attrs = config.get('msvs_configuration_attributes', {}) @@ -1420,8 +1167,7 @@ def _GetMSVSAttributes(spec, config, config_type): prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs) # Set configuration type. prepared_attrs['ConfigurationType'] = config_type - output_dir = prepared_attrs.get('OutputDirectory', - '$(SolutionDir)$(ConfigurationName)') + output_dir = prepared_attrs.get('OutputDirectory', '$(SolutionDir)$(ConfigurationName)') prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\' if 'IntermediateDirectory' not in prepared_attrs: intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' @@ -1434,7 +1180,9 @@ def _GetMSVSAttributes(spec, config, config_type): def _AddNormalizedSources(sources_set, sources_array): - sources_set.update(_NormalizedSource(s) for s in sources_array) + sources_list = [_NormalizedSource(s) for s in sources_array] + sources_list = sorted(sources_list, key=lambda s: os.path.basename(s)) + sources_set.update(sources_list) def _PrepareListOfSources(spec, generator_flags, gyp_file): @@ -1473,19 +1221,16 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file): # Add in 'copies' inputs and outputs. for cpy in spec.get('copies', []): _AddNormalizedSources(sources, cpy.get('files', [])) - return (sources, excluded_sources) + return sources, excluded_sources -def _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, gyp_dir, sources, excluded_sources, list_excluded, version): +def _AdjustSourcesAndConvertToFilterHierarchy(spec, sources, excluded_sources, list_excluded, version): """Adjusts the list of sources and excluded sources. Also converts the sets to lists. Arguments: spec: The target dictionary containing the properties of the target. - options: Global generator options. - gyp_dir: The path to the gyp file being processed. sources: A set of sources to be included for this project. excluded_sources: A set of sources to be excluded for this project. version: A MSVSVersion object. @@ -1512,19 +1257,17 @@ def _AdjustSourcesAndConvertToFilterHierarchy( # Convert to folders and the right slashes. sources = [i.split('\\') for i in sources] - sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded, - list_excluded=list_excluded, - msvs_version=version) + sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded, list_excluded=list_excluded, msvs_version=version) # Prune filters with a single child to flatten ugly directory structures # such as ../../src/modules/module1 etc. - if version.UsesVcxproj(): - while all([isinstance(s, MSVSProject.Filter) for s in sources]) \ + if version.uses_vcxproj: + while all([isinstance(s, MSVS.Filter) for s in sources]) \ and len(set([s.name for s in sources])) == 1: assert all([len(s.contents) == 1 for s in sources]) sources = [s.contents[0] for s in sources] else: - while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter): + while len(sources) == 1 and isinstance(sources[0], MSVS.Filter): sources = sources[0].contents return sources, excluded_sources, excluded_idl @@ -1613,29 +1356,25 @@ def _HandlePreCompiledHeaders(p, sources, spec): if source: source = _FixPath(source) # UsePrecompiledHeader=1 for if using precompiled headers. - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '1'}) - p.AddFileConfig(source, _ConfigFullName(config_name, config), - {}, tools=[tool]) + tool = MSVS.Tool('VCCLCompilerTool', {'UsePrecompiledHeader': '1'}) + p.AddFileConfig(source, _ConfigFullName(config_name, config), {}, tools=[tool]) basename, extension = os.path.splitext(source) if extension == '.c': extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] else: extensions_excluded_from_precompile = ['.c'] + def DisableForSourceTree(source_tree): - for source in source_tree: - if isinstance(source, MSVSProject.Filter): - DisableForSourceTree(source.contents) + for source2 in source_tree: + if isinstance(source2, MSVS.Filter): + DisableForSourceTree(source2.contents) else: - basename, extension = os.path.splitext(source) - if extension in extensions_excluded_from_precompile: - for config_name, config in spec['configurations'].items(): - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '0', - 'ForcedIncludeFiles': '$(NOINHERIT)'}) - p.AddFileConfig(_FixPath(source), - _ConfigFullName(config_name, config), - {}, tools=[tool]) + basename2, extension2 = os.path.splitext(source2) + if extension2 in extensions_excluded_from_precompile: + for config_name2, config2 in spec['configurations'].items(): + tool2 = MSVS.Tool('VCCLCompilerTool', {'UsePrecompiledHeader': '0', 'ForcedIncludeFiles': '$(NOINHERIT)'}) + p.AddFileConfig(_FixPath(source2), _ConfigFullName(config_name2, config2), {}, tools=[tool2]) + # Do nothing if there was no precompiled source. if extensions_excluded_from_precompile: DisableForSourceTree(sources) @@ -1665,27 +1404,6 @@ def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): command=cmd) -def _WriteMSVSUserFile(project_path, version, spec): - # Add run_as and test targets. - if 'run_as' in spec: - run_as = spec['run_as'] - action = run_as.get('action', []) - environment = run_as.get('environment', []) - working_directory = run_as.get('working_directory', '.') - elif int(spec.get('test', 0)): - action = ['$(TargetPath)', '--gtest_print_time'] - environment = [] - working_directory = '.' - else: - return # Nothing to add - # Write out the user file. - user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec['configurations'].items(): - user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), - action, environment, working_directory) - user_file.WriteIfChanged() - - def _AddCopies(actions_to_add, spec): copies = _GetCopies(spec) for inputs, outputs, cmd, description in copies: @@ -1705,14 +1423,17 @@ def _GetCopies(spec): src_bare = src[:-1] base_dir = posixpath.split(src_bare)[0] outer_dir = posixpath.split(src_bare)[1] - cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % ( - _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir) + fixed_dst = _FixPath(dst) + full_dst = '"%s\\%s\\"' % (fixed_dst, outer_dir) + cmd = 'mkdir %s 2>nul & cd "%s" && xcopy /e /f /y "%s" %s' % ( + full_dst, _FixPath(base_dir), outer_dir, full_dst) copies.append(([src], ['dummy_copies', dst], cmd, - 'Copying %s to %s' % (src, dst))) + 'Copying %s to %s' % (src, fixed_dst))) else: + fix_dst = _FixPath(cpy['destination']) cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( - _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst)) - copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst))) + fix_dst, _FixPath(src), _FixPath(dst)) + copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, fix_dst))) return copies @@ -1738,9 +1459,9 @@ def _DictsToFolders(base_path, bucket, flat): if flat: children += folder_children else: - folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder), - name='(' + folder + ')', - entries=folder_children) + folder_children = MSVSNew.MSVSFolderEntry(os.path.join(base_path, folder), + name='(' + folder + ')', + entries=folder_children) children.append(folder_children) else: children.append(contents) @@ -1753,8 +1474,8 @@ def _CollapseSingles(parent, node): # such projects up one level. if (type(node) == dict and len(node) == 1 and - list(node)[0] == parent + '.vcproj'): - return node[list(node)[0]] + next(iter(node)) == parent + '.vcproj'): + return node[next(iter(node))] if type(node) != dict: return node for child in node: @@ -1766,15 +1487,15 @@ def _GatherSolutionFolders(sln_projects, project_objects, flat): root = {} # Convert into a tree of dicts on path. for p in sln_projects: - gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] + gyp_file, target = common.ParseQualifiedTarget(p)[0:2] gyp_dir = os.path.dirname(gyp_file) path_dict = _GetPathDict(root, gyp_dir) path_dict[target + '.vcproj'] = project_objects[p] # Walk down from the top until we hit a folder that has more than one entry. # In practice, this strips the top-level "src/" dir from the hierarchy in # the solution. - while len(root) == 1 and type(root[list(root)[0]]) == dict: - root = root[list(root)[0]] + while len(root) == 1 and type(root[next(iter(root))]) == dict: + root = root[next(iter(root))] # Collapse singles. root = _CollapseSingles('', root) # Merge buckets until everything is a root entry. @@ -1788,14 +1509,13 @@ def _GetPathOfProject(qualified_target, spec, options, msvs_version): proj_filename = (spec['target_name'] + options.suffix + msvs_version.ProjectExtension()) - build_file = gyp.common.BuildFile(qualified_target) + build_file = common.BuildFile(qualified_target) proj_path = os.path.join(os.path.dirname(build_file), proj_filename) fix_prefix = None if options.generator_output: project_dir_path = os.path.dirname(os.path.abspath(proj_path)) proj_path = os.path.join(options.generator_output, proj_path) - fix_prefix = gyp.common.RelativePath(project_dir_path, - os.path.dirname(proj_path)) + fix_prefix = common.RelativePath(project_dir_path, os.path.dirname(proj_path)) return proj_path, fix_prefix @@ -1806,8 +1526,7 @@ def _GetPlatformOverridesOfProject(spec): for config_name, c in spec['configurations'].items(): config_fullname = _ConfigFullName(config_name, c) platform = c.get('msvs_target_platform', _ConfigPlatform(c)) - fixed_config_fullname = '%s|%s' % ( - _ConfigBaseName(config_name, _ConfigPlatform(c)), platform) + fixed_config_fullname = '%s|%s' % (_ConfigBaseName(config_name, _ConfigPlatform(c)), platform) config_platform_overrides[config_fullname] = fixed_config_fullname return config_platform_overrides @@ -1829,16 +1548,13 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): for qualified_target in target_list: spec = target_dicts[qualified_target] if spec['toolset'] != 'target': - raise GypError( - 'Multiple toolsets not supported in msvs build (target %s)' % - qualified_target) - proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec, - options, msvs_version) - guid = _GetGuidOfProject(proj_path, spec) + raise GypError('Multiple toolsets not supported in msvs build (target %s)' % qualified_target) + proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec, options, msvs_version) + guid = _TryGetGuidOfProject(spec) overrides = _GetPlatformOverridesOfProject(spec) - build_file = gyp.common.BuildFile(qualified_target) + build_file = common.BuildFile(qualified_target) # Create object for this project. - obj = MSVSNew.MSVSProject( + obj = MSVSNew.MSVSProjectEntry( proj_path, name=spec['target_name'], guid=guid, @@ -1847,9 +1563,8 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): config_platform_overrides=overrides, fixpath_prefix=fixpath_prefix) # Set project toolset if any (MS build only) - if msvs_version.UsesVcxproj(): - obj.set_msbuild_toolset( - _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version)) + if msvs_version.uses_vcxproj: + obj.set_msbuild_toolset(_GetMsbuildToolsetOfProject(spec, msvs_version)) projects[qualified_target] = obj # Set all the dependencies, but not if we are using an external builder like # ninja @@ -1883,7 +1598,7 @@ def _InitNinjaFlavor(params, target_list, target_dicts): spec['msvs_external_builder'] = 'ninja' if not spec.get('msvs_external_builder_out_dir'): - gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) + gyp_file, _, _ = common.ParseQualifiedTarget(qualified_target) gyp_dir = os.path.dirname(gyp_file) configuration = '$(Configuration)' if params.get('target_arch') == 'x64': @@ -1891,7 +1606,7 @@ def _InitNinjaFlavor(params, target_list, target_dicts): if params.get('target_arch') == 'arm64': configuration += '_arm64' spec['msvs_external_builder_out_dir'] = os.path.join( - gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir), + common.RelativePath(params['options'].toplevel_dir, gyp_dir), ninja_generator.ComputeOutputDir(params), configuration) if not spec.get('msvs_external_builder_build_cmd'): @@ -1917,13 +1632,13 @@ def CalculateVariables(default_variables, params): generator_flags = params.get('generator_flags', {}) # Select project file format version (if unset, default to auto detecting). - msvs_version = MSVSVersion.SelectVisualStudioVersion( - generator_flags.get('msvs_version', 'auto')) + msvs_version = generator_flags.get('msvs_version', 'auto') + msvs_version = MSVSVersion.SelectVisualStudioVersion(msvs_version) # Stash msvs_version for later (so we don't have to probe the system twice). params['msvs_version'] = msvs_version # Set a variable so conditions can be based on msvs_version. - default_variables['MSVS_VERSION'] = msvs_version.ShortName() + default_variables['MSVS_VERSION'] = msvs_version.short_name # To determine processor word size on Windows, in addition to checking # PROCESSOR_ARCHITECTURE (which reflects the word size of the current @@ -1935,7 +1650,7 @@ def CalculateVariables(default_variables, params): else: default_variables['MSVS_OS_BITS'] = 32 - if gyp.common.GetFlavor(params) == 'ninja': + if common.GetFlavor(params) == 'ninja': default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen' @@ -1944,6 +1659,7 @@ def PerformBuild(data, configurations, params): msvs_version = params['msvs_version'] devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') + sln_path = '' for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': @@ -1951,13 +1667,27 @@ def PerformBuild(data, configurations, params): sln_path = build_file_root + options.suffix + '.sln' if options.generator_output: sln_path = os.path.join(options.generator_output, sln_path) + assert sln_path for config in configurations: arguments = [devenv, sln_path, '/Build', config] print('Building [%s]: %s' % (config, arguments)) - rtn = subprocess.check_call(arguments) + subprocess.check_call(arguments) +def CalculateGeneratorInputInfo(params): + if params.get('flavor') == 'ninja': + toplevel = params['options'].toplevel_dir + qualified_out_dir = os.path.normpath(os.path.join( + toplevel, ninja_generator.ComputeOutputDir(params), + 'gypfiles-msvs-ninja')) + + global generator_filelist_paths + generator_filelist_paths = { + 'toplevel': toplevel, + 'qualified_out_dir': qualified_out_dir, + } + def GenerateOutput(target_list, target_dicts, data, params): """Generate .sln and .vcproj files. @@ -1966,6 +1696,7 @@ def GenerateOutput(target_list, target_dicts, data, params): target_list: List of target pairs: 'base/base.gyp:base'. target_dicts: Dict of target properties keyed on target pair. data: Dictionary containing per .gyp data. + params: """ global fixpath_prefix @@ -1978,12 +1709,11 @@ def GenerateOutput(target_list, target_dicts, data, params): generator_flags = params.get('generator_flags', {}) # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. - (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) + (target_list, target_dicts) = MSVS.ShardTargets(target_list, target_dicts) # Optionally use the large PDB workaround for targets marked with # 'msvs_large_pdb': 1. - (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables) + (target_list, target_dicts) = MSVS.InsertLargePdbShims(target_list, target_dicts, generator_default_variables) # Optionally configure each spec to use ninja as the external builder. if params.get('flavor') == 'ninja': @@ -1998,15 +1728,14 @@ def GenerateOutput(target_list, target_dicts, data, params): configs = list(configs) # Figure out all the projects that will be generated and their guids - project_objects = _CreateProjectObjects(target_list, target_dicts, options, - msvs_version) + project_objects = _CreateProjectObjects(target_list, target_dicts, options, msvs_version) # Generate each project. missing_sources = [] for project in project_objects.values(): fixpath_prefix = project.fixpath_prefix - missing_sources.extend(_GenerateProject(project, options, msvs_version, - generator_flags)) + generated_project = _GenerateProject(project, options, msvs_version, generator_flags) + missing_sources.extend(generated_project) fixpath_prefix = None for build_file in data: @@ -2017,31 +1746,23 @@ def GenerateOutput(target_list, target_dicts, data, params): if options.generator_output: sln_path = os.path.join(options.generator_output, sln_path) # Get projects in the solution, and their dependents. - sln_projects = gyp.common.BuildFileTargets(target_list, build_file) - sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) + sln_projects = common.BuildFileTargets(target_list, build_file) + sln_projects += common.DeepDependencyTargets(target_dicts, sln_projects) # Create folder hierarchy. - root_entries = _GatherSolutionFolders( - sln_projects, project_objects, flat=msvs_version.FlatSolution()) + root_entries = _GatherSolutionFolders(sln_projects, project_objects, flat=msvs_version.flat_sln) # Create solution. - sln = MSVSNew.MSVSSolution(sln_path, - entries=root_entries, - variants=configs, - websiteProperties=False, - version=msvs_version) + sln = MSVSNew.MSVSSolution(sln_path, entries=root_entries, variants=configs, websiteProperties=False, version=msvs_version) sln.Write() if missing_sources: - error_message = "Missing input files:\n" + \ - '\n'.join(set(missing_sources)) + error_message = "Missing input files:\n" + '\n'.join(set(missing_sources)) if generator_flags.get('msvs_error_on_missing_sources', False): raise GypError(error_message) else: - print("Warning: " + error_message, file=sys.stdout) + print("Warning: " + error_message) -def _GenerateMSBuildFiltersFile(filters_path, source_files, - rule_dependencies, extension_to_rule_name, - platforms): +def _GenerateMSBuildFiltersFile(filters_path, source_files, rule_dependencies, extension_to_rule_name, platforms): """Generate the filters file. This file is used by Visual Studio to organize the presentation of source @@ -2058,13 +1779,12 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files, extension_to_rule_name, platforms, filter_group, source_group) if filter_group: - content = ['Project', - {'ToolsVersion': '4.0', - 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' - }, - ['ItemGroup'] + filter_group, - ['ItemGroup'] + source_group - ] + content = [ + 'Project', + {'ToolsVersion': '4.0', 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}, + ['ItemGroup'] + filter_group, + ['ItemGroup'] + source_group + ] easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) elif os.path.exists(filters_path): # We don't need this filter anymore. Delete the old filter file. @@ -2085,25 +1805,21 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies, source_group: The list to which source entries will be appeneded. """ for source in sources: - if isinstance(source, MSVSProject.Filter): + if isinstance(source, MSVS.Filter): # We have a sub-filter. Create the name of that sub-filter. if not parent_filter_name: filter_name = source.name else: filter_name = '%s\\%s' % (parent_filter_name, source.name) # Add the filter to the group. - filter_group.append( - ['Filter', {'Include': filter_name}, - ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]]) + filter_group.append(['Filter', {'Include': filter_name}, ['UniqueIdentifier', MSVSNew.MSVSProjectEntry.MakeGuid(source.name)]]) # Recurse and add its dependents. _AppendFiltersForMSBuild(filter_name, source.contents, rule_dependencies, extension_to_rule_name, platforms, filter_group, source_group) else: # It's a source. Create a source entry. - _, element = _MapFileToMsBuildSourceType(source, rule_dependencies, - extension_to_rule_name, - platforms) + _, element = _MapFileToMsBuildSourceType(source, rule_dependencies, extension_to_rule_name, platforms) source_entry = [element, {'Include': source}] # Specify the filter it is part of, if any. if parent_filter_name: @@ -2111,8 +1827,7 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies, source_group.append(source_entry) -def _MapFileToMsBuildSourceType(source, rule_dependencies, - extension_to_rule_name, platforms): +def _MapFileToMsBuildSourceType(source, rule_dependencies, extension_to_rule_name, platforms): """Returns the group and element type of the source file. Arguments: @@ -2126,7 +1841,7 @@ def _MapFileToMsBuildSourceType(source, rule_dependencies, if ext in extension_to_rule_name: group = 'rule' element = extension_to_rule_name[ext] - elif ext in ['.cc', '.cpp', '.c', '.cxx', '.mm']: + elif ext in ['.cc', '.cpp', '.c', '.cxx']: group = 'compile' element = 'ClCompile' elif ext in ['.h', '.hxx']: @@ -2135,12 +1850,12 @@ def _MapFileToMsBuildSourceType(source, rule_dependencies, elif ext == '.rc': group = 'resource' element = 'ResourceCompile' - elif ext == '.asm': + elif ext in ('.asm', '.s', '.S'): group = 'masm' element = 'MASM' for platform in platforms: if platform.lower() in ['arm', 'arm64']: - element = 'MARMASM' + element = 'MARMASM' elif ext == '.idl': group = 'midl' element = 'Midl' @@ -2150,7 +1865,7 @@ def _MapFileToMsBuildSourceType(source, rule_dependencies, else: group = 'none' element = 'None' - return (group, element) + return group, element def _GenerateRulesForMSBuild(output_dir, options, spec, @@ -2629,12 +2344,11 @@ def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): def _GetConfigurationAndPlatform(name, settings): configuration = name.rsplit('_', 1)[0] platform = settings.get('msvs_configuration_platform', 'Win32') - return (configuration, platform) + return configuration, platform def _GetConfigurationCondition(name, settings): - return (r"'$(Configuration)|$(Platform)'=='%s|%s'" % - _GetConfigurationAndPlatform(name, settings)) + return r"'$(Configuration)|$(Platform)'=='%s|%s'" % _GetConfigurationAndPlatform(name, settings) def _GetMSBuildProjectConfigurations(configurations): @@ -2642,70 +2356,64 @@ def _GetMSBuildProjectConfigurations(configurations): for (name, settings) in sorted(configurations.items()): configuration, platform = _GetConfigurationAndPlatform(name, settings) designation = '%s|%s' % (configuration, platform) - group.append( - ['ProjectConfiguration', {'Include': designation}, - ['Configuration', configuration], - ['Platform', platform]]) + group.append([ + 'ProjectConfiguration', + {'Include': designation}, + ['Configuration', configuration], + ['Platform', platform] + ]) return [group] -def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): +def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name): namespace = os.path.splitext(gyp_file_name)[0] properties = [ - ['PropertyGroup', {'Label': 'Globals'}, - ['ProjectGuid', guid], - ['Keyword', 'Win32Proj'], - ['RootNamespace', namespace], - ['IgnoreWarnCompileDuplicatedFilename', 'true'], - ] - ] + 'PropertyGroup', + {'Label': 'Globals'}, + ['ProjectGuid', guid], + ['Keyword', 'Win32Proj'], + ['RootNamespace', namespace], + ['IgnoreWarnCompileDuplicatedFilename', 'true'], + ] if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \ os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64': - properties[0].append(['PreferredToolArchitecture', 'x64']) + properties.append(['PreferredToolArchitecture', 'x64']) + + if spec.get('msvs_target_platform_version'): + target_platform_version = spec.get('msvs_target_platform_version') + properties.append(['WindowsTargetPlatformVersion', target_platform_version]) + if spec.get('msvs_target_platform_minversion'): + target_platform_minversion = spec.get('msvs_target_platform_minversion') + properties.append(['WindowsTargetPlatformMinVersion', target_platform_minversion]) + else: + properties.append(['WindowsTargetPlatformMinVersion', target_platform_version]) if spec.get('msvs_enable_winrt'): - properties[0].append(['DefaultLanguage', 'en-US']) - properties[0].append(['AppContainerApplication', 'true']) + properties.append(['DefaultLanguage', 'en-US']) + properties.append(['AppContainerApplication', 'true']) if spec.get('msvs_application_type_revision'): app_type_revision = spec.get('msvs_application_type_revision') - properties[0].append(['ApplicationTypeRevision', app_type_revision]) + properties.append(['ApplicationTypeRevision', app_type_revision]) else: - properties[0].append(['ApplicationTypeRevision', '8.1']) - - if spec.get('msvs_target_platform_version'): - target_platform_version = spec.get('msvs_target_platform_version') - properties[0].append(['WindowsTargetPlatformVersion', - target_platform_version]) - if spec.get('msvs_target_platform_minversion'): - target_platform_minversion = spec.get('msvs_target_platform_minversion') - properties[0].append(['WindowsTargetPlatformMinVersion', - target_platform_minversion]) - else: - properties[0].append(['WindowsTargetPlatformMinVersion', - target_platform_version]) + properties.append(['ApplicationTypeRevision', '8.1']) if spec.get('msvs_enable_winphone'): - properties[0].append(['ApplicationType', 'Windows Phone']) + properties.append(['ApplicationType', 'Windows Phone']) else: - properties[0].append(['ApplicationType', 'Windows Store']) - - platform_name = None - msvs_windows_target_platform_version = None - for configuration in spec['configurations'].values(): - platform_name = platform_name or _ConfigPlatform(configuration) - msvs_windows_target_platform_version = \ - msvs_windows_target_platform_version or \ - _ConfigWindowsTargetPlatformVersion(configuration) - if platform_name and msvs_windows_target_platform_version: - break + properties.append(['ApplicationType', 'Windows Store']) + + configuration = spec['configurations'][spec['default_configuration']] + platform_name = _ConfigPlatform(configuration) + possible_sdk_versions = configuration.get('msvs_windows_sdk_version', version.compatible_sdks) + msvs_windows_sdk_version = MSVSVersion.WindowsTargetPlatformVersion(possible_sdk_versions) + if msvs_windows_sdk_version: + properties.append(['WindowsTargetPlatformVersion', str(msvs_windows_sdk_version)]) if platform_name == 'ARM': - properties[0].append(['WindowsSDKDesktopARMSupport', 'true']) - if msvs_windows_target_platform_version: - properties[0].append(['WindowsTargetPlatformVersion', \ - str(msvs_windows_target_platform_version)]) + properties.append(['WindowsSDKDesktopARMSupport', 'true']) + + return [properties] - return properties def _GetMSBuildConfigurationDetails(spec, build_file): properties = {} @@ -2713,12 +2421,14 @@ def _GetMSBuildConfigurationDetails(spec, build_file): msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) condition = _GetConfigurationCondition(name, settings) character_set = msbuild_attributes.get('CharacterSet') - _AddConditionalProperty(properties, condition, 'ConfigurationType', - msbuild_attributes['ConfigurationType']) + config_type = msbuild_attributes.get('ConfigurationType') + _AddConditionalProperty(properties, condition, 'ConfigurationType', config_type) + if config_type == 'Driver': + _AddConditionalProperty(properties, condition, 'DriverType', 'WDM') + _AddConditionalProperty(properties, condition, 'TargetVersion', _ConfigTargetVersion(settings)) if character_set: - if 'msvs_enable_winrt' not in spec : - _AddConditionalProperty(properties, condition, 'CharacterSet', - character_set) + if 'msvs_enable_winrt' not in spec: + _AddConditionalProperty(properties, condition, 'CharacterSet', character_set) return _GetMSBuildPropertyGroup(spec, 'Configuration', properties) @@ -2780,7 +2490,7 @@ def _GetMSBuildPropertySheets(configurations): def _ConvertMSVSBuildAttributes(spec, config, build_file): config_type = _GetMSVSConfigurationType(spec, build_file) - msvs_attributes = _GetMSVSAttributes(spec, config, config_type) + msvs_attributes = _GetMSVSAttributes(config, config_type) msbuild_attributes = {} for a in msvs_attributes: if a in ['IntermediateDirectory', 'OutputDirectory']: @@ -2813,6 +2523,7 @@ def _ConvertMSVSConfigurationType(config_type): '1': 'Application', '2': 'DynamicLibrary', '4': 'StaticLibrary', + '5': 'Driver', '10': 'Utility' }[config_type] return config_type @@ -2855,6 +2566,7 @@ def _GetMSBuildAttributes(spec, config, build_file): 'executable': 'Link', 'shared_library': 'Link', 'loadable_module': 'Link', + 'windows_driver': 'Link', 'static_library': 'Lib', } msbuild_tool = msbuild_tool_map.get(spec['type']) @@ -2891,30 +2603,24 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): condition = _GetConfigurationCondition(name, configuration) attributes = _GetMSBuildAttributes(spec, configuration, build_file) msbuild_settings = configuration['finalized_msbuild_settings'] - _AddConditionalProperty(properties, condition, 'IntDir', - attributes['IntermediateDirectory']) - _AddConditionalProperty(properties, condition, 'OutDir', - attributes['OutputDirectory']) - _AddConditionalProperty(properties, condition, 'TargetName', - attributes['TargetName']) + _AddConditionalProperty(properties, condition, 'IntDir', attributes['IntermediateDirectory']) + _AddConditionalProperty(properties, condition, 'OutDir', attributes['OutputDirectory']) + _AddConditionalProperty(properties, condition, 'TargetName', attributes['TargetName']) if 'TargetExt' in attributes: - _AddConditionalProperty(properties, condition, 'TargetExt', - attributes['TargetExt']) + _AddConditionalProperty(properties, condition, 'TargetExt', attributes['TargetExt']) if attributes.get('TargetPath'): - _AddConditionalProperty(properties, condition, 'TargetPath', - attributes['TargetPath']) + _AddConditionalProperty(properties, condition, 'TargetPath', attributes['TargetPath']) if attributes.get('TargetExt'): - _AddConditionalProperty(properties, condition, 'TargetExt', - attributes['TargetExt']) + _AddConditionalProperty(properties, condition, 'TargetExt', attributes['TargetExt']) if new_paths: - _AddConditionalProperty(properties, condition, 'ExecutablePath', - new_paths) + _AddConditionalProperty(properties, condition, 'ExecutablePath', new_paths) tool_settings = msbuild_settings.get('', {}) - for name, value in sorted(tool_settings.items()): - formatted_value = _GetValueFormattedForMSBuild('', name, value) - _AddConditionalProperty(properties, condition, name, formatted_value) + for name2, value in sorted(tool_settings.items()): + formatted_value = _GetValueFormattedForMSBuild('', name2, value) + _AddConditionalProperty(properties, condition, name2, formatted_value) + return _GetMSBuildPropertyGroup(spec, None, properties) @@ -2952,16 +2658,12 @@ def _GetMSBuildPropertyGroup(spec, label, properties): property. The value is itself a dictionary; its key is the value and the value a list of condition for which this value is true. """ - group = ['PropertyGroup'] - if label: - group.append({'Label': label}) - num_configurations = len(spec['configurations']) def GetEdges(node): # Use a definition of edges such that user_of_variable -> used_varible. # This happens to be easier in this case, since a variable's # definition contains all variables it references in a single string. edges = set() - for value in sorted(properties[node].keys()): + for val in sorted(properties[node].keys()): # Add to edges all $(...) references to variables. # # Variable references that refer to names not in properties are excluded @@ -2970,11 +2672,14 @@ def GetEdges(node): # # Self references are ignored. Self reference is used in a few places to # append to the default value. I.e. PATH=$(PATH);other_path - edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value) - if v in properties and v != node])) + edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(val) if v in properties and v != node])) return edges - properties_ordered = gyp.common.TopologicallySorted( - properties.keys(), GetEdges) + + group = ['PropertyGroup'] + if label: + group.append({'Label': label}) + num_configurations = len(spec['configurations']) + properties_ordered = common.TopologicallySorted(properties.keys(), GetEdges) # Walk properties in the reverse of a topological sort on # user_of_variable -> used_variable as this ensures variables are # defined before they are used. @@ -2992,24 +2697,21 @@ def GetEdges(node): return [group] -def _GetMSBuildToolSettingsSections(spec, configurations): +def _GetMSBuildToolSettingsSections(configurations): groups = [] for (name, configuration) in sorted(configurations.items()): msbuild_settings = configuration['finalized_msbuild_settings'] - group = ['ItemDefinitionGroup', - {'Condition': _GetConfigurationCondition(name, configuration)} - ] + group = ['ItemDefinitionGroup', {'Condition': _GetConfigurationCondition(name, configuration)}] for tool_name, tool_settings in sorted(msbuild_settings.items()): # Skip the tool named '' which is a holder of global settings handled # by _GetMSBuildConfigurationGlobalProperties. - if tool_name: - if tool_settings: - tool = [tool_name] - for name, value in sorted(tool_settings.items()): - formatted_value = _GetValueFormattedForMSBuild(tool_name, name, - value) - tool.append([name, formatted_value]) - group.append(tool) + if not tool_name or not tool_settings: + continue + tool = [tool_name] + for name2, value in sorted(tool_settings.items()): + formatted_value = _GetValueFormattedForMSBuild(tool_name, name2, value) + tool.append([name2, formatted_value]) + group.append(tool) groups.append(group) return groups @@ -3023,8 +2725,7 @@ def _FinalizeMSBuildSettings(spec, configuration): converted = True msvs_settings = configuration.get('msvs_settings', {}) msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) - include_dirs, midl_include_dirs, resource_include_dirs = \ - _GetIncludeDirs(configuration) + include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(configuration) libraries = _GetLibraries(spec) library_dirs = _GetLibraryDirs(configuration) out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) @@ -3039,7 +2740,7 @@ def _FinalizeMSBuildSettings(spec, configuration): value = configuration.get(ignored_setting) if value: print('Warning: The automatic conversion to MSBuild does not handle ' - '%s. Ignoring setting of %s' % (ignored_setting, str(value))) + '%s. Ignoring setting of %s' % (ignored_setting, str(value))) defines = [_EscapeCppDefineForMSBuild(d) for d in defines] disabled_warnings = _GetDisabledWarnings(configuration) @@ -3150,7 +2851,7 @@ def _VerifySourcesExist(sources, root_dir): """ missing_sources = [] for source in sources: - if isinstance(source, MSVSProject.Filter): + if isinstance(source, MSVS.Filter): missing_sources.extend(_VerifySourcesExist(source.contents, root_dir)) else: if '$' not in source: @@ -3187,7 +2888,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources, list_excluded): extensions_excluded_from_precompile = [] for source in sources: - if isinstance(source, MSVSProject.Filter): + if isinstance(source, MSVS.Filter): _AddSources2(spec, source.contents, exclusions, grouped_sources, rule_dependencies, extension_to_rule_name, sources_handled_by_action, @@ -3246,7 +2947,7 @@ def _GetMSBuildProjectReferences(project): for dependency in project.dependencies: guid = dependency.guid project_dir = os.path.split(project.path)[0] - relative_path = gyp.common.RelativePath(dependency.path, project_dir) + relative_path = common.RelativePath(dependency.path, project_dir) project_ref = ['ProjectReference', {'Include': relative_path}, ['Project', guid], @@ -3269,16 +2970,14 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): spec = project.spec configurations = spec['configurations'] project_dir, project_file_name = os.path.split(project.path) - gyp.common.EnsureDirExists(project.path) + common.EnsureDirExists(project.path) # Prepare list of sources and excluded sources. - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - + # gyp_path = _NormalizedSource(project.build_file) + # relative_path_of_gyp_file = common.RelativePath(gyp_path, project_dir) gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, - gyp_file) + sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) # Add rules. - actions_to_add = {} + actions_to_add = OrderedDict() props_files_of_rules = set() targets_files_of_rules = set() rule_dependencies = set() @@ -3296,11 +2995,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): rules = spec.get('rules', []) _AdjustSourcesForRules(rules, sources, excluded_sources, True) - sources, excluded_sources, excluded_idl = ( - _AdjustSourcesAndConvertToFilterHierarchy(spec, options, - project_dir, sources, - excluded_sources, - list_excluded, version)) + sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy(spec, sources, excluded_sources, list_excluded, version) # Don't add actions if we are using an external builder like ninja. if not spec.get('msvs_external_builder'): @@ -3309,16 +3004,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): # NOTE: this stanza must appear after all actions have been decided. # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded( - excluded_sources, actions_to_add) + excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add) exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( - spec, actions_to_add) + actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(actions_to_add) - _GenerateMSBuildFiltersFile(project.path + '.filters', sources, - rule_dependencies, - extension_to_rule_name, _GetUniquePlatforms(spec)) + _GenerateMSBuildFiltersFile(project.path + '.filters', sources, rule_dependencies,extension_to_rule_name, _GetUniquePlatforms(spec)) missing_sources = _VerifySourcesExist(sources, project_dir) for configuration in configurations.values(): @@ -3326,59 +3017,44 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): # Add attributes to root element - import_default_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]] - import_cpp_props_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]] - import_cpp_targets_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]] - import_masm_props_section = [ - ['Import', - {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]] - import_masm_targets_section = [ - ['Import', - {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]] - import_marmasm_props_section = [ - ['Import', - {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.props'}]] - import_marmasm_targets_section = [ - ['Import', - {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.targets'}]] + import_default_section = [['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]] + import_cpp_props_section = [['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]] + import_cpp_targets_section = [['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]] + import_masm_props_section = [['Import', {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]] + import_masm_targets_section = [['Import', {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]] + import_marmasm_props_section = [['Import', {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.props'}]] + import_marmasm_targets_section = [['Import', {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.targets'}]] macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]] content = [ - 'Project', - {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003', - 'ToolsVersion': version.ProjectVersion(), - 'DefaultTargets': 'Build' - }] + 'Project', { + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003', + 'ToolsVersion': version.project_version, + 'DefaultTargets': 'Build', + }, + ] content += _GetMSBuildProjectConfigurations(configurations) - content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name) + content += _GetMSBuildGlobalProperties(spec, version, project.guid, project_file_name) content += import_default_section content += _GetMSBuildConfigurationDetails(spec, project.build_file) if spec.get('msvs_enable_winphone'): - content += _GetMSBuildLocalProperties('v120_wp81') + content += _GetMSBuildLocalProperties('v120_wp81') else: - content += _GetMSBuildLocalProperties(project.msbuild_toolset) + content += _GetMSBuildLocalProperties(project.msbuild_toolset) content += import_cpp_props_section content += import_masm_props_section - if spec.get('msvs_enable_marmasm'): - content += import_marmasm_props_section + content += import_marmasm_props_section content += _GetMSBuildExtensions(props_files_of_rules) content += _GetMSBuildPropertySheets(configurations) content += macro_section - content += _GetMSBuildConfigurationGlobalProperties(spec, configurations, - project.build_file) - content += _GetMSBuildToolSettingsSections(spec, configurations) - content += _GetMSBuildSources( - spec, sources, exclusions, rule_dependencies, extension_to_rule_name, - actions_spec, sources_handled_by_action, list_excluded) + content += _GetMSBuildConfigurationGlobalProperties(spec, configurations, project.build_file) + content += _GetMSBuildToolSettingsSections(configurations) + content += _GetMSBuildSources(spec, sources, exclusions, rule_dependencies, extension_to_rule_name, actions_spec, sources_handled_by_action, list_excluded) content += _GetMSBuildProjectReferences(project) content += import_cpp_targets_section content += import_masm_targets_section - if spec.get('msvs_enable_marmasm'): - content += import_marmasm_targets_section + content += import_marmasm_targets_section content += _GetMSBuildExtensionTargets(targets_files_of_rules) if spec.get('msvs_external_builder'): @@ -3393,7 +3069,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): def _GetMSBuildExternalBuilderTargets(spec): - """Return a list of MSBuild targets for external builders. + """ + Return a list of MSBuild targets for external builders. The "Build" and "Clean" targets are always generated. If the spec contains 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also @@ -3404,27 +3081,31 @@ def _GetMSBuildExternalBuilderTargets(spec): Returns: List of MSBuild 'Target' specs. """ - build_cmd = _BuildCommandLineForRuleRaw( - spec, spec['msvs_external_builder_build_cmd'], - False, False, False, False) - build_target = ['Target', {'Name': 'Build'}] - build_target.append(['Exec', {'Command': build_cmd}]) - - clean_cmd = _BuildCommandLineForRuleRaw( - spec, spec['msvs_external_builder_clean_cmd'], - False, False, False, False) - clean_target = ['Target', {'Name': 'Clean'}] - clean_target.append(['Exec', {'Command': clean_cmd}]) - - targets = [build_target, clean_target] - - if spec.get('msvs_external_builder_clcompile_cmd'): - clcompile_cmd = _BuildCommandLineForRuleRaw( - spec, spec['msvs_external_builder_clcompile_cmd'], - False, False, False, False) - clcompile_target = ['Target', {'Name': 'ClCompile'}] - clcompile_target.append(['Exec', {'Command': clcompile_cmd}]) - targets.append(clcompile_target) + build_cmd = _BuildCommandLineForRuleRaw(spec, spec['msvs_external_builder_build_cmd'], False, False, False, False) + clean_cmd = _BuildCommandLineForRuleRaw(spec, spec['msvs_external_builder_clean_cmd'], False, False, False, False) + targets = [ + [ + 'Target', + {'Name': 'Build'}, + ['Exec', {'Command': build_cmd}] + ], + [ + 'Target', + {'Name': 'Clean'}, + ['Exec', {'Command': clean_cmd}] + ] + ] + + compile_cmd_txt = spec.get('msvs_external_builder_clcompile_cmd') + if compile_cmd_txt: + compile_cmd = _BuildCommandLineForRuleRaw(spec, compile_cmd_txt, False, False, False, False) + targets.append( + [ + 'Target', + {'Name': 'ClCompile'}, + ['Exec', {'Command': compile_cmd}], + ] + ) return targets @@ -3443,11 +3124,11 @@ def _GetMSBuildExtensionTargets(targets_files_of_rules): return [targets_node] -def _GenerateActionsForMSBuild(spec, actions_to_add): - """Add actions accumulated into an actions_to_add, merging as needed. +def _GenerateActionsForMSBuild(actions_to_add): + """ + Add actions accumulated into an actions_to_add, merging as needed. Arguments: - spec: the target project dict actions_to_add: dictionary keyed on input name, which maps to a list of dicts describing the actions attached to that input file. @@ -3480,27 +3161,17 @@ def _GenerateActionsForMSBuild(spec, actions_to_add): # get too long. See also _AddActions: cygwin's setup_env mustn't be called # for every invocation or the command that sets the PATH will grow too # long. - command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%' - for c in commands]) - _AddMSBuildAction(spec, - primary_input, - inputs, - outputs, - command, - description, - sources_handled_by_action, - actions_spec) + command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%' for c in commands]) + _AddMSBuildAction(primary_input, inputs, outputs, command, description, sources_handled_by_action, actions_spec) return actions_spec, sources_handled_by_action -def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description, - sources_handled_by_action, actions_spec): +def _AddMSBuildAction(primary_input, inputs, outputs, cmd, description, sources_handled_by_action, actions_spec): command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) primary_input = _FixPath(primary_input) inputs_array = _FixPaths(inputs) outputs_array = _FixPaths(outputs) - additional_inputs = ';'.join([i for i in inputs_array - if i != primary_input]) + additional_inputs = ';'.join([i for i in inputs_array if i != primary_input]) outputs = ';'.join(outputs_array) sources_handled_by_action.add(primary_input) action_spec = ['CustomBuild', {'Include': primary_input}] diff --git a/gyp/gyp/generator/ninja.py b/gyp/gyp/generator/ninja.py new file mode 100644 index 0000000000..255ede256e --- /dev/null +++ b/gyp/gyp/generator/ninja.py @@ -0,0 +1,806 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from __future__ import print_function + +import codecs +import hashlib +import os.path +import re +import subprocess +import sys +from collections import OrderedDict + +import gyp +import gyp.common +import gyp.msvs_emulation +import gyp.xcode_emulation +import gyp.MSVS as MSVS +from gyp.common import GetEnvironFallback +from gyp.lib import ninja_syntax + +try: + # noinspection PyCompatibility + from cStringIO import StringIO +except ImportError: + from io import StringIO + + +generator_default_variables = { + 'EXECUTABLE_PREFIX': '', + 'EXECUTABLE_SUFFIX': '', + 'STATIC_LIB_PREFIX': 'lib', + 'STATIC_LIB_SUFFIX': '.a', + 'SHARED_LIB_PREFIX': 'lib', + + # Gyp expects the following variables to be expandable by the build + # system to the appropriate locations. Ninja prefers paths to be + # known at gyp time. To resolve this, introduce special + # variables starting with $! and $| (which begin with a $ so gyp knows it + # should be treated specially, but is otherwise an invalid + # ninja/shell variable) that are passed to gyp here but expanded + # before writing out into the target .ninja files; see NinjaWriter._ExpandSpecial. + # $! is used for variables that represent a path and that can only appear at + # the start of a string, while $| is used for variables that can appear + # anywhere in a string. + 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', + 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', + 'PRODUCT_DIR': '$!PRODUCT_DIR', + 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME', + + # Special variables that may be used by gyp 'rule' targets. + # We generate definitions for these variables on the fly when processing a + # rule. + 'RULE_INPUT_ROOT': '${root}', + 'RULE_INPUT_DIRNAME': '${dirname}', + 'RULE_INPUT_PATH': '${source}', + 'RULE_INPUT_EXT': '${ext}', + 'RULE_INPUT_NAME': '${name}', +} + +# Placates pylint. +generator_additional_non_configuration_keys = [] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] +generator_filelist_paths = None + +generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() + + +def ComputeOutputDir(params): + """Returns the path from the toplevel_dir to the build output directory.""" + # generator_dir: relative path from pwd to where make puts build files. + # Makes migrating from make to ninja easier, ninja doesn't put anything here. + generator_dir = os.path.relpath(params['options'].generator_output or '.') + + # output_dir: relative path from generator_dir to the build directory. + output_dir = params.get('generator_flags', {}).get('output_dir', 'out') + + # Relative path from source root to our output files. e.g. "out" + return os.path.normpath(os.path.join(generator_dir, output_dir)) + +# module level API +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + global generator_additional_non_configuration_keys + global generator_additional_path_sections + flavor = gyp.common.GetFlavor(params) + if flavor == 'mac': + default_variables.setdefault('OS', 'mac') + default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') + default_variables.setdefault('SHARED_LIB_DIR', generator_default_variables['PRODUCT_DIR']) + default_variables.setdefault('LIB_DIR', generator_default_variables['PRODUCT_DIR']) + + # Copy additional generator configuration data from Xcode, which is shared + # by the Mac Ninja generator. + import gyp.generator.xcode as xcode_generator + generator_additional_non_configuration_keys = getattr(xcode_generator, 'generator_additional_non_configuration_keys', []) + generator_additional_path_sections = getattr(xcode_generator, 'generator_additional_path_sections', []) + global generator_extra_sources_for_rules + generator_extra_sources_for_rules = getattr(xcode_generator, 'generator_extra_sources_for_rules', []) + elif flavor == 'win': + exts = MSVS.TARGET_TYPE_EXT + default_variables.setdefault('OS', 'win') + default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable'] + default_variables['STATIC_LIB_PREFIX'] = '' + default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library'] + default_variables['SHARED_LIB_PREFIX'] = '' + default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library'] + + # Copy additional generator configuration data from VS, which is shared + # by the Windows Ninja generator. + import gyp.generator.msvs as msvs_generator + generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) + generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) + + gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) + else: + operating_system = flavor + if flavor == 'android': + operating_system = 'linux' # Keep this legacy behavior for now. + default_variables.setdefault('OS', operating_system) + default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') + default_variables.setdefault('SHARED_LIB_DIR', os.path.join('$!PRODUCT_DIR', 'lib')) + default_variables.setdefault('LIB_DIR', os.path.join('$!PRODUCT_DIR', 'obj')) + + +# module level API +def CalculateGeneratorInputInfo(params): + """Called by __init__ to initialize generator values based on params.""" + # E.g. "out/gypfiles" + toplevel = params['options'].toplevel_dir + qualified_out_dir = os.path.normpath(os.path.join(toplevel, ComputeOutputDir(params), 'gypfiles')) + + global generator_filelist_paths + generator_filelist_paths = { + 'toplevel': toplevel, + 'qualified_out_dir': qualified_out_dir, + } + + +def OpenOutput(path): + """Open |path| for writing, creating directories if necessary.""" + gyp.common.EnsureDirExists(path) + return codecs.open(path, mode='w', encoding='utf-8') + + +def CommandWithWrapper(cmd, wrappers, prog): + wrapper = wrappers.get(cmd, '') + if wrapper: + return wrapper + ' ' + prog + return prog + + +def GetDefaultConcurrentLinks(): + """Returns a best-guess for a number of concurrent links.""" + pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0)) + if pool_size: + return pool_size + + if sys.platform in ('win32', 'cygwin'): + import ctypes + + class MEMORYSTATUSEX(ctypes.Structure): + _fields_ = [ + ("dwLength", ctypes.c_ulong), + ("dwMemoryLoad", ctypes.c_ulong), + ("ullTotalPhys", ctypes.c_ulonglong), + ("ullAvailPhys", ctypes.c_ulonglong), + ("ullTotalPageFile", ctypes.c_ulonglong), + ("ullAvailPageFile", ctypes.c_ulonglong), + ("ullTotalVirtual", ctypes.c_ulonglong), + ("ullAvailVirtual", ctypes.c_ulonglong), + ("sullAvailExtendedVirtual", ctypes.c_ulonglong), + ] + + stat = MEMORYSTATUSEX() + stat.dwLength = ctypes.sizeof(stat) + ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) + + # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM + # on a 64 GB machine. + mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB + hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2 ** 32))) + return min(mem_limit, hard_cap) + elif sys.platform.startswith('linux'): + if os.path.exists("/proc/meminfo"): + with open("/proc/meminfo") as meminfo: + memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') + for line in meminfo: + match = memtotal_re.match(line) + if not match: + continue + # Allow 8Gb per link on Linux because Gold is quite memory hungry + return max(1, int(match.group(1)) // (8 * (2 ** 20))) + return 1 + elif sys.platform == 'darwin': + try: + avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) + # A static library debug build of Chromium's unit_tests takes ~2.7GB, so + # 4GB per ld process allows for some more bloat. + return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB + except ValueError: + return 1 + else: + # TODO(scottmg): Implement this for other platforms. + return 1 + + +def GetWinLinkRuleNameSuffix(embed_manifest): + """ + Returns the suffix used to select an appropriate linking rule depending on whether the manifest embedding is enabled. + """ + return '_embed' if embed_manifest else '' + + +def _AddWinLinkRules(master_ninja, embed_manifest): + """Adds link rules for Windows platform to |master_ninja|.""" + + def FullLinkCommand(ldcmd, out, binary_type): + resource_name = { + 'exe': '1', + 'dll': '2', + }[binary_type] + return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s %(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" $manifests' % { + 'python': sys.executable, + 'out': out, + 'ldcmd': ldcmd, + 'resname': resource_name, + 'embed': embed_manifest } + rule_name_suffix = GetWinLinkRuleNameSuffix(embed_manifest) + use_separate_mspdbsrv = (int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0) + dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper() + dllcmd = ('%s gyp-win-tool link-wrapper $arch %s $ld /nologo $implibflag /DLL /OUT:$binary @$binary.rsp' % (sys.executable, use_separate_mspdbsrv)) + dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll') + master_ninja.rule('solink' + rule_name_suffix, + description=dlldesc, command=dllcmd, + rspfile='$binary.rsp', + rspfile_content='$libs $in_newline $ldflags', + restat=True, + pool='link_pool') + master_ninja.rule('solink_module' + rule_name_suffix, + description=dlldesc, command=dllcmd, + rspfile='$binary.rsp', + rspfile_content='$libs $in_newline $ldflags', + restat=True, + pool='link_pool') + # Note that ldflags goes at the end so that it has the option of + # overriding default settings earlier in the command line. + exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s $ld /nologo /OUT:$binary @$binary.rsp' % (sys.executable, use_separate_mspdbsrv)) + exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe') + master_ninja.rule('link' + rule_name_suffix, description='LINK%s $binary' % rule_name_suffix.upper(), command=exe_cmd, rspfile='$binary.rsp', rspfile_content='$in_newline $libs $ldflags', pool='link_pool') + + +def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): + options = params['options'] + flavor = gyp.common.GetFlavor(params) + generator_flags = params.get('generator_flags', OrderedDict()) + + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.normpath(os.path.join(ComputeOutputDir(params), config_name)) + + toplevel_build = os.path.join(options.toplevel_dir, build_dir) + + master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja')) + master_ninja_writer = ninja_syntax.Writer(master_ninja_file, width=250) + + # Put build-time support tools in out/{config_name}. + gyp.common.CopyTool(flavor, toplevel_build, generator_flags.get('mac_toolchain_dir')) + + # Grab make settings for CC/CXX. + # The rules are + # - The priority from low to high is gcc/g++, the 'make_global_settings' in + # gyp, the environment variable. + # - If there is no 'make_global_settings' for CC.host/CXX.host or + # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set + # to cc/cxx. + if flavor == 'win': + ar = 'lib.exe' + # cc and cxx must be set to the correct architecture by overriding with one + # of cl_x86 or cl_x64 below. + cc = 'cl.exe' + cxx = 'cl.exe' + ld = 'link.exe' + ld_host = '$ld' + ldxx = 'UNSET' + ldxx_host = 'UNSET' + else: + ar = 'ar' + cc = 'cc' + cxx = 'c++' + ld = '$cc' + ldxx = '$cxx' + ld_host = '$cc_host' + ldxx_host = '$cxx_host' + + ar_host = ar + cc_host = None + cxx_host = None + cc_host_global_setting = None + cxx_host_global_setting = None + nm = 'nm' + nm_host = 'nm' + readelf = 'readelf' + readelf_host = 'readelf' + + build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) + make_global_settings = data[build_file].get('make_global_settings', []) + build_to_root = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) + wrappers = {} + for key, value in make_global_settings: + if key == 'AR': + ar = os.path.join(build_to_root, value) + if key == 'AR.host': + ar_host = os.path.join(build_to_root, value) + if key == 'CC': + cc = os.path.join(build_to_root, value) + if key == 'CXX': + cxx = os.path.join(build_to_root, value) + if key == 'CC.host': + cc_host = os.path.join(build_to_root, value) + cc_host_global_setting = value + if key == 'CXX.host': + cxx_host = os.path.join(build_to_root, value) + cxx_host_global_setting = value + if key == 'LD': + ld = os.path.join(build_to_root, value) + if key == 'LD.host': + ld_host = os.path.join(build_to_root, value) + if key == 'NM': + nm = os.path.join(build_to_root, value) + if key == 'NM.host': + nm_host = os.path.join(build_to_root, value) + if key == 'READELF': + readelf = os.path.join(build_to_root, value) + if key == 'READELF.host': + readelf_host = os.path.join(build_to_root, value) + if key.endswith('_wrapper'): + wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) + + # Support wrappers from environment variables too. + for key, value in os.environ.items(): + if key.lower().endswith('_wrapper'): + key_prefix = key[:-len('_wrapper')] + key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) + wrappers[key_prefix] = os.path.join(build_to_root, value) + + mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None) + if mac_toolchain_dir: + wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir + + if flavor == 'win': + gyp.msvs_emulation.GenerateEnvironmentFiles(toplevel_build, generator_flags) + + cc = GetEnvironFallback(['CC_target', 'CC'], cc) + master_ninja_writer.variable('cc', CommandWithWrapper('CC', wrappers, cc)) + cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx) + master_ninja_writer.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx)) + + if flavor == 'win': + master_ninja_writer.variable('ld', ld) + master_ninja_writer.variable('idl', 'midl.exe') + master_ninja_writer.variable('ar', ar) + master_ninja_writer.variable('rc', 'rc.exe') + master_ninja_writer.variable('ml_x86', 'ml.exe') + master_ninja_writer.variable('ml_x64', 'ml64.exe') + master_ninja_writer.variable('mt', 'mt.exe') + else: + master_ninja_writer.variable('ld', CommandWithWrapper('LINK', wrappers, ld)) + master_ninja_writer.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx)) + master_ninja_writer.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar)) + if flavor != 'mac': + # Mac does not use readelf/nm for .TOC generation, so avoiding polluting + # the master ninja with extra unused variables. + master_ninja_writer.variable('nm', GetEnvironFallback(['NM_target', 'NM'], nm)) + master_ninja_writer.variable('readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf)) + + if generator_supports_multiple_toolsets: + if not cc_host: + cc_host = cc + if not cxx_host: + cxx_host = cxx + + master_ninja_writer.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host)) + master_ninja_writer.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host)) + master_ninja_writer.variable('readelf_host', GetEnvironFallback(['READELF_host'], readelf_host)) + cc_host = GetEnvironFallback(['CC_host'], cc_host) + cxx_host = GetEnvironFallback(['CXX_host'], cxx_host) + + # The environment variable could be used in 'make_global_settings', like + # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. + if '$(CC)' in cc_host and cc_host_global_setting: + cc_host = cc_host_global_setting.replace('$(CC)', cc) + if '$(CXX)' in cxx_host and cxx_host_global_setting: + cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx) + master_ninja_writer.variable('cc_host', CommandWithWrapper('CC.host', wrappers, cc_host)) + master_ninja_writer.variable('cxx_host', CommandWithWrapper('CXX.host', wrappers, cxx_host)) + if flavor == 'win': + master_ninja_writer.variable('ld_host', ld_host) + else: + master_ninja_writer.variable('ld_host', CommandWithWrapper('LINK', wrappers, ld_host)) + master_ninja_writer.variable('ldxx_host', CommandWithWrapper('LINK', wrappers, ldxx_host)) + + master_ninja_writer.newline() + + master_ninja_writer.pool('link_pool', depth=GetDefaultConcurrentLinks()) + master_ninja_writer.newline() + + deps = 'msvc' if flavor == 'win' else 'gcc' + + if flavor != 'win': + master_ninja_writer.rule( + 'cc', + description='CC $out', + command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' + '$cflags_pch_c -c $in -o $out'), + depfile='$out.d', + deps=deps) + master_ninja_writer.rule( + 'cc_s', + description='CC $out', + command=('$cc $defines $includes $cflags $cflags_c ' + '$cflags_pch_c -c $in -o $out')) + master_ninja_writer.rule( + 'cxx', + description='CXX $out', + command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc ' + '$cflags_pch_cc -c $in -o $out'), + depfile='$out.d', + deps=deps) + else: + # TODO(scottmg) Separate pdb names is a test to see if it works around + # http://crbug.com/142362. It seems there's a race between the creation of + # the .pdb by the precompiled header step for .cc and the compilation of + # .c files. This should be handled by mspdbsrv, but rarely errors out with + # c1xx : fatal error C1033: cannot open program database + # By making the rules target separate pdb files this might be avoided. + cc_command = ('ninja -t msvc -e $arch ' + + '-- ' + '$cc /nologo /showIncludes /FC ' + '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ') + cxx_command = ('ninja -t msvc -e $arch ' + + '-- ' + '$cxx /nologo /showIncludes /FC ' + '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ') + master_ninja_writer.rule( + 'cc', + description='CC $out', + command=cc_command, + rspfile='$out.rsp', + rspfile_content='$defines $includes $cflags $cflags_c', + deps=deps) + master_ninja_writer.rule( + 'cxx', + description='CXX $out', + command=cxx_command, + rspfile='$out.rsp', + rspfile_content='$defines $includes $cflags $cflags_cc', + deps=deps) + master_ninja_writer.rule( + 'idl', + description='IDL $in', + command=('%s gyp-win-tool midl-wrapper $arch $outdir ' + '$tlb $h $dlldata $iid $proxy $in ' + '$midl_includes $idlflags' % sys.executable)) + master_ninja_writer.rule( + 'rc', + description='RC $in', + # Note: $in must be last otherwise rc.exe complains. + command=('%s gyp-win-tool rc-wrapper ' + '$arch $rc $defines $resource_includes $rcflags /fo$out $in' % + sys.executable)) + master_ninja_writer.rule( + 'asm', + description='ASM $out', + command=('%s gyp-win-tool asm-wrapper ' + '$arch $asm $defines $includes $asmflags /c /Fo $out $in' % + sys.executable)) + + if flavor != 'mac' and flavor != 'win': + master_ninja_writer.rule('alink', description='AR $out', command='rm -f $out && $ar rcs $arflags $out $in') + master_ninja_writer.rule('alink_thin', description='AR $out', command='rm -f $out && $ar rcsT $arflags $out $in') + + # This allows targets that only need to depend on $lib's API to declare an + # order-only dependency on $lib.TOC and avoid relinking such downstream + # dependencies when $lib changes only in non-public ways. + # The resulting string leaves an uninterpolated %{suffix} which + # is used in the final substitution below. + mtime_preserving_solink_base = ( + 'if [ ! -e $lib -o ! -e $lib.TOC ]; then ' + '%(solink)s && %(extract_toc)s > $lib.TOC; else ' + '%(solink)s && %(extract_toc)s > $lib.tmp && ' + 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; ' + 'fi; fi' + % { 'solink': + '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s', + 'extract_toc': + ('{ $readelf -d $lib | grep SONAME ; ' + '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')}) + + master_ninja_writer.rule( + 'solink', + description='SOLINK $lib', + restat=True, + command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'}, + rspfile='$link_file_list', + rspfile_content= + '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs', + pool='link_pool') + master_ninja_writer.rule( + 'solink_module', + description='SOLINK(module) $lib', + restat=True, + command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'}, + rspfile='$link_file_list', + rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs', + pool='link_pool') + master_ninja_writer.rule( + 'link', + description='LINK $out', + command=('$ld $ldflags -o $out ' + '-Wl,--start-group $in -Wl,--end-group $solibs $libs'), + pool='link_pool') + elif flavor == 'win': + master_ninja_writer.rule( + 'alink', + description='LIB $out', + command=('%s gyp-win-tool link-wrapper $arch False ' + '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % + sys.executable), + rspfile='$out.rsp', + rspfile_content='$in_newline $libflags') + _AddWinLinkRules(master_ninja_writer, embed_manifest=True) + _AddWinLinkRules(master_ninja_writer, embed_manifest=False) + else: + master_ninja_writer.rule( + 'objc', + description='OBJC $out', + command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc ' + '$cflags_pch_objc -c $in -o $out'), + depfile='$out.d', + deps=deps) + master_ninja_writer.rule( + 'objcxx', + description='OBJCXX $out', + command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc ' + '$cflags_pch_objcc -c $in -o $out'), + depfile='$out.d', + deps=deps) + master_ninja_writer.rule( + 'alink', + description='LIBTOOL-STATIC $out, POSTBUILDS', + command='rm -f $out && ' + './gyp-mac-tool filter-libtool libtool $libtool_flags ' + '-static -o $out $in' + '$postbuilds') + master_ninja_writer.rule( + 'lipo', + description='LIPO $out, POSTBUILDS', + command='rm -f $out && lipo -create $in -output $out$postbuilds') + master_ninja_writer.rule( + 'solipo', + description='SOLIPO $out, POSTBUILDS', + command=( + 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&' + '%(extract_toc)s > $lib.TOC' + % { 'extract_toc': + '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' + 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})) + + + # Record the public interface of $lib in $lib.TOC. See the corresponding + # comment in the posix section above for details. + solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s' + mtime_preserving_solink_base = ( + 'if [ ! -e $lib -o ! -e $lib.TOC ] || ' + # Always force dependent targets to relink if this library + # reexports something. Handling this correctly would require + # recursive TOC dumping but this is rare in practice, so punt. + 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then ' + '%(solink)s && %(extract_toc)s > $lib.TOC; ' + 'else ' + '%(solink)s && %(extract_toc)s > $lib.tmp && ' + 'if ! cmp -s $lib.tmp $lib.TOC; then ' + 'mv $lib.tmp $lib.TOC ; ' + 'fi; ' + 'fi' + % { 'solink': solink_base, + 'extract_toc': + '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' + 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}) + + solink_suffix = '@$link_file_list$postbuilds' + master_ninja_writer.rule( + 'solink', + description='SOLINK $lib, POSTBUILDS', + restat=True, + command=mtime_preserving_solink_base % {'suffix': solink_suffix, + 'type': '-shared'}, + rspfile='$link_file_list', + rspfile_content='$in $solibs $libs', + pool='link_pool') + master_ninja_writer.rule( + 'solink_notoc', + description='SOLINK $lib, POSTBUILDS', + restat=True, + command=solink_base % {'suffix':solink_suffix, 'type': '-shared'}, + rspfile='$link_file_list', + rspfile_content='$in $solibs $libs', + pool='link_pool') + + master_ninja_writer.rule( + 'solink_module', + description='SOLINK(module) $lib, POSTBUILDS', + restat=True, + command=mtime_preserving_solink_base % {'suffix': solink_suffix, + 'type': '-bundle'}, + rspfile='$link_file_list', + rspfile_content='$in $solibs $libs', + pool='link_pool') + master_ninja_writer.rule( + 'solink_module_notoc', + description='SOLINK(module) $lib, POSTBUILDS', + restat=True, + command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'}, + rspfile='$link_file_list', + rspfile_content='$in $solibs $libs', + pool='link_pool') + + master_ninja_writer.rule( + 'link', + description='LINK $out, POSTBUILDS', + command=('$ld $ldflags -o $out ' + '$in $solibs $libs$postbuilds'), + pool='link_pool') + master_ninja_writer.rule( + 'preprocess_infoplist', + description='PREPROCESS INFOPLIST $out', + command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && ' + 'plutil -convert xml1 $out $out')) + master_ninja_writer.rule( + 'copy_infoplist', + description='COPY INFOPLIST $in', + command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys') + master_ninja_writer.rule( + 'merge_infoplist', + description='MERGE INFOPLISTS $in', + command='$env ./gyp-mac-tool merge-info-plist $out $in') + master_ninja_writer.rule( + 'compile_xcassets', + description='COMPILE XCASSETS $in', + command='$env ./gyp-mac-tool compile-xcassets $keys $in') + master_ninja_writer.rule( + 'compile_ios_framework_headers', + description='COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in', + command='$env ./gyp-mac-tool compile-ios-framework-header-map $out ' + '$framework $in && $env ./gyp-mac-tool ' + 'copy-ios-framework-headers $framework $copy_headers') + master_ninja_writer.rule( + 'mac_tool', + description='MACTOOL $mactool_cmd $in', + command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary') + master_ninja_writer.rule( + 'package_framework', + description='PACKAGE FRAMEWORK $out, POSTBUILDS', + command='./gyp-mac-tool package-framework $out $version$postbuilds ' + '&& touch $out') + master_ninja_writer.rule( + 'package_ios_framework', + description='PACKAGE IOS FRAMEWORK $out, POSTBUILDS', + command='./gyp-mac-tool package-ios-framework $out $postbuilds ' + '&& touch $out') + if flavor == 'win': + master_ninja_writer.rule('stamp', description='STAMP $out', command='%s gyp-win-tool stamp $out' % sys.executable) + else: + master_ninja_writer.rule('stamp', description='STAMP $out', command='${postbuilds}touch $out') + if flavor == 'win': + master_ninja_writer.rule('copy', description='COPY $in $out', command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable) + elif flavor == 'zos': + master_ninja_writer.rule('copy', description='COPY $in $out', command='rm -rf $out && cp -fRP $in $out') + else: + master_ninja_writer.rule('copy', description='COPY $in $out', command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)') + master_ninja_writer.newline() + + all_targets = set() + for build_file in params['build_files']: + for target in gyp.common.AllTargets(target_list, target_dicts, os.path.normpath(build_file)): + all_targets.add(target) + all_outputs = set() + + # target_outputs is a map from qualified target name to a Target object. + target_outputs = {} + # target_short_names is a map from target short name to a list of Target + # objects. + target_short_names = {} + + # short name of targets that were skipped because they didn't contain anything + # interesting. + # NOTE: there may be overlap between this an non_empty_target_names. + empty_target_names = set() + + # Set of non-empty short target names. + # NOTE: there may be overlap between this an empty_target_names. + non_empty_target_names = set() + + for qualified_target in target_list: + # qualified_target is like: third_party/icu/icu.gyp:icui18n#target + build_file, name, toolset = gyp.common.ParseQualifiedTarget(qualified_target) + + this_make_global_settings = data[build_file].get('make_global_settings', []) + assert make_global_settings == this_make_global_settings, ("make_global_settings needs to be the same for all targets. %s vs. %s" % (this_make_global_settings, make_global_settings)) + + spec = target_dicts[qualified_target] + if flavor == 'mac': + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) + + # If build_file is a symlink, we must not follow it because there's a chance + # it could point to a path above toplevel_dir, and we cannot correctly deal + # with that case at the moment. + build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, False) + + qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name, toolset) + qualified_target_for_hash = qualified_target_for_hash.encode('utf-8') + hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() + + base_path = os.path.dirname(build_file) + obj = 'obj' + if toolset != 'target': + obj += '.' + toolset + output_file = os.path.join(obj, base_path, name + '.ninja') + + ninja_output = StringIO() + from gyp.NinjaWriter import NinjaWriter + writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir, ninja_output, toplevel_build, output_file, flavor, spec, generator_flags, config_name, options.toplevel_dir) + + target = writer.WriteSpec() + + if ninja_output.tell() > 0: + # Only create files for ninja files that actually have contents. + with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file: + ninja_file.write(ninja_output.getvalue()) + ninja_output.close() + master_ninja_writer.subninja(output_file) + + if target: + if name != target.FinalOutput() and spec['toolset'] == 'target': + target_short_names.setdefault(name, []).append(target) + target_outputs[qualified_target] = target + if qualified_target in all_targets: + all_outputs.add(target.FinalOutput()) + non_empty_target_names.add(name) + else: + empty_target_names.add(name) + + if target_short_names: + # Write a short name to build this target. This benefits both the + # "build chrome" case as well as the gyp tests, which expect to be + # able to run actions and build libraries by their short name. + master_ninja_writer.newline() + master_ninja_writer.comment('Short names for targets.') + for short_name in sorted(target_short_names): + master_ninja_writer.build(short_name, 'phony', [x.FinalOutput() for x in target_short_names[short_name]]) + + # Write phony targets for any empty targets that weren't written yet. As + # short names are not necessarily unique only do this for short names that + # haven't already been output for another target. + empty_target_names = empty_target_names - non_empty_target_names + if empty_target_names: + master_ninja_writer.newline() + master_ninja_writer.comment('Empty targets (output for completeness).') + for name in sorted(empty_target_names): + master_ninja_writer.build(name, 'phony') + + if all_outputs: + master_ninja_writer.newline() + master_ninja_writer.build('all', 'phony', sorted(all_outputs)) + master_ninja_writer.default(generator_flags.get('default_target', 'all')) + + master_ninja_file.close() + + +def PerformBuild(_, configurations, params): + options = params['options'] + for config in configurations: + builddir = os.path.join(options.toplevel_dir, 'out', config) + arguments = ['ninja', '-C', builddir] + print('Building [%s]: %s' % (config, arguments)) + subprocess.check_call(arguments) + + +def GenerateOutput(target_list, target_dicts, data, params): + user_config = params['generator_flags'].get('config', None) + if gyp.common.GetFlavor(params) == 'win': + target_list, target_dicts = MSVS.ShardTargets(target_list, target_dicts) + target_list, target_dicts = MSVS.InsertLargePdbShims(target_list, target_dicts, generator_default_variables) + elif gyp.common.GetFlavor(params) == 'mac': + # Update target_dicts for iOS device builds. + target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(target_dicts) + + if user_config: + GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) + else: + config_names = target_dicts[target_list[0]]['configurations'] + for config_name in config_names: + GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) diff --git a/gyp/pylib/gyp/generator/xcode.py b/gyp/gyp/generator/xcode.py similarity index 94% rename from gyp/pylib/gyp/generator/xcode.py rename to gyp/gyp/generator/xcode.py index 694a28afb1..0bdc67f989 100644 --- a/gyp/pylib/gyp/generator/xcode.py +++ b/gyp/gyp/generator/xcode.py @@ -1,12 +1,13 @@ -from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import filecmp import gyp.common -import gyp.xcodeproj_file -import gyp.xcode_ninja +import gyp.generator.xcodeproj_file +import gyp.generator.xcode_ninja import errno import os import sys @@ -78,6 +79,7 @@ 'mac_framework_headers', 'mac_framework_private_headers', 'mac_xctest_bundle', + 'mac_xcuitest_bundle', 'xcode_create_dependents_test_runner', ] @@ -98,11 +100,11 @@ ]) def CreateXCConfigurationList(configuration_names): - xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) + xccl = gyp.generator.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) if len(configuration_names) == 0: configuration_names = ['Default'] for configuration_name in configuration_names: - xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ + xcbc = gyp.generator.xcodeproj_file.XCBuildConfiguration({ 'name': configuration_name}) xccl.AppendProperty('buildConfigurations', xcbc) xccl.SetProperty('defaultConfigurationName', configuration_names[0]) @@ -113,13 +115,13 @@ class XcodeProject(object): def __init__(self, gyp_path, path, build_file_dict): self.gyp_path = gyp_path self.path = path - self.project = gyp.xcodeproj_file.PBXProject(path=path) + self.project = gyp.generator.xcodeproj_file.PBXProject(path=path) projectDirPath = gyp.common.RelativePath( os.path.dirname(os.path.abspath(self.gyp_path)), os.path.dirname(path) or '.') self.project.SetProperty('projectDirPath', projectDirPath) self.project_file = \ - gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) + gyp.generator.xcodeproj_file.XCProjectFile({'rootObject': self.project}) self.build_file_dict = build_file_dict # TODO(mark): add destructor that cleans up self.path if created_dir is @@ -254,7 +256,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): # Make a target to run something. It should have one # dependency, the parent xcode target. xccl = CreateXCConfigurationList(configurations) - run_target = gyp.xcodeproj_file.PBXAggregateTarget({ + run_target = gyp.generator.xcodeproj_file.PBXAggregateTarget({ 'name': 'Run ' + target_name, 'productName': xcode_target.GetProperty('productName'), 'buildConfigurationList': xccl, @@ -266,13 +268,13 @@ def Finalize1(self, xcode_targets, serialize_all_tests): script = '' if command.get('working_directory'): script = script + 'cd "%s"\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + gyp.generator.xcodeproj_file.ConvertVariablesToShellSyntax( command.get('working_directory')) if command.get('environment'): script = script + "\n".join( ['export %s="%s"' % - (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) + (key, gyp.generator.xcodeproj_file.ConvertVariablesToShellSyntax(val)) for (key, val) in command.get('environment').items()]) + "\n" # Some test end up using sockets, files on disk, etc. and can get @@ -293,10 +295,10 @@ def Finalize1(self, xcode_targets, serialize_all_tests): # with an error, and fixup variable references to be shell # syntax instead of xcode syntax. script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + gyp.generator.xcodeproj_file.ConvertVariablesToShellSyntax( gyp.common.EncodePOSIXShellList(command.get('action'))) - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + ssbp = gyp.generator.xcodeproj_file.PBXShellScriptBuildPhase({ 'shellScript': script, 'showEnvVarsInLog': 0, }) @@ -328,7 +330,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): # time will build everything by default. if len(targets_for_all) > 1 and not has_custom_all: xccl = CreateXCConfigurationList(configurations) - all_target = gyp.xcodeproj_file.PBXAggregateTarget( + all_target = gyp.generator.xcodeproj_file.PBXAggregateTarget( { 'buildConfigurationList': xccl, 'name': 'All', @@ -346,7 +348,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): # The same, but for run_test_targets. if len(run_test_targets) > 1: xccl = CreateXCConfigurationList(configurations) - run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( + run_all_tests_target = gyp.generator.xcodeproj_file.PBXAggregateTarget( { 'buildConfigurationList': xccl, 'name': 'Run All Tests', @@ -378,7 +380,7 @@ def Finalize2(self, xcode_targets, xcode_target_to_target_dict): qualified_target = gyp.common.QualifiedTarget(self.gyp_path, tgt_name, toolset) xcode_target = xcode_targets[qualified_target] - if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): + if isinstance(xcode_target, gyp.generator.xcodeproj_file.PBXAggregateTarget): # Collect all the run test targets. all_run_tests = [] pbxtds = xcode_target.GetProperty('dependencies') @@ -391,7 +393,7 @@ def Finalize2(self, xcode_targets, xcode_target_to_target_dict): # Directly depend on all the runners as they depend on the target # that builds them. if len(all_run_tests) > 0: - run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ + run_all_target = gyp.generator.xcodeproj_file.PBXAggregateTarget({ 'name': 'Run %s Tests' % tgt_name, 'productName': tgt_name, }, @@ -444,7 +446,7 @@ def Write(self): dir=self.path) try: - output_file = os.fdopen(output_fd, 'wb') + output_file = os.fdopen(output_fd, 'w') self.project_file.Print(output_file) output_file.close() @@ -606,7 +608,7 @@ def GenerateOutput(target_list, target_dicts, data, params): ninja_wrapper = params.get('flavor') == 'ninja' if ninja_wrapper: (target_list, target_dicts, data) = \ - gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params) + gyp.generator.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params) options = params['options'] generator_flags = params.get('generator_flags', {}) @@ -651,7 +653,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # Add gyp/gypi files to project if not generator_flags.get('standalone'): main_group = pbxp.GetProperty('mainGroup') - build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) + build_group = gyp.generator.xcodeproj_file.PBXGroup({'name': 'Build'}) main_group.AppendChild(build_group) for included_file in build_file_dict['included_files']: build_group.AddOrGetFileByPath(included_file, False) @@ -659,8 +661,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xcode_targets = {} xcode_target_to_target_dict = {} for qualified_target in target_list: - [build_file, target_name, toolset] = \ - gyp.common.ParseQualifiedTarget(qualified_target) + [build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget(qualified_target) spec = target_dicts[qualified_target] if spec['toolset'] != 'target': @@ -692,6 +693,7 @@ def GenerateOutput(target_list, target_dicts, data, params): 'executable+bundle': 'com.apple.product-type.application', 'loadable_module+bundle': 'com.apple.product-type.bundle', 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test', + 'loadable_module+xcuitest': 'com.apple.product-type.bundle.ui-testing', 'shared_library+bundle': 'com.apple.product-type.framework', 'executable+extension+bundle': 'com.apple.product-type.app-extension', 'executable+watch+extension+bundle': @@ -708,13 +710,19 @@ def GenerateOutput(target_list, target_dicts, data, params): type = spec['type'] is_xctest = int(spec.get('mac_xctest_bundle', 0)) - is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest + is_xcuitest = int(spec.get('mac_xcuitest_bundle', 0)) + is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest or is_xcuitest is_app_extension = int(spec.get('ios_app_extension', 0)) is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0)) is_watch_app = int(spec.get('ios_watch_app', 0)) if type != 'none': type_bundle_key = type - if is_xctest: + if is_xcuitest: + type_bundle_key += '+xcuitest' + assert type == 'loadable_module', ( + 'mac_xcuitest_bundle targets must have type loadable_module ' + '(target %s)' % target_name) + elif is_xctest: type_bundle_key += '+xctest' assert type == 'loadable_module', ( 'mac_xctest_bundle targets must have type loadable_module ' @@ -734,7 +742,7 @@ def GenerateOutput(target_list, target_dicts, data, params): elif is_bundle: type_bundle_key += '+bundle' - xctarget_type = gyp.xcodeproj_file.PBXNativeTarget + xctarget_type = gyp.generator.xcodeproj_file.PBXNativeTarget try: target_properties['productType'] = _types[type_bundle_key] except KeyError as e: @@ -742,10 +750,13 @@ def GenerateOutput(target_list, target_dicts, data, params): "writing target %s" % target_name) raise else: - xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget + xctarget_type = gyp.generator.xcodeproj_file.PBXAggregateTarget assert not is_bundle, ( 'mac_bundle targets cannot have type none (target "%s")' % target_name) + assert not is_xcuitest, ( + 'mac_xcuitest_bundle targets cannot have type none (target "%s")' % + target_name) assert not is_xctest, ( 'mac_xctest_bundle targets cannot have type none (target "%s")' % target_name) @@ -787,8 +798,8 @@ def GenerateOutput(target_list, target_dicts, data, params): support_target_properties['productName'] = \ target_product_name + ' Support' support_xct = \ - gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, - parent=pbxp) + gyp.generator.xcodeproj_file.PBXAggregateTarget(support_target_properties, + parent=pbxp) pbxp.AppendProperty('targets', support_xct) xct.AddDependency(support_xct) # Hang the support target off the main target so it can be tested/found @@ -815,8 +826,8 @@ def GenerateOutput(target_list, target_dicts, data, params): # Convert Xcode-type variable references to sh-compatible environment # variable references. - message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) - action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + message_sh = gyp.generator.xcodeproj_file.ConvertVariablesToShellSyntax(message) + action_string_sh = gyp.generator.xcodeproj_file.ConvertVariablesToShellSyntax( action_string) script = '' @@ -826,7 +837,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # Be sure the script runs in exec, and that if exec fails, the script # exits signalling an error. script += 'exec ' + action_string_sh + '\nexit 1\n' - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + ssbp = gyp.generator.xcodeproj_file.PBXShellScriptBuildPhase({ 'inputPaths': action['inputs'], 'name': 'Action "' + action['action_name'] + '"', 'outputPaths': action['outputs'], @@ -1006,22 +1017,21 @@ def GenerateOutput(target_list, target_dicts, data, params): makefile_name) # TODO(mark): try/close? Write to a temporary file and swap it only # if it's got changes? - makefile = open(makefile_path, 'wb') + makefile = open(makefile_path, 'w') # make will build the first target in the makefile by default. By # convention, it's called "all". List all (or at least one) # concrete output for each rule source as a prerequisite of the "all" # target. makefile.write('all: \\\n') - for concrete_output_index in \ - range(0, len(concrete_outputs_by_rule_source)): + for concrete_output_index, concrete_output_by_rule_source in \ + enumerate(concrete_outputs_by_rule_source): # Only list the first (index [0]) concrete output of each input # in the "all" target. Otherwise, a parallel make (-j > 1) would # attempt to process each input multiple times simultaneously. # Otherwise, "all" could just contain the entire list of # concrete_outputs_all. - concrete_output = \ - concrete_outputs_by_rule_source[concrete_output_index][0] + concrete_output = concrete_output_by_rule_source[0] if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: eol = '' else: @@ -1037,8 +1047,8 @@ def GenerateOutput(target_list, target_dicts, data, params): # rule source. Collect the names of the directories that are # required. concrete_output_dirs = [] - for concrete_output_index in range(0, len(concrete_outputs)): - concrete_output = concrete_outputs[concrete_output_index] + for concrete_output_index, concrete_output in \ + enumerate(concrete_outputs): if concrete_output_index == 0: bol = '' else: @@ -1056,8 +1066,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # the set of additional rule inputs, if any. prerequisites = [rule_source] prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in range(0, len(prerequisites)): - prerequisite = prerequisites[prerequisite_index] + for prerequisite_index, prerequisite in enumerate(prerequisites): if prerequisite_index == len(prerequisites) - 1: eol = '' else: @@ -1084,17 +1093,6 @@ def GenerateOutput(target_list, target_dicts, data, params): # work if there ever was a concrete output that had an input-dependent # variable anywhere other than in the leaf position. - # Don't declare any inputPaths or outputPaths. If they're present, - # Xcode will provide a slight optimization by only running the script - # phase if any output is missing or outdated relative to any input. - # Unfortunately, it will also assume that all outputs are touched by - # the script, and if the outputs serve as files in a compilation - # phase, they will be unconditionally rebuilt. Since make might not - # rebuild everything that could be declared here as an output, this - # extra compilation activity is unnecessary. With inputPaths and - # outputPaths not supplied, make will always be called, but it knows - # enough to not do anything when everything is up-to-date. - # To help speed things up, pass -j COUNT to make so it does some work # in parallel. Don't use ncpus because Xcode will build ncpus targets # in parallel and if each target happens to have a rules step, there @@ -1109,8 +1107,10 @@ def GenerateOutput(target_list, target_dicts, data, params): exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" exit 1 """ % makefile_name - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + ssbp = gyp.generator.xcodeproj_file.PBXShellScriptBuildPhase({ + 'inputPaths': rule['rule_sources'], 'name': 'Rule "' + rule['rule_name'] + '"', + 'outputPaths': concrete_outputs_all, 'shellScript': script, 'showEnvVarsInLog': 0, }) @@ -1178,7 +1178,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # they'll wind up with ID collisions. pbxcp = pbxcp_dict.get(dest, None) if pbxcp is None: - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ + pbxcp = gyp.generator.xcodeproj_file.PBXCopyFilesBuildPhase({ 'name': 'Copy to ' + copy_group['destination'] }, parent=xct) @@ -1222,7 +1222,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # sufficient to ensure proper ordering in all cases. With an input # declared but no outputs, the script step should run every time, as # desired. - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + ssbp = gyp.generator.xcodeproj_file.PBXShellScriptBuildPhase({ 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'], 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', 'shellScript': script, diff --git a/gyp/pylib/gyp/xcode_ninja.py b/gyp/gyp/generator/xcode_ninja.py similarity index 91% rename from gyp/pylib/gyp/xcode_ninja.py rename to gyp/gyp/generator/xcode_ninja.py index d70eddc90a..1d71b8c5f8 100644 --- a/gyp/pylib/gyp/xcode_ninja.py +++ b/gyp/gyp/generator/xcode_ninja.py @@ -85,18 +85,23 @@ def _TargetFromSpec(old_spec, params): "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel if 'configurations' in old_spec: - for config in old_spec['configurations']: + for config in old_spec['configurations'].keys(): old_xcode_settings = \ old_spec['configurations'][config].get('xcode_settings', {}) if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings: new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO" new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \ old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] + for key in ['BUNDLE_LOADER', 'TEST_HOST']: + if key in old_xcode_settings: + new_xcode_settings[key] = old_xcode_settings[key] + ninja_target['configurations'][config] = {} ninja_target['configurations'][config]['xcode_settings'] = \ new_xcode_settings ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0) + ninja_target['mac_xctest_bundle'] = old_spec.get('mac_xctest_bundle', 0) ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0) ninja_target['ios_watchkit_extension'] = \ old_spec.get('ios_watchkit_extension', 0) @@ -138,9 +143,10 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): if target_extras is not None and re.search(target_extras, target_name): return True - # Otherwise just show executable targets. - if spec.get('type', '') == 'executable' and \ - spec.get('product_extension', '') != 'bundle': + # Otherwise just show executable targets and xc_tests. + if (int(spec.get('mac_xctest_bundle', 0)) != 0 or + (spec.get('type', '') == 'executable' and + spec.get('product_extension', '') != 'bundle')): # If there is a filter and the target does not match, exclude the target. if executable_target_pattern is not None: @@ -227,13 +233,26 @@ def CreateWrapper(target_list, target_dicts, data, params): # Tell Xcode to look everywhere for headers. sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } } + # Put excluded files into the sources target so they can be opened in Xcode. + skip_excluded_files = \ + not generator_flags.get('xcode_ninja_list_excluded_files', True) + sources = [] for target, target_dict in target_dicts.items(): base = os.path.dirname(target) files = target_dict.get('sources', []) + \ target_dict.get('mac_bundle_resources', []) + + if not skip_excluded_files: + files.extend(target_dict.get('sources_excluded', []) + + target_dict.get('mac_bundle_resources_excluded', [])) + for action in target_dict.get('actions', []): files.extend(action.get('inputs', [])) + + if not skip_excluded_files: + files.extend(action.get('inputs_excluded', [])) + # Remove files starting with $. These are mostly intermediate files for the # build system. files = [ file for file in files if not file.startswith('$')] diff --git a/gyp/pylib/gyp/xcodeproj_file.py b/gyp/gyp/generator/xcodeproj_file.py similarity index 94% rename from gyp/pylib/gyp/xcodeproj_file.py rename to gyp/gyp/generator/xcodeproj_file.py index b0385468c5..544b94d134 100644 --- a/gyp/pylib/gyp/xcodeproj_file.py +++ b/gyp/gyp/generator/xcodeproj_file.py @@ -136,21 +136,30 @@ Strings of class unicode are handled properly and encoded in UTF-8 when a project file is output. """ - -import gyp.common -import hashlib +from functools import cmp_to_key import posixpath import re import struct import sys +import gyp.common + +# hashlib is supplied as of Python 2.5 as the replacement interface for sha +# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if +# available, avoiding a deprecation warning under 2.6. Import sha otherwise, +# preserving 2.4 compatibility. try: - basestring, cmp, unicode -except NameError: # Python 3 - basestring = unicode = str - def cmp(x, y): - return (x > y) - (x < y) + from hashlib import sha1 as sha +except ImportError: + # noinspection PyUnresolvedReferences,PyCompatibility + from sha import new as sha + +if 'basestring' not in __builtins__: + basestring = str +if 'cmp' not in __builtins__: + def cmp(a, b): + return (a > b) - (a < b) # See XCObject._EncodeString. This pattern is used to determine when a string # can be printed unquoted. Strings that match this pattern may be printed @@ -321,7 +330,7 @@ def Copy(self): that._properties[key] = new_value else: that._properties[key] = value - elif isinstance(value, (basestring, int)): + elif isinstance(value, basestring) or isinstance(value, int): that._properties[key] = value elif isinstance(value, list): if is_strong: @@ -418,7 +427,7 @@ def _HashUpdate(hash, data): hash.update(data) if seed_hash is None: - seed_hash = hashlib.sha1() + seed_hash = sha() hash = seed_hash.copy() @@ -445,7 +454,7 @@ def _HashUpdate(hash, data): # is 160 bits. Instead of throwing out 64 bits of the digest, xor them # into the portion that gets used. assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size / 4 + digest_int_count = hash.digest_size // 4 digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) id_ints = [0, 0, 0] for index in range(0, digest_int_count): @@ -599,7 +608,12 @@ def _XCPrintableValue(self, tabs, value, flatten_list=False): comment = value.Comment() elif isinstance(value, str): printable += self._EncodeString(value) - elif isinstance(value, unicode): + # A python3 compatible way of saying isinstance(value, unicode). + # basestring is str in python3 so this is equivalent to the above + # isinstance. Thus if it failed above it will fail here. + # In python2 we test against str and unicode at this point. str has already + # failed in the above isinstance so we test against unicode. + elif isinstance(value, basestring): printable += self._EncodeString(value.encode('utf-8')) elif isinstance(value, int): printable += str(value) @@ -762,7 +776,7 @@ def UpdateProperties(self, properties, do_copy=False): ' must be list, not ' + value.__class__.__name__) for item in value: if not isinstance(item, property_type) and \ - not (item.__class__ == unicode and property_type == str): + not (isinstance(item, basestring) and property_type == str): # Accept unicode where str is specified. str is treated as # UTF-8-encoded. raise TypeError( @@ -770,7 +784,7 @@ def UpdateProperties(self, properties, do_copy=False): ' must be ' + property_type.__name__ + ', not ' + \ item.__class__.__name__) elif not isinstance(value, property_type) and \ - not (value.__class__ == unicode and property_type == str): + not (isinstance(value, basestring) and property_type == str): # Accept unicode where str is specified. str is treated as # UTF-8-encoded. raise TypeError( @@ -784,7 +798,7 @@ def UpdateProperties(self, properties, do_copy=False): self._properties[property] = value.Copy() else: self._properties[property] = value - elif isinstance(value, (basestring, int)): + elif isinstance(value, basestring) or isinstance(value, int): self._properties[property] = value elif isinstance(value, list): if is_strong: @@ -1421,8 +1435,8 @@ def PathHashables(self): xche = self while xche != None and isinstance(xche, XCHierarchicalElement): xche_hashables = xche.Hashables() - for index in range(0, len(xche_hashables)): - hashables.insert(index, xche_hashables[index]) + for index, xche_hashable in enumerate(xche_hashables): + hashables.insert(index, xche_hashable) xche = xche.parent return hashables @@ -1940,24 +1954,40 @@ class PBXCopyFilesBuildPhase(XCBuildPhase): 'name': [0, str, 0, 0], }) - # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is - # "DIR", match group 3 is "path" or None. - path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$') - - # path_tree_to_subfolder maps names of Xcode variables to the associated - # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object. - path_tree_to_subfolder = { - 'BUILT_FRAMEWORKS_DIR': 10, # Frameworks Directory - 'BUILT_PRODUCTS_DIR': 16, # Products Directory - # Other types that can be chosen via the Xcode UI. - # TODO(mark): Map Xcode variable names to these. - # : 1, # Wrapper - # : 6, # Executables: 6 - # : 7, # Resources - # : 15, # Java Resources - # : 11, # Shared Frameworks - # : 12, # Shared Support - # : 13, # PlugIns + # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)". + # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path" + # or None. If group 3 is "path", group 4 will be None otherwise group 4 is + # "DIR2" and group 6 is "path". + path_tree_re = re.compile(r'^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$') + + # path_tree_{first,second}_to_subfolder map names of Xcode variables to the + # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase + # object. + path_tree_first_to_subfolder = { + # Types that can be chosen via the Xcode UI. + 'BUILT_PRODUCTS_DIR': 16, # Products Directory + 'BUILT_FRAMEWORKS_DIR': 10, # Not an official Xcode macro. + # Existed before support for the + # names below was added. Maps to + # "Frameworks". + } + + path_tree_second_to_subfolder = { + 'WRAPPER_NAME': 1, # Wrapper + # Although Xcode's friendly name is "Executables", the destination + # is demonstrably the value of the build setting + # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH. + 'EXECUTABLE_FOLDER_PATH': 6, # Executables. + 'UNLOCALIZED_RESOURCES_FOLDER_PATH': 7, # Resources + 'JAVA_FOLDER_PATH': 15, # Java Resources + 'FRAMEWORKS_FOLDER_PATH': 10, # Frameworks + 'SHARED_FRAMEWORKS_FOLDER_PATH': 11, # Shared Frameworks + 'SHARED_SUPPORT_FOLDER_PATH': 12, # Shared Support + 'PLUGINS_FOLDER_PATH': 13, # PlugIns + # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec. + # Note that it re-uses the BUILT_PRODUCTS_DIR value for + # dstSubfolderSpec. dstPath is set below. + 'XPCSERVICES_FOLDER_PATH': 16, # XPC Services. } def Name(self): @@ -1978,14 +2008,61 @@ def SetDestination(self, path): path_tree_match = self.path_tree_re.search(path) if path_tree_match: - # Everything else needs to be relative to an Xcode variable. - path_tree = path_tree_match.group(1) - relative_path = path_tree_match.group(3) - - if path_tree in self.path_tree_to_subfolder: - subfolder = self.path_tree_to_subfolder[path_tree] + path_tree = path_tree_match.group(1); + if path_tree in self.path_tree_first_to_subfolder: + subfolder = self.path_tree_first_to_subfolder[path_tree] + relative_path = path_tree_match.group(3) if relative_path is None: relative_path = '' + + if subfolder == 16 and path_tree_match.group(4) is not None: + # BUILT_PRODUCTS_DIR (16) is the first element in a path whose + # second element is possibly one of the variable names in + # path_tree_second_to_subfolder. Xcode sets the values of all these + # variables to relative paths so .gyp files must prefix them with + # BUILT_PRODUCTS_DIR, e.g. + # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then + # xcode_emulation.py can export these variables with the same values + # as Xcode yet make & ninja files can determine the absolute path + # to the target. Xcode uses the dstSubfolderSpec value set here + # to determine the full path. + # + # An alternative of xcode_emulation.py setting the values to absolute + # paths when exporting these variables has been ruled out because + # then the values would be different depending on the build tool. + # + # Another alternative is to invent new names for the variables used + # to match to the subfolder indices in the second table. .gyp files + # then will not need to prepend $(BUILT_PRODUCTS_DIR) because + # xcode_emulation.py can set the values of those variables to + # the absolute paths when exporting. This is possibly the thinking + # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner. + # + # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because + # this same way could be used to specify destinations in .gyp files + # that pre-date this addition to GYP. However they would only work + # with the Xcode generator. The previous version of xcode_emulation.py + # does not export these variables. Such files will get the benefit + # of the Xcode UI showing the proper destination name simply by + # regenerating the projects with this version of GYP. + path_tree = path_tree_match.group(4) + relative_path = path_tree_match.group(6) + separator = '/' + + if path_tree in self.path_tree_second_to_subfolder: + subfolder = self.path_tree_second_to_subfolder[path_tree] + if relative_path is None: + relative_path = '' + separator = '' + if path_tree == 'XPCSERVICES_FOLDER_PATH': + relative_path = '$(CONTENTS_FOLDER_PATH)/XPCServices' \ + + separator + relative_path + else: + # subfolder = 16 from above + # The second element of the path is an unrecognized variable. + # Include it and any remaining elements in relative_path. + relative_path = path_tree_match.group(3); + else: # The path starts with an unrecognized Xcode variable # name like $(SRCROOT). Xcode will still handle this @@ -2136,8 +2213,8 @@ class XCTarget(XCRemoteObject): 'productName': [0, str, 0, 1], }) - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): + # noinspection PyUnusedLocal + def __init__(self, properties=None, id=None, parent=None, force_outdir=None, force_prefix=None, force_extension=None): # super XCRemoteObject.__init__(self, properties, id, parent) @@ -2197,12 +2274,10 @@ def GetBuildSetting(self, key): return self._properties['buildConfigurationList'].GetBuildSetting(key) def SetBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].SetBuildSetting(key, \ - value) + return self._properties['buildConfigurationList'].SetBuildSetting(key, value) def AppendBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].AppendBuildSetting(key, \ - value) + return self._properties['buildConfigurationList'].AppendBuildSetting(key, value) def DelBuildSetting(self, key): return self._properties['buildConfigurationList'].DelBuildSetting(key) @@ -2256,6 +2331,8 @@ class PBXNativeTarget(XCTarget): '', ''], 'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle', '', '.xctest'], + 'com.apple.product-type.bundle.ui-testing': ['wrapper.cfbundle', + '', '.xctest'], 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib', '', '.so'], 'com.apple.product-type.kernel-extension': ['wrapper.kext', @@ -2312,7 +2389,9 @@ def __init__(self, properties=None, id=None, parent=None, force_extension = suffix[1:] if self._properties['productType'] == \ - 'com.apple.product-type-bundle.unit.test': + 'com.apple.product-type-bundle.unit.test' or \ + self._properties['productType'] == \ + 'com.apple.product-type-bundle.ui-testing': if force_extension is None: force_extension = suffix[1:] @@ -2396,8 +2475,7 @@ def HeadersPhase(self): # The headers phase should come before the resources, sources, and # frameworks phases, if any. insert_at = len(self._properties['buildPhases']) - for index in range(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] + for index, phase in enumerate(self._properties['buildPhases']): if isinstance(phase, PBXResourcesBuildPhase) or \ isinstance(phase, PBXSourcesBuildPhase) or \ isinstance(phase, PBXFrameworksBuildPhase): @@ -2417,8 +2495,7 @@ def ResourcesPhase(self): # The resources phase should come before the sources and frameworks # phases, if any. insert_at = len(self._properties['buildPhases']) - for index in range(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] + for index, phase in enumerate(self._properties['buildPhases']): if isinstance(phase, PBXSourcesBuildPhase) or \ isinstance(phase, PBXFrameworksBuildPhase): insert_at = index @@ -2456,9 +2533,9 @@ def AddDependency(self, other): 'productType' in self._properties and \ self._properties['productType'] != static_library_type and \ 'productType' in other._properties and \ - (other._properties['productType'] == static_library_type or \ - ((other._properties['productType'] == shared_library_type or \ - other._properties['productType'] == framework_type) and \ + (other._properties['productType'] == static_library_type or + ((other._properties['productType'] == shared_library_type or + other._properties['productType'] == framework_type) and ((not other.HasBuildSetting('MACH_O_TYPE')) or other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))): @@ -2496,8 +2573,7 @@ class PBXProject(XCContainerPortal): _schema = XCContainerPortal._schema.copy() _schema.update({ 'attributes': [0, dict, 0, 0], - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], + 'buildConfigurationList': [0, XCConfigurationList, 1, 1, XCConfigurationList()], 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.2'], 'hasScannedForEncodings': [0, int, 0, 1, 1], 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()], @@ -2510,8 +2586,7 @@ class PBXProject(XCContainerPortal): def __init__(self, properties=None, id=None, parent=None, path=None): self.path = path self._other_pbxprojects = {} - # super - return XCContainerPortal.__init__(self, properties, id, parent) + super(PBXProject, self).__init__(properties, id, parent) def Name(self): name = self.path @@ -2604,14 +2679,14 @@ def RootGroupForPath(self, path): } (source_tree, path) = SourceTreeAndPathFromPath(path) - if source_tree != None and source_tree in source_tree_groups: + if source_tree is not None and source_tree in source_tree_groups: (group_func, hierarchical) = source_tree_groups[source_tree] group = group_func() - return (group, hierarchical) + return group, hierarchical # TODO(mark): make additional choices based on file extension. - return (self.SourceGroup(), True) + return self.SourceGroup(), True def AddOrGetFileInRootGroup(self, path): """Returns a PBXFileReference corresponding to path in the correct group @@ -2634,9 +2709,7 @@ def RootGroupsTakeOverOnlyChildren(self, recurse=False): def SortGroups(self): # Sort the children of the mainGroup (like "Source" and "Products") # according to their defined order. - self._properties['mainGroup']._properties['children'] = \ - sorted(self._properties['mainGroup']._properties['children'], - cmp=lambda x,y: x.CompareRootGroup(y)) + self._properties['mainGroup']._properties['children'].sort(key=cmp_to_key(XCHierarchicalElement.CompareRootGroup)) # Sort everything else by putting group before files, and going # alphabetically by name within sections of groups and files. SortGroup @@ -2683,9 +2756,6 @@ def AddOrGetProjectReference(self, other_pbxproject): if not 'projectReferences' in self._properties: self._properties['projectReferences'] = [] - product_group = None - project_ref = None - if not other_pbxproject in self._other_pbxprojects: # This project file isn't yet linked to the other one. Establish the # link. @@ -2884,8 +2954,7 @@ def Print(self, file=sys.stdout): self._XCPrint(file, 0, '{ ') else: self._XCPrint(file, 0, '{\n') - for property, value in sorted(self._properties.items(), - cmp=lambda x, y: cmp(x, y)): + for property, value in sorted(self._properties.items()): if property == 'objects': self._PrintObjects(file) else: diff --git a/gyp/pylib/gyp/input.py b/gyp/gyp/input.py similarity index 76% rename from gyp/pylib/gyp/input.py rename to gyp/gyp/input.py index dde2823a9f..4b0023e7bd 100644 --- a/gyp/pylib/gyp/input.py +++ b/gyp/gyp/input.py @@ -5,23 +5,21 @@ from __future__ import print_function import ast - -import gyp.common -import gyp.simple_copy -import multiprocessing -import optparse import os.path import re import shlex -import signal import subprocess import sys -import threading -import time import traceback -from gyp.common import GypError -from gyp.common import OrderedSet +from collections import OrderedDict +import gyp.common +import gyp.lib.simple_copy +from gyp.common import GypError, OrderedSet + +if not 'unicode' in __builtins__: + unicode = str + basestring = str # A list of types that are treated as linkable. linkable_types = [ @@ -29,10 +27,11 @@ 'shared_library', 'loadable_module', 'mac_kernel_extension', + 'windows_driver', ] # A list of sections that contain links to other targets. -dependency_sections = ['dependencies', 'export_dependent_settings'] +dependency_sections = ['dependencies', 'export_dependent_settings', ] # base_path_sections is a list of sections defined by GYP that contain # pathnames. The generators can provide more keys, the two lists are merged @@ -54,6 +53,7 @@ per_process_data = {} per_process_aux_data = {} + def IsPathSection(section): # If section ends in one of the '=+?!' characters, it's applied to a section # without the trailing characters. '/' is notably absent from this list, @@ -78,6 +78,7 @@ def IsPathSection(section): return False + # base_non_configuration_keys is a list of key names that belong in the target # itself and should not be propagated into its configurations. It is merged # with a list that can come from the generator to @@ -136,6 +137,7 @@ def IsPathSection(section): # } generator_filelist_paths = None + def GetIncludedBuildFiles(build_file_path, aux_data, included=None): """Return a list of all build files included into build_file_path. @@ -189,15 +191,12 @@ def CheckedEval(file_contents): def CheckNode(node, keypath): if isinstance(node, ast.Dict): - c = node.getChildren() dict = {} for key, value in zip(node.keys, node.values): assert isinstance(key, ast.Str) key = key.s if key in dict: - raise GypError("Key '" + key + "' repeated at level " + - repr(len(keypath) + 1) + " with key path '" + - '.'.join(keypath) + "'") + raise GypError("Key '" + key + "' repeated at level " + repr(len(keypath) + 1) + " with key path '" + '.'.join(keypath) + "'") kp = list(keypath) # Make a copy of the list for descending this node. kp.append(key) dict[key] = CheckNode(value, kp) @@ -211,36 +210,23 @@ def CheckNode(node, keypath): return children elif isinstance(node, ast.Str): return node.s + elif isinstance(node, ast.Num): + return node.n else: - raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + - "': " + repr(node)) + raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + "': " + repr(node)) -def LoadOneBuildFile(build_file_path, data, aux_data, includes, - is_target, check): +def LoadOneBuildFile(build_file_path, data, aux_data, includes): if build_file_path in data: return data[build_file_path] if os.path.exists(build_file_path): - # Open the build file for read ('r') with universal-newlines mode ('U') - # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n' - # which otherwise will fail eval() - if sys.platform == 'zos': - # On z/OS, universal-newlines mode treats the file as an ascii file. But since - # node-gyp produces ebcdic files, do not use that mode. - build_file_contents = open(build_file_path, 'r').read() - else: - build_file_contents = open(build_file_path, 'rU').read() + build_file_contents = open(build_file_path).read() else: raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) - build_file_data = None try: - if check: - build_file_data = CheckedEval(build_file_contents) - else: - build_file_data = eval(build_file_contents, {'__builtins__': None}, - None) + build_file_data = CheckedEval(build_file_contents) except SyntaxError as e: e.filename = build_file_path raise @@ -252,38 +238,29 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, raise GypError("%s does not evaluate to a dictionary." % build_file_path) data[build_file_path] = build_file_data - aux_data[build_file_path] = {} + aux_data[build_file_path] = OrderedDict() # Scan for includes and merge them in. - if ('skip_includes' not in build_file_data or - not build_file_data['skip_includes']): + if 'skip_includes' not in build_file_data or not build_file_data['skip_includes']: try: - if is_target: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, includes, check) - else: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, None, check) + LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, aux_data, includes) except Exception as e: - gyp.common.ExceptionAppend(e, - 'while reading includes of ' + build_file_path) + gyp.common.ExceptionAppend(e, 'while reading includes of ' + build_file_path) raise return build_file_data -def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, - includes, check): +def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, includes=None): includes_list = [] - if includes != None: + if includes is not None: includes_list.extend(includes) if 'includes' in subdict: for include in subdict['includes']: # "include" is specified relative to subdict_path, so compute the real # path to include by appending the provided "include" to the directory # in which subdict_path resides. - relative_include = \ - os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) + relative_include = os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) includes_list.append(relative_include) # Unhook the includes list, it's no longer needed. del subdict['includes'] @@ -296,28 +273,24 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) - MergeDicts(subdict, - LoadOneBuildFile(include, data, aux_data, None, False, check), - subdict_path, include) + MergeDicts(subdict, LoadOneBuildFile(include, data, aux_data, None), subdict_path, include) # Recurse into subdictionaries. for k, v in subdict.items(): if type(v) is dict: - LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, - None, check) + LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data) elif type(v) is list: - LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, - check) + LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data) # This recurses into lists so that it can look for dicts. -def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check): +def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data): for item in sublist: if type(item) is dict: - LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, - None, check) + LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data) elif type(item) is list: - LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check) + LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data) + # Processes toolsets in all the targets. This recurses into condition entries # since they can contain toolsets as well. @@ -341,7 +314,7 @@ def ProcessToolsetsInDict(data): if len(toolsets) > 0: # Optimization: only do copies if more than one toolset is specified. for build in toolsets[1:]: - new_target = gyp.simple_copy.deepcopy(target) + new_target = gyp.lib.simple_copy.deepcopy(target) new_target['toolset'] = build new_target_list.append(new_target) target['toolset'] = toolsets[0] @@ -358,8 +331,7 @@ def ProcessToolsetsInDict(data): # TODO(mark): I don't love this name. It just means that it's going to load # a build file that contains targets and is expected to provide a targets dict # that contains the targets... -def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, - depth, check, load_dependencies): +def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, depth, load_dependencies): # If depth is set, predefine the DEPTH variable to be a relative path from # this build file's directory to the directory identified by depth. if depth: @@ -384,11 +356,9 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, return False data['target_build_files'].add(build_file_path) - gyp.DebugOutput(gyp.DEBUG_INCLUDES, - "Loading Target Build File '%s'", build_file_path) + gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Target Build File '%s'", build_file_path) - build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, - includes, True, check) + build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, includes) # Store DEPTH for later use in generators. build_file_data['_DEPTH'] = depth @@ -403,9 +373,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, for included_file in included: # included_file is relative to the current directory, but it needs to # be made relative to build_file_path's directory. - included_relative = \ - gyp.common.RelativePath(included_file, - os.path.dirname(build_file_path)) + included_relative = gyp.common.RelativePath(included_file, os.path.dirname(build_file_path)) build_file_data['included_files'].append(included_relative) # Do a first round of toolsets expansion so that conditions can be defined @@ -413,8 +381,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, ProcessToolsetsInDict(build_file_data) # Apply "pre"/"early" variable expansions and condition evaluations. - ProcessVariablesAndConditionsInDict( - build_file_data, PHASE_EARLY, variables, build_file_path) + ProcessVariablesAndConditionsInDict(build_file_data, PHASE_EARLY, variables, build_file_path) # Since some toolsets might have been defined conditionally, perform # a second round of toolsets expansion now. @@ -424,8 +391,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, # targets. if 'target_defaults' in build_file_data: if 'targets' not in build_file_data: - raise GypError("Unable to find targets in build file %s" % - build_file_path) + raise GypError("Unable to find targets in build file %s" % build_file_path) index = 0 while index < len(build_file_data['targets']): @@ -437,10 +403,8 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, # copy with the target-specific data merged into it as the replacement # target dict. old_target_dict = build_file_data['targets'][index] - new_target_dict = gyp.simple_copy.deepcopy( - build_file_data['target_defaults']) - MergeDicts(new_target_dict, old_target_dict, - build_file_path, build_file_path) + new_target_dict = gyp.lib.simple_copy.deepcopy(build_file_data['target_defaults']) + MergeDicts(new_target_dict, old_target_dict, build_file_path, build_file_path) build_file_data['targets'][index] = new_target_dict index += 1 @@ -458,172 +422,28 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, if 'dependencies' not in target_dict: continue for dependency in target_dict['dependencies']: - dependencies.append( - gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) + dependencies.append(gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) if load_dependencies: for dependency in dependencies: try: - LoadTargetBuildFile(dependency, data, aux_data, variables, - includes, depth, check, load_dependencies) + LoadTargetBuildFile(dependency, data, aux_data, variables, includes, depth, load_dependencies) except Exception as e: - gyp.common.ExceptionAppend( - e, 'while loading dependencies of %s' % build_file_path) + gyp.common.ExceptionAppend(e, 'while loading dependencies of %s' % build_file_path) raise else: return (build_file_path, dependencies) -def CallLoadTargetBuildFile(global_flags, - build_file_path, variables, - includes, depth, check, - generator_input_info): - """Wrapper around LoadTargetBuildFile for parallel processing. - - This wrapper is used when LoadTargetBuildFile is executed in - a worker process. - """ - - try: - signal.signal(signal.SIGINT, signal.SIG_IGN) - - # Apply globals so that the worker process behaves the same. - for key, value in global_flags.items(): - globals()[key] = value - - SetGeneratorGlobals(generator_input_info) - result = LoadTargetBuildFile(build_file_path, per_process_data, - per_process_aux_data, variables, - includes, depth, check, False) - if not result: - return result - - (build_file_path, dependencies) = result - - # We can safely pop the build_file_data from per_process_data because it - # will never be referenced by this process again, so we don't need to keep - # it in the cache. - build_file_data = per_process_data.pop(build_file_path) - - # This gets serialized and sent back to the main process via a pipe. - # It's handled in LoadTargetBuildFileCallback. - return (build_file_path, - build_file_data, - dependencies) - except GypError as e: - sys.stderr.write("gyp: %s\n" % e) - return None - except Exception as e: - print('Exception:', e, file=sys.stderr) - print(traceback.format_exc(), file=sys.stderr) - return None - - -class ParallelProcessingError(Exception): - pass - - -class ParallelState(object): - """Class to keep track of state when processing input files in parallel. - - If build files are loaded in parallel, use this to keep track of - state during farming out and processing parallel jobs. It's stored - in a global so that the callback function can have access to it. - """ - - def __init__(self): - # The multiprocessing pool. - self.pool = None - # The condition variable used to protect this object and notify - # the main loop when there might be more data to process. - self.condition = None - # The "data" dict that was passed to LoadTargetBuildFileParallel - self.data = None - # The number of parallel calls outstanding; decremented when a response - # was received. - self.pending = 0 - # The set of all build files that have been scheduled, so we don't - # schedule the same one twice. - self.scheduled = set() - # A list of dependency build file paths that haven't been scheduled yet. - self.dependencies = [] - # Flag to indicate if there was an error in a child process. - self.error = False - - def LoadTargetBuildFileCallback(self, result): - """Handle the results of running LoadTargetBuildFile in another process. - """ - self.condition.acquire() - if not result: - self.error = True - self.condition.notify() - self.condition.release() - return - (build_file_path0, build_file_data0, dependencies0) = result - self.data[build_file_path0] = build_file_data0 - self.data['target_build_files'].add(build_file_path0) - for new_dependency in dependencies0: - if new_dependency not in self.scheduled: - self.scheduled.add(new_dependency) - self.dependencies.append(new_dependency) - self.pending -= 1 - self.condition.notify() - self.condition.release() - - -def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, - check, generator_input_info): - parallel_state = ParallelState() - parallel_state.condition = threading.Condition() - # Make copies of the build_files argument that we can modify while working. - parallel_state.dependencies = list(build_files) - parallel_state.scheduled = set(build_files) - parallel_state.pending = 0 - parallel_state.data = data - - try: - parallel_state.condition.acquire() - while parallel_state.dependencies or parallel_state.pending: - if parallel_state.error: - break - if not parallel_state.dependencies: - parallel_state.condition.wait() - continue - - dependency = parallel_state.dependencies.pop() - - parallel_state.pending += 1 - global_flags = { - 'path_sections': globals()['path_sections'], - 'non_configuration_keys': globals()['non_configuration_keys'], - 'multiple_toolsets': globals()['multiple_toolsets']} - - if not parallel_state.pool: - parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) - parallel_state.pool.apply_async( - CallLoadTargetBuildFile, - args = (global_flags, dependency, - variables, includes, depth, check, generator_input_info), - callback = parallel_state.LoadTargetBuildFileCallback) - except KeyboardInterrupt as e: - parallel_state.pool.terminate() - raise e - - parallel_state.condition.release() - - parallel_state.pool.close() - parallel_state.pool.join() - parallel_state.pool = None - - if parallel_state.error: - sys.exit(1) # Look for the bracket that matches the first bracket seen in a # string, and return the start and end as a tuple. For example, if # the input is something like "<(foo <(bar)) blah", then it would # return (1, 13), indicating the entire string except for the leading # "<" and trailing " blah". -LBRACKETS= set('{[(') +LBRACKETS = set('{[(') BRACKETS = {'}': '{', ']': '[', ')': '('} + + def FindEnclosingBracketGroup(input_str): stack = [] start = -1 @@ -668,24 +488,24 @@ def IsStrCanonicalInt(string): # "(?P<(?:(?:!?@?)|\|)?)' - r'(?P[-a-zA-Z0-9_.]+)?' - r'\((?P\s*\[?)' - r'(?P.*?)(\]?)\))') + r'(?P(?P<(?:(?:!?@?)|\|)?)' + r'(?P[-a-zA-Z0-9_.]+)?' + r'\((?P\s*\[?)' + r'(?P.*?)(\]?)\))') # This matches the same as early_variable_re, but with '>' instead of '<'. late_variable_re = re.compile( - r'(?P(?P>(?:(?:!?@?)|\|)?)' - r'(?P[-a-zA-Z0-9_.]+)?' - r'\((?P\s*\[?)' - r'(?P.*?)(\]?)\))') + r'(?P(?P>(?:(?:!?@?)|\|)?)' + r'(?P[-a-zA-Z0-9_.]+)?' + r'\((?P\s*\[?)' + r'(?P.*?)(\]?)\))') # This matches the same as early_variable_re, but with '^' instead of '<'. latelate_variable_re = re.compile( - r'(?P(?P[\^](?:(?:!?@?)|\|)?)' - r'(?P[-a-zA-Z0-9_.]+)?' - r'\((?P\s*\[?)' - r'(?P.*?)(\]?)\))') + r'(?P(?P[\^](?:(?:!?@?)|\|)?)' + r'(?P[-a-zA-Z0-9_.]+)?' + r'\((?P\s*\[?)' + r'(?P.*?)(\]?)\))') # Global cache of results from running commands so they don't have to be run # more then once. @@ -721,9 +541,6 @@ def ExpandVariables(input, phase, variables, build_file): assert False input_str = str(input) - if IsStrCanonicalInt(input_str): - return int(input_str) - # Do a quick scan to determine if an expensive regex search is warranted. if expansion_symbol not in input_str: return input_str @@ -759,7 +576,6 @@ def ExpandVariables(input, phase, variables, build_file): # Capture these now so we can adjust them later. replace_start = match_group.start('replace') - replace_end = match_group.end('replace') # Find the ending paren, and re-evaluate the contained string. (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) @@ -784,11 +600,10 @@ def ExpandVariables(input, phase, variables, build_file): # contexts. However, since filtration has no chance to run on <|(), # this seems like the only obvious way to give them access to filters. if file_list: - processed_variables = gyp.simple_copy.deepcopy(variables) + processed_variables = gyp.lib.simple_copy.deepcopy(variables) ProcessListFiltersInDict(contents, processed_variables) # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, - processed_variables, build_file) + contents = ExpandVariables(contents, phase, processed_variables, build_file) else: # Recurse to expand variables in the contents contents = ExpandVariables(contents, phase, variables, build_file) @@ -805,6 +620,7 @@ def ExpandVariables(input, phase, variables, build_file): # expansion in the input string. expand_to_list = '@' in match['type'] and input_str == replacement + build_file_dir = None if run_command or file_list: # Find the build file's directory, so commands can be run or file lists # generated relative to it. @@ -865,36 +681,31 @@ def ExpandVariables(input, phase, variables, build_file): cache_key = (str(contents), build_file_dir) cached_value = cached_command_results.get(cache_key, None) if cached_value is None: - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'", - contents, build_file_dir) - - replacement = '' + gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Executing command '%s' in directory '%s'", contents, build_file_dir) if command_string == 'pymod_do_main': - # i + 2 and type(condition[i + 2]) is dict: false_dict = condition[i + 2] i = i + 3 if i != len(condition): - raise GypError('{} {} has {} unexpected trailing items'.format( - conditions_key, cond_expr, len(condition) - i)) + raise GypError('{} {} has {} unexpected trailing items'.format(conditions_key, cond_expr, len(condition) - i)) else: false_dict = None i = i + 2 if result == None: - result = EvalSingleCondition( - cond_expr, true_dict, false_dict, phase, variables, build_file) + result = EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file) return result -def EvalSingleCondition( - cond_expr, true_dict, false_dict, phase, variables, build_file): +def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file): """Returns true_dict if cond_expr evaluates to true, and false_dict otherwise.""" # Do expansions on the condition itself. Since the conditon can naturally # contain variable references without needing to resort to GYP expansion # syntax, this is of dubious value for variables, but someone might want to # use a command expansion directly inside a condition. - cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, - build_file) + cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, build_file) if type(cond_expr_expanded) not in (str, int): - raise ValueError( - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + cond_expr_expanded.__class__.__name__) + raise ValueError('Variable expansion in this context permits str and int only, found ' + cond_expr_expanded.__class__.__name__) try: if cond_expr_expanded in cached_conditions_asts: @@ -1094,14 +876,16 @@ def EvalSingleCondition( return true_dict return false_dict except SyntaxError as e: - syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' - 'at character %d.' % - (str(e.args[0]), e.text, build_file, e.offset), - e.filename, e.lineno, e.offset, e.text) + traceback.print_exc(file=sys.stderr) + syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s at character %d.' % (str(e.args[0]), e.text, build_file, e.offset), e.filename, e.lineno, e.offset, e.text) raise syntax_error except NameError as e: - gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % - (cond_expr_expanded, build_file)) + traceback.print_exc(file=sys.stderr) + gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s\nvariables=%r' % (cond_expr_expanded, build_file, variables)) + raise GypError(e) + except TypeError as e: + traceback.print_exc(file=sys.stderr) + gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % (cond_expr_expanded, build_file)) raise GypError(e) @@ -1139,14 +923,12 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file): del the_dict[conditions_key] for condition in conditions_list: - merge_dict = EvalCondition(condition, conditions_key, phase, variables, - build_file) + merge_dict = EvalCondition(condition, conditions_key, phase, variables, build_file) - if merge_dict != None: - # Expand variables and nested conditinals in the merge_dict before + if merge_dict is not None: + # Expand variables and nested conditionals in the merge_dict before # merging it. - ProcessVariablesAndConditionsInDict(merge_dict, phase, - variables, build_file) + ProcessVariablesAndConditionsInDict(merge_dict, phase, variables, build_file) MergeDicts(the_dict, merge_dict, build_file, build_file) @@ -1187,8 +969,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): variables[variable_name] = value -def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, - build_file, the_dict_key=None): +def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, build_file, the_dict_key=None): """Handle all variable and command expansion and conditional evaluation. This function is the public entry point for all variable expansions and @@ -1214,8 +995,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # Pass a copy of the variables dict to avoid having it be tainted. # Otherwise, it would have extra automatics added for everything that # should just be an ordinary variable in this scope. - ProcessVariablesAndConditionsInDict(the_dict['variables'], phase, - variables, build_file, 'variables') + ProcessVariablesAndConditionsInDict(the_dict['variables'], phase, variables, build_file, 'variables') LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) @@ -1224,9 +1004,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, if key != 'variables' and type(value) is str: expanded = ExpandVariables(value, phase, variables, build_file) if type(expanded) not in (str, int): - raise ValueError( - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ + ' for ' + key) + raise ValueError('Variable expansion in this context permits str and int only, found ' + expanded.__class__.__name__ + ' for ' + key) the_dict[key] = expanded # Variable expansion may have resulted in changes to automatics. Reload. @@ -1285,22 +1063,18 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, if type(value) is dict: # Pass a copy of the variables dict so that subdicts can't influence # parents. - ProcessVariablesAndConditionsInDict(value, phase, variables, - build_file, key) + ProcessVariablesAndConditionsInDict(value, phase, variables, build_file, key) elif type(value) is list: # The list itself can't influence the variables dict, and # ProcessVariablesAndConditionsInList will make copies of the variables # dict if it needs to pass it to something that can influence it. No # copy is necessary here. - ProcessVariablesAndConditionsInList(value, phase, variables, - build_file) + ProcessVariablesAndConditionsInList(value, phase, variables, build_file) elif type(value) is not int: - raise TypeError('Unknown type ' + value.__class__.__name__ + \ - ' for ' + key) + raise TypeError('Unknown type ' + value.__class__.__name__ + ' for ' + key) -def ProcessVariablesAndConditionsInList(the_list, phase, variables, - build_file): +def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file): # Iterate using an index so that new values can be assigned into the_list. index = 0 while index < len(the_list): @@ -1316,20 +1090,16 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables, if type(expanded) in (str, int): the_list[index] = expanded elif type(expanded) is list: - the_list[index:index+1] = expanded + the_list[index:index + 1] = expanded index += len(expanded) # index now identifies the next item to examine. Continue right now # without falling into the index increment below. continue else: - raise ValueError( - 'Variable expansion in this context permits strings and ' + \ - 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \ - index) + raise ValueError('Variable expansion in this context permits strings and lists only, found ' + expanded.__class__.__name__ + ' at ' + index) elif type(item) is not int: - raise TypeError('Unknown type ' + item.__class__.__name__ + \ - ' at index ' + index) + raise TypeError('Unknown type ' + item.__class__.__name__ + ' at index ' + index) index = index + 1 @@ -1345,13 +1115,10 @@ def BuildTargetsDict(data): in the returned dict. These keys provide access to the target dicts, the dicts in the "targets" lists. """ - - targets = {} + targets = OrderedDict() for build_file in data['target_build_files']: for target in data[build_file].get('targets', []): - target_name = gyp.common.QualifiedTarget(build_file, - target['target_name'], - target['toolset']) + target_name = gyp.common.QualifiedTarget(build_file, target['target_name'], target['toolset']) if target_name in targets: raise GypError('Duplicate target definitions for ' + target_name) targets[target_name] = target @@ -1370,32 +1137,25 @@ def QualifyDependencies(targets): similar dict. """ - all_dependency_sections = [dep + op - for dep in dependency_sections - for op in ('', '!', '/')] + all_dependency_sections = [dep + op for dep in dependency_sections for op in ('', '!', '/')] for target, target_dict in targets.items(): target_build_file = gyp.common.BuildFile(target) toolset = target_dict['toolset'] for dependency_key in all_dependency_sections: dependencies = target_dict.get(dependency_key, []) - for index in range(0, len(dependencies)): - dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dependencies[index], toolset) + for index, dep in enumerate(dependencies): + dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(target_build_file, dep, toolset) if not multiple_toolsets: # Ignore toolset specification in the dependency if it is specified. dep_toolset = toolset - dependency = gyp.common.QualifiedTarget(dep_file, - dep_target, - dep_toolset) + dependency = gyp.common.QualifiedTarget(dep_file, dep_target, dep_toolset) dependencies[index] = dependency # Make sure anything appearing in a list other than "dependencies" also # appears in the "dependencies" list. - if dependency_key != 'dependencies' and \ - dependency not in target_dict['dependencies']: - raise GypError('Found ' + dependency + ' in ' + dependency_key + - ' of ' + target + ', but not in dependencies') + if dependency_key != 'dependencies' and dependency not in target_dict['dependencies']: + raise GypError('Found ' + dependency + ' in ' + dependency_key + ' of ' + target + ', but not in dependencies') def ExpandWildcardDependencies(targets, data): @@ -1415,17 +1175,15 @@ def ExpandWildcardDependencies(targets, data): """ for target, target_dict in targets.items(): - toolset = target_dict['toolset'] target_build_file = gyp.common.BuildFile(target) for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) - # Loop this way instead of "for dependency in" or "for index in range" + # Loop this way instead of "for dependency in" or "for index in xrange" # because the dependencies list will be modified within the loop body. index = 0 while index < len(dependencies): - (dependency_build_file, dependency_target, dependency_toolset) = \ - gyp.common.ParseQualifiedTarget(dependencies[index]) + (dependency_build_file, dependency_target, dependency_toolset) = gyp.common.ParseQualifiedTarget(dependencies[index]) if dependency_target != '*' and dependency_toolset != '*': # Not a wildcard. Keep it moving. index = index + 1 @@ -1434,8 +1192,7 @@ def ExpandWildcardDependencies(targets, data): if dependency_build_file == target_build_file: # It's an error for a target to depend on all other targets in # the same file, because a target cannot depend on itself. - raise GypError('Found wildcard in ' + dependency_key + ' of ' + - target + ' referring to same build file') + raise GypError('Found wildcard in ' + dependency_key + ' of ' + target + ' referring to same build file') # Take the wildcard out and adjust the index so that the next # dependency in the list will be processed the next time through the @@ -1451,16 +1208,12 @@ def ExpandWildcardDependencies(targets, data): if int(dependency_target_dict.get('suppress_wildcard', False)): continue dependency_target_name = dependency_target_dict['target_name'] - if (dependency_target != '*' and - dependency_target != dependency_target_name): + if (dependency_target != '*' and dependency_target != dependency_target_name): continue dependency_target_toolset = dependency_target_dict['toolset'] - if (dependency_toolset != '*' and - dependency_toolset != dependency_target_toolset): + if (dependency_toolset != '*' and dependency_toolset != dependency_target_toolset): continue - dependency = gyp.common.QualifiedTarget(dependency_build_file, - dependency_target_name, - dependency_target_toolset) + dependency = gyp.common.QualifiedTarget(dependency_build_file, dependency_target_name, dependency_target_toolset) index = index + 1 dependencies.insert(index, dependency) @@ -1512,8 +1265,7 @@ def RemoveLinkDependenciesFromNoneTargets(targets): for t in dependencies: if target_dict.get('type', None) == 'none': if targets[t].get('variables', {}).get('link_dependency', 0): - target_dict[dependency_key] = \ - Filter(target_dict[dependency_key], t) + target_dict[dependency_key] = Filter(target_dict[dependency_key], t) class DependencyGraphNode(object): @@ -1536,7 +1288,10 @@ def __init__(self, ref): def __repr__(self): return '' % self.ref - def FlattenToList(self): + def __lt__(self, other): + return self.ref < other.ref + + def FlattenToList_NoCycles(self, nodes): # flat_list is the sorted list of dependencies - actually, the list items # are the "ref" attributes of DependencyGraphNodes. Every target will # appear in flat_list after all of its dependencies, and before all of its @@ -1547,7 +1302,7 @@ def FlattenToList(self): # dependencies not in flat_list. Initially, it is a copy of the children # of this node, because when the graph was built, nodes with no # dependencies were made implicit dependents of the root node. - in_degree_zeros = set(self.dependents[:]) + in_degree_zeros = sorted(self.dependents[:]) while in_degree_zeros: # Nodes in in_degree_zeros have no dependencies not in flat_list, so they @@ -1559,12 +1314,12 @@ def FlattenToList(self): # Look at dependents of the node just added to flat_list. Some of them # may now belong in in_degree_zeros. - for node_dependent in node.dependents: + for node_dependent in sorted(node.dependents): is_in_degree_zero = True # TODO: We want to check through the # node_dependent.dependencies list but if it's long and we # always start at the beginning, then we get O(n^2) behaviour. - for node_dependent_dependency in node_dependent.dependencies: + for node_dependent_dependency in sorted(node_dependent.dependencies): if not node_dependent_dependency.ref in flat_list: # The dependent one or more dependencies not in flat_list. There # will be more chances to add it to flat_list when examining @@ -1577,7 +1332,22 @@ def FlattenToList(self): # All of the dependent's dependencies are already in flat_list. Add # it to in_degree_zeros where it will be processed in a future # iteration of the outer loop. - in_degree_zeros.add(node_dependent) + in_degree_zeros += [node_dependent] + + if len(flat_list) != len(nodes) and len(nodes) != 0: + # If there's anything left unvisited, there must be a circular dependency (cycle). + if not self.dependents: + # If all files have dependencies, add the first file as a dependent + # of root_node so that the cycle can be discovered from root_node. + first_node = nodes.popitem()[1] + first_node.dependencies.append(self) + self.dependents.append(first_node) + + cycles = [] + for cycle in self.FindCycles(): + paths = [n.ref for n in cycle] + cycles.append('Cycle: %s' % ' -> '.join(paths)) + raise DependencyGraphNode.CircularException('Cycles in dependency graph detected:\n' + '\n'.join(cycles)) return list(flat_list) @@ -1586,7 +1356,7 @@ def FindCycles(self): Returns a list of cycles in the graph, where each cycle is its own list. """ results = [] - visited = set() + visited = OrderedSet() def Visit(node, path): for child in node.dependents: @@ -1645,8 +1415,7 @@ def _AddImportedDependencies(self, targets, dependencies=None): # dependency that exported them. This is done to more closely match # the depth-first method used by DeepDependencies. add_index = 1 - for imported_dependency in \ - dependency_dict.get('export_dependent_settings', []): + for imported_dependency in dependency_dict.get('export_dependent_settings', []): if imported_dependency not in dependencies: dependencies.insert(index + add_index, imported_dependency) add_index = add_index + 1 @@ -1680,8 +1449,7 @@ def DeepDependencies(self, dependencies=None): return dependencies - def _LinkDependenciesInternal(self, targets, include_shared_libraries, - dependencies=None, initial=True): + def _LinkDependenciesInternal(self, targets, include_shared_libraries, dependencies=None, initial=True): """Returns an OrderedSet of dependency targets that are linked into this target. @@ -1713,8 +1481,7 @@ def _LinkDependenciesInternal(self, targets, include_shared_libraries, raise GypError("Missing 'target_name' field in target.") if 'type' not in targets[self.ref]: - raise GypError("Missing 'type' field in target %s" % - targets[self.ref]['target_name']) + raise GypError("Missing 'type' field in target %s" % targets[self.ref]['target_name']) target_type = targets[self.ref]['type'] @@ -1728,17 +1495,15 @@ def _LinkDependenciesInternal(self, targets, include_shared_libraries, return dependencies # Don't traverse 'none' targets if explicitly excluded. - if (target_type == 'none' and - not targets[self.ref].get('dependencies_traverse', True)): + if (target_type == 'none' and not targets[self.ref].get('dependencies_traverse', True)): dependencies.add(self.ref) return dependencies - # Executables, mac kernel extensions and loadable modules are already fully - # and finally linked. Nothing else can be a link dependency of them, there - # can only be dependencies in the sense that a dependent target might run - # an executable or load the loadable_module. - if not initial and target_type in ('executable', 'loadable_module', - 'mac_kernel_extension'): + # Executables, mac kernel extensions, windows drivers and loadable modules + # are already fully and finally linked. Nothing else can be a link + # dependency of them, there can only be dependencies in the sense that a + # dependent target might run an executable or load the loadable_module. + if not initial and target_type in ('executable', 'loadable_module', 'mac_kernel_extension', 'windows_driver'): return dependencies # Shared libraries are already fully linked. They should only be included @@ -1747,8 +1512,7 @@ def _LinkDependenciesInternal(self, targets, include_shared_libraries, # in |dependencies| when propagating link_settings. # The |include_shared_libraries| flag controls which of these two cases we # are handling. - if (not initial and target_type == 'shared_library' and - not include_shared_libraries): + if (not initial and target_type == 'shared_library' and not include_shared_libraries): return dependencies # The target is linkable, add it to the list of link dependencies. @@ -1760,9 +1524,7 @@ def _LinkDependenciesInternal(self, targets, include_shared_libraries, # this target linkable. Always look at dependencies of the initial # target, and always look at dependencies of non-linkables. for dependency in self.dependencies: - dependency._LinkDependenciesInternal(targets, - include_shared_libraries, - dependencies, False) + dependency._LinkDependenciesInternal(targets, include_shared_libraries, dependencies, False) return dependencies @@ -1776,8 +1538,7 @@ def DependenciesForLinkSettings(self, targets): # link_settings are propagated. So for now, we will allow it, unless the # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to # False. Once chrome is fixed, we can remove this flag. - include_shared_libraries = \ - targets[self.ref].get('allow_sharedlib_linksettings_propagation', True) + include_shared_libraries = targets[self.ref].get('allow_sharedlib_linksettings_propagation', True) return self._LinkDependenciesInternal(targets, include_shared_libraries) def DependenciesToLinkAgainst(self, targets): @@ -1790,17 +1551,15 @@ def DependenciesToLinkAgainst(self, targets): def BuildDependencyList(targets): # Create a DependencyGraphNode for each target. Put it into a dict for easy # access. - dependency_nodes = {} - for target, spec in targets.items(): + dependency_nodes = OrderedDict() + for target in targets.keys(): if target not in dependency_nodes: dependency_nodes[target] = DependencyGraphNode(target) - # Set up the dependency links. Targets that have no dependencies are treated - # as dependent on root_node. + # Set up the dependency links. Targets that have no dependencies are treated as dependent on root_node. root_node = DependencyGraphNode(None) for target, spec in targets.items(): target_node = dependency_nodes[target] - target_build_file = gyp.common.BuildFile(target) dependencies = spec.get('dependencies') if not dependencies: target_node.dependencies = [root_node] @@ -1809,39 +1568,19 @@ def BuildDependencyList(targets): for dependency in dependencies: dependency_node = dependency_nodes.get(dependency) if not dependency_node: - raise GypError("Dependency '%s' not found while " - "trying to load target %s" % (dependency, target)) + raise GypError("Dependency '%s' not found while trying to load target %s" % (dependency, target)) target_node.dependencies.append(dependency_node) dependency_node.dependents.append(target_node) - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(targets): - if not root_node.dependents: - # If all targets have dependencies, add the first target as a dependent - # of root_node so that the cycle can be discovered from root_node. - target = targets.keys()[0] - target_node = dependency_nodes[target] - target_node.dependencies.append(root_node) - root_node.dependents.append(target_node) - - cycles = [] - for cycle in root_node.FindCycles(): - paths = [node.ref for node in cycle] - cycles.append('Cycle: %s' % ' -> '.join(paths)) - raise DependencyGraphNode.CircularException( - 'Cycles in dependency graph detected:\n' + '\n'.join(cycles)) - - return [dependency_nodes, flat_list] + flat_list = root_node.FlattenToList_NoCycles(dependency_nodes) + return [dict(dependency_nodes), flat_list] def VerifyNoGYPFileCircularDependencies(targets): # Create a DependencyGraphNode for each gyp file containing a target. Put # it into a dict for easy access. - dependency_nodes = {} - for target in targets: + dependency_nodes = OrderedDict() + for target in targets.keys(): build_file = gyp.common.BuildFile(target) if not build_file in dependency_nodes: dependency_nodes[build_file] = DependencyGraphNode(build_file) @@ -1855,8 +1594,7 @@ def VerifyNoGYPFileCircularDependencies(targets): try: dependency_build_file = gyp.common.BuildFile(dependency) except GypError as e: - gyp.common.ExceptionAppend( - e, 'while computing dependencies of .gyp file %s' % build_file) + gyp.common.ExceptionAppend(e, 'while computing dependencies of .gyp file %s' % build_file) raise if dependency_build_file == build_file: @@ -1869,7 +1607,6 @@ def VerifyNoGYPFileCircularDependencies(targets): build_file_node.dependencies.append(dependency_node) dependency_node.dependents.append(build_file_node) - # Files that have no dependencies are treated as dependent on root_node. root_node = DependencyGraphNode(None) for build_file_node in dependency_nodes.values(): @@ -1877,23 +1614,7 @@ def VerifyNoGYPFileCircularDependencies(targets): build_file_node.dependencies.append(root_node) root_node.dependents.append(build_file_node) - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(dependency_nodes): - if not root_node.dependents: - # If all files have dependencies, add the first file as a dependent - # of root_node so that the cycle can be discovered from root_node. - file_node = dependency_nodes.values()[0] - file_node.dependencies.append(root_node) - root_node.dependents.append(file_node) - cycles = [] - for cycle in root_node.FindCycles(): - paths = [node.ref for node in cycle] - cycles.append('Cycle: %s' % ' -> '.join(paths)) - raise DependencyGraphNode.CircularException( - 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)) + root_node.FlattenToList_NoCycles(dependency_nodes) def DoDependentSettings(key, flat_list, targets, dependency_nodes): @@ -1907,26 +1628,21 @@ def DoDependentSettings(key, flat_list, targets, dependency_nodes): if key == 'all_dependent_settings': dependencies = dependency_nodes[target].DeepDependencies() elif key == 'direct_dependent_settings': - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) + dependencies = dependency_nodes[target].DirectAndImportedDependencies(targets) elif key == 'link_settings': - dependencies = \ - dependency_nodes[target].DependenciesForLinkSettings(targets) + dependencies = dependency_nodes[target].DependenciesForLinkSettings(targets) else: - raise GypError("DoDependentSettings doesn't know how to determine " - 'dependencies for ' + key) + raise GypError("DoDependentSettings doesn't know how to determine dependencies for " + key) for dependency in dependencies: dependency_dict = targets[dependency] if not key in dependency_dict: continue dependency_build_file = gyp.common.BuildFile(dependency) - MergeDicts(target_dict, dependency_dict[key], - build_file, dependency_build_file) + MergeDicts(target_dict, dependency_dict[key], build_file, dependency_build_file) -def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, - sort_dependencies): +def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, sort_dependencies): # Recompute target "dependencies" properties. For each static library # target, remove "dependencies" entries referring to other static libraries, # unless the dependency has the "hard_dependency" attribute set. For each @@ -1941,8 +1657,7 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, if not 'dependencies' in target_dict: continue - target_dict['dependencies_original'] = target_dict.get( - 'dependencies', [])[:] + target_dict['dependencies_original'] = target_dict.get('dependencies', [])[:] # A static library should not depend on another static library unless # the dependency relationship is "hard," which should only be done when @@ -1952,8 +1667,7 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, # the non-hard dependency can safely be removed, but the exported hard # dependency must be added to the target to keep the same dependency # ordering. - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) + dependencies = dependency_nodes[target].DirectAndImportedDependencies(targets) index = 0 while index < len(dependencies): dependency = dependencies[index] @@ -1961,10 +1675,10 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, # Remove every non-hard static library dependency and remove every # non-static library dependency that isn't a direct dependency. - if (dependency_dict['type'] == 'static_library' and \ + if (dependency_dict['type'] == 'static_library' and not dependency_dict.get('hard_dependency', False)) or \ - (dependency_dict['type'] != 'static_library' and \ - not dependency in target_dict['dependencies']): + (dependency_dict['type'] != 'static_library' and + not dependency in target_dict['dependencies']): # Take the dependency out of the list, and don't increment index # because the next dependency to analyze will shift into the index # formerly occupied by the one being removed. @@ -1984,8 +1698,7 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, # target. Add them to the dependencies list if they're not already # present. - link_dependencies = \ - dependency_nodes[target].DependenciesToLinkAgainst(targets) + link_dependencies = dependency_nodes[target].DependenciesToLinkAgainst(targets) for dependency in link_dependencies: if dependency == target: continue @@ -1998,8 +1711,7 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, # Note: flat_list is already sorted in the order from dependencies to # dependents. if sort_dependencies and 'dependencies' in target_dict: - target_dict['dependencies'] = [dep for dep in reversed(flat_list) - if dep in target_dict['dependencies']] + target_dict['dependencies'] = [dep for dep in reversed(flat_list) if dep in target_dict['dependencies']] # Initialize this here to speed up MakePathRelative. @@ -2030,14 +1742,12 @@ def MakePathRelative(to_file, fro_file, item): # TODO(dglazkov) The backslash/forward-slash replacement at the end is a # temporary measure. This should really be addressed by keeping all paths # in POSIX until actual project generation. - ret = os.path.normpath(os.path.join( - gyp.common.RelativePath(os.path.dirname(fro_file), - os.path.dirname(to_file)), - item)).replace('\\', '/') - if item[-1:] == '/': + ret = os.path.normpath(os.path.join(gyp.common.RelativePath(os.path.dirname(fro_file), os.path.dirname(to_file)), item)).replace('\\', '/') + if item[-1] == '/': ret += '/' return ret + def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): # Python documentation recommends objects which do not support hash # set this value to None. Python library objects follow this rule. @@ -2083,9 +1793,7 @@ def is_in_set_or_list(x, s, l): to_item = [] MergeLists(to_item, item, to_file, fro_file) else: - raise TypeError( - 'Attempt to merge list item of unsupported type ' + \ - item.__class__.__name__) + raise TypeError('Attempt to merge list item of unsupported type ' + item.__class__.__name__) if append: # If appending a singleton that's already in the list, don't append. @@ -2127,10 +1835,7 @@ def MergeDicts(to, fro, to_file, fro_file): bad_merge = True if bad_merge: - raise TypeError( - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[k].__class__.__name__ + \ - ' for key ' + k) + raise TypeError('Attempt to merge dict value of type ' + v.__class__.__name__ + ' into incompatible type ' + to[k].__class__.__name__ + ' for key ' + k) if type(v) in (str, int): # Overwrite the existing value, if any. Cheap and easy. is_path = IsPathSection(k) @@ -2178,8 +1883,7 @@ def MergeDicts(to, fro, to_file, fro_file): # and prepend are the only policies that can coexist. for list_incompatible in lists_incompatible: if list_incompatible in fro: - raise GypError('Incompatible list policies ' + k + ' and ' + - list_incompatible) + raise GypError('Incompatible list policies ' + k + ' and ' + list_incompatible) if list_base in to: if ext == '?': @@ -2189,10 +1893,7 @@ def MergeDicts(to, fro, to_file, fro_file): elif type(to[list_base]) is not list: # This may not have been checked above if merging in a list with an # extension character. - raise TypeError( - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[list_base].__class__.__name__ + \ - ' for key ' + list_base + '(' + k + ')') + raise TypeError('Attempt to merge dict value of type ' + v.__class__.__name__ + ' into incompatible type ' + to[list_base].__class__.__name__ + ' for key ' + list_base + '(' + k + ')') else: to[list_base] = [] @@ -2204,13 +1905,10 @@ def MergeDicts(to, fro, to_file, fro_file): is_paths = IsPathSection(list_base) MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) else: - raise TypeError( - 'Attempt to merge dict value of unsupported type ' + \ - v.__class__.__name__ + ' for key ' + k) + raise TypeError('Attempt to merge dict value of unsupported type ' + v.__class__.__name__ + ' for key ' + k) -def MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, visited): +def MergeConfigWithInheritance(new_configuration_dict, build_file, target_dict, configuration, visited): # Skip if previously visted. if configuration in visited: return @@ -2220,12 +1918,10 @@ def MergeConfigWithInheritance(new_configuration_dict, build_file, # Merge in parents. for parent in configuration_dict.get('inherit_from', []): - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, parent, visited + [configuration]) + MergeConfigWithInheritance(new_configuration_dict, build_file, target_dict, parent, visited + [configuration]) # Merge it into the new config. - MergeDicts(new_configuration_dict, configuration_dict, - build_file, build_file) + MergeDicts(new_configuration_dict, configuration_dict, build_file, build_file) # Drop abstract. if 'abstract' in new_configuration_dict: @@ -2247,8 +1943,7 @@ def SetUpConfigurations(target, target_dict): if not 'configurations' in target_dict: target_dict['configurations'] = {'Default': {}} if not 'default_configuration' in target_dict: - concrete = [i for (i, config) in target_dict['configurations'].items() - if not config.get('abstract')] + concrete = [i for (i, config) in target_dict['configurations'].items() if not config.get('abstract')] target_dict['default_configuration'] = sorted(concrete)[0] merged_configurations = {} @@ -2268,24 +1963,20 @@ def SetUpConfigurations(target, target_dict): else: key_base = key if not key_base in non_configuration_keys: - new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val) + new_configuration_dict[key] = gyp.lib.simple_copy.deepcopy(target_val) # Merge in configuration (with all its parents first). - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, []) + MergeConfigWithInheritance(new_configuration_dict, build_file, target_dict, configuration, []) merged_configurations[configuration] = new_configuration_dict # Put the new configurations back into the target dict as a configuration. for configuration in merged_configurations.keys(): - target_dict['configurations'][configuration] = ( - merged_configurations[configuration]) + target_dict['configurations'][configuration] = merged_configurations[configuration] # Now drop all the abstract ones. - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - if old_configuration_dict.get('abstract'): - del target_dict['configurations'][configuration] + configs = target_dict['configurations'] + target_dict['configurations'] = {k: v for k, v in configs.items() if not v.get('abstract')} # Now that all of the target's configurations have been built, go through # the target dict's keys and remove everything that's been moved into a @@ -2307,9 +1998,7 @@ def SetUpConfigurations(target, target_dict): configuration_dict = target_dict['configurations'][configuration] for key in configuration_dict.keys(): if key in invalid_configuration_keys: - raise GypError('%s not allowed in the %s configuration, found in ' - 'target %s' % (key, configuration, target)) - + raise GypError('%s not allowed in the %s configuration, found in target %s' % (key, configuration, target)) def ProcessListFiltersInDict(name, the_dict): @@ -2351,8 +2040,7 @@ def ProcessListFiltersInDict(name, the_dict): continue if type(value) is not list: - raise ValueError(name + ' key ' + key + ' must be list, not ' + \ - value.__class__.__name__) + raise ValueError(name + ' key ' + key + ' must be list, not ' + value.__class__.__name__) list_key = key[:-1] if list_key not in the_dict: @@ -2364,10 +2052,7 @@ def ProcessListFiltersInDict(name, the_dict): if type(the_dict[list_key]) is not list: value = the_dict[list_key] - raise ValueError(name + ' key ' + list_key + \ - ' must be list, not ' + \ - value.__class__.__name__ + ' when applying ' + \ - {'!': 'exclusion', '/': 'regex'}[operation]) + raise ValueError(name + ' key ' + list_key + ' must be list, not ' + value.__class__.__name__ + ' when applying ' + {'!': 'exclusion', '/': 'regex'}[operation]) if not list_key in lists: lists.append(list_key) @@ -2393,8 +2078,8 @@ def ProcessListFiltersInDict(name, the_dict): exclude_key = list_key + '!' if exclude_key in the_dict: for exclude_item in the_dict[exclude_key]: - for index in range(0, len(the_list)): - if exclude_item == the_list[index]: + for index, list_item in enumerate(the_list): + if exclude_item == list_item: # This item matches the exclude_item, so set its action to 0 # (exclude). list_actions[index] = 0 @@ -2416,11 +2101,9 @@ def ProcessListFiltersInDict(name, the_dict): action_value = 1 else: # This is an action that doesn't make any sense. - raise ValueError('Unrecognized action ' + action + ' in ' + name + \ - ' key ' + regex_key) + raise ValueError('Unrecognized action ' + action + ' in ' + name + ' key ' + regex_key) - for index in range(0, len(the_list)): - list_item = the_list[index] + for index, list_item in enumerate(the_list): if list_actions[index] == action_value: # Even if the regex matches, nothing will change so continue (regex # searches are expensive). @@ -2440,9 +2123,7 @@ def ProcessListFiltersInDict(name, the_dict): # to be created. excluded_key = list_key + '_excluded' if excluded_key in the_dict: - raise GypError(name + ' key ' + excluded_key + - ' must not be present prior ' - ' to applying exclusion/regex filters for ' + list_key) + raise GypError(name + ' key ' + excluded_key + ' must not be present prior to applying exclusion/regex filters for ' + list_key) excluded_list = [] @@ -2487,33 +2168,22 @@ def ValidateTargetType(target, target_dict): Raises an exception on error. """ - VALID_TARGET_TYPES = ('executable', 'loadable_module', - 'static_library', 'shared_library', - 'mac_kernel_extension', 'none') + VALID_TARGET_TYPES = ('executable', 'loadable_module', 'static_library', 'shared_library', 'mac_kernel_extension', 'none', 'windows_driver') target_type = target_dict.get('type', None) if target_type not in VALID_TARGET_TYPES: - raise GypError("Target %s has an invalid target type '%s'. " - "Must be one of %s." % - (target, target_type, '/'.join(VALID_TARGET_TYPES))) - if (target_dict.get('standalone_static_library', 0) and - not target_type == 'static_library'): - raise GypError('Target %s has type %s but standalone_static_library flag is' - ' only valid for static_library type.' % (target, - target_type)) - - -def ValidateSourcesInTarget(target, target_dict, build_file, - duplicate_basename_check): - if not duplicate_basename_check: - return + raise GypError("Target %s has an invalid target type '%s'. Must be one of %s." % (target, target_type, '/'.join(VALID_TARGET_TYPES))) + if (target_dict.get('standalone_static_library', 0) and not target_type == 'static_library'): + raise GypError('Target %s has type %s but standalone_static_library flag is only valid for static_library type.' % (target, target_type)) + + +def ValidateSourcesInTarget(target, target_dict): if target_dict.get('type', None) != 'static_library': return sources = target_dict.get('sources', []) basenames = {} for source in sources: name, ext = os.path.splitext(source) - is_compiled_file = ext in [ - '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] + is_compiled_file = ext in ['.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] if not is_compiled_file: continue basename = os.path.basename(name) # Don't include extension. @@ -2525,9 +2195,7 @@ def ValidateSourcesInTarget(target, target_dict, build_file, error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % target - + error + 'libtool on Mac cannot handle that. Use ' - '--no-duplicate-basename-check to disable this validation.') + print('static library %s has several files with the same basename:\n' % target + error + 'libtool on Mac cannot handle that. Use --no-duplicate-basename-check to disable this validation.') raise GypError('Duplicate basenames in sources section, see list above') @@ -2552,31 +2220,24 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): # Make sure that there's no conflict among rule names and extensions. rule_name = rule['rule_name'] if rule_name in rule_names: - raise GypError('rule %s exists in duplicate, target %s' % - (rule_name, target)) + raise GypError('rule %s exists in duplicate, target %s' % (rule_name, target)) rule_names[rule_name] = rule rule_extension = rule['extension'] if rule_extension.startswith('.'): rule_extension = rule_extension[1:] if rule_extension in rule_extensions: - raise GypError(('extension %s associated with multiple rules, ' + - 'target %s rules %s and %s') % - (rule_extension, target, - rule_extensions[rule_extension]['rule_name'], - rule_name)) + raise GypError(('extension %s associated with multiple rules, ' + 'target %s rules %s and %s') % (rule_extension, target, rule_extensions[rule_extension]['rule_name'], rule_name)) rule_extensions[rule_extension] = rule # Make sure rule_sources isn't already there. It's going to be # created below if needed. if 'rule_sources' in rule: - raise GypError( - 'rule_sources must not exist in input, target %s rule %s' % - (target, rule_name)) + raise GypError('rule_sources must not exist in input, target %s rule %s' % (target, rule_name)) rule_sources = [] - source_keys = ['sources'] - source_keys.extend(extra_sources_for_rules) + source_keys = {'sources'} + source_keys.update(extra_sources_for_rules) for source_key in source_keys: for source in target_dict.get(source_key, []): (source_root, source_extension) = os.path.splitext(source) @@ -2589,46 +2250,34 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): rule['rule_sources'] = rule_sources -def ValidateRunAsInTarget(target, target_dict, build_file): +def ValidateRunAsInTarget(target_dict, build_file): target_name = target_dict.get('target_name') run_as = target_dict.get('run_as') if not run_as: return if type(run_as) is not dict: - raise GypError("The 'run_as' in target %s from file %s should be a " - "dictionary." % - (target_name, build_file)) + raise GypError("The 'run_as' in target %s from file %s should be a dictionary." % (target_name, build_file)) action = run_as.get('action') if not action: - raise GypError("The 'run_as' in target %s from file %s must have an " - "'action' section." % - (target_name, build_file)) + raise GypError("The 'run_as' in target %s from file %s must have an 'action' section." % (target_name, build_file)) if type(action) is not list: - raise GypError("The 'action' for 'run_as' in target %s from file %s " - "must be a list." % - (target_name, build_file)) + raise GypError("The 'action' for 'run_as' in target %s from file %s must be a list." % (target_name, build_file)) working_directory = run_as.get('working_directory') if working_directory and type(working_directory) is not str: - raise GypError("The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % - (target_name, build_file)) + raise GypError("The 'working_directory' for 'run_as' in target %s in file %s should be a string." % (target_name, build_file)) environment = run_as.get('environment') if environment and type(environment) is not dict: - raise GypError("The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % - (target_name, build_file)) + raise GypError("The 'environment' for 'run_as' in target %s in file %s should be a dictionary." % (target_name, build_file)) -def ValidateActionsInTarget(target, target_dict, build_file): - '''Validates the inputs to the actions in a target.''' +def ValidateActionsInTarget(target_dict): + """Validates the inputs to the actions in a target.""" target_name = target_dict.get('target_name') actions = target_dict.get('actions', []) for action in actions: action_name = action.get('action_name') if not action_name: - raise GypError("Anonymous action in target %s. " - "An action must have an 'action_name' field." % - target_name) + raise GypError("Anonymous action in target %s. An action must have an 'action_name' field." % target_name) inputs = action.get('inputs', None) if inputs is None: raise GypError('Action in target %s has no inputs.' % target_name) @@ -2640,7 +2289,7 @@ def ValidateActionsInTarget(target, target_dict, build_file): def TurnIntIntoStrInDict(the_dict): """Given dict the_dict, recursively converts all integers into strings. """ - # Use items instead of items because there's no need to try to look at + # Use items instead of iteritems because there's no need to try to look at # reinserted keys and their associated values. for k, v in the_dict.items(): if type(v) is int: @@ -2659,8 +2308,7 @@ def TurnIntIntoStrInDict(the_dict): def TurnIntIntoStrInList(the_list): """Given list the_list, recursively converts all integers into strings. """ - for index in range(0, len(the_list)): - item = the_list[index] + for index, item in enumerate(the_list): if type(item) is int: the_list[index] = str(item) elif type(item) is dict: @@ -2669,8 +2317,7 @@ def TurnIntIntoStrInList(the_list): TurnIntIntoStrInList(item) -def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, - data): +def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data): """Return only the targets that are deep dependencies of |root_targets|.""" qualified_root_targets = [] for target in root_targets: @@ -2694,9 +2341,7 @@ def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, continue new_targets = [] for target in data[build_file]['targets']: - qualified_name = gyp.common.QualifiedTarget(build_file, - target['target_name'], - target['toolset']) + qualified_name = gyp.common.QualifiedTarget(build_file, target['target_name'], target['toolset']) if qualified_name in wanted_targets: new_targets.append(target) data[build_file]['targets'] = new_targets @@ -2743,15 +2388,13 @@ def SetGeneratorGlobals(generator_input_info): non_configuration_keys.extend(generator_input_info['non_configuration_keys']) global multiple_toolsets - multiple_toolsets = generator_input_info[ - 'generator_supports_multiple_toolsets'] + multiple_toolsets = generator_input_info['generator_supports_multiple_toolsets'] global generator_filelist_paths generator_filelist_paths = generator_input_info['generator_filelist_paths'] -def Load(build_files, variables, includes, depth, generator_input_info, check, - circular_check, duplicate_basename_check, parallel, root_targets): +def Load(build_files, variables, includes, depth, generator_input_info, root_targets): SetGeneratorGlobals(generator_input_info) # A generator can have other lists (in addition to sources) be processed # for rules. @@ -2764,22 +2407,17 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps # track of the keys corresponding to "target" files. - data = {'target_build_files': set()} + data = {'target_build_files': OrderedSet()} # Normalize paths everywhere. This is important because paths will be # used as keys to the data dict and for references between input files. - build_files = set(map(os.path.normpath, build_files)) - if parallel: - LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, - check, generator_input_info) - else: - aux_data = {} - for build_file in build_files: - try: - LoadTargetBuildFile(build_file, data, aux_data, - variables, includes, depth, check, True) - except Exception as e: - gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) - raise + build_files = OrderedSet(map(os.path.normpath, build_files)) + aux_data = OrderedDict() + for build_file in build_files: + try: + LoadTargetBuildFile(build_file, data, aux_data, variables, includes, depth, True) + except Exception as e: + gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) + raise # Build a dict to access each target's subdict by qualified name. targets = BuildTargetsDict(data) @@ -2815,26 +2453,20 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, # Make sure every dependency appears at most once. RemoveDuplicateDependencies(targets) - if circular_check: - # Make sure that any targets in a.gyp don't contain dependencies in other - # .gyp files that further depend on a.gyp. - VerifyNoGYPFileCircularDependencies(targets) + VerifyNoGYPFileCircularDependencies(targets) [dependency_nodes, flat_list] = BuildDependencyList(targets) if root_targets: # Remove, from |targets| and |flat_list|, the targets that are not deep # dependencies of the targets specified in |root_targets|. - targets, flat_list = PruneUnwantedTargets( - targets, flat_list, dependency_nodes, root_targets, data) + targets, flat_list = PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data) # Check that no two targets in the same directory have the same name. VerifyNoCollidingTargets(flat_list) # Handle dependent settings of various types. - for settings_type in ['all_dependent_settings', - 'direct_dependent_settings', - 'link_settings']: + for settings_type in ['all_dependent_settings', 'direct_dependent_settings', 'link_settings']: DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) # Take out the dependent settings now that they've been published to all @@ -2848,15 +2480,13 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, # that they need so that their link steps will be correct. gii = generator_input_info if gii['generator_wants_static_library_dependencies_adjusted']: - AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, - gii['generator_wants_sorted_dependencies']) + AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, gii['generator_wants_sorted_dependencies']) # Apply "post"/"late"/"target" variable expansions and condition evaluations. for target in flat_list: target_dict = targets[target] build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATE, variables, build_file) + ProcessVariablesAndConditionsInDict(target_dict, PHASE_LATE, variables, build_file) # Move everything that can go into a "configurations" section into one. for target in flat_list: @@ -2872,8 +2502,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, for target in flat_list: target_dict = targets[target] build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATELATE, variables, build_file) + ProcessVariablesAndConditionsInDict(target_dict, PHASE_LATELATE, variables, build_file) # Make sure that the rules make sense, and build up rule_sources lists as # needed. Not all generators will need to use the rule_sources lists, but @@ -2883,11 +2512,10 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, target_dict = targets[target] build_file = gyp.common.BuildFile(target) ValidateTargetType(target, target_dict) - ValidateSourcesInTarget(target, target_dict, build_file, - duplicate_basename_check) + ValidateSourcesInTarget(target, target_dict) ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) - ValidateRunAsInTarget(target, target_dict, build_file) - ValidateActionsInTarget(target, target_dict, build_file) + ValidateRunAsInTarget(target_dict, build_file) + ValidateActionsInTarget(target_dict) # Generators might not expect ints. Turn them into strs. TurnIntIntoStrInDict(data) diff --git a/gyp/gyp/lib/__init__.py b/gyp/gyp/lib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gyp/pylib/gyp/ninja_syntax.py b/gyp/gyp/lib/ninja_syntax.py similarity index 100% rename from gyp/pylib/gyp/ninja_syntax.py rename to gyp/gyp/lib/ninja_syntax.py diff --git a/gyp/pylib/gyp/simple_copy.py b/gyp/gyp/lib/simple_copy.py similarity index 65% rename from gyp/pylib/gyp/simple_copy.py rename to gyp/gyp/lib/simple_copy.py index 94a6f17dab..188434e8bb 100644 --- a/gyp/pylib/gyp/simple_copy.py +++ b/gyp/gyp/lib/simple_copy.py @@ -6,6 +6,8 @@ structures or complex types except for dicts and lists. This is because gyp copies so large structure that small copy overhead ends up taking seconds in a project the size of Chromium.""" +from collections import OrderedDict + class Error(Exception): pass @@ -20,8 +22,7 @@ def deepcopy(x): try: return _deepcopy_dispatch[type(x)](x) except KeyError: - raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' + - 'or expand simple_copy support.' % type(x)) + raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy or expand simple_copy support.' % type(x)) _deepcopy_dispatch = d = {} @@ -29,11 +30,18 @@ def _deepcopy_atomic(x): return x try: - types = bool, float, int, str, type, type(None), long, unicode -except NameError: # Python 3 - types = bool, float, int, str, type, type(None) + # noinspection PyUnresolvedReferences + _string_types = (str, unicode) +except NameError: + _string_types = (str, ) + +try: + # noinspection PyUnresolvedReferences + _integer_types = (int, long) +except NameError: + _integer_types = (int, ) -for x in types: +for x in (type(None), float, bool, type) + _integer_types + _string_types: d[x] = _deepcopy_atomic def _deepcopy_list(x): @@ -47,4 +55,11 @@ def _deepcopy_dict(x): return y d[dict] = _deepcopy_dict +def _deepcopy_ordered_dict(x): + y = OrderedDict() + for key, value in x.items(): + y[deepcopy(key)] = deepcopy(value) + return y +d[OrderedDict] = _deepcopy_ordered_dict + del d diff --git a/gyp/pylib/gyp/msvs_emulation.py b/gyp/gyp/msvs_emulation.py similarity index 67% rename from gyp/pylib/gyp/msvs_emulation.py rename to gyp/gyp/msvs_emulation.py index 4a50b1b74c..c525af4509 100644 --- a/gyp/pylib/gyp/msvs_emulation.py +++ b/gyp/gyp/msvs_emulation.py @@ -7,18 +7,33 @@ build systems, primarily ninja. """ +import codecs import os +import pickle import re import subprocess import sys +import time +import hashlib +import traceback +from collections import Iterable, OrderedDict +from tempfile import gettempdir -from gyp.common import OrderedSet -import gyp.MSVSUtil -import gyp.MSVSVersion +from gyp import DebugOutput, DEBUG_GENERAL +from gyp.common import EnsureDirExists, WriteOnDiff, memoize +from gyp.MSVS import TARGET_TYPE_EXT, TryQueryRegistryValue +if 'basestring' not in __builtins__: + basestring = str windows_quoter_regex = re.compile(r'(\\*)"') +utf8encoder = codecs.getencoder('utf-8') +def encode(s): + return utf8encoder(s)[0] + +NUL = bytes([0]) + def QuoteForRspFile(arg): """Quote a command line argument so that it appears as one argument when @@ -30,10 +45,14 @@ def QuoteForRspFile(arg): # works more or less because most programs (including the compiler, etc.) # use that function to handle command line arguments. + # Use a heuristic to try to find args that are paths, and normalize them + if arg.find('/') > 0 or arg.count('/') > 1: + arg = os.path.normpath(arg) + # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes # preceding it, and results in n backslashes + the quote. So we substitute # in 2* what we match, +1 more, plus the quote. - arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg) + arg = windows_quoter_regex.sub(lambda mo: 2 * str(mo.group(1)) + '\\"', arg) # %'s also need to be doubled otherwise they're interpreted as batch # positional arguments. Also make sure to escape the % so that they're @@ -80,23 +99,22 @@ def _AddPrefix(element, prefix): """Add |prefix| to |element| or each subelement if element is iterable.""" if element is None: return element - # Note, not Iterable because we don't want to handle strings like that. - if isinstance(element, list) or isinstance(element, tuple): + if isinstance(element, Iterable) and not isinstance(element, basestring): return [prefix + e for e in element] else: return prefix + element -def _DoRemapping(element, map): +def _DoRemapping(element, map_arg): """If |element| then remap it through |map|. If |element| is iterable then each item will be remapped. Any elements not found will be removed.""" - if map is not None and element is not None: - if not callable(map): - map = map.get # Assume it's a dict, otherwise a callable to do the remap. - if isinstance(element, list) or isinstance(element, tuple): - element = filter(None, [map(elem) for elem in element]) + if map_arg is not None and element is not None: + if not callable(map_arg): + map_arg = map_arg.get # Assume it's a dict, otherwise a callable to do the remap. + if isinstance(element, Iterable) and not isinstance(element, basestring): + element = filter(None, [map_arg(elem) for elem in element]) else: - element = map(element) + element = map_arg(element) return element @@ -105,7 +123,7 @@ def _AppendOrReturn(append, element): then add |element| to it, adding each item in |element| if it's a list or tuple.""" if append is not None and element is not None: - if isinstance(element, list) or isinstance(element, tuple): + if isinstance(element, Iterable) and not isinstance(element, basestring): append.extend(element) else: append.append(element) @@ -113,76 +131,18 @@ def _AppendOrReturn(append, element): return element +@memoize def _FindDirectXInstallation(): - """Try to find an installation location for the DirectX SDK. Check for the + """ + Try to find an installation location for the DirectX SDK. Check for the standard environment variable, and if that doesn't exist, try to find - via the registry. May return None if not found in either location.""" - # Return previously calculated value, if there is one - if hasattr(_FindDirectXInstallation, 'dxsdk_dir'): - return _FindDirectXInstallation.dxsdk_dir - - dxsdk_dir = os.environ.get('DXSDK_DIR') - if not dxsdk_dir: - # Setup params to pass to and attempt to launch reg.exe. - cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s'] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - for line in p.communicate()[0].splitlines(): - if 'InstallPath' in line: - dxsdk_dir = line.split(' ')[3] + "\\" - - # Cache return value - _FindDirectXInstallation.dxsdk_dir = dxsdk_dir + via the registry. May return None if not found in either location. + """ + dxsdk_dir = os.environ.get('DXSDK_DIR', TryQueryRegistryValue('Software\Microsoft\DirectX', 'InstallPath')) return dxsdk_dir -def GetGlobalVSMacroEnv(vs_version): - """Get a dict of variables mapping internal VS macro names to their gyp - equivalents. Returns all variables that are independent of the target.""" - env = {} - # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when - # Visual Studio is actually installed. - if vs_version.Path(): - env['$(VSInstallDir)'] = vs_version.Path() - env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\' - # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be - # set. This happens when the SDK is sync'd via src-internal, rather than - # by typical end-user installation of the SDK. If it's not set, we don't - # want to leave the unexpanded variable in the path, so simply strip it. - dxsdk_dir = _FindDirectXInstallation() - env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else '' - # Try to find an installation location for the Windows DDK by checking - # the WDK_DIR environment variable, may be None. - env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '') - return env - -def ExtractSharedMSVSSystemIncludes(configs, generator_flags): - """Finds msvs_system_include_dirs that are common to all targets, removes - them from all targets, and returns an OrderedSet containing them.""" - all_system_includes = OrderedSet( - configs[0].get('msvs_system_include_dirs', [])) - for config in configs[1:]: - system_includes = config.get('msvs_system_include_dirs', []) - all_system_includes = all_system_includes & OrderedSet(system_includes) - if not all_system_includes: - return None - # Expand macros in all_system_includes. - env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags)) - expanded_system_includes = OrderedSet([ExpandMacros(include, env) - for include in all_system_includes]) - if any(['$' in include for include in expanded_system_includes]): - # Some path relies on target-specific variables, bail. - return None - - # Remove system includes shared by all targets from the targets. - for config in configs: - includes = config.get('msvs_system_include_dirs', []) - if includes: # Don't insert a msvs_system_include_dirs key if not needed. - # This must check the unexpanded includes list: - new_includes = [i for i in includes if i not in all_system_includes] - config['msvs_system_include_dirs'] = new_includes - return expanded_system_includes - - +# noinspection PyUnresolvedReferences class MsvsSettings(object): """A class that understands the gyp 'msvs_...' values (especially the msvs_settings field). They largely correpond to the VS2008 IDE DOM. This @@ -218,11 +178,13 @@ def __init__(self, spec, generator_flags): for field in unsupported_fields: for config in configs.values(): if field in config: - unsupported += ["%s not supported (target %s)." % - (field, spec['target_name'])] + unsupported += ["%s not supported (target %s)." % (field, spec['target_name'])] if unsupported: raise Exception('\n'.join(unsupported)) + self.msvs_configuration_platform = {} + self.msvs_target_platform = {} + def GetExtension(self): """Returns the extension for the target, with no leading dot. @@ -232,7 +194,7 @@ def GetExtension(self): ext = self.spec.get('product_extension', None) if ext: return ext - return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '') + return TARGET_TYPE_EXT.get(self.spec['type'], '') def GetVSMacroEnv(self, base_to_build=None, config=None): """Get a dict of variables mapping internal VS macro names to their gyp @@ -242,12 +204,11 @@ def GetVSMacroEnv(self, base_to_build=None, config=None): target_platform = 'Win32' else: target_platform = target_arch - target_name = self.spec.get('product_prefix', '') + \ - self.spec.get('product_name', self.spec['target_name']) + target_name = self.spec.get('product_prefix', '') + self.spec.get('product_name', self.spec['target_name']) target_dir = base_to_build + '\\' if base_to_build else '' target_ext = '.' + self.GetExtension() target_file_name = target_name + target_ext - + VSInstallDir = self.vs_version.path or '' replacements = { '$(InputName)': '${root}', '$(InputPath)': '${source}', @@ -261,8 +222,19 @@ def GetVSMacroEnv(self, base_to_build=None, config=None): '$(TargetFileName)': target_file_name, '$(TargetName)': target_name, '$(TargetPath)': os.path.join(target_dir, target_file_name), + # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when + # Visual Studio is actually installed. + '$(VSInstallDir)': VSInstallDir, + '$(VCInstallDir)': os.path.join(VSInstallDir, 'VC\\'), + # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be + # set. This happens when the SDK is sync'd via src-internal, rather than + # by typical end-user installation of the SDK. If it's not set, we don't + # want to leave the unexpanded variable in the path, so simply strip it. + '$(DXSDK_DIR)': _FindDirectXInstallation() or '', + # Try to find an installation location for the Windows DDK by checking + # the WDK_DIR environment variable, may be None. + '$(WDK_DIR)': os.environ.get('WDK_DIR', ''), } - replacements.update(GetGlobalVSMacroEnv(self.vs_version)) return replacements def ConvertVSMacros(self, s, base_to_build=None, config=None): @@ -270,18 +242,20 @@ def ConvertVSMacros(self, s, base_to_build=None, config=None): env = self.GetVSMacroEnv(base_to_build, config=config) return ExpandMacros(s, env) - def AdjustLibraries(self, libraries): + @staticmethod + def AdjustLibraries(libraries): """Strip -l from library if it's specified with that.""" libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries] - return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs] + return [lib + '.lib' if not lib.lower().endswith('.lib') else lib for lib in libs] - def _GetAndMunge(self, field, path, default, prefix, append, map): + @staticmethod + def _GetAndMunge(field, path, default, prefix, append, map_arg): """Retrieve a value from |field| at |path| or return |default|. If |append| is specified, and the item is found, it will be appended to that object instead of returned. If |map| is specified, results will be remapped through |map| before being returned or appended.""" result = _GenericRetrieve(field, default, path) - result = _DoRemapping(result, map) + result = _DoRemapping(result, map_arg) result = _AddPrefix(result, prefix) return _AppendOrReturn(append, result) @@ -291,16 +265,18 @@ def __init__(self, parent, field, base_path, append=None): self.field = field self.base_path = [base_path] self.append = append + + # noinspection PyShadowingBuiltins def __call__(self, name, map=None, prefix='', default=None): - return self.parent._GetAndMunge(self.field, self.base_path + [name], - default=default, prefix=prefix, append=self.append, map=map) + # noinspection PyProtectedMember + return self.parent._GetAndMunge(self.field, self.base_path + [name], default=default, prefix=prefix, append=self.append, map_arg=map) def GetArch(self, config): """Get architecture based on msvs_configuration_platform and msvs_target_platform. Returns either 'x86' or 'x64'.""" configuration_platform = self.msvs_configuration_platform.get(config, '') platform = self.msvs_target_platform.get(config, '') - if not platform: # If no specific override, use the configuration's. + if not platform: # If no specific override, use the configuration's. platform = configuration_platform # Map from platform to architecture. return {'Win32': 'x86', 'x64': 'x64', 'ARM64': 'arm64'}.get(platform, 'x86') @@ -310,7 +286,10 @@ def _TargetConfig(self, config): # There's two levels of architecture/platform specification in VS. The # first level is globally for the configuration (this is what we consider # "the" config at the gyp level, which will be something like 'Debug' or - # 'Release_x64'), and a second target-specific configuration, which is an + # 'Release'), VS2015 and later only use this level + if self.vs_version.short_name >= '2015': + return config + # and a second target-specific configuration, which is an # override for the global one. |config| is remapped here to take into # account the local target-specific overrides to the global configuration. arch = self.GetArch(config) @@ -320,26 +299,20 @@ def _TargetConfig(self, config): config = config.rsplit('_', 1)[0] return config - def _Setting(self, path, config, - default=None, prefix='', append=None, map=None): + def _Setting(self, path, config, default=None, prefix='', append=None, map_arg=None): """_GetAndMunge for msvs_settings.""" - return self._GetAndMunge( - self.msvs_settings[config], path, default, prefix, append, map) + return self._GetAndMunge(self.msvs_settings[config], path, default, prefix, append, map_arg) - def _ConfigAttrib(self, path, config, - default=None, prefix='', append=None, map=None): + def _ConfigAttrib(self, path, config, default=None, prefix='', append=None, map_arg=None): """_GetAndMunge for msvs_configuration_attributes.""" - return self._GetAndMunge( - self.msvs_configuration_attributes[config], - path, default, prefix, append, map) + return self._GetAndMunge(self.msvs_configuration_attributes[config], path, default, prefix, append, map_arg) def AdjustIncludeDirs(self, include_dirs, config): """Updates include_dirs to expand VS specific paths, and adds the system include dirs used for platform SDK and similar.""" config = self._TargetConfig(config) includes = include_dirs + self.msvs_system_include_dirs[config] - includes.extend(self._Setting( - ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[])) + includes.extend(self._Setting(('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[])) return [self.ConvertVSMacros(p, config=config) for p in includes] def AdjustMidlIncludeDirs(self, midl_include_dirs, config): @@ -347,8 +320,7 @@ def AdjustMidlIncludeDirs(self, midl_include_dirs, config): system include dirs used for platform SDK and similar.""" config = self._TargetConfig(config) includes = midl_include_dirs + self.msvs_system_include_dirs[config] - includes.extend(self._Setting( - ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[])) + includes.extend(self._Setting(('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[])) return [self.ConvertVSMacros(p, config=config) for p in includes] def GetComputedDefines(self, config): @@ -360,16 +332,14 @@ def GetComputedDefines(self, config): defines.extend(('_UNICODE', 'UNICODE')) if self._ConfigAttrib(['CharacterSet'], config) == '2': defines.append('_MBCS') - defines.extend(self._Setting( - ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[])) + defines.extend(self._Setting(('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[])) return defines def GetCompilerPdbName(self, config, expand_special): """Get the pdb file name that should be used for compiler invocations, or None if there's no explicit name specified.""" config = self._TargetConfig(config) - pdbname = self._Setting( - ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config) + pdbname = self._Setting(('VCCLCompilerTool', 'ProgramDataBaseFileName'), config) if pdbname: pdbname = expand_special(self.ConvertVSMacros(pdbname)) return pdbname @@ -387,13 +357,12 @@ def GetOutputName(self, config, expand_special): """Gets the explicitly overridden output name for a target or returns None if it's not overridden.""" config = self._TargetConfig(config) - type = self.spec['type'] - root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool' + spec_type = self.spec['type'] + root = 'VCLibrarianTool' if spec_type == 'static_library' else 'VCLinkerTool' # TODO(scottmg): Handle OutputDirectory without OutputFile. output_file = self._Setting((root, 'OutputFile'), config) if output_file: - output_file = expand_special(self.ConvertVSMacros( - output_file, config=config)) + output_file = expand_special(self.ConvertVSMacros(output_file, config=config)) return output_file def GetPDBName(self, config, expand_special, default): @@ -401,8 +370,7 @@ def GetPDBName(self, config, expand_special, default): default if it's not overridden, or if no pdb will be generated.""" config = self._TargetConfig(config) output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config) - generate_debug_info = self._Setting( - ('VCLinkerTool', 'GenerateDebugInformation'), config) + generate_debug_info = self._Setting(('VCLinkerTool', 'GenerateDebugInformation'), config) if generate_debug_info == 'true': if output_file: return expand_special(self.ConvertVSMacros(output_file, config=config)) @@ -432,10 +400,8 @@ def GetCflags(self, config): config = self._TargetConfig(config) cflags = [] cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]]) - cl = self._GetWrapper(self, self.msvs_settings[config], - 'VCCLCompilerTool', append=cflags) - cl('Optimization', - map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2') + cl = self._GetWrapper(self, self.msvs_settings[config], 'VCCLCompilerTool', append=cflags) + cl('Optimization', map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2') cl('InlineFunctionExpansion', prefix='/Ob') cl('DisableSpecificWarnings', prefix='/wd') cl('StringPooling', map={'true': '/GF'}) @@ -443,40 +409,33 @@ def GetCflags(self, config): cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy') cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi') cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O') - cl('FloatingPointModel', - map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:', - default='0') + cl('FloatingPointModel', map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:', default='0') cl('CompileAsManaged', map={'false': '', 'true': '/clr'}) cl('WholeProgramOptimization', map={'true': '/GL'}) cl('WarningLevel', prefix='/W') cl('WarnAsError', map={'true': '/WX'}) - cl('CallingConvention', - map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G') - cl('DebugInformationFormat', - map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z') + cl('CallingConvention', map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G') + cl('DebugInformationFormat', map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z') cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'}) cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'}) cl('MinimalRebuild', map={'true': '/Gm'}) cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'}) cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC') - cl('RuntimeLibrary', - map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M') - cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH') + cl('RuntimeLibrary', map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M') + cl('ExceptionHandling', map={'1': 'sc', '2': 'a'}, prefix='/EH') cl('DefaultCharIsUnsigned', map={'true': '/J'}) - cl('TreatWChar_tAsBuiltInType', - map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t') + cl('TreatWChar_tAsBuiltInType', map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t') cl('EnablePREfast', map={'true': '/analyze'}) cl('AdditionalOptions', prefix='') - cl('EnableEnhancedInstructionSet', - map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'}, - prefix='/arch:') - cflags.extend(['/FI' + f for f in self._Setting( - ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])]) - if self.vs_version.short_name in ('2013', '2013e', '2015'): - # New flag required in 2013 to maintain previous PDB behavior. + cl('EnableEnhancedInstructionSet', map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'}, prefix='/arch:') + cflags.extend(['/FI' + f for f in self._Setting(('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])]) + if self.vs_version.project_version >= '12.0': + # New flag introduced in VS2013 (project version 12.0) Forces writes to + # the program database (PDB) to be serialized through MSPDBSRV.EXE. + # https://msdn.microsoft.com/en-us/library/dn502518.aspx cflags.append('/FS') # ninja handles parallelism by itself, don't have the compiler do it too. - cflags = filter(lambda x: not x.startswith('/MP'), cflags) + cflags = [x for x in cflags if not x.startswith('/MP')] return cflags def _GetPchFlags(self, config, extension): @@ -489,9 +448,10 @@ def _GetPchFlags(self, config, extension): if self.msvs_precompiled_header[config]: source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1] if _LanguageMatchesForPch(source_ext, extension): - pch = os.path.split(self.msvs_precompiled_header[config])[1] - return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch'] - return [] + pch = self.msvs_precompiled_header[config] + pchbase = os.path.split(pch)[1] + return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pchbase + '.pch'] + return [] def GetCflagsC(self, config): """Returns the flags that need to be added to .c compilations.""" @@ -507,24 +467,21 @@ def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): """Get and normalize the list of paths in AdditionalLibraryDirectories setting.""" config = self._TargetConfig(config) - libpaths = self._Setting((root, 'AdditionalLibraryDirectories'), - config, default=[]) - libpaths = [os.path.normpath( - gyp_to_build_path(self.ConvertVSMacros(p, config=config))) - for p in libpaths] + libpaths = self._Setting((root, 'AdditionalLibraryDirectories'), config, default=[]) + libpaths = [os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(p, config=config))) for p in libpaths] return ['/LIBPATH:"' + p + '"' for p in libpaths] def GetLibFlags(self, config, gyp_to_build_path): """Returns the flags that need to be added to lib commands.""" config = self._TargetConfig(config) libflags = [] - lib = self._GetWrapper(self, self.msvs_settings[config], - 'VCLibrarianTool', append=libflags) - libflags.extend(self._GetAdditionalLibraryDirectories( - 'VCLibrarianTool', config, gyp_to_build_path)) + lib = self._GetWrapper(self, self.msvs_settings[config], 'VCLibrarianTool', append=libflags) + libflags.extend(self._GetAdditionalLibraryDirectories('VCLibrarianTool', config, gyp_to_build_path)) lib('LinkTimeCodeGeneration', map={'true': '/LTCG'}) - lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'}, - prefix='/MACHINE:') + # TODO: These 'map' values come from machineTypeOption enum, + # and does not have an official value for ARM64 in VS2017 (yet). + # It needs to verify the ARM64 value when machineTypeOption is updated. + lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM', '18': 'ARM64'}, prefix='/MACHINE:') lib('AdditionalOptions') return libflags @@ -532,7 +489,7 @@ def GetDefFile(self, gyp_to_build_path): """Returns the .def file from sources, if any. Otherwise returns None.""" spec = self.spec if spec['type'] in ('shared_library', 'loadable_module', 'executable'): - def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] + def_files = [s for s in spec.get('sources', []) if s.lower().endswith('.def')] if len(def_files) == 1: return gyp_to_build_path(def_files[0]) elif len(def_files) > 1: @@ -550,33 +507,26 @@ def GetPGDName(self, config, expand_special): """Gets the explicitly overridden pgd name for a target or returns None if it's not overridden.""" config = self._TargetConfig(config) - output_file = self._Setting( - ('VCLinkerTool', 'ProfileGuidedDatabase'), config) + output_file = self._Setting(('VCLinkerTool', 'ProfileGuidedDatabase'), config) if output_file: - output_file = expand_special(self.ConvertVSMacros( - output_file, config=config)) + output_file = expand_special(self.ConvertVSMacros(output_file, config=config)) return output_file - def GetLdflags(self, config, gyp_to_build_path, expand_special, - manifest_base_name, output_name, is_executable, build_dir): + def GetLdflags(self, config, gyp_to_build_path, expand_special, manifest_base_name, output_name, is_executable, build_dir): """Returns the flags that need to be added to link commands, and the manifest files.""" config = self._TargetConfig(config) ldflags = [] - ld = self._GetWrapper(self, self.msvs_settings[config], - 'VCLinkerTool', append=ldflags) + ld = self._GetWrapper(self, self.msvs_settings[config], 'VCLinkerTool', append=ldflags) self._GetDefFileAsLdflags(ldflags, gyp_to_build_path) ld('GenerateDebugInformation', map={'true': '/DEBUG'}) # TODO: These 'map' values come from machineTypeOption enum, # and does not have an official value for ARM64 in VS2017 (yet). # It needs to verify the ARM64 value when machineTypeOption is updated. - ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM', '18': 'ARM64'}, - prefix='/MACHINE:') - ldflags.extend(self._GetAdditionalLibraryDirectories( - 'VCLinkerTool', config, gyp_to_build_path)) + ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM', '18': 'ARM64'}, prefix='/MACHINE:') + ldflags.extend(self._GetAdditionalLibraryDirectories('VCLinkerTool', config, gyp_to_build_path)) ld('DelayLoadDLLs', prefix='/DELAYLOAD:') - ld('TreatLinkerWarningAsErrors', prefix='/WX', - map={'true': '', 'false': ':NO'}) + ld('TreatLinkerWarningAsErrors', prefix='/WX', map={'true': '', 'false': ':NO'}) out = self.GetOutputName(config, expand_special) if out: ldflags.append('/OUT:' + out) @@ -587,25 +537,18 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, if pgd: ldflags.append('/PGD:' + pgd) map_file = self.GetMapFileName(config, expand_special) - ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file - else '/MAP'}) + ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file else '/MAP'}) ld('MapExports', map={'true': '/MAPINFO:EXPORTS'}) ld('AdditionalOptions', prefix='') - minimum_required_version = self._Setting( - ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='') + minimum_required_version = self._Setting(('VCLinkerTool', 'MinimumRequiredVersion'), config, default='') if minimum_required_version: minimum_required_version = ',' + minimum_required_version - ld('SubSystem', - map={'1': 'CONSOLE%s' % minimum_required_version, - '2': 'WINDOWS%s' % minimum_required_version}, - prefix='/SUBSYSTEM:') + ld('SubSystem', map={'1': 'CONSOLE%s' % minimum_required_version, '2': 'WINDOWS%s' % minimum_required_version}, prefix='/SUBSYSTEM:') - stack_reserve_size = self._Setting( - ('VCLinkerTool', 'StackReserveSize'), config, default='') + stack_reserve_size = self._Setting(('VCLinkerTool', 'StackReserveSize'), config, default='') if stack_reserve_size: - stack_commit_size = self._Setting( - ('VCLinkerTool', 'StackCommitSize'), config, default='') + stack_commit_size = self._Setting(('VCLinkerTool', 'StackCommitSize'), config, default='') if stack_commit_size: stack_commit_size = ',' + stack_commit_size ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size)) @@ -614,23 +557,17 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL') ld('BaseAddress', prefix='/BASE:') ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED') - ld('RandomizedBaseAddress', - map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE') - ld('DataExecutionPrevention', - map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT') + ld('RandomizedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE') + ld('DataExecutionPrevention', map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT') ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:') ld('ForceSymbolReferences', prefix='/INCLUDE:') ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:') - ld('LinkTimeCodeGeneration', - map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE', - '4': ':PGUPDATE'}, - prefix='/LTCG') + ld('LinkTimeCodeGeneration', map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE', '4': ':PGUPDATE'}, prefix='/LTCG') ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:') ld('ResourceOnlyDLL', map={'true': '/NOENTRY'}) ld('EntryPointSymbol', prefix='/ENTRY:') ld('Profile', map={'true': '/PROFILE'}) - ld('LargeAddressAware', - map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE') + ld('LargeAddressAware', map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE') # TODO(scottmg): This should sort of be somewhere else (not really a flag). ld('AdditionalDependencies', prefix='') @@ -638,33 +575,26 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, safeseh_default = 'true' else: safeseh_default = None - ld('ImageHasSafeExceptionHandlers', - map={'false': ':NO', 'true': ''}, prefix='/SAFESEH', - default=safeseh_default) + ld('ImageHasSafeExceptionHandlers', map={'false': ':NO', 'true': ''}, prefix='/SAFESEH', default=safeseh_default) # If the base address is not specifically controlled, DYNAMICBASE should # be on by default. - base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED', - ldflags) - if not base_flags: + if not any('DYNAMICBASE' in flag or flag == '/FIXED' for flag in ldflags): ldflags.append('/DYNAMICBASE') # If the NXCOMPAT flag has not been specified, default to on. Despite the # documentation that says this only defaults to on when the subsystem is # Vista or greater (which applies to the linker), the IDE defaults it on # unless it's explicitly off. - if not filter(lambda x: 'NXCOMPAT' in x, ldflags): + if not any('NXCOMPAT' in flag for flag in ldflags): ldflags.append('/NXCOMPAT') - have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags) - manifest_flags, intermediate_manifest, manifest_files = \ - self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path, - is_executable and not have_def_file, build_dir) + have_def_file = any(flag.startswith('/DEF:') for flag in ldflags) + manifest_flags, intermediate_manifest, manifest_files = self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path, is_executable and not have_def_file, build_dir) ldflags.extend(manifest_flags) return ldflags, intermediate_manifest, manifest_files - def _GetLdManifestFlags(self, config, name, gyp_to_build_path, - allow_isolation, build_dir): + def _GetLdManifestFlags(self, config, name, gyp_to_build_path, allow_isolation, build_dir): """Returns a 3-tuple: - the set of flags that need to be added to the link to generate a default manifest @@ -672,9 +602,7 @@ def _GetLdManifestFlags(self, config, name, gyp_to_build_path, used to assert it doesn't add anything to the merged one. - the list of all the manifest files to be merged by the manifest tool and included into the link.""" - generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'), - config, - default='true') + generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'), config, default='true') if generate_manifest != 'true': # This means not only that the linker should not generate the intermediate # manifest but also that the manifest tool should do nothing even when @@ -682,10 +610,6 @@ def _GetLdManifestFlags(self, config, name, gyp_to_build_path, return ['/MANIFEST:NO'], [], [] output_name = name + '.intermediate.manifest' - flags = [ - '/MANIFEST', - '/ManifestFile:' + output_name, - ] # Instead of using the MANIFESTUAC flags, we generate a .manifest to # include into the list of manifests. This allows us to avoid the need to @@ -694,29 +618,24 @@ def _GetLdManifestFlags(self, config, name, gyp_to_build_path, # final manifest we get from merging all the additional manifest files # (plus the one we generate here) isn't modified by merging the # intermediate into it. + flags = ['/MANIFEST', '/ManifestFile:' + output_name, '/MANIFESTUAC:NO'] # Always NO, because we generate a manifest file that has what we want. - flags.append('/MANIFESTUAC:NO') config = self._TargetConfig(config) - enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config, - default='true') - manifest_files = [] - generated_manifest_outer = \ -"" \ -"%s" \ -"" + enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config, default='true') + generated_manifest_outer = "" \ + "%s" \ + "" if enable_uac == 'true': - execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'), - config, default='0') + execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'), config, default='0') execution_level_map = { '0': 'asInvoker', '1': 'highestAvailable', '2': 'requireAdministrator' } - ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config, - default='false') + ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config, default='false') inner = ''' @@ -737,8 +656,8 @@ def _GetLdManifestFlags(self, config, name, gyp_to_build_path, # have changed so that simply regenerating the project files doesn't # cause a relink. build_dir_generated_name = os.path.join(build_dir, generated_name) - gyp.common.EnsureDirExists(build_dir_generated_name) - f = gyp.common.WriteOnDiff(build_dir_generated_name) + EnsureDirExists(build_dir_generated_name) + f = WriteOnDiff(build_dir_generated_name) f.write(generated_manifest_contents) f.close() manifest_files = [generated_name] @@ -746,20 +665,16 @@ def _GetLdManifestFlags(self, config, name, gyp_to_build_path, if allow_isolation: flags.append('/ALLOWISOLATION') - manifest_files += self._GetAdditionalManifestFiles(config, - gyp_to_build_path) + manifest_files += self._GetAdditionalManifestFiles(config, gyp_to_build_path) return flags, output_name, manifest_files def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): """Gets additional manifest files that are added to the default one generated by the linker.""" - files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config, - default=[]) + files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config, default=[]) if isinstance(files, str): files = files.split(';') - return [os.path.normpath( - gyp_to_build_path(self.ConvertVSMacros(f, config=config))) - for f in files] + return [os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config))) for f in files] def IsUseLibraryDependencyInputs(self, config): """Returns whether the target should be linked via Use Library Dependency @@ -771,8 +686,7 @@ def IsUseLibraryDependencyInputs(self, config): def IsEmbedManifest(self, config): """Returns whether manifest should be linked into binary.""" config = self._TargetConfig(config) - embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config, - default='true') + embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config, default='true') return embed == 'true' def IsLinkIncremental(self, config): @@ -786,8 +700,7 @@ def GetRcflags(self, config, gyp_to_ninja_path): compiler.""" config = self._TargetConfig(config) rcflags = [] - rc = self._GetWrapper(self, self.msvs_settings[config], - 'VCResourceCompilerTool', append=rcflags) + rc = self._GetWrapper(self, self.msvs_settings[config], 'VCResourceCompilerTool', append=rcflags) rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I') rcflags.append('/I' + gyp_to_ninja_path('.')) rc('PreprocessorDefinitions', prefix='/d') @@ -804,54 +717,51 @@ def BuildCygwinBashCommandLine(self, args, path_to_base): contain ninja variables cannot be fixed here (for example ${source}), so the outer generator needs to make sure that the paths that are written out are in posix style, if the command line will be used here.""" - cygwin_dir = os.path.normpath( - os.path.join(path_to_base, self.msvs_cygwin_dirs[0])) + cygwin_dir = os.path.normpath(os.path.join(path_to_base, self.msvs_cygwin_dirs[0])) cd = ('cd %s' % path_to_base).replace('\\', '/') args = [a.replace('\\', '/').replace('"', '\\"') for a in args] args = ["'%s'" % a.replace("'", "'\\''") for a in args] bash_cmd = ' '.join(args) - cmd = ( - 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir + - 'bash -c "%s ; %s"' % (cd, bash_cmd)) + cmd = ('call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir + 'bash -c "%s ; %s"' % (cd, bash_cmd)) return cmd def IsRuleRunUnderCygwin(self, rule): """Determine if an action should be run under cygwin. If the variable is unset, or set to 1 we use cygwin.""" - return int(rule.get('msvs_cygwin_shell', - self.spec.get('msvs_cygwin_shell', 1))) != 0 + return int(rule.get('msvs_cygwin_shell', self.spec.get('msvs_cygwin_shell', 1))) != 0 - def _HasExplicitRuleForExtension(self, spec, extension): + @staticmethod + def _HasExplicitRuleForExtension(spec, extension): """Determine if there's an explicit rule for a particular extension.""" for rule in spec.get('rules', []): - if rule['extension'] == extension: + if rule.get('extension') == extension: return True return False - def _HasExplicitIdlActions(self, spec): + @staticmethod + def _HasExplicitIdlActions(spec): """Determine if an action should not run midl for .idl files.""" - return any([action.get('explicit_idl_action', 0) - for action in spec.get('actions', [])]) + return any([action.get('explicit_idl_action', 0) for action in spec.get('actions', [])]) def HasExplicitIdlRulesOrActions(self, spec): """Determine if there's an explicit rule or action for idl files. When there isn't we need to generate implicit rules to build MIDL .idl files.""" - return (self._HasExplicitRuleForExtension(spec, 'idl') or - self._HasExplicitIdlActions(spec)) + return self._HasExplicitRuleForExtension(spec, 'idl') or self._HasExplicitIdlActions(spec) def HasExplicitAsmRules(self, spec): """Determine if there's an explicit rule for asm files. When there isn't we need to generate implicit rules to assemble .asm files.""" return self._HasExplicitRuleForExtension(spec, 'asm') - def GetIdlBuildData(self, source, config): + def GetIdlBuildData(self, _, config): """Determine the implicit outputs for an idl file. Returns output directory, outputs, and variables and flags that are required.""" config = self._TargetConfig(config) midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool') + def midl(name, default=None): - return self.ConvertVSMacros(midl_get(name, default=default), - config=config) + return self.ConvertVSMacros(midl_get(name, default=default), config=config) + tlb = midl('TypeLibraryName', default='${root}.tlb') header = midl('HeaderFileName', default='${root}.h') dlldata = midl('DLLDataFileName', default='dlldata.c') @@ -861,11 +771,7 @@ def midl(name, default=None): # generated depending on the content of the input idl file. outdir = midl('OutputDirectory', default='') output = [header, dlldata, iid, proxy] - variables = [('tlb', tlb), - ('h', header), - ('dlldata', dlldata), - ('iid', iid), - ('proxy', proxy)] + variables = [('tlb', tlb), ('h', header), ('dlldata', dlldata), ('iid', iid), ('proxy', proxy)] # TODO(scottmg): Are there configuration settings to set these flags? target_platform = self.GetArch(config) if target_platform == 'x86': @@ -885,8 +791,8 @@ class PrecompiledHeader(object): """Helper to generate dependencies and build rules to handle generation of precompiled headers. Interface matches the GCH handler in xcode_emulation.py. """ - def __init__( - self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext): + + def __init__(self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext): self.settings = settings self.config = config pch_source = self.settings.msvs_precompiled_source[self.config] @@ -897,9 +803,9 @@ def __init__( def _PchHeader(self): """Get the header that will appear in an #include line for all source files.""" - return os.path.split(self.settings.msvs_precompiled_header[self.config])[1] + return self.settings.msvs_precompiled_header[self.config] - def GetObjDependencies(self, sources, objs, arch): + def GetObjDependencies(self, sources, _, arch): """Given a list of sources files and the corresponding object files, returns a list of the pch files that should be depended upon. The additional wrapping in the return value is for interface compatibility @@ -913,38 +819,37 @@ def GetObjDependencies(self, sources, objs, arch): return [(None, None, self.output_obj)] return [] - def GetPchBuildCommands(self, arch): + @staticmethod + def GetPchBuildCommands(_): """Not used on Windows as there are no additional build steps required (instead, existing steps are modified in GetFlagsModifications below).""" return [] - def GetFlagsModifications(self, input, output, implicit, command, - cflags_c, cflags_cc, expand_special): + def GetFlagsModifications(self, input_arg, output, implicit, command, cflags_c, cflags_cc, expand_special): """Get the modified cflags and implicit dependencies that should be used for the pch compilation step.""" - if input == self.pch_source: + if input_arg == self.pch_source: pch_output = ['/Yc' + self._PchHeader()] if command == 'cxx': - return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))], - self.output_obj, []) + return [('cflags_cc', map(expand_special, cflags_cc + pch_output))], self.output_obj, [] elif command == 'cc': - return ([('cflags_c', map(expand_special, cflags_c + pch_output))], - self.output_obj, []) + return [('cflags_c', map(expand_special, cflags_c + pch_output))], self.output_obj, [] return [], output, implicit -vs_version = None +default_vs_version = None def GetVSVersion(generator_flags): - global vs_version - if not vs_version: - vs_version = gyp.MSVSVersion.SelectVisualStudioVersion( - generator_flags.get('msvs_version', 'auto'), - allow_fallback=False) - return vs_version + global default_vs_version + if not default_vs_version: + from gyp.MSVS import MSVSVersion + default_vs_version = MSVSVersion.SelectVisualStudioVersion(generator_flags.get('msvs_version', 'auto')) + return default_vs_version + def _GetVsvarsSetupArgs(generator_flags, arch): vs = GetVSVersion(generator_flags) - return vs.SetupScript() + return vs.SetupScript(arch) + def ExpandMacros(string, expansions): """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv @@ -955,7 +860,8 @@ def ExpandMacros(string, expansions): string = string.replace(old, new) return string -def _ExtractImportantEnvironment(output_of_set): + +def _ExtractImportantEnvironment(output_of_set, arch): """Extracts environment variables required for the toolchain to run from a textual dump output by the cmd.exe 'set' command.""" envvars_to_save = ( @@ -969,10 +875,20 @@ def _ExtractImportantEnvironment(output_of_set): 'temp', 'tmp', ) - env = {} + env = OrderedDict() + # This occasionally happens and leads to misleading SYSTEMROOT error messages + # if not caught here. + cl_find = 'cl.exe' + if 'Visual Studio 201' in output_of_set: + cl_find = arch + '.' + cl_find + if output_of_set.count('=') == 0: + raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set) for line in output_of_set.splitlines(): + if re.search(cl_find, line, re.I): + env['GYP_CL_PATH'] = line + continue for envvar in envvars_to_save: - if re.match(envvar + '=', line.lower()): + if re.match(envvar + '=', line, re.I): var, setting = line.split('=', 1) if envvar == 'path': # Our own rules (for running gyp-win-tool) and other actions in @@ -982,34 +898,17 @@ def _ExtractImportantEnvironment(output_of_set): setting = os.path.dirname(sys.executable) + os.pathsep + setting env[var.upper()] = setting break + for required in ('SYSTEMROOT', 'TEMP', 'TMP'): if required not in env: - raise Exception('Environment variable "%s" ' - 'required to be set to valid path' % required) + raise Exception('Environment variable "%s" required to be set to valid path' % required) return env -def _FormatAsEnvironmentBlock(envvar_dict): - """Format as an 'environment block' directly suitable for CreateProcess. - Briefly this is a list of key=value\0, terminated by an additional \0. See - CreateProcess documentation for more details.""" - block = '' - nul = '\0' - for key, value in envvar_dict.items(): - block += key + '=' + value + nul - block += nul - return block - -def _ExtractCLPath(output_of_where): - """Gets the path to cl.exe based on the output of calling the environment - setup batch file, followed by the equivalent of `where`.""" - # Take the first line, as that's the first found in the PATH. - for line in output_of_where.strip().splitlines(): - if line.startswith('LOC:'): - return line[len('LOC:'):].strip() - -def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, - system_includes, open_out): - """It's not sufficient to have the absolute path to the compiler, linker, + +# TODO(refack) Pass only one arch +def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags): + """ + It's not sufficient to have the absolute path to the compiler, linker, etc. on Windows, as those tools rely on .dlls being in the PATH. We also need to support both x86 and x64 compilers within the same build (to support msvs_target_platform hackery). Different architectures require a different @@ -1022,43 +921,82 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, When the following procedure to generate environment files does not meet your requirement (e.g. for custom toolchains), you can pass "-G ninja_use_custom_environment_files" to the gyp to suppress file - generation and use custom environment files prepared by yourself.""" - archs = ('x86', 'x64') - if generator_flags.get('ninja_use_custom_environment_files', 0): - cl_paths = {} - for arch in archs: - cl_paths[arch] = 'cl.exe' - return cl_paths + generation and use custom environment files prepared by yourself. + + Args: + toplevel_build_dir (str): root dir of build tree + generator_flags (OrderedDict): flags passed to the generator + """ vs = GetVSVersion(generator_flags) - cl_paths = {} + + if generator_flags.get('ninja_use_custom_environment_files', False) or not vs.path: + return + + archs = ('x86', 'x64') for arch in archs: - # Extract environment variables for subprocesses. - args = vs.SetupScript(arch) - args.extend(('&&', 'set')) - popen = subprocess.Popen( - args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - variables, _ = popen.communicate() - env = _ExtractImportantEnvironment(variables) - - # Inject system includes from gyp files into INCLUDE. - if system_includes: - system_includes = system_includes | OrderedSet( - env.get('INCLUDE', '').split(';')) - env['INCLUDE'] = ';'.join(system_includes) - - env_block = _FormatAsEnvironmentBlock(env) - f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb') - f.write(env_block) - f.close() + env = _GetEnvironment(arch, vs) + env_block = NUL.join(encode(k) + '=' + encode(v) for k, v in env.items()) + NUL + NUL + with open(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb') as f: + f.write(env_block) + + +def _GetEnvironment(arch, vs): + """ + This function will run the VC environment setup script, retrieve variables, + and also the path on cl.exe. + It will then try to cache the values to disk, and on next run will try to + lookup the cache. The cache key is the path to the setup script (which is + embedded within each Visual Studio installed instance) + it's args. + Even after a cache hit we do some validation of the cached values, + since parts of the tool-set can be upgraded with in the installed lifecycle + so paths and version numbers may change. + + Args: + arch: {string} target architecture + vs: VisualStudioVersion + + Returns: {dict} the important environment variables VC need to run + + """ + args = vs.SetupScript(arch) + args.extend(('&&', 'set', '&&', 'where', 'cl.exe')) + args_slug = ''.join(args).encode('utf-8') + args_hash = hashlib.md5(args_slug).hexdigest() + cache_key = 'gyp-env-cache-' + args_hash + # The default value for %TEMP% will make all cache look ups to safely miss + cache_dir = os.environ.get('GYP_TEMP', gettempdir()) + cache_keyed_file = os.path.join(cache_dir, cache_key) + if os.path.exists(cache_keyed_file): + try: + with open(cache_keyed_file, 'rb') as f: + env = pickle.load(f) + except Exception as e: + DebugOutput(DEBUG_GENERAL, "Failed to load env pickle: %s" % e) + DebugOutput(DEBUG_GENERAL, "args_slug=%s" % args_slug) + DebugOutput(DEBUG_GENERAL, "cache_keyed_file=%s" % cache_keyed_file) + DebugOutput(DEBUG_GENERAL, traceback.format_exc()) + cl_path = env.get('GYP_CL_PATH', '') + if os.path.exists(cl_path): + return env + else: + # cache has become invalid (probably form a tool set update) + os.remove(cache_keyed_file) + start_time = time.clock() + # Extract environment variables for subprocesses. + std_out = subprocess.check_output(args, shell=True, stderr=subprocess.STDOUT).decode('utf-8') + end_time = time.clock() + DebugOutput(DEBUG_GENERAL, "vcvars %s time: %f" % (' '.join(args), end_time - start_time)) + env = _ExtractImportantEnvironment(std_out, arch) + try: + with open(cache_keyed_file, mode='wb') as f: + pickle.dump(env, f) + except Exception as e: + DebugOutput(DEBUG_GENERAL, "Failed to save env pickle: %s" % e) + DebugOutput(DEBUG_GENERAL, "args_slug=%s" % args_slug) + DebugOutput(DEBUG_GENERAL, "cache_keyed_file=%s" % cache_keyed_file) + DebugOutput(DEBUG_GENERAL, traceback.format_exc()) + return env - # Find cl.exe location for this architecture. - args = vs.SetupScript(arch) - args.extend(('&&', - 'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i')) - popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE) - output, _ = popen.communicate() - cl_paths[arch] = _ExtractCLPath(output) - return cl_paths def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): """Emulate behavior of msvs_error_on_missing_sources present in the msvs @@ -1069,21 +1007,22 @@ def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): if int(generator_flags.get('msvs_error_on_missing_sources', 0)): no_specials = filter(lambda x: '$' not in x, sources) relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] - missing = filter(lambda x: not os.path.exists(x), relative) + missing = [x for x in relative if not os.path.exists(x)] if missing: # They'll look like out\Release\..\..\stuff\things.cc, so normalize the # path for a slightly less crazy looking output. cleaned_up = [os.path.normpath(x) for x in missing] raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up)) + # Sets some values in default_variables, which are required for many # generators, run on Windows. def CalculateCommonVariables(default_variables, params): generator_flags = params.get('generator_flags', {}) # Set a variable so conditions can be based on msvs_version. - msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) - default_variables['MSVS_VERSION'] = msvs_version.ShortName() + msvs_version = GetVSVersion(generator_flags) + default_variables['MSVS_VERSION'] = msvs_version.short_name # To determine processor word size on Windows, in addition to checking # PROCESSOR_ARCHITECTURE (which reflects the word size of the current diff --git a/gyp/pylib/gyp/MSVSSettings_test.py b/gyp/gyp/unit_tests/MSVSSettings_test.py old mode 100755 new mode 100644 similarity index 81% rename from gyp/pylib/gyp/MSVSSettings_test.py rename to gyp/gyp/unit_tests/MSVSSettings_test.py index c082bbea9b..caf2e994bf --- a/gyp/pylib/gyp/MSVSSettings_test.py +++ b/gyp/gyp/unit_tests/MSVSSettings_test.py @@ -6,15 +6,14 @@ """Unit tests for the MSVSSettings.py file.""" +import unittest +import gyp.MSVS.MSVSSettings as MSVSSettings try: + # noinspection PyCompatibility from cStringIO import StringIO except ImportError: from io import StringIO -import unittest -import gyp.MSVSSettings as MSVSSettings - - class TestSequenceFunctions(unittest.TestCase): def setUp(self): @@ -286,272 +285,282 @@ def testValidateMSVSSettings_settings(self): def testValidateMSBuildSettings_settings(self): """Tests that for invalid MSBuild settings.""" - MSVSSettings.ValidateMSBuildSettings( - {'ClCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': ['string1', 'string2'], - 'AdditionalUsingDirectories': 'folder1;folder2', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': 'NoListing', - 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', - 'BrowseInformation': 'false', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'BuildingInIDE': 'true', - 'CallingConvention': 'Cdecl', - 'CompileAs': 'CompileAsC', - 'CompileAsManaged': 'true', - 'CreateHotpatchableImage': 'true', - 'DebugInformationFormat': 'ProgramDatabase', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'string1;string2', - 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', - 'EnableFiberSafeOptimizations': 'true', - 'EnablePREfast': 'true', - 'Enableprefast': 'bogus', - 'ErrorReporting': 'Prompt', - 'ExceptionHandling': 'SyncCThrow', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': 'Neither', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': 'Precise', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2', - 'ForcedUsingFiles': 'file1;file2', - 'FunctionLevelLinking': 'false', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': 'OnlyExplicitInline', - 'IntrinsicFunctions': 'false', - 'MinimalRebuild': 'true', - 'MultiProcessorCompilation': 'true', - 'ObjectFileName': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMPSupport': 'true', - 'Optimization': 'Disabled', - 'PrecompiledHeader': 'NotUsing', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderOutputFile': 'a_file_name', - 'PreprocessKeepComments': 'true', - 'PreprocessorDefinitions': 'string1;string2', - 'PreprocessOutputPath': 'a string1', - 'PreprocessSuppressLineNumbers': 'false', - 'PreprocessToFile': 'false', - 'ProcessorNumber': '33', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': 'MultiThreaded', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1Byte', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'TreatSpecificWarningsAsErrors': 'string1;string2', - 'TreatWarningAsError': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'UseFullPaths': 'true', - 'UseUnicodeForAssemblerListing': 'true', - 'WarningLevel': 'TurnOffAllWarnings', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name', - 'ZZXYZ': 'bogus'}, - 'Link': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalManifestDependencies': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AddModuleNamesToAssembly': 'file1;file2', - 'AllowIsolation': 'true', - 'AssemblyDebug': '', - 'AssemblyLinkResource': 'file1;file2', - 'BaseAddress': 'a string1', - 'BuildingInIDE': 'true', - 'CLRImageType': 'ForceIJWImage', - 'CLRSupportLastError': 'Enabled', - 'CLRThreadAttribute': 'MTAThreadingAttribute', - 'CLRUnmanagedCodeCheck': 'true', - 'CreateHotPatchableImage': 'X86Image', - 'DataExecutionPrevention': 'false', - 'DelayLoadDLLs': 'file1;file2', - 'DelaySign': 'true', - 'Driver': 'NotSet', - 'EmbedManagedResourceFile': 'file1;file2', - 'EnableCOMDATFolding': 'false', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a string1', - 'FixedBaseAddress': 'false', - 'ForceFileOutput': 'Enabled', - 'ForceSymbolReferences': 'file1;file2', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a string1', - 'HeapReserveSize': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreSpecificDefaultLibraries': 'a_file_list', - 'ImageHasSafeExceptionHandlers': 'true', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': 'false', - 'LinkDLL': 'true', - 'LinkErrorReporting': 'SendErrorReport', - 'LinkStatus': 'true', - 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a string1', - 'MidlCommandFile': 'a_file_name', - 'MinimumRequiredVersion': 'a string1', - 'ModuleDefinitionFile': 'a_file_name', - 'MSDOSStubFileName': 'a_file_name', - 'NoEntryPoint': 'true', - 'OptimizeReferences': 'false', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'PreventDllBinding': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': 'false', - 'RegisterOutput': 'true', - 'SectionAlignment': '33', - 'SetChecksum': 'true', - 'ShowProgress': 'LinkVerboseREF', - 'SpecifySectionAttributes': 'a string1', - 'StackCommitSize': 'a string1', - 'StackReserveSize': 'a string1', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': 'Console', - 'SupportNobindOfDelayLoadedDLL': 'true', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNET': 'true', - 'TargetMachine': 'MachineX86', - 'TerminalServerAware': 'false', - 'TrackerLogDirectory': 'a_folder', - 'TreatLinkerWarningAsErrors': 'true', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': 'AsInvoker', - 'UACUIAccess': 'true', - 'Version': 'a string1'}, - 'ResourceCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'Culture': '0x236', - 'IgnoreStandardIncludePath': 'true', - 'NullTerminateStrings': 'true', - 'PreprocessorDefinitions': 'string1;string2', - 'ResourceOutputFileName': 'a string1', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'UndefinePreprocessorDefinitions': 'string1;string2'}, - 'Midl': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'ApplicationConfigurationMode': 'true', - 'ClientStubFile': 'a_file_name', - 'CPreprocessOptions': 'a string1', - 'DefaultCharType': 'Signed', - 'DllDataFileName': 'a_file_name', - 'EnableErrorChecks': 'EnableCustom', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateClientFiles': 'Stub', - 'GenerateServerFiles': 'None', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'LocaleID': '33', - 'MkTypLibCompatible': 'true', - 'OutputDirectory': 'a string1', - 'PreprocessorDefinitions': 'string1;string2', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'ServerStubFile': 'a_file_name', - 'StructMemberAlignment': 'NotSet', - 'SuppressCompilerWarnings': 'true', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': 'Itanium', - 'TrackerLogDirectory': 'a_folder', - 'TypeLibFormat': 'NewFormat', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'ValidateAllParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '1'}, - 'Lib': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'DisplayLibrary': 'a string1', - 'ErrorReporting': 'PromptImmediately', - 'ExportNamedFunctions': 'string1;string2', - 'ForceSymbolReferences': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2', - 'LinkTimeCodeGeneration': 'true', - 'MinimumRequiredVersion': 'a string1', - 'ModuleDefinitionFile': 'a_file_name', - 'Name': 'a_file_name', - 'OutputFile': 'a_file_name', - 'RemoveObjects': 'file1;file2', - 'SubSystem': 'Console', - 'SuppressStartupBanner': 'true', - 'TargetMachine': 'MachineX86i', - 'TrackerLogDirectory': 'a_folder', - 'TreatLibWarningAsErrors': 'true', - 'UseUnicodeResponseFiles': 'true', - 'Verbose': 'true'}, - 'Manifest': { - 'AdditionalManifestFiles': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AssemblyIdentity': 'a string1', - 'ComponentFileName': 'a_file_name', - 'EnableDPIAwareness': 'fal', - 'GenerateCatalogFiles': 'truel', - 'GenerateCategoryTags': 'true', - 'InputResourceManifests': 'a string1', - 'ManifestFromManagedAssembly': 'a_file_name', - 'notgood3': 'bogus', - 'OutputManifestFile': 'a_file_name', - 'OutputResourceManifests': 'a string1', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressDependencyElement': 'true', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'VerboseOutput': 'true'}, - 'ProjectReference': { - 'LinkLibraryDependencies': 'true', - 'UseLibraryDependencyInputs': 'true'}, - 'ManifestResourceCompile': { - 'ResourceOutputFileName': 'a_file_name'}, - '': { - 'EmbedManifest': 'true', - 'GenerateManifest': 'true', - 'IgnoreImportLibrary': 'true', - 'LinkIncremental': 'false'}}, - self.stderr) + settings_fixture = { + 'ClCompile': { + 'AdditionalIncludeDirectories': 'folder1;folder2', + 'AdditionalOptions': ['string1', 'string2'], + 'AdditionalUsingDirectories': 'folder1;folder2', + 'AssemblerListingLocation': 'a_file_name', + 'AssemblerOutput': 'NoListing', + 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', + 'BrowseInformation': 'false', + 'BrowseInformationFile': 'a_file_name', + 'BufferSecurityCheck': 'true', + 'BuildingInIDE': 'true', + 'CallingConvention': 'Cdecl', + 'CompileAs': 'CompileAsC', + 'CompileAsManaged': 'true', + 'CreateHotpatchableImage': 'true', + 'DebugInformationFormat': 'ProgramDatabase', + 'DisableLanguageExtensions': 'true', + 'DisableSpecificWarnings': 'string1;string2', + 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', + 'EnableFiberSafeOptimizations': 'true', + 'EnablePREfast': 'true', + 'Enableprefast': 'bogus', + 'ErrorReporting': 'Prompt', + 'ExceptionHandling': 'SyncCThrow', + 'ExpandAttributedSource': 'true', + 'FavorSizeOrSpeed': 'Neither', + 'FloatingPointExceptions': 'true', + 'FloatingPointModel': 'Precise', + 'ForceConformanceInForLoopScope': 'true', + 'ForcedIncludeFiles': 'file1;file2', + 'ForcedUsingFiles': 'file1;file2', + 'FunctionLevelLinking': 'false', + 'GenerateXMLDocumentationFiles': 'true', + 'IgnoreStandardIncludePath': 'true', + 'InlineFunctionExpansion': 'OnlyExplicitInline', + 'IntrinsicFunctions': 'false', + 'MinimalRebuild': 'true', + 'MultiProcessorCompilation': 'true', + 'ObjectFileName': 'a_file_name', + 'OmitDefaultLibName': 'true', + 'OmitFramePointers': 'true', + 'OpenMPSupport': 'true', + 'Optimization': 'Disabled', + 'PrecompiledHeader': 'NotUsing', + 'PrecompiledHeaderFile': 'a_file_name', + 'PrecompiledHeaderOutputFile': 'a_file_name', + 'PreprocessKeepComments': 'true', + 'PreprocessorDefinitions': 'string1;string2', + 'PreprocessOutputPath': 'a string1', + 'PreprocessSuppressLineNumbers': 'false', + 'PreprocessToFile': 'false', + 'ProcessorNumber': '33', + 'ProgramDataBaseFileName': 'a_file_name', + 'RuntimeLibrary': 'MultiThreaded', + 'RuntimeTypeInfo': 'true', + 'ShowIncludes': 'true', + 'SmallerTypeCheck': 'true', + 'StringPooling': 'true', + 'StructMemberAlignment': '1Byte', + 'SuppressStartupBanner': 'true', + 'TrackerLogDirectory': 'a_folder', + 'TreatSpecificWarningsAsErrors': 'string1;string2', + 'TreatWarningAsError': 'true', + 'TreatWChar_tAsBuiltInType': 'true', + 'UndefineAllPreprocessorDefinitions': 'true', + 'UndefinePreprocessorDefinitions': 'string1;string2', + 'UseFullPaths': 'true', + 'UseUnicodeForAssemblerListing': 'true', + 'WarningLevel': 'TurnOffAllWarnings', + 'WholeProgramOptimization': 'true', + 'XMLDocumentationFileName': 'a_file_name', + 'ZZXYZ': 'bogus' + }, + 'Link': { + 'AdditionalDependencies': 'file1;file2', + 'AdditionalLibraryDirectories': 'folder1;folder2', + 'AdditionalManifestDependencies': 'file1;file2', + 'AdditionalOptions': 'a string1', + 'AddModuleNamesToAssembly': 'file1;file2', + 'AllowIsolation': 'true', + 'AssemblyDebug': '', + 'AssemblyLinkResource': 'file1;file2', + 'BaseAddress': 'a string1', + 'BuildingInIDE': 'true', + 'CLRImageType': 'ForceIJWImage', + 'CLRSupportLastError': 'Enabled', + 'CLRThreadAttribute': 'MTAThreadingAttribute', + 'CLRUnmanagedCodeCheck': 'true', + 'CreateHotPatchableImage': 'X86Image', + 'DataExecutionPrevention': 'false', + 'DelayLoadDLLs': 'file1;file2', + 'DelaySign': 'true', + 'Driver': 'NotSet', + 'EmbedManagedResourceFile': 'file1;file2', + 'EnableCOMDATFolding': 'false', + 'EnableUAC': 'true', + 'EntryPointSymbol': 'a string1', + 'FixedBaseAddress': 'false', + 'ForceFileOutput': 'Enabled', + 'ForceSymbolReferences': 'file1;file2', + 'FunctionOrder': 'a_file_name', + 'GenerateDebugInformation': 'true', + 'GenerateMapFile': 'true', + 'HeapCommitSize': 'a string1', + 'HeapReserveSize': 'a string1', + 'IgnoreAllDefaultLibraries': 'true', + 'IgnoreEmbeddedIDL': 'true', + 'IgnoreSpecificDefaultLibraries': 'a_file_list', + 'ImageHasSafeExceptionHandlers': 'true', + 'ImportLibrary': 'a_file_name', + 'KeyContainer': 'a_file_name', + 'KeyFile': 'a_file_name', + 'LargeAddressAware': 'false', + 'LinkDLL': 'true', + 'LinkErrorReporting': 'SendErrorReport', + 'LinkStatus': 'true', + 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', + 'ManifestFile': 'a_file_name', + 'MapExports': 'true', + 'MapFileName': 'a_file_name', + 'MergedIDLBaseFileName': 'a_file_name', + 'MergeSections': 'a string1', + 'MidlCommandFile': 'a_file_name', + 'MinimumRequiredVersion': 'a string1', + 'ModuleDefinitionFile': 'a_file_name', + 'MSDOSStubFileName': 'a_file_name', + 'NoEntryPoint': 'true', + 'OptimizeReferences': 'false', + 'OutputFile': 'a_file_name', + 'PerUserRedirection': 'true', + 'PreventDllBinding': 'true', + 'Profile': 'true', + 'ProfileGuidedDatabase': 'a_file_name', + 'ProgramDatabaseFile': 'a_file_name', + 'RandomizedBaseAddress': 'false', + 'RegisterOutput': 'true', + 'SectionAlignment': '33', + 'SetChecksum': 'true', + 'ShowProgress': 'LinkVerboseREF', + 'SpecifySectionAttributes': 'a string1', + 'StackCommitSize': 'a string1', + 'StackReserveSize': 'a string1', + 'StripPrivateSymbols': 'a_file_name', + 'SubSystem': 'Console', + 'SupportNobindOfDelayLoadedDLL': 'true', + 'SupportUnloadOfDelayLoadedDLL': 'true', + 'SuppressStartupBanner': 'true', + 'SwapRunFromCD': 'true', + 'SwapRunFromNET': 'true', + 'TargetMachine': 'MachineX86', + 'TerminalServerAware': 'false', + 'TrackerLogDirectory': 'a_folder', + 'TreatLinkerWarningAsErrors': 'true', + 'TurnOffAssemblyGeneration': 'true', + 'TypeLibraryFile': 'a_file_name', + 'TypeLibraryResourceID': '33', + 'UACExecutionLevel': 'AsInvoker', + 'UACUIAccess': 'true', + 'Version': 'a string1' + }, + 'ResourceCompile': { + 'AdditionalIncludeDirectories': 'folder1;folder2', + 'AdditionalOptions': 'a string1', + 'Culture': '0x236', + 'IgnoreStandardIncludePath': 'true', + 'NullTerminateStrings': 'true', + 'PreprocessorDefinitions': 'string1;string2', + 'ResourceOutputFileName': 'a string1', + 'ShowProgress': 'true', + 'SuppressStartupBanner': 'true', + 'TrackerLogDirectory': 'a_folder', + 'UndefinePreprocessorDefinitions': 'string1;string2' + }, + 'Midl': { + 'AdditionalIncludeDirectories': 'folder1;folder2', + 'AdditionalOptions': 'a string1', + 'ApplicationConfigurationMode': 'true', + 'ClientStubFile': 'a_file_name', + 'CPreprocessOptions': 'a string1', + 'DefaultCharType': 'Signed', + 'DllDataFileName': 'a_file_name', + 'EnableErrorChecks': 'EnableCustom', + 'ErrorCheckAllocations': 'true', + 'ErrorCheckBounds': 'true', + 'ErrorCheckEnumRange': 'true', + 'ErrorCheckRefPointers': 'true', + 'ErrorCheckStubData': 'true', + 'GenerateClientFiles': 'Stub', + 'GenerateServerFiles': 'None', + 'GenerateStublessProxies': 'true', + 'GenerateTypeLibrary': 'true', + 'HeaderFileName': 'a_file_name', + 'IgnoreStandardIncludePath': 'true', + 'InterfaceIdentifierFileName': 'a_file_name', + 'LocaleID': '33', + 'MkTypLibCompatible': 'true', + 'OutputDirectory': 'a string1', + 'PreprocessorDefinitions': 'string1;string2', + 'ProxyFileName': 'a_file_name', + 'RedirectOutputAndErrors': 'a_file_name', + 'ServerStubFile': 'a_file_name', + 'StructMemberAlignment': 'NotSet', + 'SuppressCompilerWarnings': 'true', + 'SuppressStartupBanner': 'true', + 'TargetEnvironment': 'Itanium', + 'TrackerLogDirectory': 'a_folder', + 'TypeLibFormat': 'NewFormat', + 'TypeLibraryName': 'a_file_name', + 'UndefinePreprocessorDefinitions': 'string1;string2', + 'ValidateAllParameters': 'true', + 'WarnAsError': 'true', + 'WarningLevel': '1' + }, + 'Lib': { + 'AdditionalDependencies': 'file1;file2', + 'AdditionalLibraryDirectories': 'folder1;folder2', + 'AdditionalOptions': 'a string1', + 'DisplayLibrary': 'a string1', + 'ErrorReporting': 'PromptImmediately', + 'ExportNamedFunctions': 'string1;string2', + 'ForceSymbolReferences': 'a string1', + 'IgnoreAllDefaultLibraries': 'true', + 'IgnoreSpecificDefaultLibraries': 'file1;file2', + 'LinkTimeCodeGeneration': 'true', + 'MinimumRequiredVersion': 'a string1', + 'ModuleDefinitionFile': 'a_file_name', + 'Name': 'a_file_name', + 'OutputFile': 'a_file_name', + 'RemoveObjects': 'file1;file2', + 'SubSystem': 'Console', + 'SuppressStartupBanner': 'true', + 'TargetMachine': 'MachineX86i', + 'TrackerLogDirectory': 'a_folder', + 'TreatLibWarningAsErrors': 'true', + 'UseUnicodeResponseFiles': 'true', + 'Verbose': 'true' + }, + 'Manifest': { + 'AdditionalManifestFiles': 'file1;file2', + 'AdditionalOptions': 'a string1', + 'AssemblyIdentity': 'a string1', + 'ComponentFileName': 'a_file_name', + 'EnableDPIAwareness': 'fal', + 'GenerateCatalogFiles': 'truel', + 'GenerateCategoryTags': 'true', + 'InputResourceManifests': 'a string1', + 'ManifestFromManagedAssembly': 'a_file_name', + 'notgood3': 'bogus', + 'OutputManifestFile': 'a_file_name', + 'OutputResourceManifests': 'a string1', + 'RegistrarScriptFile': 'a_file_name', + 'ReplacementsFile': 'a_file_name', + 'SuppressDependencyElement': 'true', + 'SuppressStartupBanner': 'true', + 'TrackerLogDirectory': 'a_folder', + 'TypeLibraryFile': 'a_file_name', + 'UpdateFileHashes': 'true', + 'UpdateFileHashesSearchPath': 'a_file_name', + 'VerboseOutput': 'true' + }, + 'ProjectReference': { + 'LinkLibraryDependencies': 'true', + 'UseLibraryDependencyInputs': 'true' + }, + 'ManifestResourceCompile': { + 'ResourceOutputFileName': 'a_file_name' + }, + '': { + 'EmbedManifest': 'true', + 'GenerateManifest': 'true', + 'IgnoreImportLibrary': 'true', + 'LinkIncremental': 'false' + } + } + MSVSSettings.ValidateMSBuildSettings(settings_fixture, self.stderr) self._ExpectedWarnings([ 'Warning: unrecognized setting ClCompile/Enableprefast', 'Warning: unrecognized setting ClCompile/ZZXYZ', @@ -632,9 +641,7 @@ def testConvertToMSBuildSettings_warnings(self): 'ResourceCompile': { # Custom 'Culture': '0x03eb'}} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) + actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([ 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to ' diff --git a/gyp/gyp/unit_tests/NinjaWriter_test.py b/gyp/gyp/unit_tests/NinjaWriter_test.py new file mode 100644 index 0000000000..e001411171 --- /dev/null +++ b/gyp/gyp/unit_tests/NinjaWriter_test.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Unit tests for the ninja.py file. """ + +import unittest +import sys + +import gyp.NinjaWriter + + +class TestPrefixesAndSuffixes(unittest.TestCase): + spec = {'target_name': 'wee', 'type': 'executable', 'toolset': 'target', 'configurations': {'gu': {}, 'ga': {}}} + def setUp(self): + self.win_writer = gyp.NinjaWriter.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.', 'build.ninja', 'win', self.spec, {'msvs_version': '2019'}, 'gu', '.') + self.linux_writer = gyp.NinjaWriter.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.', 'build.ninja', 'linux', self.spec, {'msvs_version': '2019'}, 'ga', '.') + + def test_BinaryNamesWindows(self): + # These cannot run on non-Windows as they require a VS installation to + # correctly handle variable expansion. + if not sys.platform.startswith('win'): + self.skipTest('can only run on windows') + self.assertTrue(self.win_writer.ComputeOutputFileName(self.spec, 'executable').endswith('.exe')) + self.assertTrue(self.win_writer.ComputeOutputFileName(self.spec, 'shared_library').endswith('.dll')) + self.assertTrue(self.win_writer.ComputeOutputFileName(self.spec, 'static_library').endswith('.lib')) + + def test_BinaryNamesLinux(self): + self.assertTrue('.' not in self.linux_writer.ComputeOutputFileName(self.spec, 'executable')) + self.assertTrue(self.linux_writer.ComputeOutputFileName(self.spec, 'shared_library').startswith('lib')) + self.assertTrue(self.linux_writer.ComputeOutputFileName(self.spec, 'static_library').startswith('lib')) + self.assertTrue(self.linux_writer.ComputeOutputFileName(self.spec, 'shared_library').endswith('.so')) + self.assertTrue(self.linux_writer.ComputeOutputFileName(self.spec, 'static_library').endswith('.a')) + + +if __name__ == '__main__': + unittest.main() diff --git a/gyp/gyp/unit_tests/__init__.py b/gyp/gyp/unit_tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gyp/pylib/gyp/common_test.py b/gyp/gyp/unit_tests/common_test.py old mode 100755 new mode 100644 similarity index 97% rename from gyp/pylib/gyp/common_test.py rename to gyp/gyp/unit_tests/common_test.py index ad6f9a1438..0b8ada3dc3 --- a/gyp/pylib/gyp/common_test.py +++ b/gyp/gyp/unit_tests/common_test.py @@ -63,6 +63,7 @@ def test_platform_default(self): self.assertFlavor('solaris', 'sunos' , {}); self.assertFlavor('linux' , 'linux2' , {}); self.assertFlavor('linux' , 'linux3' , {}); + self.assertFlavor('linux' , 'linux' , {}); def test_param(self): self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) diff --git a/gyp/pylib/gyp/easy_xml_test.py b/gyp/gyp/unit_tests/easy_xml_test.py old mode 100755 new mode 100644 similarity index 97% rename from gyp/pylib/gyp/easy_xml_test.py rename to gyp/gyp/unit_tests/easy_xml_test.py index 2a80b8a456..79c5a61ff2 --- a/gyp/pylib/gyp/easy_xml_test.py +++ b/gyp/gyp/unit_tests/easy_xml_test.py @@ -8,10 +8,7 @@ import gyp.easy_xml as easy_xml import unittest -try: - from cStringIO import StringIO -except ImportError: - from io import StringIO +from io import StringIO class TestSequenceFunctions(unittest.TestCase): diff --git a/gyp/pylib/gyp/generator/msvs_test.py b/gyp/gyp/unit_tests/generator_msvs_test.py old mode 100755 new mode 100644 similarity index 92% rename from gyp/pylib/gyp/generator/msvs_test.py rename to gyp/gyp/unit_tests/generator_msvs_test.py index daf4f411bc..fd0fcd0804 --- a/gyp/pylib/gyp/generator/msvs_test.py +++ b/gyp/gyp/unit_tests/generator_msvs_test.py @@ -7,10 +7,7 @@ import gyp.generator.msvs as msvs import unittest -try: - from cStringIO import StringIO -except ImportError: - from io import StringIO +from io import StringIO class TestSequenceFunctions(unittest.TestCase): diff --git a/gyp/pylib/gyp/generator/xcode_test.py b/gyp/gyp/unit_tests/generator_xcode_test.py similarity index 100% rename from gyp/pylib/gyp/generator/xcode_test.py rename to gyp/gyp/unit_tests/generator_xcode_test.py diff --git a/gyp/pylib/gyp/input_test.py b/gyp/gyp/unit_tests/input_test.py old mode 100755 new mode 100644 similarity index 99% rename from gyp/pylib/gyp/input_test.py rename to gyp/gyp/unit_tests/input_test.py index 1bc5e3d308..7b7216d7f4 --- a/gyp/pylib/gyp/input_test.py +++ b/gyp/gyp/unit_tests/input_test.py @@ -8,7 +8,6 @@ import gyp.input import unittest -import sys class TestFindCycles(unittest.TestCase): diff --git a/gyp/pylib/gyp/xcode_emulation.py b/gyp/gyp/xcode_emulation.py similarity index 85% rename from gyp/pylib/gyp/xcode_emulation.py rename to gyp/gyp/xcode_emulation.py index 0162d978bf..9864393d9a 100644 --- a/gyp/pylib/gyp/xcode_emulation.py +++ b/gyp/gyp/xcode_emulation.py @@ -17,7 +17,6 @@ import shlex import subprocess import sys -import tempfile from gyp.common import GypError # Populated lazily by XcodeVersion, for efficiency, and to fix an issue when @@ -76,6 +75,7 @@ def _ExpandArchs(self, archs, sdkroot): expanded_archs.append(arch) except KeyError as e: print('Warning: Ignoring unsupported variable "%s".' % variable) + print(e) elif arch not in expanded_archs: expanded_archs.append(arch) return expanded_archs @@ -149,6 +149,7 @@ class XcodeSettings(object): # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached # at class-level for efficiency. _sdk_path_cache = {} + _platform_path_cache = {} _sdk_root_cache = {} # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so @@ -163,6 +164,8 @@ def __init__(self, spec): self.spec = spec self.isIOS = False + self.mac_toolchain_dir = None + self.header_map_path = None # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. # This means self.xcode_settings[config] always contains all settings @@ -196,8 +199,7 @@ def _ConvertConditionalKeys(self, configname): new_key = key.split("[")[0] settings[new_key] = settings[key] else: - print('Warning: Conditional keys not implemented, ignoring:', \ - ' '.join(conditional_keys)) + print('Warning: Conditional keys not implemented, ignoring:', ' '.join(conditional_keys)) del settings[key] def _Settings(self): @@ -219,12 +221,22 @@ def _WarnUnimplemented(self, test_key): def IsBinaryOutputFormat(self, configname): default = "binary" if self.isIOS else "xml" - format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT', - default) + format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT', default) return format == "binary" + def IsIosFramework(self): + return self.spec['type'] == 'shared_library' and self._IsBundle() and \ + self.isIOS + def _IsBundle(self): - return int(self.spec.get('mac_bundle', 0)) != 0 + return int(self.spec.get('mac_bundle', 0)) != 0 or self._IsXCTest() or \ + self._IsXCUiTest() + + def _IsXCTest(self): + return int(self.spec.get('mac_xctest_bundle', 0)) != 0 + + def _IsXCUiTest(self): + return int(self.spec.get('mac_xcuitest_bundle', 0)) != 0 def _IsIosAppExtension(self): return int(self.spec.get('ios_app_extension', 0)) != 0 @@ -235,9 +247,6 @@ def _IsIosWatchKitExtension(self): def _IsIosWatchApp(self): return int(self.spec.get('ios_watch_app', 0)) != 0 - def _IsXCTest(self): - return int(self.spec.get('mac_xctest_bundle', 0)) != 0 - def GetFrameworkVersion(self): """Returns the framework version of the current target. Only valid for bundles.""" @@ -303,11 +312,62 @@ def GetBundleResourceFolder(self): return self.GetBundleContentsFolderPath() return os.path.join(self.GetBundleContentsFolderPath(), 'Resources') + def GetBundleExecutableFolderPath(self): + """Returns the qualified path to the bundle's executables folder. E.g. + Chromium.app/Contents/MacOS. Only valid for bundles.""" + assert self._IsBundle() + if self.spec['type'] in ('shared_library') or self.isIOS: + return self.GetBundleContentsFolderPath() + elif self.spec['type'] in ('executable', 'loadable_module'): + return os.path.join(self.GetBundleContentsFolderPath(), 'MacOS') + + def GetBundleJavaFolderPath(self): + """Returns the qualified path to the bundle's Java resource folder. + E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleResourceFolder(), 'Java') + + def GetBundleFrameworksFolderPath(self): + """Returns the qualified path to the bundle's frameworks folder. E.g, + Chromium.app/Contents/Frameworks. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), 'Frameworks') + + def GetBundleSharedFrameworksFolderPath(self): + """Returns the qualified path to the bundle's frameworks folder. E.g, + Chromium.app/Contents/SharedFrameworks. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), + 'SharedFrameworks') + + def GetBundleSharedSupportFolderPath(self): + """Returns the qualified path to the bundle's shared support folder. E.g, + Chromium.app/Contents/SharedSupport. Only valid for bundles.""" + assert self._IsBundle() + if self.spec['type'] == 'shared_library': + return self.GetBundleResourceFolder() + else: + return os.path.join(self.GetBundleContentsFolderPath(), + 'SharedSupport') + + def GetBundlePlugInsFolderPath(self): + """Returns the qualified path to the bundle's plugins folder. E.g, + Chromium.app/Contents/PlugIns. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), 'PlugIns') + + def GetBundleXPCServicesFolderPath(self): + """Returns the qualified path to the bundle's XPC services folder. E.g, + Chromium.app/Contents/XPCServices. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), 'XPCServices') + def GetBundlePlistPath(self): """Returns the qualified path to the bundle's plist file. E.g. Chromium.app/Contents/Info.plist. Only valid for bundles.""" assert self._IsBundle() - if self.spec['type'] in ('executable', 'loadable_module'): + if self.spec['type'] in ('executable', 'loadable_module') or \ + self.IsIosFramework(): return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist') else: return os.path.join(self.GetBundleContentsFolderPath(), @@ -327,6 +387,10 @@ def GetProductType(self): assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle ' '(target %s)' % self.spec['target_name']) return 'com.apple.product-type.application.watchapp' + if self._IsXCUiTest(): + assert self._IsBundle(), ('mac_xcuitest_bundle flag requires mac_bundle ' + '(target %s)' % self.spec['target_name']) + return 'com.apple.product-type.bundle.ui-testing' if self._IsBundle(): return { 'executable': 'com.apple.product-type.application', @@ -357,11 +421,7 @@ def _GetBundleBinaryPath(self): """Returns the name of the bundle binary of by this target. E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" assert self._IsBundle() - if self.spec['type'] in ('shared_library') or self.isIOS: - path = self.GetBundleContentsFolderPath() - elif self.spec['type'] in ('executable', 'loadable_module'): - path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS') - return os.path.join(path, self.GetExecutableName()) + return os.path.join(self.GetBundleExecutableFolderPath(), self.GetExecutableName()) def _GetStandaloneExecutableSuffix(self): if 'product_extension' in self.spec: @@ -412,8 +472,8 @@ def GetExecutableName(self): return self._GetStandaloneBinaryPath() def GetExecutablePath(self): - """Returns the directory name of the bundle represented by this target. E.g. - Chromium.app/Contents/MacOS/Chromium.""" + """Returns the qualified path to the primary executable of the bundle + represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium.""" if self._IsBundle(): return self._GetBundleBinaryPath() else: @@ -434,7 +494,7 @@ def _GetSdkVersionInfoItem(self, sdk, infoitem): # Since the CLT has no SDK paths anyway, returning None is the # most sensible route and should still do the right thing. try: - return GetStdoutQuiet(['xcodebuild', '-version', '-sdk', sdk, infoitem]) + return GetStdout(['xcrun', '--sdk', sdk, infoitem]) except: pass @@ -443,6 +503,13 @@ def _SdkRoot(self, configname): configname = self.configname return self.GetPerConfigSetting('SDKROOT', configname, default='') + def _XcodePlatformPath(self, configname=None): + sdk_root = self._SdkRoot(configname) + if sdk_root not in XcodeSettings._platform_path_cache: + platform_path = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-platform-path') + XcodeSettings._platform_path_cache[sdk_root] = platform_path + return XcodeSettings._platform_path_cache[sdk_root] + def _SdkPath(self, configname=None): sdk_root = self._SdkRoot(configname) if sdk_root.startswith('/'): @@ -451,14 +518,13 @@ def _SdkPath(self, configname=None): def _XcodeSdkPath(self, sdk_root): if sdk_root not in XcodeSettings._sdk_path_cache: - sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path') + sdk_path = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-path') XcodeSettings._sdk_path_cache[sdk_root] = sdk_path if sdk_root: XcodeSettings._sdk_root_cache[sdk_path] = sdk_root return XcodeSettings._sdk_path_cache[sdk_root] def _AppendPlatformVersionMinFlags(self, lst): - self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings(): # TODO: Implement this better? sdk_path_basename = os.path.basename(self._SdkPath()) @@ -468,6 +534,8 @@ def _AppendPlatformVersionMinFlags(self, lst): else: self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET', '-miphoneos-version-min=%s') + else: + self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') def GetCflags(self, configname, arch=None): """Returns flags that need to be added to .c, .cc, .m, and .mm @@ -482,6 +550,9 @@ def GetCflags(self, configname, arch=None): if 'SDKROOT' in self._Settings() and sdk_root: cflags.append('-isysroot %s' % sdk_root) + if self.header_map_path: + cflags.append('-I%s' % self.header_map_path) + if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'): cflags.append('-Wconstant-conversion') @@ -573,10 +644,9 @@ def GetCflags(self, configname, arch=None): cflags += self._Settings().get('WARNING_CFLAGS', []) - if self._IsXCTest(): - platform_root = self._XcodePlatformPath(configname) - if platform_root: - cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') + platform_root = self._XcodePlatformPath(configname) + if platform_root and self._IsXCTest(): + cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') if sdk_root: framework_root = sdk_root @@ -824,7 +894,8 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): ldflags.append('-arch ' + archs[0]) # Xcode adds the product directory by default. - ldflags.append('-L' + product_dir) + # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838 + ldflags.append('-L' + (product_dir if product_dir != '.' else './')) install_name = self.GetInstallName() if install_name and self.spec['type'] != 'loadable_module': @@ -841,10 +912,10 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): for directory in framework_dirs: ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) - if self._IsXCTest(): - platform_root = self._XcodePlatformPath(configname) - if platform_root: - cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') # noqa TODO @cclauss + platform_root = self._XcodePlatformPath(configname) + if sdk_root and platform_root and self._IsXCTest(): + ldflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') + ldflags.append('-framework XCTest') is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() if sdk_root and is_extension: @@ -852,13 +923,13 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): # extensions and provide loader and main function. # These flags reflect the compilation options used by xcode to compile # extensions. - ldflags.append('-lpkstart') if XcodeVersion() < '0900': + ldflags.append('-lpkstart') ldflags.append(sdk_root + '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit') + else: + ldflags.append('-e _NSExtensionMain') ldflags.append('-fapplication-extension') - ldflags.append('-Xlinker -rpath ' - '-Xlinker @executable_path/../../Frameworks') self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s') @@ -932,7 +1003,8 @@ def _GetStripPostbuilds(self, configname, output_binary, quiet): self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')): default_strip_style = 'debugging' - if self.spec['type'] == 'loadable_module' and self._IsBundle(): + if ((self.spec['type'] == 'loadable_module' or self._IsIosAppExtension()) + and self._IsBundle()): default_strip_style = 'non-global' elif self.spec['type'] == 'executable': default_strip_style = 'all' @@ -983,17 +1055,29 @@ def _GetTargetPostbuilds(self, configname, output, output_binary, self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) + self._GetStripPostbuilds(configname, output_binary, quiet)) - def _GetIOSPostbuilds(self, configname, output_binary): + def _GetIOSPostbuilds(self, configname): """Return a shell command to codesign the iOS output binary so it can be deployed to a device. This should be run as the very last step of the build.""" - if not (self.isIOS and self.spec['type'] == 'executable'): + if not (self.isIOS and + (self.spec['type'] == 'executable' or self._IsXCTest()) or + self.IsIosFramework()): return [] + postbuilds = [] + product_name = self.GetFullProductName() settings = self.xcode_settings[configname] + + # Xcode expects XCTests to be copied into the TEST_HOST dir. + if self._IsXCTest(): + source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name) + test_host = os.path.dirname(settings.get('TEST_HOST')) + xctest_destination = os.path.join(test_host, 'PlugIns', product_name) + postbuilds.extend(['ditto %s %s' % (source, xctest_destination)]) + key = self._GetIOSCodeSignIdentityKey(settings) if not key: - return [] + return postbuilds # Warn for any unimplemented signing xcode keys. unimpl = ['OTHER_CODE_SIGN_FLAGS'] @@ -1002,12 +1086,41 @@ def _GetIOSPostbuilds(self, configname, output_binary): print('Warning: Some codesign keys not implemented, ignoring: %s' % ( ', '.join(sorted(unimpl)))) - return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % ( + if self._IsXCTest(): + # For device xctests, Xcode copies two extra frameworks into $TEST_HOST. + test_host = os.path.dirname(settings.get('TEST_HOST')) + frameworks_dir = os.path.join(test_host, 'Frameworks') + platform_root = self._XcodePlatformPath(configname) + frameworks = \ + ['Developer/Library/PrivateFrameworks/IDEBundleInjection.framework', + 'Developer/Library/Frameworks/XCTest.framework'] + for framework in frameworks: + source = os.path.join(platform_root, framework) + destination = os.path.join(frameworks_dir, os.path.basename(framework)) + postbuilds.extend(['ditto %s %s' % (source, destination)]) + + # Then re-sign everything with 'preserve=True' + postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % ( + os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key, + settings.get('CODE_SIGN_ENTITLEMENTS', ''), + settings.get('PROVISIONING_PROFILE', ''), destination, True) + ]) + plugin_dir = os.path.join(test_host, 'PlugIns') + targets = [os.path.join(plugin_dir, product_name), test_host] + for target in targets: + postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % ( + os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key, + settings.get('CODE_SIGN_ENTITLEMENTS', ''), + settings.get('PROVISIONING_PROFILE', ''), target, True) + ]) + + postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % ( os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key, - settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''), settings.get('CODE_SIGN_ENTITLEMENTS', ''), - settings.get('PROVISIONING_PROFILE', '')) - ] + settings.get('PROVISIONING_PROFILE', ''), + os.path.join("${BUILT_PRODUCTS_DIR}", product_name), False) + ]) + return postbuilds def _GetIOSCodeSignIdentityKey(self, settings): identity = settings.get('CODE_SIGN_IDENTITY') @@ -1025,13 +1138,15 @@ def _GetIOSCodeSignIdentityKey(self, settings): XcodeSettings._codesigning_key_cache[identity] = fingerprint return XcodeSettings._codesigning_key_cache.get(identity, '') - def AddImplicitPostbuilds(self, configname, output, output_binary, - postbuilds=[], quiet=False): - """Returns a list of shell commands that should run before and after - |postbuilds|.""" + def AddImplicitPostbuilds(self, configname, output, output_binary, postbuilds=None, quiet=False): + """ + Returns a list of shell commands that should run before and after |postbuilds|. + """ assert output_binary is not None + if postbuilds is None: + postbuilds = [] pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet) - post = self._GetIOSPostbuilds(configname, output_binary) + post = self._GetIOSPostbuilds(configname) return pre + postbuilds + post def _AdjustLibrary(self, library, config_name=None): @@ -1089,25 +1204,37 @@ def GetExtraPlistItems(self, configname=None): xcode, xcode_build = XcodeVersion() cache['DTXcode'] = xcode cache['DTXcodeBuild'] = xcode_build + compiler = self.xcode_settings[configname].get('GCC_VERSION') + if compiler is not None: + cache['DTCompiler'] = compiler sdk_root = self._SdkRoot(configname) if not sdk_root: sdk_root = self._DefaultSdkRoot() - cache['DTSDKName'] = sdk_root - if xcode >= '0430': + sdk_version = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-version') + cache['DTSDKName'] = sdk_root + (sdk_version or '') + if xcode >= '0720': cache['DTSDKBuild'] = self._GetSdkVersionInfoItem( - sdk_root, 'ProductBuildVersion') + sdk_root, '--show-sdk-build-version') + elif xcode >= '0430': + cache['DTSDKBuild'] = sdk_version else: cache['DTSDKBuild'] = cache['BuildMachineOSBuild'] if self.isIOS: - cache['DTPlatformName'] = cache['DTSDKName'] + cache['MinimumOSVersion'] = self.xcode_settings[configname].get( + 'IPHONEOS_DEPLOYMENT_TARGET') + cache['DTPlatformName'] = sdk_root + cache['DTPlatformVersion'] = sdk_version + if configname.endswith("iphoneos"): - cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem( - sdk_root, 'ProductVersion') cache['CFBundleSupportedPlatforms'] = ['iPhoneOS'] + cache['DTPlatformBuild'] = cache['DTSDKBuild'] else: cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator'] + # This is weird, but Xcode sets DTPlatformBuild to an empty field + # for simulator builds. + cache['DTPlatformBuild'] = "" XcodeSettings._plist_cache[configname] = cache # Include extra plist items that are per-target, not per global @@ -1266,7 +1393,7 @@ def XcodeVersion(): if XCODE_VERSION_CACHE: return XCODE_VERSION_CACHE try: - version_list = GetStdoutQuiet(['xcodebuild', '-version']).splitlines() + version_list = GetStdout(['xcodebuild', '-version']).splitlines() # In some circumstances xcodebuild exits 0 but doesn't return # the right results; for example, a user on 10.7 or 10.8 with # a bogus path set via xcode-select @@ -1277,16 +1404,16 @@ def XcodeVersion(): except: version = CLTVersion() if version: - version = re.match(r'(\d+\.\d+\.?\d*)', version).groups()[0] + version = re.search(r'^(\d{1,2}\.\d(\.\d+)?)', version).groups()[0] else: raise GypError("No Xcode or CLT version detected!") # The CLT has no build information, so we return an empty string. version_list = [version, ''] version = version_list[0] build = version_list[-1] - # Be careful to convert "4.2" to "0420": - version = version.split()[-1].replace('.', '') - version = (version + '0' * (3 - len(version))).zfill(4) + # Be careful to convert "4.2" to "0420" and "10.0" to "1000": + version = format(''.join((version.split()[-1].split('.') + ['0', '0'])[:3]), + '>04s') if build: build = build.split()[-1] XCODE_VERSION_CACHE = (version, build) @@ -1316,24 +1443,17 @@ def CLTVersion(): continue -def GetStdoutQuiet(cmdlist): - """Returns the content of standard output returned by invoking |cmdlist|. - Ignores the stderr. - Raises |GypError| if the command return with a non-zero return code.""" +def GetStdout(cmdlist, with_stderr=False): + """ + Returns the content of standard output returned by invoking |cmdlist|. + Raises |GypError| if the command return with a non-zero return code. + """ job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out = job.communicate()[0] - if job.returncode != 0: - raise GypError('Error %d running %s' % (job.returncode, cmdlist[0])) - return out.rstrip('\n') - - -def GetStdout(cmdlist): - """Returns the content of standard output returned by invoking |cmdlist|. - Raises |GypError| if the command return with a non-zero return code.""" - job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) - out = job.communicate()[0] + out, err = job.communicate() if job.returncode != 0: - sys.stderr.write(out + '\n') + if with_stderr: + print(out, file=sys.stderr) + print(err, file=sys.stderr) raise GypError('Error %d running %s' % (job.returncode, cmdlist[0])) return out.rstrip('\n') @@ -1360,7 +1480,10 @@ def IsMacBundle(flavor, spec): Bundles are directories with a certain subdirectory structure, instead of just a single file. Bundle rules do not produce a binary but also package resources into that directory.""" - is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') + is_mac_bundle = int(spec.get('mac_xctest_bundle', 0)) != 0 or \ + int(spec.get('mac_xcuitest_bundle', 0)) != 0 or \ + (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') + if is_mac_bundle: assert spec['type'] != 'none', ( 'mac_bundle targets cannot have type none (target "%s")' % @@ -1409,24 +1532,19 @@ def GetMacBundleResources(product_dir, xcode_settings, resources): def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): - """Returns (info_plist, dest_plist, defines, extra_env), where: - * |info_plist| is the source plist path, relative to the - build directory, - * |dest_plist| is the destination plist path, relative to the - build directory, - * |defines| is a list of preprocessor defines (empty if the plist - shouldn't be preprocessed, - * |extra_env| is a dict of env variables that should be exported when - invoking |mac_tool copy-info-plist|. + """ + Returns (info_plist, dest_plist, defines, extra_env), where: + * |info_plist| is the source plist path, relative to the build directory, + * |dest_plist| is the destination plist path, relative to the build directory, + * |defines| is a list of preprocessor defines (empty if the plist shouldn't be preprocessed, + * |extra_env| is a dict of env variables that should be exported when invoking |mac_tool copy-info-plist|. Only call this for mac bundle targets. Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. + product_dir: Path to the directory containing the output bundle, relative to the build directory. xcode_settings: The XcodeSettings of the current target. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. + gyp_path_to_build_path: A function that converts paths relative to the current gyp file to paths relative to the build direcotry. """ info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE') if not info_plist: @@ -1470,13 +1588,14 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, additional_settings: An optional dict with more values to add to the result. """ + if not xcode_settings: return {} # This function is considered a friend of XcodeSettings, so let it reach into # its implementation details. spec = xcode_settings.spec - # These are filled in on a as-needed basis. + # These are filled in on an as-needed basis. env = { 'BUILT_FRAMEWORKS_DIR' : built_products_dir, 'BUILT_PRODUCTS_DIR' : built_products_dir, @@ -1489,12 +1608,16 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, # written for bundles: 'TARGET_BUILD_DIR' : built_products_dir, 'TEMP_DIR' : '${TMPDIR}', + 'XCODE_VERSION_ACTUAL' : XcodeVersion()[0], } if xcode_settings.GetPerConfigSetting('SDKROOT', configuration): env['SDKROOT'] = xcode_settings._SdkPath(configuration) else: env['SDKROOT'] = '' + if xcode_settings.mac_toolchain_dir: + env['DEVELOPER_DIR'] = xcode_settings.mac_toolchain_dir + if spec['type'] in ( 'executable', 'static_library', 'shared_library', 'loadable_module'): env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() @@ -1505,10 +1628,27 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, env['MACH_O_TYPE'] = mach_o_type env['PRODUCT_TYPE'] = xcode_settings.GetProductType() if xcode_settings._IsBundle(): + # xcodeproj_file.py sets the same Xcode subfolder value for this as for + # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value. + env['BUILT_FRAMEWORKS_DIR'] = \ + os.path.join(built_products_dir + os.sep \ + + xcode_settings.GetBundleFrameworksFolderPath()) env['CONTENTS_FOLDER_PATH'] = \ - xcode_settings.GetBundleContentsFolderPath() + xcode_settings.GetBundleContentsFolderPath() + env['EXECUTABLE_FOLDER_PATH'] = \ + xcode_settings.GetBundleExecutableFolderPath() env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \ xcode_settings.GetBundleResourceFolder() + env['JAVA_FOLDER_PATH'] = xcode_settings.GetBundleJavaFolderPath() + env['FRAMEWORKS_FOLDER_PATH'] = \ + xcode_settings.GetBundleFrameworksFolderPath() + env['SHARED_FRAMEWORKS_FOLDER_PATH'] = \ + xcode_settings.GetBundleSharedFrameworksFolderPath() + env['SHARED_SUPPORT_FOLDER_PATH'] = \ + xcode_settings.GetBundleSharedSupportFolderPath() + env['PLUGINS_FOLDER_PATH'] = xcode_settings.GetBundlePlugInsFolderPath() + env['XPCSERVICES_FOLDER_PATH'] = \ + xcode_settings.GetBundleXPCServicesFolderPath() env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() @@ -1522,8 +1662,6 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, sdk_root = xcode_settings._SdkRoot(configuration) if not sdk_root: sdk_root = xcode_settings._XcodeSdkPath('') - if sdk_root is None: - sdk_root = '' env['SDKROOT'] = sdk_root if not additional_settings: @@ -1580,7 +1718,7 @@ def _TopologicallySortedEnvVarKeys(env): # Since environment variables can refer to other variables, the evaluation # order is important. Below is the logic to compute the dependency graph # and sort it. - regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}') + regex = re.compile(r'\${([a-zA-Z0-9\-_]+)\}') def GetEdges(node): # Use a definition of edges such that user_of_variable -> used_varible. # This happens to be easier in this case, since a variable's @@ -1639,11 +1777,13 @@ def _AddIOSDeviceConfigurations(targets): for target_dict in targets.values(): toolset = target_dict['toolset'] configs = target_dict['configurations'] - for config_name, config_dict in dict(configs).items(): - iphoneos_config_dict = copy.deepcopy(config_dict) + + for config_name, simulator_config_dict in dict(configs).items(): + iphoneos_config_dict = copy.deepcopy(simulator_config_dict) configs[config_name + '-iphoneos'] = iphoneos_config_dict - configs[config_name + '-iphonesimulator'] = config_dict + configs[config_name + '-iphonesimulator'] = simulator_config_dict if toolset == 'target': + simulator_config_dict['xcode_settings']['SDKROOT'] = 'iphonesimulator' iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos' return targets diff --git a/gyp/gyp_main.py b/gyp/gyp_main.py index 25a6eba94a..b08f9a9b62 100755 --- a/gyp/gyp_main.py +++ b/gyp/gyp_main.py @@ -1,15 +1,5 @@ #!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os import sys - -# Make sure we're using the version of pylib in this repo, not one installed -# elsewhere on the system. -sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) import gyp if __name__ == '__main__': diff --git a/gyp/pylib/gyp/MSVSProject.py b/gyp/pylib/gyp/MSVSProject.py deleted file mode 100644 index db1ceede34..0000000000 --- a/gyp/pylib/gyp/MSVSProject.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.common -import gyp.easy_xml as easy_xml - -#------------------------------------------------------------------------------ - - -class Tool(object): - """Visual Studio tool.""" - - def __init__(self, name, attrs=None): - """Initializes the tool. - - Args: - name: Tool name. - attrs: Dict of tool attributes; may be None. - """ - self._attrs = attrs or {} - self._attrs['Name'] = name - - def _GetSpecification(self): - """Creates an element for the tool. - - Returns: - A new xml.dom.Element for the tool. - """ - return ['Tool', self._attrs] - -class Filter(object): - """Visual Studio filter - that is, a virtual folder.""" - - def __init__(self, name, contents=None): - """Initializes the folder. - - Args: - name: Filter (folder) name. - contents: List of filenames and/or Filter objects contained. - """ - self.name = name - self.contents = list(contents or []) - - -#------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML project writer.""" - - def __init__(self, project_path, version, name, guid=None, platforms=None): - """Initializes the project. - - Args: - project_path: Path to the project file. - version: Format version to emit. - name: Name of the project. - guid: GUID to use for project, if not None. - platforms: Array of string, the supported platforms. If null, ['Win32'] - """ - self.project_path = project_path - self.version = version - self.name = name - self.guid = guid - - # Default to Win32 for platforms. - if not platforms: - platforms = ['Win32'] - - # Initialize the specifications of the various sections. - self.platform_section = ['Platforms'] - for platform in platforms: - self.platform_section.append(['Platform', {'Name': platform}]) - self.tool_files_section = ['ToolFiles'] - self.configurations_section = ['Configurations'] - self.files_section = ['Files'] - - # Keep a dict keyed on filename to speed up access. - self.files_dict = dict() - - def AddToolFile(self, path): - """Adds a tool file to the project. - - Args: - path: Relative path from project to tool file. - """ - self.tool_files_section.append(['ToolFile', {'RelativePath': path}]) - - def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools): - """Returns the specification for a configuration. - - Args: - config_type: Type of configuration node. - config_name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - Returns: - """ - # Handle defaults - if not attrs: - attrs = {} - if not tools: - tools = [] - - # Add configuration node and its attributes - node_attrs = attrs.copy() - node_attrs['Name'] = config_name - specification = [config_type, node_attrs] - - # Add tool nodes and their attributes - if tools: - for t in tools: - if isinstance(t, Tool): - specification.append(t._GetSpecification()) - else: - specification.append(Tool(t)._GetSpecification()) - return specification - - - def AddConfig(self, name, attrs=None, tools=None): - """Adds a configuration to the project. - - Args: - name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools) - self.configurations_section.append(spec) - - def _AddFilesToNode(self, parent, files): - """Adds files and/or filters to the parent node. - - Args: - parent: Destination node - files: A list of Filter objects and/or relative paths to files. - - Will call itself recursively, if the files list contains Filter objects. - """ - for f in files: - if isinstance(f, Filter): - node = ['Filter', {'Name': f.name}] - self._AddFilesToNode(node, f.contents) - else: - node = ['File', {'RelativePath': f}] - self.files_dict[f] = node - parent.append(node) - - def AddFiles(self, files): - """Adds files to the project. - - Args: - files: A list of Filter objects and/or relative paths to files. - - This makes a copy of the file/filter tree at the time of this call. If you - later add files to a Filter object which was passed into a previous call - to AddFiles(), it will not be reflected in this project. - """ - self._AddFilesToNode(self.files_section, files) - # TODO(rspangler) This also doesn't handle adding files to an existing - # filter. That is, it doesn't merge the trees. - - def AddFileConfig(self, path, config, attrs=None, tools=None): - """Adds a configuration to a file. - - Args: - path: Relative path to the file. - config: Name of configuration to add. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - - Raises: - ValueError: Relative path does not match any file added via AddFiles(). - """ - # Find the file node with the right relative path - parent = self.files_dict.get(path) - if not parent: - raise ValueError('AddFileConfig: file "%s" not in project.' % path) - - # Add the config to the file node - spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs, - tools) - parent.append(spec) - - def WriteIfChanged(self): - """Writes the project file.""" - # First create XML content definition - content = [ - 'VisualStudioProject', - {'ProjectType': 'Visual C++', - 'Version': self.version.ProjectVersion(), - 'Name': self.name, - 'ProjectGUID': self.guid, - 'RootNamespace': self.name, - 'Keyword': 'Win32Proj' - }, - self.platform_section, - self.tool_files_section, - self.configurations_section, - ['References'], # empty section - self.files_section, - ['Globals'] # empty section - ] - easy_xml.WriteXmlIfChanged(content, self.project_path, - encoding="Windows-1252") diff --git a/gyp/pylib/gyp/MSVSUserFile.py b/gyp/pylib/gyp/MSVSUserFile.py deleted file mode 100644 index 2264d64015..0000000000 --- a/gyp/pylib/gyp/MSVSUserFile.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio user preferences file writer.""" - -import os -import re -import socket # for gethostname - -import gyp.common -import gyp.easy_xml as easy_xml - - -#------------------------------------------------------------------------------ - -def _FindCommandInPath(command): - """If there are no slashes in the command given, this function - searches the PATH env to find the given command, and converts it - to an absolute path. We have to do this because MSVS is looking - for an actual file to launch a debugger on, not just a command - line. Note that this happens at GYP time, so anything needing to - be built needs to have a full path.""" - if '/' in command or '\\' in command: - # If the command already has path elements (either relative or - # absolute), then assume it is constructed properly. - return command - else: - # Search through the path list and find an existing file that - # we can access. - paths = os.environ.get('PATH','').split(os.pathsep) - for path in paths: - item = os.path.join(path, command) - if os.path.isfile(item) and os.access(item, os.X_OK): - return item - return command - -def _QuoteWin32CommandLineArgs(args): - new_args = [] - for arg in args: - # Replace all double-quotes with double-double-quotes to escape - # them for cmd shell, and then quote the whole thing if there - # are any. - if arg.find('"') != -1: - arg = '""'.join(arg.split('"')) - arg = '"%s"' % arg - - # Otherwise, if there are any spaces, quote the whole arg. - elif re.search(r'[ \t\n]', arg): - arg = '"%s"' % arg - new_args.append(arg) - return new_args - -class Writer(object): - """Visual Studio XML user user file writer.""" - - def __init__(self, user_file_path, version, name): - """Initializes the user file. - - Args: - user_file_path: Path to the user file. - version: Version info. - name: Name of the user file. - """ - self.user_file_path = user_file_path - self.version = version - self.name = name - self.configurations = {} - - def AddConfig(self, name): - """Adds a configuration to the project. - - Args: - name: Configuration name. - """ - self.configurations[name] = ['Configuration', {'Name': name}] - - def AddDebugSettings(self, config_name, command, environment = {}, - working_directory=""): - """Adds a DebugSettings node to the user file for a particular config. - - Args: - command: command line to run. First element in the list is the - executable. All elements of the command will be quoted if - necessary. - working_directory: other files which may trigger the rule. (optional) - """ - command = _QuoteWin32CommandLineArgs(command) - - abs_command = _FindCommandInPath(command[0]) - - if environment and isinstance(environment, dict): - env_list = ['%s="%s"' % (key, val) - for (key,val) in environment.items()] - environment = ' '.join(env_list) - else: - environment = '' - - n_cmd = ['DebugSettings', - {'Command': abs_command, - 'WorkingDirectory': working_directory, - 'CommandArguments': " ".join(command[1:]), - 'RemoteMachine': socket.gethostname(), - 'Environment': environment, - 'EnvironmentMerge': 'true', - # Currently these are all "dummy" values that we're just setting - # in the default manner that MSVS does it. We could use some of - # these to add additional capabilities, I suppose, but they might - # not have parity with other platforms then. - 'Attach': 'false', - 'DebuggerType': '3', # 'auto' debugger - 'Remote': '1', - 'RemoteCommand': '', - 'HttpUrl': '', - 'PDBPath': '', - 'SQLDebugging': '', - 'DebuggerFlavor': '0', - 'MPIRunCommand': '', - 'MPIRunArguments': '', - 'MPIRunWorkingDirectory': '', - 'ApplicationCommand': '', - 'ApplicationArguments': '', - 'ShimCommand': '', - 'MPIAcceptMode': '', - 'MPIAcceptFilter': '' - }] - - # Find the config, and add it if it doesn't exist. - if config_name not in self.configurations: - self.AddConfig(config_name) - - # Add the DebugSettings onto the appropriate config. - self.configurations[config_name].append(n_cmd) - - def WriteIfChanged(self): - """Writes the user file.""" - configs = ['Configurations'] - for config, spec in sorted(self.configurations.items()): - configs.append(spec) - - content = ['VisualStudioUserFile', - {'Version': self.version.ProjectVersion(), - 'Name': self.name - }, - configs] - easy_xml.WriteXmlIfChanged(content, self.user_file_path, - encoding="Windows-1252") diff --git a/gyp/pylib/gyp/MSVSUtil.py b/gyp/pylib/gyp/MSVSUtil.py deleted file mode 100644 index c8187eb331..0000000000 --- a/gyp/pylib/gyp/MSVSUtil.py +++ /dev/null @@ -1,270 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions shared amongst the Windows generators.""" - -import copy -import os - - -# A dictionary mapping supported target types to extensions. -TARGET_TYPE_EXT = { - 'executable': 'exe', - 'loadable_module': 'dll', - 'shared_library': 'dll', - 'static_library': 'lib', -} - - -def _GetLargePdbShimCcPath(): - """Returns the path of the large_pdb_shim.cc file.""" - this_dir = os.path.abspath(os.path.dirname(__file__)) - src_dir = os.path.abspath(os.path.join(this_dir, '..', '..')) - win_data_dir = os.path.join(src_dir, 'data', 'win') - large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc') - return large_pdb_shim_cc - - -def _DeepCopySomeKeys(in_dict, keys): - """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. - - Arguments: - in_dict: The dictionary to copy. - keys: The keys to be copied. If a key is in this list and doesn't exist in - |in_dict| this is not an error. - Returns: - The partially deep-copied dictionary. - """ - d = {} - for key in keys: - if key not in in_dict: - continue - d[key] = copy.deepcopy(in_dict[key]) - return d - - -def _SuffixName(name, suffix): - """Add a suffix to the end of a target. - - Arguments: - name: name of the target (foo#target) - suffix: the suffix to be added - Returns: - Target name with suffix added (foo_suffix#target) - """ - parts = name.rsplit('#', 1) - parts[0] = '%s_%s' % (parts[0], suffix) - return '#'.join(parts) - - -def _ShardName(name, number): - """Add a shard number to the end of a target. - - Arguments: - name: name of the target (foo#target) - number: shard number - Returns: - Target name with shard added (foo_1#target) - """ - return _SuffixName(name, str(number)) - - -def ShardTargets(target_list, target_dicts): - """Shard some targets apart to work around the linkers limits. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - Returns: - Tuple of the new sharded versions of the inputs. - """ - # Gather the targets to shard, and how many pieces. - targets_to_shard = {} - for t in target_dicts: - shards = int(target_dicts[t].get('msvs_shard', 0)) - if shards: - targets_to_shard[t] = shards - # Shard target_list. - new_target_list = [] - for t in target_list: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - new_target_list.append(_ShardName(t, i)) - else: - new_target_list.append(t) - # Shard target_dict. - new_target_dicts = {} - for t in target_dicts: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - name = _ShardName(t, i) - new_target_dicts[name] = copy.copy(target_dicts[t]) - new_target_dicts[name]['target_name'] = _ShardName( - new_target_dicts[name]['target_name'], i) - sources = new_target_dicts[name].get('sources', []) - new_sources = [] - for pos in range(i, len(sources), targets_to_shard[t]): - new_sources.append(sources[pos]) - new_target_dicts[name]['sources'] = new_sources - else: - new_target_dicts[t] = target_dicts[t] - # Shard dependencies. - for t in new_target_dicts: - for deptype in ('dependencies', 'dependencies_original'): - dependencies = copy.copy(new_target_dicts[t].get(deptype, [])) - new_dependencies = [] - for d in dependencies: - if d in targets_to_shard: - for i in range(targets_to_shard[d]): - new_dependencies.append(_ShardName(d, i)) - else: - new_dependencies.append(d) - new_target_dicts[t][deptype] = new_dependencies - - return (new_target_list, new_target_dicts) - - -def _GetPdbPath(target_dict, config_name, vars): - """Returns the path to the PDB file that will be generated by a given - configuration. - - The lookup proceeds as follows: - - Look for an explicit path in the VCLinkerTool configuration block. - - Look for an 'msvs_large_pdb_path' variable. - - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is - specified. - - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'. - - Arguments: - target_dict: The target dictionary to be searched. - config_name: The name of the configuration of interest. - vars: A dictionary of common GYP variables with generator-specific values. - Returns: - The path of the corresponding PDB file. - """ - config = target_dict['configurations'][config_name] - msvs = config.setdefault('msvs_settings', {}) - - linker = msvs.get('VCLinkerTool', {}) - - pdb_path = linker.get('ProgramDatabaseFile') - if pdb_path: - return pdb_path - - variables = target_dict.get('variables', {}) - pdb_path = variables.get('msvs_large_pdb_path', None) - if pdb_path: - return pdb_path - - - pdb_base = target_dict.get('product_name', target_dict['target_name']) - pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']]) - pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base - - return pdb_path - - -def InsertLargePdbShims(target_list, target_dicts, vars): - """Insert a shim target that forces the linker to use 4KB pagesize PDBs. - - This is a workaround for targets with PDBs greater than 1GB in size, the - limit for the 1KB pagesize PDBs created by the linker by default. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - vars: A dictionary of common GYP variables with generator-specific values. - Returns: - Tuple of the shimmed version of the inputs. - """ - # Determine which targets need shimming. - targets_to_shim = [] - for t in target_dicts: - target_dict = target_dicts[t] - - # We only want to shim targets that have msvs_large_pdb enabled. - if not int(target_dict.get('msvs_large_pdb', 0)): - continue - # This is intended for executable, shared_library and loadable_module - # targets where every configuration is set up to produce a PDB output. - # If any of these conditions is not true then the shim logic will fail - # below. - targets_to_shim.append(t) - - large_pdb_shim_cc = _GetLargePdbShimCcPath() - - for t in targets_to_shim: - target_dict = target_dicts[t] - target_name = target_dict.get('target_name') - - base_dict = _DeepCopySomeKeys(target_dict, - ['configurations', 'default_configuration', 'toolset']) - - # This is the dict for copying the source file (part of the GYP tree) - # to the intermediate directory of the project. This is necessary because - # we can't always build a relative path to the shim source file (on Windows - # GYP and the project may be on different drives), and Ninja hates absolute - # paths (it ends up generating the .obj and .obj.d alongside the source - # file, polluting GYPs tree). - copy_suffix = 'large_pdb_copy' - copy_target_name = target_name + '_' + copy_suffix - full_copy_target_name = _SuffixName(t, copy_suffix) - shim_cc_basename = os.path.basename(large_pdb_shim_cc) - shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name - shim_cc_path = shim_cc_dir + '/' + shim_cc_basename - copy_dict = copy.deepcopy(base_dict) - copy_dict['target_name'] = copy_target_name - copy_dict['type'] = 'none' - copy_dict['sources'] = [ large_pdb_shim_cc ] - copy_dict['copies'] = [{ - 'destination': shim_cc_dir, - 'files': [ large_pdb_shim_cc ] - }] - - # This is the dict for the PDB generating shim target. It depends on the - # copy target. - shim_suffix = 'large_pdb_shim' - shim_target_name = target_name + '_' + shim_suffix - full_shim_target_name = _SuffixName(t, shim_suffix) - shim_dict = copy.deepcopy(base_dict) - shim_dict['target_name'] = shim_target_name - shim_dict['type'] = 'static_library' - shim_dict['sources'] = [ shim_cc_path ] - shim_dict['dependencies'] = [ full_copy_target_name ] - - # Set up the shim to output its PDB to the same location as the final linker - # target. - for config_name, config in shim_dict.get('configurations').items(): - pdb_path = _GetPdbPath(target_dict, config_name, vars) - - # A few keys that we don't want to propagate. - for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']: - config.pop(key, None) - - msvs = config.setdefault('msvs_settings', {}) - - # Update the compiler directives in the shim target. - compiler = msvs.setdefault('VCCLCompilerTool', {}) - compiler['DebugInformationFormat'] = '3' - compiler['ProgramDataBaseFileName'] = pdb_path - - # Set the explicit PDB path in the appropriate configuration of the - # original target. - config = target_dict['configurations'][config_name] - msvs = config.setdefault('msvs_settings', {}) - linker = msvs.setdefault('VCLinkerTool', {}) - linker['GenerateDebugInformation'] = 'true' - linker['ProgramDatabaseFile'] = pdb_path - - # Add the new targets. They must go to the beginning of the list so that - # the dependency generation works as expected in ninja. - target_list.insert(0, full_copy_target_name) - target_list.insert(0, full_shim_target_name) - target_dicts[full_copy_target_name] = copy_dict - target_dicts[full_shim_target_name] = shim_dict - - # Update the original target to depend on the shim target. - target_dict.setdefault('dependencies', []).append(full_shim_target_name) - - return (target_list, target_dicts) diff --git a/gyp/pylib/gyp/MSVSVersion.py b/gyp/pylib/gyp/MSVSVersion.py deleted file mode 100644 index 13d9777f0e..0000000000 --- a/gyp/pylib/gyp/MSVSVersion.py +++ /dev/null @@ -1,449 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Handle version information related to Visual Stuio.""" - -import errno -import os -import re -import subprocess -import sys -import gyp -import glob - - -class VisualStudioVersion(object): - """Information regarding a version of Visual Studio.""" - - def __init__(self, short_name, description, - solution_version, project_version, flat_sln, uses_vcxproj, - path, sdk_based, default_toolset=None): - self.short_name = short_name - self.description = description - self.solution_version = solution_version - self.project_version = project_version - self.flat_sln = flat_sln - self.uses_vcxproj = uses_vcxproj - self.path = path - self.sdk_based = sdk_based - self.default_toolset = default_toolset - - def ShortName(self): - return self.short_name - - def Description(self): - """Get the full description of the version.""" - return self.description - - def SolutionVersion(self): - """Get the version number of the sln files.""" - return self.solution_version - - def ProjectVersion(self): - """Get the version number of the vcproj or vcxproj files.""" - return self.project_version - - def FlatSolution(self): - return self.flat_sln - - def UsesVcxproj(self): - """Returns true if this version uses a vcxproj file.""" - return self.uses_vcxproj - - def ProjectExtension(self): - """Returns the file extension for the project.""" - return self.uses_vcxproj and '.vcxproj' or '.vcproj' - - def Path(self): - """Returns the path to Visual Studio installation.""" - return self.path - - def ToolPath(self, tool): - """Returns the path to a given compiler tool. """ - return os.path.normpath(os.path.join(self.path, "VC/bin", tool)) - - def DefaultToolset(self): - """Returns the msbuild toolset version that will be used in the absence - of a user override.""" - return self.default_toolset - - def SetupScript(self, target_arch): - """Returns a command (with arguments) to be used to set up the - environment.""" - # Check if we are running in the SDK command line environment and use - # the setup script from the SDK if so. |target_arch| should be either - # 'x86' or 'x64'. - assert target_arch in ('x86', 'x64') - sdk_dir = os.environ.get('WindowsSDKDir') - if self.sdk_based and sdk_dir: - return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')), - '/' + target_arch] - else: - # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls - # vcvars32, which it can only find if VS??COMNTOOLS is set, which it - # isn't always. - if target_arch == 'x86': - if self.short_name >= '2013' and self.short_name[-1] != 'e' and ( - os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or - os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'): - # VS2013 and later, non-Express have a x64-x86 cross that we want - # to prefer. - return [os.path.normpath( - os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86'] - # Otherwise, the standard x86 compiler. - return [os.path.normpath( - os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))] - else: - assert target_arch == 'x64' - arg = 'x86_amd64' - # Use the 64-on-64 compiler if we're not using an express - # edition and we're running on a 64bit OS. - if self.short_name[-1] != 'e' and ( - os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or - os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'): - arg = 'amd64' - return [os.path.normpath( - os.path.join(self.path, 'VC/vcvarsall.bat')), arg] - - -def _RegistryQueryBase(sysdir, key, value): - """Use reg.exe to read a particular key. - - While ideally we might use the win32 module, we would like gyp to be - python neutral, so for instance cygwin python lacks this module. - - Arguments: - sysdir: The system subdirectory to attempt to launch reg.exe from. - key: The registry key to read from. - value: The particular value to read. - Return: - stdout from reg.exe, or None for failure. - """ - # Skip if not on Windows or Python Win32 setup issue - if sys.platform not in ('win32', 'cygwin'): - return None - # Setup params to pass to and attempt to launch reg.exe - cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'), - 'query', key] - if value: - cmd.extend(['/v', value]) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid - # Note that the error text may be in [1] in some cases - text = p.communicate()[0] - # Check return code from reg.exe; officially 0==success and 1==error - if p.returncode: - return None - return text - - -def _RegistryQuery(key, value=None): - r"""Use reg.exe to read a particular key through _RegistryQueryBase. - - First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If - that fails, it falls back to System32. Sysnative is available on Vista and - up and available on Windows Server 2003 and XP through KB patch 942589. Note - that Sysnative will always fail if using 64-bit python due to it being a - virtual directory and System32 will work correctly in the first place. - - KB 942589 - http://support.microsoft.com/kb/942589/en-us. - - Arguments: - key: The registry key. - value: The particular registry value to read (optional). - Return: - stdout from reg.exe, or None for failure. - """ - text = None - try: - text = _RegistryQueryBase('Sysnative', key, value) - except OSError as e: - if e.errno == errno.ENOENT: - text = _RegistryQueryBase('System32', key, value) - else: - raise - return text - - -def _RegistryGetValueUsingWinReg(key, value): - """Use the _winreg module to obtain the value of a registry key. - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. Throws - ImportError if _winreg is unavailable. - """ - try: - # Python 2 - from _winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx - except ImportError: - # Python 3 - from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx - - try: - root, subkey = key.split('\\', 1) - assert root == 'HKLM' # Only need HKLM for now. - with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey: - return QueryValueEx(hkey, value)[0] - except WindowsError: - return None - - -def _RegistryGetValue(key, value): - """Use _winreg or reg.exe to obtain the value of a registry key. - - Using _winreg is preferable because it solves an issue on some corporate - environments where access to reg.exe is locked down. However, we still need - to fallback to reg.exe for the case where the _winreg module is not available - (for example in cygwin python). - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. - """ - try: - return _RegistryGetValueUsingWinReg(key, value) - except ImportError: - pass - - # Fallback to reg.exe if we fail to import _winreg. - text = _RegistryQuery(key, value) - if not text: - return None - # Extract value. - match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text) - if not match: - return None - return match.group(1) - - -def _CreateVersion(name, path, sdk_based=False): - """Sets up MSVS project generation. - - Setup is based off the GYP_MSVS_VERSION environment variable or whatever is - autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is - passed in that doesn't match a value in versions python will throw a error. - """ - if path: - path = os.path.normpath(path) - versions = { - '2015': VisualStudioVersion('2015', - 'Visual Studio 2015', - solution_version='12.00', - project_version='14.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v140'), - '2013': VisualStudioVersion('2013', - 'Visual Studio 2013', - solution_version='13.00', - project_version='12.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v120'), - '2013e': VisualStudioVersion('2013e', - 'Visual Studio 2013', - solution_version='13.00', - project_version='12.0', - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v120'), - '2012': VisualStudioVersion('2012', - 'Visual Studio 2012', - solution_version='12.00', - project_version='4.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v110'), - '2012e': VisualStudioVersion('2012e', - 'Visual Studio 2012', - solution_version='12.00', - project_version='4.0', - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v110'), - '2010': VisualStudioVersion('2010', - 'Visual Studio 2010', - solution_version='11.00', - project_version='4.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based), - '2010e': VisualStudioVersion('2010e', - 'Visual C++ Express 2010', - solution_version='11.00', - project_version='4.0', - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based), - '2008': VisualStudioVersion('2008', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2008e': VisualStudioVersion('2008e', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2005': VisualStudioVersion('2005', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2005e': VisualStudioVersion('2005e', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - } - return versions[str(name)] - - -def _ConvertToCygpath(path): - """Convert to cygwin path if we are using cygwin.""" - if sys.platform == 'cygwin': - p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE) - path = p.communicate()[0].strip() - return path - - -def _DetectVisualStudioVersions(versions_to_check, force_express): - """Collect the list of installed visual studio versions. - - Returns: - A list of visual studio versions installed in descending order of - usage preference. - Base this on the registry and a quick check if devenv.exe exists. - Only versions 8-10 are considered. - Possibilities are: - 2005(e) - Visual Studio 2005 (8) - 2008(e) - Visual Studio 2008 (9) - 2010(e) - Visual Studio 2010 (10) - 2012(e) - Visual Studio 2012 (11) - 2013(e) - Visual Studio 2013 (12) - 2015 - Visual Studio 2015 (14) - Where (e) is e for express editions of MSVS and blank otherwise. - """ - version_to_year = { - '8.0': '2005', - '9.0': '2008', - '10.0': '2010', - '11.0': '2012', - '12.0': '2013', - '14.0': '2015', - } - versions = [] - for version in versions_to_check: - # Old method of searching for which VS version is installed - # We don't use the 2010-encouraged-way because we also want to get the - # path to the binaries, which it doesn't offer. - keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version, - r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version, - r'HKLM\Software\Microsoft\VCExpress\%s' % version, - r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], 'InstallDir') - if not path: - continue - path = _ConvertToCygpath(path) - # Check for full. - full_path = os.path.join(path, 'devenv.exe') - express_path = os.path.join(path, '*express.exe') - if not force_express and os.path.exists(full_path): - # Add this one. - versions.append(_CreateVersion(version_to_year[version], - os.path.join(path, '..', '..'))) - # Check for express. - elif glob.glob(express_path): - # Add this one. - versions.append(_CreateVersion(version_to_year[version] + 'e', - os.path.join(path, '..', '..'))) - - # The old method above does not work when only SDK is installed. - keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7', - r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7'] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], version) - if not path: - continue - path = _ConvertToCygpath(path) - if version != '14.0': # There is no Express edition for 2015. - versions.append(_CreateVersion(version_to_year[version] + 'e', - os.path.join(path, '..'), sdk_based=True)) - - return versions - - -def SelectVisualStudioVersion(version='auto', allow_fallback=True): - """Select which version of Visual Studio projects to generate. - - Arguments: - version: Hook to allow caller to force a particular version (vs auto). - Returns: - An object representing a visual studio project format version. - """ - # In auto mode, check environment variable for override. - if version == 'auto': - version = os.environ.get('GYP_MSVS_VERSION', 'auto') - version_map = { - 'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'), - '2005': ('8.0',), - '2005e': ('8.0',), - '2008': ('9.0',), - '2008e': ('9.0',), - '2010': ('10.0',), - '2010e': ('10.0',), - '2012': ('11.0',), - '2012e': ('11.0',), - '2013': ('12.0',), - '2013e': ('12.0',), - '2015': ('14.0',), - } - override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH') - if override_path: - msvs_version = os.environ.get('GYP_MSVS_VERSION') - if not msvs_version: - raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be ' - 'set to a particular version (e.g. 2010e).') - return _CreateVersion(msvs_version, override_path, sdk_based=True) - version = str(version) - versions = _DetectVisualStudioVersions(version_map[version], 'e' in version) - if not versions: - if not allow_fallback: - raise ValueError('Could not locate Visual Studio installation.') - if version == 'auto': - # Default to 2005 if we couldn't find anything - return _CreateVersion('2005', None) - else: - return _CreateVersion(version, None) - return versions[0] diff --git a/gyp/pylib/gyp/generator/android.py b/gyp/pylib/gyp/generator/android.py deleted file mode 100644 index b7f9842888..0000000000 --- a/gyp/pylib/gyp/generator/android.py +++ /dev/null @@ -1,1097 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This generates makefiles suitable for inclusion into the Android build system -# via an Android.mk file. It is based on make.py, the standard makefile -# generator. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level GypAndroid.mk. This means that all -# variables in .mk-files clobber one another, and furthermore that any -# variables set potentially clash with other Android build system variables. -# Try to avoid setting global variables where possible. - -from __future__ import print_function - -import gyp -import gyp.common -import gyp.generator.make as make # Reuse global functions from make backend. -import os -import re -import subprocess - -generator_default_variables = { - 'OS': 'android', - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.so', - 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)', - 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)', - 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)', - 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)', - 'LIB_DIR': '$(obj).$(TOOLSET)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(RULE_SOURCES)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - 'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - - -# Generator-specific gyp specs. -generator_additional_non_configuration_keys = [ - # Boolean to declare that this target does not want its name mangled. - 'android_unmangled_name', - # Map of android build system variables to set. - 'aosp_build_settings', -] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - - -ALL_MODULES_FOOTER = """\ -# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from -# all the included sub-makefiles. This is just here to clarify. -gyp_all_modules: -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Map gyp target types to Android module classes. -MODULE_CLASSES = { - 'static_library': 'STATIC_LIBRARIES', - 'shared_library': 'SHARED_LIBRARIES', - 'executable': 'EXECUTABLES', -} - - -def IsCPPExtension(ext): - return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx' - - -def Sourceify(path): - """Convert a path to its source directory form. The Android backend does not - support options.generator_output, so this function is a noop.""" - return path - - -# Map from qualified target to path to output. -# For Android, the target of these maps is a tuple ('static', 'modulename'), -# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, -# since we link by module. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class AndroidMkWriter(object): - """AndroidMkWriter packages up the writing of one target-specific Android.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, android_top_dir): - self.android_top_dir = android_top_dir - - def Write(self, qualified_target, relative_target, base_path, output_filename, - spec, configs, part_of_all, write_alias_target, sdk_version): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - relative_target: qualified target name relative to the root - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - write_alias_target: flag indicating whether to create short aliases for - this target - sdk_version: what to emit for LOCAL_SDK_VERSION in output - """ - gyp.common.EnsureDirExists(output_filename) - - self.fp = open(output_filename, 'w') - - self.fp.write(header) - - self.qualified_target = qualified_target - self.relative_target = relative_target - self.path = base_path - self.target = spec['target_name'] - self.type = spec['type'] - self.toolset = spec['toolset'] - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - - self.android_class = MODULE_CLASSES.get(self.type, 'GYP') - self.android_module = self.ComputeAndroidModule(spec) - (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) - self.output = self.output_binary = self.ComputeOutput(spec) - - # Standard header. - self.WriteLn('include $(CLEAR_VARS)\n') - - # Module class and name. - self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class) - self.WriteLn('LOCAL_MODULE := ' + self.android_module) - # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. - # The library module classes fail if the stem is set. ComputeOutputParts - # makes sure that stem == modulename in these cases. - if self.android_stem != self.android_module: - self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem) - self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix) - if self.toolset == 'host': - self.WriteLn('LOCAL_IS_HOST_MODULE := true') - self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)') - elif sdk_version > 0: - self.WriteLn('LOCAL_MODULE_TARGET_ARCH := ' - '$(TARGET_$(GYP_VAR_PREFIX)ARCH)') - self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version) - - # Grab output directories; needed for Actions and Rules. - if self.toolset == 'host': - self.WriteLn('gyp_intermediate_dir := ' - '$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))') - else: - self.WriteLn('gyp_intermediate_dir := ' - '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))') - self.WriteLn('gyp_shared_intermediate_dir := ' - '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))') - self.WriteLn() - - # List files this target depends on so that actions/rules/copies/sources - # can depend on the list. - # TODO: doesn't pull in things through transitive link deps; needed? - target_dependencies = [x[1] for x in deps if x[0] == 'path'] - self.WriteLn('# Make sure our deps are built first.') - self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES', - local_pathify=True) - - # Actions must come first, since they can generate more OBJs for use below. - if 'actions' in spec: - self.WriteActions(spec['actions'], extra_sources, extra_outputs) - - # Rules must be early like actions. - if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs) - - if 'copies' in spec: - self.WriteCopies(spec['copies'], extra_outputs) - - # GYP generated outputs. - self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True) - - # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend - # on both our dependency targets and our generated files. - self.WriteLn('# Make sure our deps and generated files are built first.') - self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) ' - '$(GYP_GENERATED_OUTPUTS)') - self.WriteLn() - - # Sources. - if spec.get('sources', []) or extra_sources: - self.WriteSources(spec, configs, extra_sources) - - self.WriteTarget(spec, configs, deps, link_deps, part_of_all, - write_alias_target) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = ('path', self.output_binary) - - # Update global list of link dependencies. - if self.type == 'static_library': - target_link_deps[qualified_target] = ('static', self.android_module) - elif self.type == 'shared_library': - target_link_deps[qualified_target] = ('shared', self.android_module) - - self.fp.close() - return self.android_module - - - def WriteActions(self, actions, extra_sources, extra_outputs): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - """ - for action in actions: - name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, - action['action_name'])) - self.WriteLn('### Rules for action "%s":' % action['action_name']) - inputs = action['inputs'] - outputs = action['outputs'] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - if not out.startswith('$'): - print('WARNING: Action for target "%s" writes output to local path ' - '"%s".' % (self.target, out)) - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get('process_outputs_as_sources', False)): - extra_sources += outputs - - # Prepare the actual command. - command = gyp.common.EncodePOSIXShellList(action['action']) - if 'message' in action: - quiet_cmd = 'Gyp action: %s ($@)' % action['message'] - else: - quiet_cmd = 'Gyp action: %s ($@)' % name - if len(dirs) > 0: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - cd_action = 'cd $(gyp_local_path)/%s; ' % self.path - command = cd_action + command - - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the gyp_* - # variables for the action rule with an absolute version so that the - # output goes in the right place. - # Only write the gyp_* rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) - self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) - self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output) - self.WriteLn('%s: gyp_intermediate_dir := ' - '$(abspath $(gyp_intermediate_dir))' % main_output) - self.WriteLn('%s: gyp_shared_intermediate_dir := ' - '$(abspath $(gyp_shared_intermediate_dir))' % main_output) - - # Android's envsetup.sh adds a number of directories to the path including - # the built host binary directory. This causes actions/rules invoked by - # gyp to sometimes use these instead of system versions, e.g. bison. - # The built host binaries may not be suitable, and can cause errors. - # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable - # set by envsetup. - self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' - % main_output) - - # Don't allow spaces in input/output filenames, but make an exception for - # filenames which start with '$(' since it's okay for there to be spaces - # inside of make function/macro invocations. - for input in inputs: - if not input.startswith('$(') and ' ' in input: - raise gyp.common.GypError( - 'Action input filename "%s" in target %s contains a space' % - (input, self.target)) - for output in outputs: - if not output.startswith('$(') and ' ' in output: - raise gyp.common.GypError( - 'Action output filename "%s" in target %s contains a space' % - (output, self.target)) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % - (main_output, ' '.join(map(self.LocalPathify, inputs)))) - self.WriteLn('\t@echo "%s"' % quiet_cmd) - self.WriteLn('\t$(hide)%s\n' % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output)) - - extra_outputs += outputs - self.WriteLn() - - self.WriteLn() - - - def WriteRules(self, rules, extra_sources, extra_outputs): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - """ - if len(rules) == 0: - return - - for rule in rules: - if len(rule.get('rule_sources', [])) == 0: - continue - name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, - rule['rule_name'])) - self.WriteLn('\n### Generated for rule "%s":' % name) - self.WriteLn('# "%s":' % rule) - - inputs = rule.get('inputs') - for rule_source in rule.get('rule_sources', []): - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) - - outputs = [self.ExpandInputRoot(out, rule_source_root, - rule_source_dirname) - for out in rule['outputs']] - - dirs = set() - for out in outputs: - if not out.startswith('$'): - print('WARNING: Rule for target %s writes output to local path %s' - % (self.target, out)) - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - extra_outputs += outputs - if int(rule.get('process_outputs_as_sources', False)): - extra_sources.extend(outputs) - - components = [] - for component in rule['action']: - component = self.ExpandInputRoot(component, rule_source_root, - rule_source_dirname) - if '$(RULE_SOURCES)' in component: - component = component.replace('$(RULE_SOURCES)', - rule_source) - components.append(component) - - command = gyp.common.EncodePOSIXShellList(components) - cd_action = 'cd $(gyp_local_path)/%s; ' % self.path - command = cd_action + command - if dirs: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - # We set up a rule to build the first output, and then set up - # a rule for each additional output to depend on the first. - outputs = map(self.LocalPathify, outputs) - main_output = outputs[0] - self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) - self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output) - self.WriteLn('%s: gyp_intermediate_dir := ' - '$(abspath $(gyp_intermediate_dir))' % main_output) - self.WriteLn('%s: gyp_shared_intermediate_dir := ' - '$(abspath $(gyp_shared_intermediate_dir))' % main_output) - - # See explanation in WriteActions. - self.WriteLn('%s: export PATH := ' - '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output) - - main_output_deps = self.LocalPathify(rule_source) - if inputs: - main_output_deps += ' ' - main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs]) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % - (main_output, main_output_deps)) - self.WriteLn('\t%s\n' % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn('%s: %s ;' % (output, main_output)) - self.WriteLn() - - self.WriteLn() - - - def WriteCopies(self, copies, extra_outputs): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - """ - self.WriteLn('### Generated for copy rule.') - - variable = make.StringToMakefileVariable(self.relative_target + '_copies') - outputs = [] - for copy in copies: - for path in copy['files']: - # The Android build system does not allow generation of files into the - # source tree. The destination should start with a variable, which will - # typically be $(gyp_intermediate_dir) or - # $(gyp_shared_intermediate_dir). Note that we can't use an assertion - # because some of the gyp tests depend on this. - if not copy['destination'].startswith('$'): - print('WARNING: Copy rule for target %s writes output to ' - 'local path %s' % (self.target, copy['destination'])) - - # LocalPathify() calls normpath, stripping trailing slashes. - path = Sourceify(self.LocalPathify(path)) - filename = os.path.split(path)[1] - output = Sourceify(self.LocalPathify(os.path.join(copy['destination'], - filename))) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' % - (output, path)) - self.WriteLn('\t@echo Copying: $@') - self.WriteLn('\t$(hide) mkdir -p $(dir $@)') - self.WriteLn('\t$(hide) $(ACP) -rpf $< $@') - self.WriteLn() - outputs.append(output) - self.WriteLn('%s = %s' % (variable, - ' '.join(map(make.QuoteSpaces, outputs)))) - extra_outputs.append('$(%s)' % variable) - self.WriteLn() - - - def WriteSourceFlags(self, spec, configs): - """Write out the flags and include paths used to compile source files for - the current target. - - Args: - spec, configs: input from gyp. - """ - for configname, config in sorted(configs.items()): - extracted_includes = [] - - self.WriteLn('\n# Flags passed to both C and C++ files.') - cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( - config.get('cflags', []) + config.get('cflags_c', [])) - extracted_includes.extend(includes_from_cflags) - self.WriteList(cflags, 'MY_CFLAGS_%s' % configname) - - self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname, - prefix='-D', quoter=make.EscapeCppDefine) - - self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS') - includes = list(config.get('include_dirs', [])) - includes.extend(extracted_includes) - includes = map(Sourceify, map(self.LocalPathify, includes)) - includes = self.NormalizeIncludePaths(includes) - self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname) - - self.WriteLn('\n# Flags passed to only C++ (and not C) files.') - self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname) - - self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) ' - '$(MY_DEFS_$(GYP_CONFIGURATION))') - # Undefine ANDROID for host modules - # TODO: the source code should not use macro ANDROID to tell if it's host - # or target module. - if self.toolset == 'host': - self.WriteLn('# Undefine ANDROID for host modules') - self.WriteLn('LOCAL_CFLAGS += -UANDROID') - self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) ' - '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))') - self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))') - # Android uses separate flags for assembly file invocations, but gyp expects - # the same CFLAGS to be applied: - self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)') - - - def WriteSources(self, spec, configs, extra_sources): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - We need to handle shared_intermediate directory source files as - a special case by copying them to the intermediate directory and - treating them as a genereated sources. Otherwise the Android build - rules won't pick them up. - - Args: - spec, configs: input from gyp. - extra_sources: Sources generated from Actions or Rules. - """ - sources = filter(make.Compilable, spec.get('sources', [])) - generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] - extra_sources = filter(make.Compilable, extra_sources) - - # Determine and output the C++ extension used by these sources. - # We simply find the first C++ file and use that extension. - all_sources = sources + extra_sources - local_cpp_extension = '.cpp' - for source in all_sources: - (root, ext) = os.path.splitext(source) - if IsCPPExtension(ext): - local_cpp_extension = ext - break - if local_cpp_extension != '.cpp': - self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension) - - # We need to move any non-generated sources that are coming from the - # shared intermediate directory out of LOCAL_SRC_FILES and put them - # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files - # that don't match our local_cpp_extension, since Android will only - # generate Makefile rules for a single LOCAL_CPP_EXTENSION. - local_files = [] - for source in sources: - (root, ext) = os.path.splitext(source) - if '$(gyp_shared_intermediate_dir)' in source: - extra_sources.append(source) - elif '$(gyp_intermediate_dir)' in source: - extra_sources.append(source) - elif IsCPPExtension(ext) and ext != local_cpp_extension: - extra_sources.append(source) - else: - local_files.append(os.path.normpath(os.path.join(self.path, source))) - - # For any generated source, if it is coming from the shared intermediate - # directory then we add a Make rule to copy them to the local intermediate - # directory first. This is because the Android LOCAL_GENERATED_SOURCES - # must be in the local module intermediate directory for the compile rules - # to work properly. If the file has the wrong C++ extension, then we add - # a rule to copy that to intermediates and use the new version. - final_generated_sources = [] - # If a source file gets copied, we still need to add the orginal source - # directory as header search path, for GCC searches headers in the - # directory that contains the source file by default. - origin_src_dirs = [] - for source in extra_sources: - local_file = source - if not '$(gyp_intermediate_dir)/' in local_file: - basename = os.path.basename(local_file) - local_file = '$(gyp_intermediate_dir)/' + basename - (root, ext) = os.path.splitext(local_file) - if IsCPPExtension(ext) and ext != local_cpp_extension: - local_file = root + local_cpp_extension - if local_file != source: - self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source))) - self.WriteLn('\tmkdir -p $(@D); cp $< $@') - origin_src_dirs.append(os.path.dirname(source)) - final_generated_sources.append(local_file) - - # We add back in all of the non-compilable stuff to make sure that the - # make rules have dependencies on them. - final_generated_sources.extend(generated_not_sources) - self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES') - - origin_src_dirs = gyp.common.uniquer(origin_src_dirs) - origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) - self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS') - - self.WriteList(local_files, 'LOCAL_SRC_FILES') - - # Write out the flags used to compile the source; this must be done last - # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. - self.WriteSourceFlags(spec, configs) - - - def ComputeAndroidModule(self, spec): - """Return the Android module name used for a gyp spec. - - We use the complete qualified target name to avoid collisions between - duplicate targets in different directories. We also add a suffix to - distinguish gyp-generated module names. - """ - - if int(spec.get('android_unmangled_name', 0)): - assert self.type != 'shared_library' or self.target.startswith('lib') - return self.target - - if self.type == 'shared_library': - # For reasons of convention, the Android build system requires that all - # shared library modules are named 'libfoo' when generating -l flags. - prefix = 'lib_' - else: - prefix = '' - - if spec['toolset'] == 'host': - suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp' - else: - suffix = '_gyp' - - if self.path: - middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target)) - else: - middle = make.StringToMakefileVariable(self.target) - - return ''.join([prefix, middle, suffix]) - - - def ComputeOutputParts(self, spec): - """Return the 'output basename' of a gyp spec, split into filename + ext. - - Android libraries must be named the same thing as their module name, - otherwise the linker can't find them, so product_name and so on must be - ignored if we are building a library, and the "lib" prepending is - not done for Android. - """ - assert self.type != 'loadable_module' # TODO: not supported? - - target = spec['target_name'] - target_prefix = '' - target_ext = '' - if self.type == 'static_library': - target = self.ComputeAndroidModule(spec) - target_ext = '.a' - elif self.type == 'shared_library': - target = self.ComputeAndroidModule(spec) - target_ext = '.so' - elif self.type == 'none': - target_ext = '.stamp' - elif self.type != 'executable': - print("ERROR: What output file should be generated?", - "type", self.type, "target", target) - - if self.type != 'static_library' and self.type != 'shared_library': - target_prefix = spec.get('product_prefix', target_prefix) - target = spec.get('product_name', target) - product_ext = spec.get('product_extension') - if product_ext: - target_ext = '.' + product_ext - - target_stem = target_prefix + target - return (target_stem, target_ext) - - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - return ''.join(self.ComputeOutputParts(spec)) - - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - if self.type == 'executable': - # We install host executables into shared_intermediate_dir so they can be - # run by gyp rules that refer to PRODUCT_DIR. - path = '$(gyp_shared_intermediate_dir)' - elif self.type == 'shared_library': - if self.toolset == 'host': - path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)' - else: - path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)' - else: - # Other targets just get built into their intermediate dir. - if self.toolset == 'host': - path = ('$(call intermediates-dir-for,%s,%s,true,,' - '$(GYP_HOST_VAR_PREFIX))' % (self.android_class, - self.android_module)) - else: - path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))' - % (self.android_class, self.android_module)) - - assert spec.get('product_dir') is None # TODO: not supported? - return os.path.join(path, self.ComputeOutputBasename(spec)) - - def NormalizeIncludePaths(self, include_paths): - """ Normalize include_paths. - Convert absolute paths to relative to the Android top directory. - - Args: - include_paths: A list of unprocessed include paths. - Returns: - A list of normalized include paths. - """ - normalized = [] - for path in include_paths: - if path[0] == '/': - path = gyp.common.RelativePath(path, self.android_top_dir) - normalized.append(path) - return normalized - - def ExtractIncludesFromCFlags(self, cflags): - """Extract includes "-I..." out from cflags - - Args: - cflags: A list of compiler flags, which may be mixed with "-I.." - Returns: - A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. - """ - clean_cflags = [] - include_paths = [] - for flag in cflags: - if flag.startswith('-I'): - include_paths.append(flag[2:]) - else: - clean_cflags.append(flag) - - return (clean_cflags, include_paths) - - def FilterLibraries(self, libraries): - """Filter the 'libraries' key to separate things that shouldn't be ldflags. - - Library entries that look like filenames should be converted to android - module names instead of being passed to the linker as flags. - - Args: - libraries: the value of spec.get('libraries') - Returns: - A tuple (static_lib_modules, dynamic_lib_modules, ldflags) - """ - static_lib_modules = [] - dynamic_lib_modules = [] - ldflags = [] - for libs in libraries: - # Libs can have multiple words. - for lib in libs.split(): - # Filter the system libraries, which are added by default by the Android - # build system. - if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or - lib.endswith('libgcc.a')): - continue - match = re.search(r'([^/]+)\.a$', lib) - if match: - static_lib_modules.append(match.group(1)) - continue - match = re.search(r'([^/]+)\.so$', lib) - if match: - dynamic_lib_modules.append(match.group(1)) - continue - if lib.startswith('-l'): - ldflags.append(lib) - return (static_lib_modules, dynamic_lib_modules, ldflags) - - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if 'dependencies' in spec: - deps.extend([target_outputs[dep] for dep in spec['dependencies'] - if target_outputs[dep]]) - for dep in spec['dependencies']: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteTargetFlags(self, spec, configs, link_deps): - """Write Makefile code to specify the link flags and library dependencies. - - spec, configs: input from gyp. - link_deps: link dependency list; see ComputeDeps() - """ - # Libraries (i.e. -lfoo) - # These must be included even for static libraries as some of them provide - # implicit include paths through the build system. - libraries = gyp.common.uniquer(spec.get('libraries', [])) - static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) - - if self.type != 'static_library': - for configname, config in sorted(configs.items()): - ldflags = list(config.get('ldflags', [])) - self.WriteLn('') - self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname) - self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS') - self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) ' - '$(LOCAL_GYP_LIBS)') - - # Link dependencies (i.e. other gyp targets this target depends on) - # These need not be included for static libraries as within the gyp build - # we do not use the implicit include path mechanism. - if self.type != 'static_library': - static_link_deps = [x[1] for x in link_deps if x[0] == 'static'] - shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared'] - else: - static_link_deps = [] - shared_link_deps = [] - - # Only write the lists if they are non-empty. - if static_libs or static_link_deps: - self.WriteLn('') - self.WriteList(static_libs + static_link_deps, - 'LOCAL_STATIC_LIBRARIES') - self.WriteLn('# Enable grouping to fix circular references') - self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true') - if dynamic_libs or shared_link_deps: - self.WriteLn('') - self.WriteList(dynamic_libs + shared_link_deps, - 'LOCAL_SHARED_LIBRARIES') - - - def WriteTarget(self, spec, configs, deps, link_deps, part_of_all, - write_alias_target): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - part_of_all: flag indicating this target is part of 'all' - write_alias_target: flag indicating whether to create short aliases for this - target - """ - self.WriteLn('### Rules for final target.') - - if self.type != 'none': - self.WriteTargetFlags(spec, configs, link_deps) - - settings = spec.get('aosp_build_settings', {}) - if settings: - self.WriteLn('### Set directly by aosp_build_settings.') - for k, v in settings.items(): - if isinstance(v, list): - self.WriteList(v, k) - else: - self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v))) - self.WriteLn('') - - # Add to the set of targets which represent the gyp 'all' target. We use the - # name 'gyp_all_modules' as the Android build system doesn't allow the use - # of the Make target 'all' and because 'all_modules' is the equivalent of - # the Make target 'all' on Android. - if part_of_all and write_alias_target: - self.WriteLn('# Add target alias to "gyp_all_modules" target.') - self.WriteLn('.PHONY: gyp_all_modules') - self.WriteLn('gyp_all_modules: %s' % self.android_module) - self.WriteLn('') - - # Add an alias from the gyp target name to the Android module name. This - # simplifies manual builds of the target, and is required by the test - # framework. - if self.target != self.android_module and write_alias_target: - self.WriteLn('# Alias gyp target name.') - self.WriteLn('.PHONY: %s' % self.target) - self.WriteLn('%s: %s' % (self.target, self.android_module)) - self.WriteLn('') - - # Add the command to trigger build of the target type depending - # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY - # NOTE: This has to come last! - modifier = '' - if self.toolset == 'host': - modifier = 'HOST_' - if self.type == 'static_library': - self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier) - elif self.type == 'shared_library': - self.WriteLn('LOCAL_PRELINK_MODULE := false') - self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier) - elif self.type == 'executable': - self.WriteLn('LOCAL_CXX_STL := libc++_static') - # Executables are for build and test purposes only, so they're installed - # to a directory that doesn't get included in the system image. - self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)') - self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier) - else: - self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp') - self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') - if self.toolset == 'target': - self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)') - else: - self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)') - self.WriteLn() - self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk') - self.WriteLn() - self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)') - self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') - self.WriteLn('\t$(hide) mkdir -p $(dir $@)') - self.WriteLn('\t$(hide) touch $@') - self.WriteLn() - self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=') - - - def WriteList(self, value_list, variable=None, prefix='', - quoter=make.QuoteIfNecessary, local_pathify=False): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = '' - if value_list: - value_list = [quoter(prefix + l) for l in value_list] - if local_pathify: - value_list = [self.LocalPathify(l) for l in value_list] - values = ' \\\n\t' + ' \\\n\t'.join(value_list) - self.fp.write('%s :=%s\n\n' % (variable, values)) - - - def WriteLn(self, text=''): - self.fp.write(text + '\n') - - - def LocalPathify(self, path): - """Convert a subdirectory-relative path into a normalized path which starts - with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). - Absolute paths, or paths that contain variables, are just normalized.""" - if '$(' in path or os.path.isabs(path): - # path is not a file in the project tree in this case, but calling - # normpath is still important for trimming trailing slashes. - return os.path.normpath(path) - local_path = os.path.join('$(LOCAL_PATH)', self.path, path) - local_path = os.path.normpath(local_path) - # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) - # - i.e. that the resulting path is still inside the project tree. The - # path may legitimately have ended up containing just $(LOCAL_PATH), though, - # so we don't look for a slash. - assert local_path.startswith('$(LOCAL_PATH)'), ( - 'Path %s attempts to escape from gyp path %s !)' % (path, self.path)) - return local_path - - - def ExpandInputRoot(self, template, expansion, dirname): - if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: - return template - path = template % { - 'INPUT_ROOT': expansion, - 'INPUT_DIRNAME': dirname, - } - return os.path.normpath(path) - - -def PerformBuild(data, configurations, params): - # The android backend only supports the default configuration. - options = params['options'] - makefile = os.path.abspath(os.path.join(options.toplevel_dir, - 'GypAndroid.mk')) - env = dict(os.environ) - env['ONE_SHOT_MAKEFILE'] = makefile - arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules'] - print('Building: %s' % arguments) - subprocess.check_call(arguments, env=env) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - limit_to_target_all = generator_flags.get('limit_to_target_all', False) - write_alias_targets = generator_flags.get('write_alias_targets', True) - sdk_version = generator_flags.get('aosp_sdk_version', 0) - android_top_dir = os.environ.get('ANDROID_BUILD_TOP') - assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.' - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - assert not options.generator_output, ( - 'The Android backend does not support options.generator_output.') - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.toplevel_dir) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'GypAndroid' + options.suffix + '.mk' - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - assert not options.generator_output, ( - 'The Android backend does not support options.generator_output.') - gyp.common.EnsureDirExists(makefile_path) - root_makefile = open(makefile_path, 'w') - - root_makefile.write(header) - - # We set LOCAL_PATH just once, here, to the top of the project tree. This - # allows all the other paths we use to be relative to the Android.mk file, - # as the Android build system expects. - root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n') - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - android_modules = {} - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - relative_build_file = gyp.common.RelativePath(build_file, - options.toplevel_dir) - build_files.add(relative_build_file) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - part_of_all = qualified_target in needed_targets - if limit_to_target_all and not part_of_all: - continue - - relative_target = gyp.common.QualifiedTarget(relative_build_file, target, - toolset) - writer = AndroidMkWriter(android_top_dir) - android_module = writer.Write(qualified_target, relative_target, base_path, - output_file, spec, configs, - part_of_all=part_of_all, - write_alias_target=write_alias_targets, - sdk_version=sdk_version) - if android_module in android_modules: - print('ERROR: Android module names must be unique. The following ' - 'targets both generate Android module name %s.\n %s\n %s' % - (android_module, android_modules[android_module], - qualified_target)) - return - android_modules[android_module] = qualified_target - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add(mkfile_rel_path) - - root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration) - root_makefile.write('GYP_VAR_PREFIX ?=\n') - root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n') - root_makefile.write('GYP_HOST_MULTILIB ?= first\n') - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include_file in sorted(include_list): - root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n') - root_makefile.write('\n') - - if write_alias_targets: - root_makefile.write(ALL_MODULES_FOOTER) - - root_makefile.close() diff --git a/gyp/pylib/gyp/generator/compile_commands_json.py b/gyp/pylib/gyp/generator/compile_commands_json.py deleted file mode 100644 index 575db63c4e..0000000000 --- a/gyp/pylib/gyp/generator/compile_commands_json.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (c) 2016 Ben Noordhuis . All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import gyp.common -import gyp.xcode_emulation -import json -import os - -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None -generator_supports_multiple_toolsets = True -generator_wants_sorted_dependencies = False - -# Lifted from make.py. The actual values don't matter much. -generator_default_variables = { - 'CONFIGURATION_NAME': '$(BUILDTYPE)', - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni', - 'PRODUCT_DIR': '$(builddir)', - 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - 'RULE_INPUT_PATH': '$(abspath $<)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', - 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', -} - - -def IsMac(params): - return 'mac' == gyp.common.GetFlavor(params) - - -def CalculateVariables(default_variables, params): - default_variables.setdefault('OS', gyp.common.GetFlavor(params)) - - -def AddCommandsForTarget(cwd, target, params, per_config_commands): - output_dir = params['generator_flags']['output_dir'] - for configuration_name, configuration in target['configurations'].iteritems(): - builddir_name = os.path.join(output_dir, configuration_name) - - if IsMac(params): - xcode_settings = gyp.xcode_emulation.XcodeSettings(target) - cflags = xcode_settings.GetCflags(configuration_name) - cflags_c = xcode_settings.GetCflagsC(configuration_name) - cflags_cc = xcode_settings.GetCflagsCC(configuration_name) - else: - cflags = configuration.get('cflags', []) - cflags_c = configuration.get('cflags_c', []) - cflags_cc = configuration.get('cflags_cc', []) - - cflags_c = cflags + cflags_c - cflags_cc = cflags + cflags_cc - - defines = configuration.get('defines', []) - defines = ['-D' + s for s in defines] - - # TODO(bnoordhuis) Handle generated source files. - sources = target.get('sources', []) - sources = [s for s in sources if s.endswith('.c') or s.endswith('.cc')] - - def resolve(filename): - return os.path.abspath(os.path.join(cwd, filename)) - - # TODO(bnoordhuis) Handle generated header files. - include_dirs = configuration.get('include_dirs', []) - include_dirs = [s for s in include_dirs if not s.startswith('$(obj)')] - includes = ['-I' + resolve(s) for s in include_dirs] - - defines = gyp.common.EncodePOSIXShellList(defines) - includes = gyp.common.EncodePOSIXShellList(includes) - cflags_c = gyp.common.EncodePOSIXShellList(cflags_c) - cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc) - - commands = per_config_commands.setdefault(configuration_name, []) - for source in sources: - file = resolve(source) - isc = source.endswith('.c') - cc = 'cc' if isc else 'c++' - cflags = cflags_c if isc else cflags_cc - command = ' '.join((cc, defines, includes, cflags, - '-c', gyp.common.EncodePOSIXShellArgument(file))) - commands.append(dict(command=command, directory=output_dir, file=file)) - - -def GenerateOutput(target_list, target_dicts, data, params): - per_config_commands = {} - for qualified_target, target in target_dicts.iteritems(): - build_file, target_name, toolset = ( - gyp.common.ParseQualifiedTarget(qualified_target)) - if IsMac(params): - settings = data[build_file] - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target) - cwd = os.path.dirname(build_file) - AddCommandsForTarget(cwd, target, params, per_config_commands) - - output_dir = params['generator_flags']['output_dir'] - for configuration_name, commands in per_config_commands.iteritems(): - filename = os.path.join(output_dir, - configuration_name, - 'compile_commands.json') - gyp.common.EnsureDirExists(filename) - fp = open(filename, 'w') - json.dump(commands, fp=fp, indent=0, check_circular=False) - - -def PerformBuild(data, configurations, params): - pass diff --git a/gyp/pylib/gyp/generator/ninja.py b/gyp/pylib/gyp/generator/ninja.py deleted file mode 100644 index 4b5122af1d..0000000000 --- a/gyp/pylib/gyp/generator/ninja.py +++ /dev/null @@ -1,2426 +0,0 @@ -from __future__ import print_function -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import collections -import copy -import hashlib -import json -import multiprocessing -import os.path -import re -import signal -import subprocess -import sys -import gyp -import gyp.common -from gyp.common import OrderedSet -import gyp.msvs_emulation -import gyp.MSVSUtil as MSVSUtil -import gyp.xcode_emulation -try: - from cStringIO import StringIO -except ImportError: - from io import StringIO - -from gyp.common import GetEnvironFallback -import gyp.ninja_syntax as ninja_syntax - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_PREFIX': 'lib', - - # Gyp expects the following variables to be expandable by the build - # system to the appropriate locations. Ninja prefers paths to be - # known at gyp time. To resolve this, introduce special - # variables starting with $! and $| (which begin with a $ so gyp knows it - # should be treated specially, but is otherwise an invalid - # ninja/shell variable) that are passed to gyp here but expanded - # before writing out into the target .ninja files; see - # ExpandSpecial. - # $! is used for variables that represent a path and that can only appear at - # the start of a string, while $| is used for variables that can appear - # anywhere in a string. - 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', - 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', - 'PRODUCT_DIR': '$!PRODUCT_DIR', - 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME', - - # Special variables that may be used by gyp 'rule' targets. - # We generate definitions for these variables on the fly when processing a - # rule. - 'RULE_INPUT_ROOT': '${root}', - 'RULE_INPUT_DIRNAME': '${dirname}', - 'RULE_INPUT_PATH': '${source}', - 'RULE_INPUT_EXT': '${ext}', - 'RULE_INPUT_NAME': '${name}', -} - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None - -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - -def StripPrefix(arg, prefix): - if arg.startswith(prefix): - return arg[len(prefix):] - return arg - - -def QuoteShellArgument(arg, flavor): - """Quote a string such that it will be interpreted as a single argument - by the shell.""" - # Rather than attempting to enumerate the bad shell characters, just - # whitelist common OK ones and quote anything else. - if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): - return arg # No quoting necessary. - if flavor == 'win': - return gyp.msvs_emulation.QuoteForRspFile(arg) - return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" - - -def Define(d, flavor): - """Takes a preprocessor define and returns a -D parameter that's ninja- and - shell-escaped.""" - if flavor == 'win': - # cl.exe replaces literal # characters with = in preprocesor definitions for - # some reason. Octal-encode to work around that. - d = d.replace('#', '\\%03o' % ord('#')) - return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) - - -def AddArch(output, arch): - """Adds an arch string to an output path.""" - output, extension = os.path.splitext(output) - return '%s.%s%s' % (output, arch, extension) - - -class Target(object): - """Target represents the paths used within a single gyp target. - - Conceptually, building a single target A is a series of steps: - - 1) actions/rules/copies generates source/resources/etc. - 2) compiles generates .o files - 3) link generates a binary (library/executable) - 4) bundle merges the above in a mac bundle - - (Any of these steps can be optional.) - - From a build ordering perspective, a dependent target B could just - depend on the last output of this series of steps. - - But some dependent commands sometimes need to reach inside the box. - For example, when linking B it needs to get the path to the static - library generated by A. - - This object stores those paths. To keep things simple, member - variables only store concrete paths to single files, while methods - compute derived values like "the last output of the target". - """ - def __init__(self, type): - # Gyp type ("static_library", etc.) of this target. - self.type = type - # File representing whether any input dependencies necessary for - # dependent actions have completed. - self.preaction_stamp = None - # File representing whether any input dependencies necessary for - # dependent compiles have completed. - self.precompile_stamp = None - # File representing the completion of actions/rules/copies, if any. - self.actions_stamp = None - # Path to the output of the link step, if any. - self.binary = None - # Path to the file representing the completion of building the bundle, - # if any. - self.bundle = None - # On Windows, incremental linking requires linking against all the .objs - # that compose a .lib (rather than the .lib itself). That list is stored - # here. In this case, we also need to save the compile_deps for the target, - # so that the target that directly depends on the .objs can also depend - # on those. - self.component_objs = None - self.compile_deps = None - # Windows only. The import .lib is the output of a build step, but - # because dependents only link against the lib (not both the lib and the - # dll) we keep track of the import library here. - self.import_lib = None - - def Linkable(self): - """Return true if this is a target that can be linked against.""" - return self.type in ('static_library', 'shared_library') - - def UsesToc(self, flavor): - """Return true if the target should produce a restat rule based on a TOC - file.""" - # For bundles, the .TOC should be produced for the binary, not for - # FinalOutput(). But the naive approach would put the TOC file into the - # bundle, so don't do this for bundles for now. - if flavor == 'win' or self.bundle: - return False - return self.type in ('shared_library', 'loadable_module') - - def PreActionInput(self, flavor): - """Return the path, if any, that should be used as a dependency of - any dependent action step.""" - if self.UsesToc(flavor): - return self.FinalOutput() + '.TOC' - return self.FinalOutput() or self.preaction_stamp - - def PreCompileInput(self): - """Return the path, if any, that should be used as a dependency of - any dependent compile step.""" - return self.actions_stamp or self.precompile_stamp - - def FinalOutput(self): - """Return the last output of the target, which depends on all prior - steps.""" - return self.bundle or self.binary or self.actions_stamp - - -# A small discourse on paths as used within the Ninja build: -# All files we produce (both at gyp and at build time) appear in the -# build directory (e.g. out/Debug). -# -# Paths within a given .gyp file are always relative to the directory -# containing the .gyp file. Call these "gyp paths". This includes -# sources as well as the starting directory a given gyp rule/action -# expects to be run from. We call the path from the source root to -# the gyp file the "base directory" within the per-.gyp-file -# NinjaWriter code. -# -# All paths as written into the .ninja files are relative to the build -# directory. Call these paths "ninja paths". -# -# We translate between these two notions of paths with two helper -# functions: -# -# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) -# into the equivalent ninja path. -# -# - GypPathToUniqueOutput translates a gyp path into a ninja path to write -# an output file; the result can be namespaced such that it is unique -# to the input file name as well as the output target name. - -class NinjaWriter(object): - def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir, - output_file, toplevel_build, output_file_name, flavor, - toplevel_dir=None): - """ - base_dir: path from source root to directory containing this gyp file, - by gyp semantics, all input paths are relative to this - build_dir: path from source root to build output - toplevel_dir: path to the toplevel directory - """ - - self.hash_for_rules = hash_for_rules - self.target_outputs = target_outputs - self.base_dir = base_dir - self.build_dir = build_dir - self.ninja = ninja_syntax.Writer(output_file) - self.toplevel_build = toplevel_build - self.output_file_name = output_file_name - - self.flavor = flavor - self.abs_build_dir = None - if toplevel_dir is not None: - self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, - build_dir)) - self.obj_ext = '.obj' if flavor == 'win' else '.o' - if flavor == 'win': - # See docstring of msvs_emulation.GenerateEnvironmentFiles(). - self.win_env = {} - for arch in ('x86', 'x64'): - self.win_env[arch] = 'environment.' + arch - - # Relative path from build output dir to base dir. - build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) - self.build_to_base = os.path.join(build_to_top, base_dir) - # Relative path from base dir to build dir. - base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) - self.base_to_build = os.path.join(base_to_top, build_dir) - - def ExpandSpecial(self, path, product_dir=None): - """Expand specials like $!PRODUCT_DIR in |path|. - - If |product_dir| is None, assumes the cwd is already the product - dir. Otherwise, |product_dir| is the relative path to the product - dir. - """ - - PRODUCT_DIR = '$!PRODUCT_DIR' - if PRODUCT_DIR in path: - if product_dir: - path = path.replace(PRODUCT_DIR, product_dir) - else: - path = path.replace(PRODUCT_DIR + '/', '') - path = path.replace(PRODUCT_DIR + '\\', '') - path = path.replace(PRODUCT_DIR, '.') - - INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' - if INTERMEDIATE_DIR in path: - int_dir = self.GypPathToUniqueOutput('gen') - # GypPathToUniqueOutput generates a path relative to the product dir, - # so insert product_dir in front if it is provided. - path = path.replace(INTERMEDIATE_DIR, - os.path.join(product_dir or '', int_dir)) - - CONFIGURATION_NAME = '$|CONFIGURATION_NAME' - path = path.replace(CONFIGURATION_NAME, self.config_name) - - return path - - def ExpandRuleVariables(self, path, root, dirname, source, ext, name): - if self.flavor == 'win': - path = self.msvs_settings.ConvertVSMacros( - path, config=self.config_name) - path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) - path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], - dirname) - path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) - path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) - path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) - return path - - def GypPathToNinja(self, path, env=None): - """Translate a gyp path to a ninja path, optionally expanding environment - variable references in |path| with |env|. - - See the above discourse on path conversions.""" - if env: - if self.flavor == 'mac': - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - elif self.flavor == 'win': - path = gyp.msvs_emulation.ExpandMacros(path, env) - if path.startswith('$!'): - expanded = self.ExpandSpecial(path) - if self.flavor == 'win': - expanded = os.path.normpath(expanded) - return expanded - if '$|' in path: - path = self.ExpandSpecial(path) - assert '$' not in path, path - return os.path.normpath(os.path.join(self.build_to_base, path)) - - def GypPathToUniqueOutput(self, path, qualified=True): - """Translate a gyp path to a ninja path for writing output. - - If qualified is True, qualify the resulting filename with the name - of the target. This is necessary when e.g. compiling the same - path twice for two separate output targets. - - See the above discourse on path conversions.""" - - path = self.ExpandSpecial(path) - assert not path.startswith('$'), path - - # Translate the path following this scheme: - # Input: foo/bar.gyp, target targ, references baz/out.o - # Output: obj/foo/baz/targ.out.o (if qualified) - # obj/foo/baz/out.o (otherwise) - # (and obj.host instead of obj for cross-compiles) - # - # Why this scheme and not some other one? - # 1) for a given input, you can compute all derived outputs by matching - # its path, even if the input is brought via a gyp file with '..'. - # 2) simple files like libraries and stamps have a simple filename. - - obj = 'obj' - if self.toolset != 'target': - obj += '.' + self.toolset - - path_dir, path_basename = os.path.split(path) - assert not os.path.isabs(path_dir), ( - "'%s' can not be absolute path (see crbug.com/462153)." % path_dir) - - if qualified: - path_basename = self.name + '.' + path_basename - return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, - path_basename)) - - def WriteCollapsedDependencies(self, name, targets, order_only=None): - """Given a list of targets, return a path for a single file - representing the result of building all the targets or None. - - Uses a stamp file if necessary.""" - - assert targets == filter(None, targets), targets - if len(targets) == 0: - assert not order_only - return None - if len(targets) > 1 or order_only: - stamp = self.GypPathToUniqueOutput(name + '.stamp') - targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) - self.ninja.newline() - return targets[0] - - def _SubninjaNameForArch(self, arch): - output_file_base = os.path.splitext(self.output_file_name)[0] - return '%s.%s.ninja' % (output_file_base, arch) - - def WriteSpec(self, spec, config_name, generator_flags): - """The main entry point for NinjaWriter: write the build rules for a spec. - - Returns a Target object, which represents the output paths for this spec. - Returns None if there are no outputs (e.g. a settings-only 'none' type - target).""" - - self.config_name = config_name - self.name = spec['target_name'] - self.toolset = spec['toolset'] - config = spec['configurations'][config_name] - self.target = Target(spec['type']) - self.is_standalone_static_library = bool( - spec.get('standalone_static_library', 0)) - # Track if this target contains any C++ files, to decide if gcc or g++ - # should be used for linking. - self.uses_cpp = False - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - self.xcode_settings = self.msvs_settings = None - if self.flavor == 'mac': - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - if self.flavor == 'win': - self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, - generator_flags) - arch = self.msvs_settings.GetArch(config_name) - self.ninja.variable('arch', self.win_env[arch]) - self.ninja.variable('cc', '$cl_' + arch) - self.ninja.variable('cxx', '$cl_' + arch) - self.ninja.variable('cc_host', '$cl_' + arch) - self.ninja.variable('cxx_host', '$cl_' + arch) - self.ninja.variable('asm', '$ml_' + arch) - - if self.flavor == 'mac': - self.archs = self.xcode_settings.GetActiveArchs(config_name) - if len(self.archs) > 1: - self.arch_subninjas = dict( - (arch, ninja_syntax.Writer( - OpenOutput(os.path.join(self.toplevel_build, - self._SubninjaNameForArch(arch)), - 'w'))) - for arch in self.archs) - - # Compute predepends for all rules. - # actions_depends is the dependencies this target depends on before running - # any of its action/rule/copy steps. - # compile_depends is the dependencies this target depends on before running - # any of its compile steps. - actions_depends = [] - compile_depends = [] - # TODO(evan): it is rather confusing which things are lists and which - # are strings. Fix these. - if 'dependencies' in spec: - for dep in spec['dependencies']: - if dep in self.target_outputs: - target = self.target_outputs[dep] - actions_depends.append(target.PreActionInput(self.flavor)) - compile_depends.append(target.PreCompileInput()) - actions_depends = filter(None, actions_depends) - compile_depends = filter(None, compile_depends) - actions_depends = self.WriteCollapsedDependencies('actions_depends', - actions_depends) - compile_depends = self.WriteCollapsedDependencies('compile_depends', - compile_depends) - self.target.preaction_stamp = actions_depends - self.target.precompile_stamp = compile_depends - - # Write out actions, rules, and copies. These must happen before we - # compile any sources, so compute a list of predependencies for sources - # while we do it. - extra_sources = [] - mac_bundle_depends = [] - self.target.actions_stamp = self.WriteActionsRulesCopies( - spec, extra_sources, actions_depends, mac_bundle_depends) - - # If we have actions/rules/copies, we depend directly on those, but - # otherwise we depend on dependent target's actions/rules/copies etc. - # We never need to explicitly depend on previous target's link steps, - # because no compile ever depends on them. - compile_depends_stamp = (self.target.actions_stamp or compile_depends) - - # Write out the compilation steps, if any. - link_deps = [] - sources = extra_sources + spec.get('sources', []) - if sources: - if self.flavor == 'mac' and len(self.archs) > 1: - # Write subninja file containing compile and link commands scoped to - # a single arch if a fat binary is being built. - for arch in self.archs: - self.ninja.subninja(self._SubninjaNameForArch(arch)) - - pch = None - if self.flavor == 'win': - gyp.msvs_emulation.VerifyMissingSources( - sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) - pch = gyp.msvs_emulation.PrecompiledHeader( - self.msvs_settings, config_name, self.GypPathToNinja, - self.GypPathToUniqueOutput, self.obj_ext) - else: - pch = gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, self.GypPathToNinja, - lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) - link_deps = self.WriteSources( - self.ninja, config_name, config, sources, compile_depends_stamp, pch, - spec) - # Some actions/rules output 'sources' that are already object files. - obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] - if obj_outputs: - if self.flavor != 'mac' or len(self.archs) == 1: - link_deps += [self.GypPathToNinja(o) for o in obj_outputs] - else: - print("Warning: Actions/rules writing object files don't work with " \ - "multiarch targets, dropping. (target %s)" % spec['target_name']) - elif self.flavor == 'mac' and len(self.archs) > 1: - link_deps = collections.defaultdict(list) - - compile_deps = self.target.actions_stamp or actions_depends - if self.flavor == 'win' and self.target.type == 'static_library': - self.target.component_objs = link_deps - self.target.compile_deps = compile_deps - - # Write out a link step, if needed. - output = None - is_empty_bundle = not link_deps and not mac_bundle_depends - if link_deps or self.target.actions_stamp or actions_depends: - output = self.WriteTarget(spec, config_name, config, link_deps, - compile_deps) - if self.is_mac_bundle: - mac_bundle_depends.append(output) - - # Bundle all of the above together, if needed. - if self.is_mac_bundle: - output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) - - if not output: - return None - - assert self.target.FinalOutput(), output - return self.target - - def _WinIdlRule(self, source, prebuild, outputs): - """Handle the implicit VS .idl rule for one source file. Fills |outputs| - with files that are generated.""" - outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( - source, self.config_name) - outdir = self.GypPathToNinja(outdir) - def fix_path(path, rel=None): - path = os.path.join(outdir, path) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - path = self.ExpandRuleVariables( - path, root, dirname, source, ext, basename) - if rel: - path = os.path.relpath(path, rel) - return path - vars = [(name, fix_path(value, outdir)) for name, value in vars] - output = [fix_path(p) for p in output] - vars.append(('outdir', outdir)) - vars.append(('idlflags', flags)) - input = self.GypPathToNinja(source) - self.ninja.build(output, 'idl', input, - variables=vars, order_only=prebuild) - outputs.extend(output) - - def WriteWinIdlFiles(self, spec, prebuild): - """Writes rules to match MSVS's implicit idl handling.""" - assert self.flavor == 'win' - if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): - return [] - outputs = [] - for source in filter(lambda x: x.endswith('.idl'), spec['sources']): - self._WinIdlRule(source, prebuild, outputs) - return outputs - - def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, - mac_bundle_depends): - """Write out the Actions, Rules, and Copies steps. Return a path - representing the outputs of these steps.""" - outputs = [] - if self.is_mac_bundle: - mac_bundle_resources = spec.get('mac_bundle_resources', [])[:] - else: - mac_bundle_resources = [] - extra_mac_bundle_resources = [] - - if 'actions' in spec: - outputs += self.WriteActions(spec['actions'], extra_sources, prebuild, - extra_mac_bundle_resources) - if 'rules' in spec: - outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, - mac_bundle_resources, - extra_mac_bundle_resources) - if 'copies' in spec: - outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends) - - if 'sources' in spec and self.flavor == 'win': - outputs += self.WriteWinIdlFiles(spec, prebuild) - - stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) - - if self.is_mac_bundle: - xcassets = self.WriteMacBundleResources( - extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends) - partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends) - self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) - - return stamp - - def GenerateDescription(self, verb, message, fallback): - """Generate and return a description of a build step. - - |verb| is the short summary, e.g. ACTION or RULE. - |message| is a hand-written description, or None if not available. - |fallback| is the gyp-level name of the step, usable as a fallback. - """ - if self.toolset != 'target': - verb += '(%s)' % self.toolset - if message: - return '%s %s' % (verb, self.ExpandSpecial(message)) - else: - return '%s %s: %s' % (verb, self.name, fallback) - - def WriteActions(self, actions, extra_sources, prebuild, - extra_mac_bundle_resources): - # Actions cd into the base directory. - env = self.GetToolchainEnv() - all_outputs = [] - for action in actions: - # First write out a rule for the action. - name = '%s_%s' % (action['action_name'], self.hash_for_rules) - description = self.GenerateDescription('ACTION', - action.get('message', None), - name) - is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action) - if self.flavor == 'win' else False) - args = action['action'] - depfile = action.get('depfile', None) - if depfile: - depfile = self.ExpandSpecial(depfile, self.base_to_build) - pool = 'console' if int(action.get('ninja_use_console', 0)) else None - rule_name, _ = self.WriteNewNinjaRule(name, args, description, - is_cygwin, env, pool, - depfile=depfile) - - inputs = [self.GypPathToNinja(i, env) for i in action['inputs']] - if int(action.get('process_outputs_as_sources', False)): - extra_sources += action['outputs'] - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += action['outputs'] - outputs = [self.GypPathToNinja(o, env) for o in action['outputs']] - - # Then write out an edge using the rule. - self.ninja.build(outputs, rule_name, inputs, - order_only=prebuild) - all_outputs += outputs - - self.ninja.newline() - - return all_outputs - - def WriteRules(self, rules, extra_sources, prebuild, - mac_bundle_resources, extra_mac_bundle_resources): - env = self.GetToolchainEnv() - all_outputs = [] - for rule in rules: - # Skip a rule with no action and no inputs. - if 'action' not in rule and not rule.get('rule_sources', []): - continue - - # First write out a rule for the rule action. - name = '%s_%s' % (rule['rule_name'], self.hash_for_rules) - - args = rule['action'] - description = self.GenerateDescription( - 'RULE', - rule.get('message', None), - ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) - is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule) - if self.flavor == 'win' else False) - pool = 'console' if int(rule.get('ninja_use_console', 0)) else None - rule_name, args = self.WriteNewNinjaRule( - name, args, description, is_cygwin, env, pool) - - # TODO: if the command references the outputs directly, we should - # simplify it to just use $out. - - # Rules can potentially make use of some special variables which - # must vary per source file. - # Compute the list of variables we'll need to provide. - special_locals = ('source', 'root', 'dirname', 'ext', 'name') - needed_variables = set(['source']) - for argument in args: - for var in special_locals: - if '${%s}' % var in argument: - needed_variables.add(var) - - def cygwin_munge(path): - # pylint: disable=cell-var-from-loop - if is_cygwin: - return path.replace('\\', '/') - return path - - inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])] - - # If there are n source files matching the rule, and m additional rule - # inputs, then adding 'inputs' to each build edge written below will - # write m * n inputs. Collapsing reduces this to m + n. - sources = rule.get('rule_sources', []) - num_inputs = len(inputs) - if prebuild: - num_inputs += 1 - if num_inputs > 2 and len(sources) > 2: - inputs = [self.WriteCollapsedDependencies( - rule['rule_name'], inputs, order_only=prebuild)] - prebuild = [] - - # For each source file, write an edge that generates all the outputs. - for source in sources: - source = os.path.normpath(source) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - - # Gather the list of inputs and outputs, expanding $vars if possible. - outputs = [self.ExpandRuleVariables(o, root, dirname, - source, ext, basename) - for o in rule['outputs']] - - if int(rule.get('process_outputs_as_sources', False)): - extra_sources += outputs - - was_mac_bundle_resource = source in mac_bundle_resources - if was_mac_bundle_resource or \ - int(rule.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += outputs - # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed - # items in a set and remove them all in a single pass if this becomes - # a performance issue. - if was_mac_bundle_resource: - mac_bundle_resources.remove(source) - - extra_bindings = [] - for var in needed_variables: - if var == 'root': - extra_bindings.append(('root', cygwin_munge(root))) - elif var == 'dirname': - # '$dirname' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build) - extra_bindings.append(('dirname', cygwin_munge(dirname_expanded))) - elif var == 'source': - # '$source' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - source_expanded = self.ExpandSpecial(source, self.base_to_build) - extra_bindings.append(('source', cygwin_munge(source_expanded))) - elif var == 'ext': - extra_bindings.append(('ext', ext)) - elif var == 'name': - extra_bindings.append(('name', cygwin_munge(basename))) - else: - assert var == None, repr(var) - - outputs = [self.GypPathToNinja(o, env) for o in outputs] - if self.flavor == 'win': - # WriteNewNinjaRule uses unique_name for creating an rsp file on win. - extra_bindings.append(('unique_name', - hashlib.md5(outputs[0]).hexdigest())) - self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), - implicit=inputs, - order_only=prebuild, - variables=extra_bindings) - - all_outputs.extend(outputs) - - return all_outputs - - def WriteCopies(self, copies, prebuild, mac_bundle_depends): - outputs = [] - env = self.GetToolchainEnv() - for copy in copies: - for path in copy['files']: - # Normalize the path so trailing slashes don't confuse us. - path = os.path.normpath(path) - basename = os.path.split(path)[1] - src = self.GypPathToNinja(path, env) - dst = self.GypPathToNinja(os.path.join(copy['destination'], basename), - env) - outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) - if self.is_mac_bundle: - # gyp has mac_bundle_resources to copy things into a bundle's - # Resources folder, but there's no built-in way to copy files to other - # places in the bundle. Hence, some targets use copies for this. Check - # if this file is copied into the current bundle, and if so add it to - # the bundle depends so that dependent targets get rebuilt if the copy - # input changes. - if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()): - mac_bundle_depends.append(dst) - - return outputs - - def WriteMacBundleResources(self, resources, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources'.""" - xcassets = [] - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables['PRODUCT_DIR'], - self.xcode_settings, map(self.GypPathToNinja, resources)): - output = self.ExpandSpecial(output) - if os.path.splitext(output)[-1] != '.xcassets': - isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) - self.ninja.build(output, 'mac_tool', res, - variables=[('mactool_cmd', 'copy-bundle-resource'), \ - ('binary', isBinary)]) - bundle_depends.append(output) - else: - xcassets.append(res) - return xcassets - - def WriteMacXCassets(self, xcassets, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources' .xcassets files. - - This add an invocation of 'actool' via the 'mac_tool.py' helper script. - It assumes that the assets catalogs define at least one imageset and - thus an Assets.car file will be generated in the application resources - directory. If this is not the case, then the build will probably be done - at each invocation of ninja.""" - if not xcassets: - return - - extra_arguments = {} - settings_to_arg = { - 'XCASSETS_APP_ICON': 'app-icon', - 'XCASSETS_LAUNCH_IMAGE': 'launch-image', - } - settings = self.xcode_settings.xcode_settings[self.config_name] - for settings_key, arg_name in settings_to_arg.items(): - value = settings.get(settings_key) - if value: - extra_arguments[arg_name] = value - - partial_info_plist = None - if extra_arguments: - partial_info_plist = self.GypPathToUniqueOutput( - 'assetcatalog_generated_info.plist') - extra_arguments['output-partial-info-plist'] = partial_info_plist - - outputs = [] - outputs.append( - os.path.join( - self.xcode_settings.GetBundleResourceFolder(), - 'Assets.car')) - if partial_info_plist: - outputs.append(partial_info_plist) - - keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor) - extra_env = self.xcode_settings.GetPerTargetSettings() - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - bundle_depends.extend(self.ninja.build( - outputs, 'compile_xcassets', xcassets, - variables=[('env', env), ('keys', keys)])) - return partial_info_plist - - def WriteMacInfoPlist(self, partial_info_plist, bundle_depends): - """Write build rules for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables['PRODUCT_DIR'], - self.xcode_settings, self.GypPathToNinja) - if not info_plist: - return - out = self.ExpandSpecial(out) - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = self.GypPathToUniqueOutput( - os.path.basename(info_plist)) - defines = ' '.join([Define(d, self.flavor) for d in defines]) - info_plist = self.ninja.build( - intermediate_plist, 'preprocess_infoplist', info_plist, - variables=[('defines',defines)]) - - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - if partial_info_plist: - intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist') - info_plist = self.ninja.build( - intermediate_plist, 'merge_infoplist', - [partial_info_plist, info_plist]) - - keys = self.xcode_settings.GetExtraPlistItems(self.config_name) - keys = QuoteShellArgument(json.dumps(keys), self.flavor) - isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) - self.ninja.build(out, 'copy_infoplist', info_plist, - variables=[('env', env), ('keys', keys), - ('binary', isBinary)]) - bundle_depends.append(out) - - def WriteSources(self, ninja_file, config_name, config, sources, predepends, - precompiled_header, spec): - """Write build rules to compile all of |sources|.""" - if self.toolset == 'host': - self.ninja.variable('ar', '$ar_host') - self.ninja.variable('cc', '$cc_host') - self.ninja.variable('cxx', '$cxx_host') - self.ninja.variable('ld', '$ld_host') - self.ninja.variable('ldxx', '$ldxx_host') - self.ninja.variable('nm', '$nm_host') - self.ninja.variable('readelf', '$readelf_host') - - if self.flavor != 'mac' or len(self.archs) == 1: - return self.WriteSourcesForArch( - self.ninja, config_name, config, sources, predepends, - precompiled_header, spec) - else: - return dict((arch, self.WriteSourcesForArch( - self.arch_subninjas[arch], config_name, config, sources, predepends, - precompiled_header, spec, arch=arch)) - for arch in self.archs) - - def WriteSourcesForArch(self, ninja_file, config_name, config, sources, - predepends, precompiled_header, spec, arch=None): - """Write build rules to compile all of |sources|.""" - - extra_defines = [] - if self.flavor == 'mac': - cflags = self.xcode_settings.GetCflags(config_name, arch=arch) - cflags_c = self.xcode_settings.GetCflagsC(config_name) - cflags_cc = self.xcode_settings.GetCflagsCC(config_name) - cflags_objc = ['$cflags_c'] + \ - self.xcode_settings.GetCflagsObjC(config_name) - cflags_objcc = ['$cflags_cc'] + \ - self.xcode_settings.GetCflagsObjCC(config_name) - elif self.flavor == 'win': - asmflags = self.msvs_settings.GetAsmflags(config_name) - cflags = self.msvs_settings.GetCflags(config_name) - cflags_c = self.msvs_settings.GetCflagsC(config_name) - cflags_cc = self.msvs_settings.GetCflagsCC(config_name) - extra_defines = self.msvs_settings.GetComputedDefines(config_name) - # See comment at cc_command for why there's two .pdb files. - pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName( - config_name, self.ExpandSpecial) - if not pdbpath_c: - obj = 'obj' - if self.toolset != 'target': - obj += '.' + self.toolset - pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) - pdbpath_c = pdbpath + '.c.pdb' - pdbpath_cc = pdbpath + '.cc.pdb' - self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c]) - self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc]) - self.WriteVariableList(ninja_file, 'pchprefix', [self.name]) - else: - cflags = config.get('cflags', []) - cflags_c = config.get('cflags_c', []) - cflags_cc = config.get('cflags_cc', []) - - # Respect environment variables related to build, but target-specific - # flags can still override them. - if self.toolset == 'target': - cflags_c = (os.environ.get('CPPFLAGS', '').split() + - os.environ.get('CFLAGS', '').split() + cflags_c) - cflags_cc = (os.environ.get('CPPFLAGS', '').split() + - os.environ.get('CXXFLAGS', '').split() + cflags_cc) - elif self.toolset == 'host': - cflags_c = (os.environ.get('CPPFLAGS_host', '').split() + - os.environ.get('CFLAGS_host', '').split() + cflags_c) - cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() + - os.environ.get('CXXFLAGS_host', '').split() + cflags_cc) - - defines = config.get('defines', []) + extra_defines - self.WriteVariableList(ninja_file, 'defines', - [Define(d, self.flavor) for d in defines]) - if self.flavor == 'win': - self.WriteVariableList(ninja_file, 'asmflags', - map(self.ExpandSpecial, asmflags)) - self.WriteVariableList(ninja_file, 'rcflags', - [QuoteShellArgument(self.ExpandSpecial(f), self.flavor) - for f in self.msvs_settings.GetRcflags(config_name, - self.GypPathToNinja)]) - - include_dirs = config.get('include_dirs', []) - - env = self.GetToolchainEnv() - if self.flavor == 'win': - include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs, - config_name) - self.WriteVariableList(ninja_file, 'includes', - [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) - for i in include_dirs]) - - if self.flavor == 'win': - midl_include_dirs = config.get('midl_include_dirs', []) - midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs( - midl_include_dirs, config_name) - self.WriteVariableList(ninja_file, 'midl_includes', - [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) - for i in midl_include_dirs]) - - pch_commands = precompiled_header.GetPchBuildCommands(arch) - if self.flavor == 'mac': - # Most targets use no precompiled headers, so only write these if needed. - for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'), - ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]: - include = precompiled_header.GetInclude(ext, arch) - if include: ninja_file.variable(var, include) - - arflags = config.get('arflags', []) - - self.WriteVariableList(ninja_file, 'cflags', - map(self.ExpandSpecial, cflags)) - self.WriteVariableList(ninja_file, 'cflags_c', - map(self.ExpandSpecial, cflags_c)) - self.WriteVariableList(ninja_file, 'cflags_cc', - map(self.ExpandSpecial, cflags_cc)) - if self.flavor == 'mac': - self.WriteVariableList(ninja_file, 'cflags_objc', - map(self.ExpandSpecial, cflags_objc)) - self.WriteVariableList(ninja_file, 'cflags_objcc', - map(self.ExpandSpecial, cflags_objcc)) - self.WriteVariableList(ninja_file, 'arflags', - map(self.ExpandSpecial, arflags)) - ninja_file.newline() - outputs = [] - has_rc_source = False - for source in sources: - filename, ext = os.path.splitext(source) - ext = ext[1:] - obj_ext = self.obj_ext - if ext in ('cc', 'cpp', 'cxx'): - command = 'cxx' - self.uses_cpp = True - elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): - command = 'cc' - elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. - command = 'cc_s' - elif (self.flavor == 'win' and ext == 'asm' and - not self.msvs_settings.HasExplicitAsmRules(spec)): - command = 'asm' - # Add the _asm suffix as msvs is capable of handling .cc and - # .asm files of the same name without collision. - obj_ext = '_asm.obj' - elif self.flavor == 'mac' and ext == 'm': - command = 'objc' - elif self.flavor == 'mac' and ext == 'mm': - command = 'objcxx' - self.uses_cpp = True - elif self.flavor == 'win' and ext == 'rc': - command = 'rc' - obj_ext = '.res' - has_rc_source = True - else: - # Ignore unhandled extensions. - continue - input = self.GypPathToNinja(source) - output = self.GypPathToUniqueOutput(filename + obj_ext) - if arch is not None: - output = AddArch(output, arch) - implicit = precompiled_header.GetObjDependencies([input], [output], arch) - variables = [] - if self.flavor == 'win': - variables, output, implicit = precompiled_header.GetFlagsModifications( - input, output, implicit, command, cflags_c, cflags_cc, - self.ExpandSpecial) - ninja_file.build(output, command, input, - implicit=[gch for _, _, gch in implicit], - order_only=predepends, variables=variables) - outputs.append(output) - - if has_rc_source: - resource_include_dirs = config.get('resource_include_dirs', include_dirs) - self.WriteVariableList(ninja_file, 'resource_includes', - [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) - for i in resource_include_dirs]) - - self.WritePchTargets(ninja_file, pch_commands) - - ninja_file.newline() - return outputs - - def WritePchTargets(self, ninja_file, pch_commands): - """Writes ninja rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - var_name = { - 'c': 'cflags_pch_c', - 'cc': 'cflags_pch_cc', - 'm': 'cflags_pch_objc', - 'mm': 'cflags_pch_objcc', - }[lang] - - map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', } - cmd = map.get(lang) - ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)]) - - def WriteLink(self, spec, config_name, config, link_deps): - """Write out a link step. Fills out target.binary. """ - if self.flavor != 'mac' or len(self.archs) == 1: - return self.WriteLinkForArch( - self.ninja, spec, config_name, config, link_deps) - else: - output = self.ComputeOutput(spec) - inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec, - config_name, config, link_deps[arch], - arch=arch) - for arch in self.archs] - extra_bindings = [] - build_output = output - if not self.is_mac_bundle: - self.AppendPostbuildVariable(extra_bindings, spec, output, output) - - # TODO(yyanagisawa): more work needed to fix: - # https://code.google.com/p/gyp/issues/detail?id=411 - if (spec['type'] in ('shared_library', 'loadable_module') and - not self.is_mac_bundle): - extra_bindings.append(('lib', output)) - self.ninja.build([output, output + '.TOC'], 'solipo', inputs, - variables=extra_bindings) - else: - self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings) - return output - - def WriteLinkForArch(self, ninja_file, spec, config_name, config, - link_deps, arch=None): - """Write out a link step. Fills out target.binary. """ - command = { - 'executable': 'link', - 'loadable_module': 'solink_module', - 'shared_library': 'solink', - }[spec['type']] - command_suffix = '' - - implicit_deps = set() - solibs = set() - order_deps = set() - - if 'dependencies' in spec: - # Two kinds of dependencies: - # - Linkable dependencies (like a .a or a .so): add them to the link line. - # - Non-linkable dependencies (like a rule that generates a file - # and writes a stamp file): add them to implicit_deps - extra_link_deps = set() - for dep in spec['dependencies']: - target = self.target_outputs.get(dep) - if not target: - continue - linkable = target.Linkable() - if linkable: - new_deps = [] - if (self.flavor == 'win' and - target.component_objs and - self.msvs_settings.IsUseLibraryDependencyInputs(config_name)): - new_deps = target.component_objs - if target.compile_deps: - order_deps.add(target.compile_deps) - elif self.flavor == 'win' and target.import_lib: - new_deps = [target.import_lib] - elif target.UsesToc(self.flavor): - solibs.add(target.binary) - implicit_deps.add(target.binary + '.TOC') - else: - new_deps = [target.binary] - for new_dep in new_deps: - if new_dep not in extra_link_deps: - extra_link_deps.add(new_dep) - link_deps.append(new_dep) - - final_output = target.FinalOutput() - if not linkable or final_output != target.binary: - implicit_deps.add(final_output) - - extra_bindings = [] - if self.uses_cpp and self.flavor != 'win': - extra_bindings.append(('ld', '$ldxx')) - - output = self.ComputeOutput(spec, arch) - if arch is None and not self.is_mac_bundle: - self.AppendPostbuildVariable(extra_bindings, spec, output, output) - - is_executable = spec['type'] == 'executable' - # The ldflags config key is not used on mac or win. On those platforms - # linker flags are set via xcode_settings and msvs_settings, respectively. - env_ldflags = os.environ.get('LDFLAGS', '').split() - if self.flavor == 'mac': - ldflags = self.xcode_settings.GetLdflags(config_name, - self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), - self.GypPathToNinja, arch) - ldflags = env_ldflags + ldflags - elif self.flavor == 'win': - manifest_base_name = self.GypPathToUniqueOutput( - self.ComputeOutputFileName(spec)) - ldflags, intermediate_manifest, manifest_files = \ - self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja, - self.ExpandSpecial, manifest_base_name, - output, is_executable, - self.toplevel_build) - ldflags = env_ldflags + ldflags - self.WriteVariableList(ninja_file, 'manifests', manifest_files) - implicit_deps = implicit_deps.union(manifest_files) - if intermediate_manifest: - self.WriteVariableList( - ninja_file, 'intermediatemanifest', [intermediate_manifest]) - command_suffix = _GetWinLinkRuleNameSuffix( - self.msvs_settings.IsEmbedManifest(config_name)) - def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) - if def_file: - implicit_deps.add(def_file) - else: - # Respect environment variables related to build, but target-specific - # flags can still override them. - ldflags = env_ldflags + config.get('ldflags', []) - if is_executable and len(solibs): - rpath = 'lib/' - if self.toolset != 'target': - rpath += self.toolset - ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath) - ldflags.append('-Wl,-rpath-link=%s' % rpath) - self.WriteVariableList(ninja_file, 'ldflags', - map(self.ExpandSpecial, ldflags)) - - library_dirs = config.get('library_dirs', []) - if self.flavor == 'win': - library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name) - for l in library_dirs] - library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l), - self.flavor) - for l in library_dirs] - else: - library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l), - self.flavor) - for l in library_dirs] - - libraries = gyp.common.uniquer(map(self.ExpandSpecial, - spec.get('libraries', []))) - if self.flavor == 'mac': - libraries = self.xcode_settings.AdjustLibraries(libraries, config_name) - elif self.flavor == 'win': - libraries = self.msvs_settings.AdjustLibraries(libraries) - - self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries) - - linked_binary = output - - if command in ('solink', 'solink_module'): - extra_bindings.append(('soname', os.path.split(output)[1])) - extra_bindings.append(('lib', - gyp.common.EncodePOSIXShellArgument(output))) - if self.flavor != 'win': - link_file_list = output - if self.is_mac_bundle: - # 'Dependency Framework.framework/Versions/A/Dependency Framework' -> - # 'Dependency Framework.framework.rsp' - link_file_list = self.xcode_settings.GetWrapperName() - if arch: - link_file_list += '.' + arch - link_file_list += '.rsp' - # If an rspfile contains spaces, ninja surrounds the filename with - # quotes around it and then passes it to open(), creating a file with - # quotes in its name (and when looking for the rsp file, the name - # makes it through bash which strips the quotes) :-/ - link_file_list = link_file_list.replace(' ', '_') - extra_bindings.append( - ('link_file_list', - gyp.common.EncodePOSIXShellArgument(link_file_list))) - if self.flavor == 'win': - extra_bindings.append(('binary', output)) - if ('/NOENTRY' not in ldflags and - not self.msvs_settings.GetNoImportLibrary(config_name)): - self.target.import_lib = output + '.lib' - extra_bindings.append(('implibflag', - '/IMPLIB:%s' % self.target.import_lib)) - pdbname = self.msvs_settings.GetPDBName( - config_name, self.ExpandSpecial, output + '.pdb') - output = [output, self.target.import_lib] - if pdbname: - output.append(pdbname) - elif not self.is_mac_bundle: - output = [output, output + '.TOC'] - else: - command = command + '_notoc' - elif self.flavor == 'win': - extra_bindings.append(('binary', output)) - pdbname = self.msvs_settings.GetPDBName( - config_name, self.ExpandSpecial, output + '.pdb') - if pdbname: - output = [output, pdbname] - - - if len(solibs): - extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs))) - - ninja_file.build(output, command + command_suffix, link_deps, - implicit=list(implicit_deps), - order_only=list(order_deps), - variables=extra_bindings) - return linked_binary - - def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): - extra_link_deps = any(self.target_outputs.get(dep).Linkable() - for dep in spec.get('dependencies', []) - if dep in self.target_outputs) - if spec['type'] == 'none' or (not link_deps and not extra_link_deps): - # TODO(evan): don't call this function for 'none' target types, as - # it doesn't do anything, and we fake out a 'binary' with a stamp file. - self.target.binary = compile_deps - self.target.type = 'none' - elif spec['type'] == 'static_library': - self.target.binary = self.ComputeOutput(spec) - if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not - self.is_standalone_static_library): - self.ninja.build(self.target.binary, 'alink_thin', link_deps, - order_only=compile_deps) - else: - variables = [] - if self.xcode_settings: - libtool_flags = self.xcode_settings.GetLibtoolflags(config_name) - if libtool_flags: - variables.append(('libtool_flags', libtool_flags)) - if self.msvs_settings: - libflags = self.msvs_settings.GetLibFlags(config_name, - self.GypPathToNinja) - variables.append(('libflags', libflags)) - - if self.flavor != 'mac' or len(self.archs) == 1: - self.AppendPostbuildVariable(variables, spec, - self.target.binary, self.target.binary) - self.ninja.build(self.target.binary, 'alink', link_deps, - order_only=compile_deps, variables=variables) - else: - inputs = [] - for arch in self.archs: - output = self.ComputeOutput(spec, arch) - self.arch_subninjas[arch].build(output, 'alink', link_deps[arch], - order_only=compile_deps, - variables=variables) - inputs.append(output) - # TODO: It's not clear if libtool_flags should be passed to the alink - # call that combines single-arch .a files into a fat .a file. - self.AppendPostbuildVariable(variables, spec, - self.target.binary, self.target.binary) - self.ninja.build(self.target.binary, 'alink', inputs, - # FIXME: test proving order_only=compile_deps isn't - # needed. - variables=variables) - else: - self.target.binary = self.WriteLink(spec, config_name, config, link_deps) - return self.target.binary - - def WriteMacBundle(self, spec, mac_bundle_depends, is_empty): - assert self.is_mac_bundle - package_framework = spec['type'] in ('shared_library', 'loadable_module') - output = self.ComputeMacBundleOutput() - if is_empty: - output += '.stamp' - variables = [] - self.AppendPostbuildVariable(variables, spec, output, self.target.binary, - is_command_start=not package_framework) - if package_framework and not is_empty: - variables.append(('version', self.xcode_settings.GetFrameworkVersion())) - self.ninja.build(output, 'package_framework', mac_bundle_depends, - variables=variables) - else: - self.ninja.build(output, 'stamp', mac_bundle_depends, - variables=variables) - self.target.bundle = output - return output - - def GetToolchainEnv(self, additional_settings=None): - """Returns the variables toolchain would set for build steps.""" - env = self.GetSortedXcodeEnv(additional_settings=additional_settings) - if self.flavor == 'win': - env = self.GetMsvsToolchainEnv( - additional_settings=additional_settings) - return env - - def GetMsvsToolchainEnv(self, additional_settings=None): - """Returns the variables Visual Studio would set for build steps.""" - return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR', - config=self.config_name) - - def GetSortedXcodeEnv(self, additional_settings=None): - """Returns the variables Xcode would set for build steps.""" - assert self.abs_build_dir - abs_build_dir = self.abs_build_dir - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, abs_build_dir, - os.path.join(abs_build_dir, self.build_to_base), self.config_name, - additional_settings) - - def GetSortedXcodePostbuildEnv(self): - """Returns the variables Xcode would set for postbuild steps.""" - postbuild_settings = {} - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - 'CHROMIUM_STRIP_SAVE_FILE') - if strip_save_file: - postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file - return self.GetSortedXcodeEnv(additional_settings=postbuild_settings) - - def AppendPostbuildVariable(self, variables, spec, output, binary, - is_command_start=False): - """Adds a 'postbuild' variable if there is a postbuild for |output|.""" - postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start) - if postbuild: - variables.append(('postbuilds', postbuild)) - - def GetPostbuildCommand(self, spec, output, output_binary, is_command_start): - """Returns a shell command that runs all the postbuilds, and removes - |output| if any of them fails. If |is_command_start| is False, then the - returned string will start with ' && '.""" - if not self.xcode_settings or spec['type'] == 'none' or not output: - return '' - output = QuoteShellArgument(output, self.flavor) - postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) - if output_binary is not None: - postbuilds = self.xcode_settings.AddImplicitPostbuilds( - self.config_name, - os.path.normpath(os.path.join(self.base_to_build, output)), - QuoteShellArgument( - os.path.normpath(os.path.join(self.base_to_build, output_binary)), - self.flavor), - postbuilds, quiet=True) - - if not postbuilds: - return '' - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert(0, gyp.common.EncodePOSIXShellList( - ['cd', self.build_to_base])) - env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) - # G will be non-null if any postbuild fails. Run all postbuilds in a - # subshell. - commands = env + ' (' + \ - ' && '.join([ninja_syntax.escape(command) for command in postbuilds]) - command_string = (commands + '); G=$$?; ' - # Remove the final output if any postbuild failed. - '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)') - if is_command_start: - return '(' + command_string + ' && ' - else: - return '$ && (' + command_string - - def ComputeExportEnvString(self, env): - """Given an environment, returns a string looking like - 'export FOO=foo; export BAR="${FOO} bar;' - that exports |env| to the shell.""" - export_str = [] - for k, v in env: - export_str.append('export %s=%s;' % - (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v)))) - return ' '.join(export_str) - - def ComputeMacBundleOutput(self): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = generator_default_variables['PRODUCT_DIR'] - return self.ExpandSpecial( - os.path.join(path, self.xcode_settings.GetWrapperName())) - - def ComputeOutputFileName(self, spec, type=None): - """Compute the filename of the final output for the current target.""" - if not type: - type = spec['type'] - - default_variables = copy.copy(generator_default_variables) - CalculateVariables(default_variables, {'flavor': self.flavor}) - - # Compute filename prefix: the product prefix, or a default for - # the product type. - DEFAULT_PREFIX = { - 'loadable_module': default_variables['SHARED_LIB_PREFIX'], - 'shared_library': default_variables['SHARED_LIB_PREFIX'], - 'static_library': default_variables['STATIC_LIB_PREFIX'], - 'executable': default_variables['EXECUTABLE_PREFIX'], - } - prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, '')) - - # Compute filename extension: the product extension, or a default - # for the product type. - DEFAULT_EXTENSION = { - 'loadable_module': default_variables['SHARED_LIB_SUFFIX'], - 'shared_library': default_variables['SHARED_LIB_SUFFIX'], - 'static_library': default_variables['STATIC_LIB_SUFFIX'], - 'executable': default_variables['EXECUTABLE_SUFFIX'], - } - extension = spec.get('product_extension') - if extension: - extension = '.' + extension - else: - extension = DEFAULT_EXTENSION.get(type, '') - - if 'product_name' in spec: - # If we were given an explicit name, use that. - target = spec['product_name'] - else: - # Otherwise, derive a name from the target name. - target = spec['target_name'] - if prefix == 'lib': - # Snip out an extra 'lib' from libs if appropriate. - target = StripPrefix(target, 'lib') - - if type in ('static_library', 'loadable_module', 'shared_library', - 'executable'): - return '%s%s%s' % (prefix, target, extension) - elif type == 'none': - return '%s.stamp' % target - else: - raise Exception('Unhandled output type %s' % type) - - def ComputeOutput(self, spec, arch=None): - """Compute the path for the final output of the spec.""" - type = spec['type'] - - if self.flavor == 'win': - override = self.msvs_settings.GetOutputName(self.config_name, - self.ExpandSpecial) - if override: - return override - - if arch is None and self.flavor == 'mac' and type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module'): - filename = self.xcode_settings.GetExecutablePath() - else: - filename = self.ComputeOutputFileName(spec, type) - - if arch is None and 'product_dir' in spec: - path = os.path.join(spec['product_dir'], filename) - return self.ExpandSpecial(path) - - # Some products go into the output root, libraries go into shared library - # dir, and everything else goes into the normal place. - type_in_output_root = ['executable', 'loadable_module'] - if self.flavor == 'mac' and self.toolset == 'target': - type_in_output_root += ['shared_library', 'static_library'] - elif self.flavor == 'win' and self.toolset == 'target': - type_in_output_root += ['shared_library'] - - if arch is not None: - # Make sure partial executables don't end up in a bundle or the regular - # output directory. - archdir = 'arch' - if self.toolset != 'target': - archdir = os.path.join('arch', '%s' % self.toolset) - return os.path.join(archdir, AddArch(filename, arch)) - elif type in type_in_output_root or self.is_standalone_static_library: - return filename - elif type == 'shared_library': - libdir = 'lib' - if self.toolset != 'target': - libdir = os.path.join('lib', '%s' % self.toolset) - return os.path.join(libdir, filename) - else: - return self.GypPathToUniqueOutput(filename, qualified=False) - - def WriteVariableList(self, ninja_file, var, values): - assert not isinstance(values, str) - if values is None: - values = [] - ninja_file.variable(var, ' '.join(values)) - - def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool, - depfile=None): - """Write out a new ninja "rule" statement for a given command. - - Returns the name of the new rule, and a copy of |args| with variables - expanded.""" - - if self.flavor == 'win': - args = [self.msvs_settings.ConvertVSMacros( - arg, self.base_to_build, config=self.config_name) - for arg in args] - description = self.msvs_settings.ConvertVSMacros( - description, config=self.config_name) - elif self.flavor == 'mac': - # |env| is an empty list on non-mac. - args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] - description = gyp.xcode_emulation.ExpandEnvVars(description, env) - - # TODO: we shouldn't need to qualify names; we do it because - # currently the ninja rule namespace is global, but it really - # should be scoped to the subninja. - rule_name = self.name - if self.toolset == 'target': - rule_name += '.' + self.toolset - rule_name += '.' + name - rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) - - # Remove variable references, but not if they refer to the magic rule - # variables. This is not quite right, as it also protects these for - # actions, not just for rules where they are valid. Good enough. - protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ] - protect = '(?!' + '|'.join(map(re.escape, protect)) + ')' - description = re.sub(protect + r'\$', '_', description) - - # gyp dictates that commands are run from the base directory. - # cd into the directory before running, and adjust paths in - # the arguments to point to the proper locations. - rspfile = None - rspfile_content = None - args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] - if self.flavor == 'win': - rspfile = rule_name + '.$unique_name.rsp' - # The cygwin case handles this inside the bash sub-shell. - run_in = '' if is_cygwin else ' ' + self.build_to_base - if is_cygwin: - rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( - args, self.build_to_base) - else: - rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) - command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable + - rspfile + run_in) - else: - env = self.ComputeExportEnvString(env) - command = gyp.common.EncodePOSIXShellList(args) - command = 'cd %s; ' % self.build_to_base + env + command - - # GYP rules/actions express being no-ops by not touching their outputs. - # Avoid executing downstream dependencies in this case by specifying - # restat=1 to ninja. - self.ninja.rule(rule_name, command, description, depfile=depfile, - restat=True, pool=pool, - rspfile=rspfile, rspfile_content=rspfile_content) - self.ninja.newline() - - return rule_name, args - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - global generator_additional_non_configuration_keys - global generator_additional_path_sections - flavor = gyp.common.GetFlavor(params) - if flavor == 'mac': - default_variables.setdefault('OS', 'mac') - default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') - default_variables.setdefault('SHARED_LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - default_variables.setdefault('LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Ninja generator. - import gyp.generator.xcode as xcode_generator - generator_additional_non_configuration_keys = getattr(xcode_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(xcode_generator, - 'generator_additional_path_sections', []) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr(xcode_generator, - 'generator_extra_sources_for_rules', []) - elif flavor == 'win': - exts = gyp.MSVSUtil.TARGET_TYPE_EXT - default_variables.setdefault('OS', 'win') - default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable'] - default_variables['STATIC_LIB_PREFIX'] = '' - default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library'] - default_variables['SHARED_LIB_PREFIX'] = '' - default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library'] - - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - else: - operating_system = flavor - if flavor == 'android': - operating_system = 'linux' # Keep this legacy behavior for now. - default_variables.setdefault('OS', operating_system) - default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') - default_variables.setdefault('SHARED_LIB_DIR', - os.path.join('$!PRODUCT_DIR', 'lib')) - default_variables.setdefault('LIB_DIR', - os.path.join('$!PRODUCT_DIR', 'obj')) - -def ComputeOutputDir(params): - """Returns the path from the toplevel_dir to the build output directory.""" - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to ninja easier, ninja doesn't put anything here. - generator_dir = os.path.relpath(params['options'].generator_output or '.') - - # output_dir: relative path from generator_dir to the build directory. - output_dir = params.get('generator_flags', {}).get('output_dir', 'out') - - # Relative path from source root to our output files. e.g. "out" - return os.path.normpath(os.path.join(generator_dir, output_dir)) - - -def CalculateGeneratorInputInfo(params): - """Called by __init__ to initialize generator values based on params.""" - # E.g. "out/gypfiles" - toplevel = params['options'].toplevel_dir - qualified_out_dir = os.path.normpath(os.path.join( - toplevel, ComputeOutputDir(params), 'gypfiles')) - - global generator_filelist_paths - generator_filelist_paths = { - 'toplevel': toplevel, - 'qualified_out_dir': qualified_out_dir, - } - - -def OpenOutput(path, mode='w'): - """Open |path| for writing, creating directories if necessary.""" - gyp.common.EnsureDirExists(path) - return open(path, mode) - - -def CommandWithWrapper(cmd, wrappers, prog): - wrapper = wrappers.get(cmd, '') - if wrapper: - return wrapper + ' ' + prog - return prog - - -def GetDefaultConcurrentLinks(): - """Returns a best-guess for a number of concurrent links.""" - pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0)) - if pool_size: - return pool_size - - if sys.platform in ('win32', 'cygwin'): - import ctypes - - class MEMORYSTATUSEX(ctypes.Structure): - _fields_ = [ - ("dwLength", ctypes.c_ulong), - ("dwMemoryLoad", ctypes.c_ulong), - ("ullTotalPhys", ctypes.c_ulonglong), - ("ullAvailPhys", ctypes.c_ulonglong), - ("ullTotalPageFile", ctypes.c_ulonglong), - ("ullAvailPageFile", ctypes.c_ulonglong), - ("ullTotalVirtual", ctypes.c_ulonglong), - ("ullAvailVirtual", ctypes.c_ulonglong), - ("sullAvailExtendedVirtual", ctypes.c_ulonglong), - ] - - stat = MEMORYSTATUSEX() - stat.dwLength = ctypes.sizeof(stat) - ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) - - # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM - # on a 64 GB machine. - mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB - hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32))) - return min(mem_limit, hard_cap) - elif sys.platform.startswith('linux'): - if os.path.exists("/proc/meminfo"): - with open("/proc/meminfo") as meminfo: - memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') - for line in meminfo: - match = memtotal_re.match(line) - if not match: - continue - # Allow 8Gb per link on Linux because Gold is quite memory hungry - return max(1, int(match.group(1)) / (8 * (2 ** 20))) - return 1 - elif sys.platform == 'darwin': - try: - avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) - # A static library debug build of Chromium's unit_tests takes ~2.7GB, so - # 4GB per ld process allows for some more bloat. - return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB - except: - return 1 - else: - # TODO(scottmg): Implement this for other platforms. - return 1 - - -def _GetWinLinkRuleNameSuffix(embed_manifest): - """Returns the suffix used to select an appropriate linking rule depending on - whether the manifest embedding is enabled.""" - return '_embed' if embed_manifest else '' - - -def _AddWinLinkRules(master_ninja, embed_manifest): - """Adds link rules for Windows platform to |master_ninja|.""" - def FullLinkCommand(ldcmd, out, binary_type): - resource_name = { - 'exe': '1', - 'dll': '2', - }[binary_type] - return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \ - '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \ - '$manifests' % { - 'python': sys.executable, - 'out': out, - 'ldcmd': ldcmd, - 'resname': resource_name, - 'embed': embed_manifest } - rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest) - use_separate_mspdbsrv = ( - int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0) - dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper() - dllcmd = ('%s gyp-win-tool link-wrapper $arch %s ' - '$ld /nologo $implibflag /DLL /OUT:$binary ' - '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv)) - dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll') - master_ninja.rule('solink' + rule_name_suffix, - description=dlldesc, command=dllcmd, - rspfile='$binary.rsp', - rspfile_content='$libs $in_newline $ldflags', - restat=True, - pool='link_pool') - master_ninja.rule('solink_module' + rule_name_suffix, - description=dlldesc, command=dllcmd, - rspfile='$binary.rsp', - rspfile_content='$libs $in_newline $ldflags', - restat=True, - pool='link_pool') - # Note that ldflags goes at the end so that it has the option of - # overriding default settings earlier in the command line. - exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s ' - '$ld /nologo /OUT:$binary @$binary.rsp' % - (sys.executable, use_separate_mspdbsrv)) - exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe') - master_ninja.rule('link' + rule_name_suffix, - description='LINK%s $binary' % rule_name_suffix.upper(), - command=exe_cmd, - rspfile='$binary.rsp', - rspfile_content='$in_newline $libs $ldflags', - pool='link_pool') - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name): - options = params['options'] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get('generator_flags', {}) - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath( - os.path.join(ComputeOutputDir(params), config_name)) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - - master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja')) - master_ninja = ninja_syntax.Writer(master_ninja_file, width=120) - - # Put build-time support tools in out/{config_name}. - gyp.common.CopyTool(flavor, toplevel_build) - - # Grab make settings for CC/CXX. - # The rules are - # - The priority from low to high is gcc/g++, the 'make_global_settings' in - # gyp, the environment variable. - # - If there is no 'make_global_settings' for CC.host/CXX.host or - # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set - # to cc/cxx. - if flavor == 'win': - ar = 'lib.exe' - # cc and cxx must be set to the correct architecture by overriding with one - # of cl_x86 or cl_x64 below. - cc = 'UNSET' - cxx = 'UNSET' - ld = 'link.exe' - ld_host = '$ld' - else: - ar = 'ar' - cc = 'cc' - cxx = 'c++' - ld = '$cc' - ldxx = '$cxx' - ld_host = '$cc_host' - ldxx_host = '$cxx_host' - - ar_host = 'ar' - cc_host = None - cxx_host = None - cc_host_global_setting = None - cxx_host_global_setting = None - clang_cl = None - nm = 'nm' - nm_host = 'nm' - readelf = 'readelf' - readelf_host = 'readelf' - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings = data[build_file].get('make_global_settings', []) - build_to_root = gyp.common.InvertRelativePath(build_dir, - options.toplevel_dir) - wrappers = {} - for key, value in make_global_settings: - if key == 'AR': - ar = os.path.join(build_to_root, value) - if key == 'AR.host': - ar_host = os.path.join(build_to_root, value) - if key == 'CC': - cc = os.path.join(build_to_root, value) - if cc.endswith('clang-cl'): - clang_cl = cc - if key == 'CXX': - cxx = os.path.join(build_to_root, value) - if key == 'CC.host': - cc_host = os.path.join(build_to_root, value) - cc_host_global_setting = value - if key == 'CXX.host': - cxx_host = os.path.join(build_to_root, value) - cxx_host_global_setting = value - if key == 'LD': - ld = os.path.join(build_to_root, value) - if key == 'LD.host': - ld_host = os.path.join(build_to_root, value) - if key == 'LDXX': - ldxx = os.path.join(build_to_root, value) - if key == 'LDXX.host': - ldxx_host = os.path.join(build_to_root, value) - if key == 'NM': - nm = os.path.join(build_to_root, value) - if key == 'NM.host': - nm_host = os.path.join(build_to_root, value) - if key == 'READELF': - readelf = os.path.join(build_to_root, value) - if key == 'READELF.host': - readelf_host = os.path.join(build_to_root, value) - if key.endswith('_wrapper'): - wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) - - # Support wrappers from environment variables too. - for key, value in os.environ.items(): - if key.lower().endswith('_wrapper'): - key_prefix = key[:-len('_wrapper')] - key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) - wrappers[key_prefix] = os.path.join(build_to_root, value) - - if flavor == 'win': - configs = [target_dicts[qualified_target]['configurations'][config_name] - for qualified_target in target_list] - shared_system_includes = None - if not generator_flags.get('ninja_use_custom_environment_files', 0): - shared_system_includes = \ - gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes( - configs, generator_flags) - cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( - toplevel_build, generator_flags, shared_system_includes, OpenOutput) - for arch, path in cl_paths.items(): - if clang_cl: - # If we have selected clang-cl, use that instead. - path = clang_cl - command = CommandWithWrapper('CC', wrappers, - QuoteShellArgument(path, 'win')) - if clang_cl: - # Use clang-cl to cross-compile for x86 or x86_64. - command += (' -m32' if arch == 'x86' else ' -m64') - master_ninja.variable('cl_' + arch, command) - - cc = GetEnvironFallback(['CC_target', 'CC'], cc) - master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc)) - cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx) - master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx)) - - if flavor == 'win': - master_ninja.variable('ld', ld) - master_ninja.variable('idl', 'midl.exe') - master_ninja.variable('ar', ar) - master_ninja.variable('rc', 'rc.exe') - master_ninja.variable('ml_x86', 'ml.exe') - master_ninja.variable('ml_x64', 'ml64.exe') - master_ninja.variable('mt', 'mt.exe') - else: - master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld)) - master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx)) - master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar)) - if flavor != 'mac': - # Mac does not use readelf/nm for .TOC generation, so avoiding polluting - # the master ninja with extra unused variables. - master_ninja.variable( - 'nm', GetEnvironFallback(['NM_target', 'NM'], nm)) - master_ninja.variable( - 'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf)) - - if generator_supports_multiple_toolsets: - if not cc_host: - cc_host = cc - if not cxx_host: - cxx_host = cxx - - master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host)) - master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host)) - master_ninja.variable('readelf_host', - GetEnvironFallback(['READELF_host'], readelf_host)) - cc_host = GetEnvironFallback(['CC_host'], cc_host) - cxx_host = GetEnvironFallback(['CXX_host'], cxx_host) - - # The environment variable could be used in 'make_global_settings', like - # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. - if '$(CC)' in cc_host and cc_host_global_setting: - cc_host = cc_host_global_setting.replace('$(CC)', cc) - if '$(CXX)' in cxx_host and cxx_host_global_setting: - cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx) - master_ninja.variable('cc_host', - CommandWithWrapper('CC.host', wrappers, cc_host)) - master_ninja.variable('cxx_host', - CommandWithWrapper('CXX.host', wrappers, cxx_host)) - if flavor == 'win': - master_ninja.variable('ld_host', ld_host) - master_ninja.variable('ldxx_host', ldxx_host) - else: - master_ninja.variable('ld_host', CommandWithWrapper( - 'LINK', wrappers, ld_host)) - master_ninja.variable('ldxx_host', CommandWithWrapper( - 'LINK', wrappers, ldxx_host)) - - master_ninja.newline() - - master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks()) - master_ninja.newline() - - deps = 'msvc' if flavor == 'win' else 'gcc' - - if flavor != 'win': - master_ninja.rule( - 'cc', - description='CC $out', - command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' - '$cflags_pch_c -c $in -o $out'), - depfile='$out.d', - deps=deps) - master_ninja.rule( - 'cc_s', - description='CC $out', - command=('$cc $defines $includes $cflags $cflags_c ' - '$cflags_pch_c -c $in -o $out')) - master_ninja.rule( - 'cxx', - description='CXX $out', - command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc ' - '$cflags_pch_cc -c $in -o $out'), - depfile='$out.d', - deps=deps) - else: - # TODO(scottmg) Separate pdb names is a test to see if it works around - # http://crbug.com/142362. It seems there's a race between the creation of - # the .pdb by the precompiled header step for .cc and the compilation of - # .c files. This should be handled by mspdbsrv, but rarely errors out with - # c1xx : fatal error C1033: cannot open program database - # By making the rules target separate pdb files this might be avoided. - cc_command = ('ninja -t msvc -e $arch ' + - '-- ' - '$cc /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ') - cxx_command = ('ninja -t msvc -e $arch ' + - '-- ' - '$cxx /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ') - master_ninja.rule( - 'cc', - description='CC $out', - command=cc_command, - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_c', - deps=deps) - master_ninja.rule( - 'cxx', - description='CXX $out', - command=cxx_command, - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_cc', - deps=deps) - master_ninja.rule( - 'idl', - description='IDL $in', - command=('%s gyp-win-tool midl-wrapper $arch $outdir ' - '$tlb $h $dlldata $iid $proxy $in ' - '$midl_includes $idlflags' % sys.executable)) - master_ninja.rule( - 'rc', - description='RC $in', - # Note: $in must be last otherwise rc.exe complains. - command=('%s gyp-win-tool rc-wrapper ' - '$arch $rc $defines $resource_includes $rcflags /fo$out $in' % - sys.executable)) - master_ninja.rule( - 'asm', - description='ASM $out', - command=('%s gyp-win-tool asm-wrapper ' - '$arch $asm $defines $includes $asmflags /c /Fo $out $in' % - sys.executable)) - - if flavor != 'mac' and flavor != 'win': - master_ninja.rule( - 'alink', - description='AR $out', - command='rm -f $out && $ar rcs $arflags $out $in') - master_ninja.rule( - 'alink_thin', - description='AR $out', - command='rm -f $out && $ar rcsT $arflags $out $in') - - # This allows targets that only need to depend on $lib's API to declare an - # order-only dependency on $lib.TOC and avoid relinking such downstream - # dependencies when $lib changes only in non-public ways. - # The resulting string leaves an uninterpolated %{suffix} which - # is used in the final substitution below. - mtime_preserving_solink_base = ( - 'if [ ! -e $lib -o ! -e $lib.TOC ]; then ' - '%(solink)s && %(extract_toc)s > $lib.TOC; else ' - '%(solink)s && %(extract_toc)s > $lib.tmp && ' - 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; ' - 'fi; fi' - % { 'solink': - '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s', - 'extract_toc': - ('{ $readelf -d $lib | grep SONAME ; ' - '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')}) - - master_ninja.rule( - 'solink', - description='SOLINK $lib', - restat=True, - command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'}, - rspfile='$link_file_list', - rspfile_content= - '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs', - pool='link_pool') - master_ninja.rule( - 'solink_module', - description='SOLINK(module) $lib', - restat=True, - command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'}, - rspfile='$link_file_list', - rspfile_content='-Wl,--start-group $in $solibs $libs -Wl,--end-group', - pool='link_pool') - master_ninja.rule( - 'link', - description='LINK $out', - command=('$ld $ldflags -o $out ' - '-Wl,--start-group $in $solibs $libs -Wl,--end-group'), - pool='link_pool') - elif flavor == 'win': - master_ninja.rule( - 'alink', - description='LIB $out', - command=('%s gyp-win-tool link-wrapper $arch False ' - '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % - sys.executable), - rspfile='$out.rsp', - rspfile_content='$in_newline $libflags') - _AddWinLinkRules(master_ninja, embed_manifest=True) - _AddWinLinkRules(master_ninja, embed_manifest=False) - else: - master_ninja.rule( - 'objc', - description='OBJC $out', - command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc ' - '$cflags_pch_objc -c $in -o $out'), - depfile='$out.d', - deps=deps) - master_ninja.rule( - 'objcxx', - description='OBJCXX $out', - command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc ' - '$cflags_pch_objcc -c $in -o $out'), - depfile='$out.d', - deps=deps) - master_ninja.rule( - 'alink', - description='LIBTOOL-STATIC $out, POSTBUILDS', - command='rm -f $out && ' - './gyp-mac-tool filter-libtool libtool $libtool_flags ' - '-static -o $out $in' - '$postbuilds') - master_ninja.rule( - 'lipo', - description='LIPO $out, POSTBUILDS', - command='rm -f $out && lipo -create $in -output $out$postbuilds') - master_ninja.rule( - 'solipo', - description='SOLIPO $out, POSTBUILDS', - command=( - 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&' - '%(extract_toc)s > $lib.TOC' - % { 'extract_toc': - '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' - 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})) - - - # Record the public interface of $lib in $lib.TOC. See the corresponding - # comment in the posix section above for details. - solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s' - mtime_preserving_solink_base = ( - 'if [ ! -e $lib -o ! -e $lib.TOC ] || ' - # Always force dependent targets to relink if this library - # reexports something. Handling this correctly would require - # recursive TOC dumping but this is rare in practice, so punt. - 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then ' - '%(solink)s && %(extract_toc)s > $lib.TOC; ' - 'else ' - '%(solink)s && %(extract_toc)s > $lib.tmp && ' - 'if ! cmp -s $lib.tmp $lib.TOC; then ' - 'mv $lib.tmp $lib.TOC ; ' - 'fi; ' - 'fi' - % { 'solink': solink_base, - 'extract_toc': - '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' - 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}) - - - solink_suffix = '@$link_file_list$postbuilds' - master_ninja.rule( - 'solink', - description='SOLINK $lib, POSTBUILDS', - restat=True, - command=mtime_preserving_solink_base % {'suffix': solink_suffix, - 'type': '-shared'}, - rspfile='$link_file_list', - rspfile_content='$in $solibs $libs', - pool='link_pool') - master_ninja.rule( - 'solink_notoc', - description='SOLINK $lib, POSTBUILDS', - restat=True, - command=solink_base % {'suffix':solink_suffix, 'type': '-shared'}, - rspfile='$link_file_list', - rspfile_content='$in $solibs $libs', - pool='link_pool') - - master_ninja.rule( - 'solink_module', - description='SOLINK(module) $lib, POSTBUILDS', - restat=True, - command=mtime_preserving_solink_base % {'suffix': solink_suffix, - 'type': '-bundle'}, - rspfile='$link_file_list', - rspfile_content='$in $solibs $libs', - pool='link_pool') - master_ninja.rule( - 'solink_module_notoc', - description='SOLINK(module) $lib, POSTBUILDS', - restat=True, - command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'}, - rspfile='$link_file_list', - rspfile_content='$in $solibs $libs', - pool='link_pool') - - master_ninja.rule( - 'link', - description='LINK $out, POSTBUILDS', - command=('$ld $ldflags -o $out ' - '$in $solibs $libs$postbuilds'), - pool='link_pool') - master_ninja.rule( - 'preprocess_infoplist', - description='PREPROCESS INFOPLIST $out', - command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && ' - 'plutil -convert xml1 $out $out')) - master_ninja.rule( - 'copy_infoplist', - description='COPY INFOPLIST $in', - command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys') - master_ninja.rule( - 'merge_infoplist', - description='MERGE INFOPLISTS $in', - command='$env ./gyp-mac-tool merge-info-plist $out $in') - master_ninja.rule( - 'compile_xcassets', - description='COMPILE XCASSETS $in', - command='$env ./gyp-mac-tool compile-xcassets $keys $in') - master_ninja.rule( - 'mac_tool', - description='MACTOOL $mactool_cmd $in', - command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary') - master_ninja.rule( - 'package_framework', - description='PACKAGE FRAMEWORK $out, POSTBUILDS', - command='./gyp-mac-tool package-framework $out $version$postbuilds ' - '&& touch $out') - if flavor == 'win': - master_ninja.rule( - 'stamp', - description='STAMP $out', - command='%s gyp-win-tool stamp $out' % sys.executable) - else: - master_ninja.rule( - 'stamp', - description='STAMP $out', - command='${postbuilds}touch $out') - if flavor == 'win': - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable) - elif flavor == 'zos': - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='rm -rf $out && cp -fRP $in $out') - else: - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='rm -rf $out && cp -af $in $out') - master_ninja.newline() - - all_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, - target_dicts, - os.path.normpath(build_file)): - all_targets.add(target) - all_outputs = set() - - # target_outputs is a map from qualified target name to a Target object. - target_outputs = {} - # target_short_names is a map from target short name to a list of Target - # objects. - target_short_names = {} - - # short name of targets that were skipped because they didn't contain anything - # interesting. - # NOTE: there may be overlap between this an non_empty_target_names. - empty_target_names = set() - - # Set of non-empty short target names. - # NOTE: there may be overlap between this an empty_target_names. - non_empty_target_names = set() - - for qualified_target in target_list: - # qualified_target is like: third_party/icu/icu.gyp:icui18n#target - build_file, name, toolset = \ - gyp.common.ParseQualifiedTarget(qualified_target) - - this_make_global_settings = data[build_file].get('make_global_settings', []) - assert make_global_settings == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets. %s vs. %s" % - (this_make_global_settings, make_global_settings)) - - spec = target_dicts[qualified_target] - if flavor == 'mac': - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - # If build_file is a symlink, we must not follow it because there's a chance - # it could point to a path above toplevel_dir, and we cannot correctly deal - # with that case at the moment. - build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, - False) - - qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name, - toolset) - hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() - - base_path = os.path.dirname(build_file) - obj = 'obj' - if toolset != 'target': - obj += '.' + toolset - output_file = os.path.join(obj, base_path, name + '.ninja') - - ninja_output = StringIO() - writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir, - ninja_output, - toplevel_build, output_file, - flavor, toplevel_dir=options.toplevel_dir) - - target = writer.WriteSpec(spec, config_name, generator_flags) - - if ninja_output.tell() > 0: - # Only create files for ninja files that actually have contents. - with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file: - ninja_file.write(ninja_output.getvalue()) - ninja_output.close() - master_ninja.subninja(output_file) - - if target: - if name != target.FinalOutput() and spec['toolset'] == 'target': - target_short_names.setdefault(name, []).append(target) - target_outputs[qualified_target] = target - if qualified_target in all_targets: - all_outputs.add(target.FinalOutput()) - non_empty_target_names.add(name) - else: - empty_target_names.add(name) - - if target_short_names: - # Write a short name to build this target. This benefits both the - # "build chrome" case as well as the gyp tests, which expect to be - # able to run actions and build libraries by their short name. - master_ninja.newline() - master_ninja.comment('Short names for targets.') - for short_name in target_short_names: - master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in - target_short_names[short_name]]) - - # Write phony targets for any empty targets that weren't written yet. As - # short names are not necessarily unique only do this for short names that - # haven't already been output for another target. - empty_target_names = empty_target_names - non_empty_target_names - if empty_target_names: - master_ninja.newline() - master_ninja.comment('Empty targets (output for completeness).') - for name in sorted(empty_target_names): - master_ninja.build(name, 'phony') - - if all_outputs: - master_ninja.newline() - master_ninja.build('all', 'phony', list(all_outputs)) - master_ninja.default(generator_flags.get('default_target', 'all')) - - master_ninja_file.close() - - -def PerformBuild(data, configurations, params): - options = params['options'] - for config in configurations: - builddir = os.path.join(options.toplevel_dir, 'out', config) - arguments = ['ninja', '-C', builddir] - print('Building [%s]: %s' % (config, arguments)) - subprocess.check_call(arguments) - - -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - (target_list, target_dicts, data, params, config_name) = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - -def GenerateOutput(target_list, target_dicts, data, params): - # Update target_dicts for iOS device builds. - target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator( - target_dicts) - - user_config = params.get('generator_flags', {}).get('config', None) - if gyp.common.GetFlavor(params) == 'win': - target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) - target_list, target_dicts = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables) - - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, - user_config) - else: - config_names = target_dicts[target_list[0]]['configurations'].keys() - if params['parallel']: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append( - (target_list, target_dicts, data, params, config_name)) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt as e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name) diff --git a/gyp/pylib/gyp/generator/ninja_test.py b/gyp/pylib/gyp/generator/ninja_test.py deleted file mode 100644 index 1ad68e4fc9..0000000000 --- a/gyp/pylib/gyp/generator/ninja_test.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the ninja.py file. """ - -import gyp.generator.ninja as ninja -import unittest -import sys -import TestCommon - - -class TestPrefixesAndSuffixes(unittest.TestCase): - def test_BinaryNamesWindows(self): - # These cannot run on non-Windows as they require a VS installation to - # correctly handle variable expansion. - if sys.platform.startswith('win'): - writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.', - 'build.ninja', 'win') - spec = { 'target_name': 'wee' } - self.assertTrue(writer.ComputeOutputFileName(spec, 'executable'). - endswith('.exe')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - endswith('.dll')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - endswith('.lib')) - - def test_BinaryNamesLinux(self): - writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.', - 'build.ninja', 'linux') - spec = { 'target_name': 'wee' } - self.assertTrue('.' not in writer.ComputeOutputFileName(spec, - 'executable')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - startswith('lib')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - startswith('lib')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - endswith('.so')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - endswith('.a')) - -if __name__ == '__main__': - unittest.main() diff --git a/gyp/pylib/gyp/ordered_dict.py b/gyp/pylib/gyp/ordered_dict.py deleted file mode 100644 index 6fe9c1f6c7..0000000000 --- a/gyp/pylib/gyp/ordered_dict.py +++ /dev/null @@ -1,289 +0,0 @@ -# Unmodified from http://code.activestate.com/recipes/576693/ -# other than to add MIT license header (as specified on page, but not in code). -# Linked from Python documentation here: -# http://docs.python.org/2/library/collections.html#collections.OrderedDict -# -# This should be deleted once Py2.7 is available on all bots, see -# http://crbug.com/241769. -# -# Copyright (c) 2009 Raymond Hettinger. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. - -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - -try: - from _abcoll import KeysView, ValuesView, ItemsView -except ImportError: - pass - - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def items(self): - 'od.items -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - # Suppress 'OrderedDict.update: Method has no argument': - # pylint: disable=E0211 - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) - diff --git a/gyp/pylib/gyp/xml_fix.py b/gyp/pylib/gyp/xml_fix.py deleted file mode 100644 index 5de848158d..0000000000 --- a/gyp/pylib/gyp/xml_fix.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Applies a fix to CR LF TAB handling in xml.dom. - -Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 -Working around this: http://bugs.python.org/issue5752 -TODO(bradnelson): Consider dropping this when we drop XP support. -""" - - -import xml.dom.minidom - - -def _Replacement_write_data(writer, data, is_attrib=False): - """Writes datachars to writer.""" - data = data.replace("&", "&").replace("<", "<") - data = data.replace("\"", """).replace(">", ">") - if is_attrib: - data = data.replace( - "\r", " ").replace( - "\n", " ").replace( - "\t", " ") - writer.write(data) - - -def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): - # indent = current indentation - # addindent = indentation to add to higher levels - # newl = newline string - writer.write(indent+"<" + self.tagName) - - attrs = self._get_attributes() - a_names = attrs.keys() - a_names.sort() - - for a_name in a_names: - writer.write(" %s=\"" % a_name) - _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) - writer.write("\"") - if self.childNodes: - writer.write(">%s" % newl) - for node in self.childNodes: - node.writexml(writer, indent + addindent, addindent, newl) - writer.write("%s%s" % (indent, self.tagName, newl)) - else: - writer.write("/>%s" % newl) - - -class XmlFix(object): - """Object to manage temporary patching of xml.dom.minidom.""" - - def __init__(self): - # Preserve current xml.dom.minidom functions. - self.write_data = xml.dom.minidom._write_data - self.writexml = xml.dom.minidom.Element.writexml - # Inject replacement versions of a function and a method. - xml.dom.minidom._write_data = _Replacement_write_data - xml.dom.minidom.Element.writexml = _Replacement_writexml - - def Cleanup(self): - if self.write_data: - xml.dom.minidom._write_data = self.write_data - xml.dom.minidom.Element.writexml = self.writexml - self.write_data = None - - def __del__(self): - self.Cleanup() diff --git a/gyp/samples/samples b/gyp/samples/samples deleted file mode 100755 index 804b618998..0000000000 --- a/gyp/samples/samples +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os.path -import shutil -import sys - - -gyps = [ - 'app/app.gyp', - 'base/base.gyp', - 'build/temp_gyp/googleurl.gyp', - 'build/all.gyp', - 'build/common.gypi', - 'build/external_code.gypi', - 'chrome/test/security_tests/security_tests.gyp', - 'chrome/third_party/hunspell/hunspell.gyp', - 'chrome/chrome.gyp', - 'media/media.gyp', - 'net/net.gyp', - 'printing/printing.gyp', - 'sdch/sdch.gyp', - 'skia/skia.gyp', - 'testing/gmock.gyp', - 'testing/gtest.gyp', - 'third_party/bzip2/bzip2.gyp', - 'third_party/icu38/icu38.gyp', - 'third_party/libevent/libevent.gyp', - 'third_party/libjpeg/libjpeg.gyp', - 'third_party/libpng/libpng.gyp', - 'third_party/libxml/libxml.gyp', - 'third_party/libxslt/libxslt.gyp', - 'third_party/lzma_sdk/lzma_sdk.gyp', - 'third_party/modp_b64/modp_b64.gyp', - 'third_party/npapi/npapi.gyp', - 'third_party/sqlite/sqlite.gyp', - 'third_party/zlib/zlib.gyp', - 'v8/tools/gyp/v8.gyp', - 'webkit/activex_shim/activex_shim.gyp', - 'webkit/activex_shim_dll/activex_shim_dll.gyp', - 'webkit/build/action_csspropertynames.py', - 'webkit/build/action_cssvaluekeywords.py', - 'webkit/build/action_jsconfig.py', - 'webkit/build/action_makenames.py', - 'webkit/build/action_maketokenizer.py', - 'webkit/build/action_useragentstylesheets.py', - 'webkit/build/rule_binding.py', - 'webkit/build/rule_bison.py', - 'webkit/build/rule_gperf.py', - 'webkit/tools/test_shell/test_shell.gyp', - 'webkit/webkit.gyp', -] - - -def Main(argv): - if len(argv) != 3 or argv[1] not in ['push', 'pull']: - print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0] - return 1 - - path_to_chrome = argv[2] - - for g in gyps: - chrome_file = os.path.join(path_to_chrome, g) - local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1]) - if argv[1] == 'push': - print 'Copying %s to %s' % (local_file, chrome_file) - shutil.copyfile(local_file, chrome_file) - elif argv[1] == 'pull': - print 'Copying %s to %s' % (chrome_file, local_file) - shutil.copyfile(chrome_file, local_file) - else: - assert False - - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv)) diff --git a/gyp/samples/samples.bat b/gyp/samples/samples.bat deleted file mode 100644 index 778d9c90f0..0000000000 --- a/gyp/samples/samples.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python %~dp0/samples %* diff --git a/gyp/setup.py b/gyp/setup.py deleted file mode 100755 index 75a42558d8..0000000000 --- a/gyp/setup.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from setuptools import setup - -setup( - name='gyp', - version='0.1', - description='Generate Your Projects', - author='Chromium Authors', - author_email='chromium-dev@googlegroups.com', - url='http://code.google.com/p/gyp', - package_dir = {'': 'pylib'}, - packages=['gyp', 'gyp.generator'], - entry_points = {'console_scripts': ['gyp=gyp:script_main'] } -) diff --git a/gyp/tools/graphviz.py b/gyp/tools/graphviz.py index 538b059da4..8453c18ecc 100755 --- a/gyp/tools/graphviz.py +++ b/gyp/tools/graphviz.py @@ -21,7 +21,7 @@ def ParseTarget(target): return filename, target, suffix -def LoadEdges(filename, targets): +def LoadEdges(targets): """Load the edges map from the dump file, and filter it to only show targets in |targets| and their depedendents.""" @@ -92,7 +92,7 @@ def main(): print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr) return 1 - edges = LoadEdges('dump.json', sys.argv[1:]) + edges = LoadEdges(sys.argv[1:]) WriteGraph(edges) return 0 diff --git a/gyp/tools/pretty_gyp.py b/gyp/tools/pretty_gyp.py index d01c692edc..d32e70f3b2 100755 --- a/gyp/tools/pretty_gyp.py +++ b/gyp/tools/pretty_gyp.py @@ -118,26 +118,23 @@ def prettyprint_input(lines): """Does the main work of indenting the input based on the brace counts.""" indent = 0 basic_offset = 2 - last_line = "" for line in lines: - if COMMENT_RE.match(line): - print(line) - else: - line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. - if len(line) > 0: + line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. + if len(line) > 0: + brace_diff = 0 + if not COMMENT_RE.match(line): (brace_diff, after) = count_braces(line) - if brace_diff != 0: - if after: - print(" " * (basic_offset * indent) + line) - indent += brace_diff - else: - indent += brace_diff - print(" " * (basic_offset * indent) + line) + if brace_diff != 0: + if after: + print(" " * (basic_offset * indent) + line) + indent += brace_diff else: + indent += brace_diff print(" " * (basic_offset * indent) + line) else: - print("") - last_line = line + print(" " * (basic_offset * indent) + line) + else: + print("") def main(): diff --git a/gyp/tools/pretty_sln.py b/gyp/tools/pretty_sln.py index 196566fb9e..e981fca8f3 100755 --- a/gyp/tools/pretty_sln.py +++ b/gyp/tools/pretty_sln.py @@ -14,12 +14,12 @@ from __future__ import print_function +__author__ = 'nsylvain (Nicolas Sylvain)' + import os import re import sys -import pretty_vcproj - -__author__ = 'nsylvain (Nicolas Sylvain)' +from . import pretty_vcproj def BuildProject(project, built, projects, deps): # if all dependencies are done, we can build it, otherwise we try to build the diff --git a/gyp/tools/pretty_vcproj.py b/gyp/tools/pretty_vcproj.py index e1ec7fee76..3d5262065b 100755 --- a/gyp/tools/pretty_vcproj.py +++ b/gyp/tools/pretty_vcproj.py @@ -14,56 +14,38 @@ from __future__ import print_function +__author__ = 'nsylvain (Nicolas Sylvain)' + import os import sys -from xml.dom.minidom import parse -from xml.dom.minidom import Node - -__author__ = 'nsylvain (Nicolas Sylvain)' +from xml.dom.minidom import parse, Node -try: - cmp -except NameError: - def cmp(x, y): - return (x > y) - (x < y) -REPLACEMENTS = dict() +REPLACEMENTS = {} ARGUMENTS = None -class CmpTuple(object): - """Compare function between 2 tuple.""" - def __call__(self, x, y): - return cmp(x[0], y[0]) - - -class CmpNode(object): - """Compare function between 2 xml nodes.""" - - def __call__(self, x, y): - def get_string(node): - node_string = "node" - node_string += node.nodeName - if node.nodeValue: - node_string += node.nodeValue - - if node.attributes: - # We first sort by name, if present. - node_string += node.getAttribute("Name") +def get_node_string(node): + node_string = "node" + node_string += node.nodeName + if node.nodeValue: + node_string += node.nodeValue - all_nodes = [] - for (name, value) in node.attributes.items(): - all_nodes.append((name, value)) + if node.attributes: + # We first sort by name, if present. + node_string += node.getAttribute("Name") - all_nodes.sort(CmpTuple()) - for (name, value) in all_nodes: - node_string += name - node_string += value + all_nodes = [] + for (name, value) in node.attributes.items(): + all_nodes.append((name, value)) - return node_string + all_nodes.sort(key=lambda x: x[0]) + for (name, value) in all_nodes: + node_string += name + node_string += value - return cmp(get_string(x), get_string(y)) + return node_string def PrettyPrintNode(node, indent=0): @@ -88,7 +70,7 @@ def PrettyPrintNode(node, indent=0): all_attributes = [] for (name, value) in node.attributes.items(): all_attributes.append((name, value)) - all_attributes.sort(CmpTuple()) + all_attributes.sort(key=(lambda attr: attr[0])) for (name, value) in all_attributes: print('%s %s="%s"' % (' '*indent, name, value)) print('%s>' % (' '*indent)) @@ -194,7 +176,7 @@ def CleanupVcproj(node): # Sort the list. - node_array.sort(CmpNode()) + node_array.sort(key=get_node_string) # Insert the nodes in the correct order. for new_node in node_array: @@ -208,7 +190,7 @@ def CleanupVcproj(node): node.appendChild(new_node) -def GetConfiguationNodes(vcproj): +def GetConfigurationNodes(vcproj): #TODO(nsylvain): Find a better way to navigate the xml. nodes = [] for node in vcproj.childNodes: @@ -291,8 +273,8 @@ def main(argv): # check if we have exactly 1 parameter. if len(argv) < 2: - print(('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0])) + print('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' + '[key2=value2]' % argv[0]) return 1 # Parse the keys @@ -305,7 +287,7 @@ def main(argv): # First thing we need to do is find the Configuration Node and merge them # with the vsprops they include. - for configuration_node in GetConfiguationNodes(dom.documentElement): + for configuration_node in GetConfigurationNodes(dom.documentElement): # Get the property sheets associated with this configuration. vsprops = configuration_node.getAttribute('InheritedPropertySheets') @@ -328,7 +310,6 @@ def main(argv): # Finally, we use the prett xml function to print the vcproj back to the # user. - #print dom.toprettyxml(newl="\n") PrettyPrintNode(dom.documentElement) return 0 diff --git a/gyp/tools/vssetup.powershell/LICENSE.txt b/gyp/tools/vssetup.powershell/LICENSE.txt new file mode 100644 index 0000000000..d4edbaef7f --- /dev/null +++ b/gyp/tools/vssetup.powershell/LICENSE.txt @@ -0,0 +1,8 @@ +The MIT License (MIT) +Copyright (C) Microsoft Corporation. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/gyp/tools/vssetup.powershell/Microsoft.VisualStudio.Setup.PowerShell.dll b/gyp/tools/vssetup.powershell/Microsoft.VisualStudio.Setup.PowerShell.dll new file mode 100644 index 0000000000000000000000000000000000000000..2aeabb3bccad275a1db107a5bb036612701e172c GIT binary patch literal 39424 zcmeHw34B!L)%Uq~X6`JR49R3;$ud9!LpHOK1jR@;B+4cUh>CucY>-SGRLbMKv*kYHQu_xpX{_xlF! zxzAai^PJ~A=UMJ^pP4K>`x>$lksa4Z9}zu?Hr(pP78 zY>C7I{ZV~$G}IgD3ib8r1A&dfPb;)KpKQsd{BA(Ne`mpS|*` zwN`0|X;vUbsUW%r6qi9QzaRGiu7kLU(j=`Kb~AzPH$Dmx==`zKqDxtn|0nJy$|QW= zx{qiDCk_&|I5=_4=P;2Mq`U4V$~}hlv1lM}x`?@eFGz`WI~_n)1V zS#Jb;tw~Pg)lq%V+Tx4~CtZ%R;iB}+C@koodQ}!oDatmA_V~%K9L@5bD)ungWLVN`}bFw^YB)#LcWsG_nR;xGL}$ zdV<^+^F84=Cc|$+P52Fc7JfruC;N>>HPJKPZ=6>fW|;8Xe@z*&N*`vQ3OP&O2$mJv7d5ocj~{jZ64f{MLs2?6=eSo`^Rl!*4=O_{|}H^N;r%i)zCR z$NP=*YN9vcw`D(ixn-Q+B-=#4ao$P&mZBv0Q_lSf{l+Ey3x1RF3}c9$5c517r>oH+ z*cQYt7fKud9IBe1~k9A-Go+~{tu zE%KG9NmTtBNRQ-mZuLTpOZMn(5K9s$I_ku{rNz!+gU}62AeYW^Pfd`!{#ZH0ojv-K zxZF|gL6%s@nh=<*DT5`Ps#W4q4c$zOoy!tvB9_QxiGsLKNR63%y2d;D0#vYFXzFrPSCe9i_sw73-x!FRXX9nxJC$7ix;+|t`hP9GC`t$_3J6UcEm+Cd@m&bD7 zpCEU~I5}rh7!AMzbp)Nmj_Bi?em7)yqF>pI%5vtIq5oCRA3-@qqs$4WqO_gpbjdOg zu*_DGVGf%2>rClk>&{|;>AhrRCRjX5a%A(JL!0j$x_sx* zTJa}uyH{Z!oL>pc{$AV7s8A(7JZQV<>2~UmMcJ(}1_J;Ch%eRi&fJO7(xKSn(UR$T z$1w+hG>H$yCdWta0&*ZJpWFs=fTwISV!{@^5oHC@8A`C^ALQy*sl$BNwdswA1TO(7_42f;*Da#kj51$@il^@^H^O36?rVappdmWNLH)weFNb)sNu4_>^~~{9 z9s|&q(Fz#Au?C5O9mJ?pVvCM(5(7_(J$gMSbC7ftm1%Q=yHQG-4fbOgDsjZF0P3;p zpTRAqB-5#1iG){k=~v-iX3tK2C)=q*(o^={J054L=G39d^Hxr=yO_rc6_1mFjf4<0 z)`=asd&QPJxKzwYu;m_3u0&l`{c2>k6%^>7#XUG3Ts-m>Y5KK52e}POy#}(Mkzykq z#JE^eEZwP^{Wl9?B?x=TWvAydL^z1aAN=-l+yp1Z$)U zIn`l?WhrX#Opt;%GM{~Fr>O|?RIZ01;Z*gTm>GTh7KaE1~;&hTR`!;q?ORQO7z=+r2d!`5^MH=#BF*Q*(tny!f@e{W6*u0fu3M{o~o za;oW0amG93mKvOR!ABo`^p3|}s=19C6e}3uR4S*u(~Y)Q=w8bI51RHeDyq^hx*}DP zn3$zzil>Wt`uY~i!1T4A+YE9Uh+RMKKKR*I>a)dAHO-@H!M!kod+QhYZpdM(G~@jTJ_;w(+IJN3JeFNXPqGv99$ zbP!{@%9Ceii6i(Ln_QMM=QZ>yMb_6krWOy@{uqZ*P8ye^W1jF>g>~BWF9S_+?wk+9 zW(4mBQtE;Q5Z8(`y-9_ny31^tC7L_UV&BNH62mh*EjDmo?#Ge<;S`G?2@n=p1WAChvAc=QRW0^LQFlXVN%gzkANJU1;S2! ze5QAq`>14MP7s4)3Cg>`ERXN;@(^Ch<-NyQzb09k6D%~!!?dLFL!BpxC@<&ghFZZd@Kk|Kq16>V1%d)rH^yk>5 zTqA@GOqV6zS)d0?QP<)e*>6~cR^X3Ke}Qw<8uy*spiCZpj9~j2JL^RV6pb>+h!~@z zV$dkTrs+nT5$QJlTM*-ES&O}dRPhz+Fh{*je;L?(zj0TGxepw*Wgk_Lw>*ZiNEwyM80YCZpfPbS#KhLdd3n#l+YO z6T2Y3+ujbM%5#fH{~o}|YT(822+tkYp<$4M^xp+b*dtlvsN-2h@s%>NYx|g zL#!*CX9vzL{#OaH$Zc%eS#Y#EPaWoZB&t=N0&~P>Pl+>I9+A17&V@m}xmq^+G1~vSwp{a(!gw zCjLZ+R^c0)!T?g9+zpy*Wy&Xf|LlSe#x z^mrY2b2R>C@@S0MHJ&;0XnLKo?o6~-n6)OVF|wI0k@8Vr9)r+cEaTA%{#I_+rd50B z9j6-n5iEGi(wmMYGjF}J-46PqLX;oRm*Q--gTdTJTo2DiR`bV^vXw4#^q~#3%zD@;Y=#DNZ#E z!@QyC-$z*^JuF=CH&Ix!Q5|Mn{{c7|4#OtDBltt0))Jblbj%W(&Be0`1mS*9da`e( znTEboW|d)2L8p!3#3)ZIg%I09`g6y71I$x5}m;23>~*Je5&X`)N&z?A#ddhqu?1SCm&*R97wl6YqTC4Z3$8(d?a=7m#5~ z5z_4g(MaEB^je}(1oKijbN1?XdIiI38fddmUES7#`&+;>vw=7D=;-$HQ4#xRp7Mnh z_Y|OiR;qdOMRO3GL8v7S;(8y~6}Zp{1UqGpeiC;S&27NXdvqeM#kkN9<<2^|jrpE} zi|s&Y;?tJNG*p*Lrp(IKXjK}+!vY(9Kb?_5-$8t6bcgTGY>oCzVK^r6#WYUu5_qq` zpNLF>?_y}!?Xv+sm&))>(cB=p&Jp-$Uy)y(7psP*agC1TTnwF>FFW5y zEvY$~KI-@7K<9rWmxl_V(@!TBq-Fc*&p8ZlPfLTm%lmSspT6y5%Kr%W)oGlb39g@N z1pW#Xl!dzZsi26{D{P$pGvs~rVgCL3emW|WPfcO&yD99|hsDG0G)_0CaqSLeGK>o4 zk5ZQ{ex{r`jo~Q*KO=tlDDTc}53TnsoT||yLcdJ>`9L;ndmqq8XSul*oKDVr(+q}B zWi$MX!1NqWKR)|T)Z$wT*X5wV@8xm&SK@&Wkk)89B>nWA8BBj(;JK-seiUhqKAM(^ zNcfGkSFyB9K!Z3w(;N(?_hI__ExN|)IHuVcv++0uUanvc-gyF6C76@fE5II^&5~}c zhDL$y%P*L^8oM0Rr6=6uHn3db{b2@Uv#0{RGbF=aoJHsq*FmpAXKx;*3v3oG{cfigT?CoiTnTj%>Fis-h2L*#z&DT>* zc$tt)SL$gy?L}7&<(6R_Xr|qQt<5&DD+JqTOUCw2Gq9_Lcaz(|t`Y2dQ_{e`WAgS2 zZ=u(ayqox~ML}v=cBRrx_XzJSDep93wwY;!4N~}1F!-epwy#Fb50DMmI^DaWMDm?YF9=p6*g5nqv}hVNVjx>hA$m=CON7@&ZwPjhln|!x zC)v_VhiN8NIe!g!rStdk^tPmQtE9am(}{HZ=!aRa+S*4~35*FmH-#xZ*$hv2ziO+b zrFmz1W|9~5nRI6c!|XN1^$y&jA*= zZueBud9c5ddS)~Cb9s9_?W3KjOZ({I{JS7?|FpXS_q*={ye8`a;QIpKbUuvqH=#kn z_5j1vT&&>-(5z|wj>l1#S5h_s{yu}@>l(wK2+S(r^k?C{TKc{HNl!LCW67^w*elqj{#0qu1Q%L zK#OEUZC3i{V6v3riuO0eYIkz4^3>TzYf^t zGYKC0~4KgdO$Ekr$&D?G1lp%(pjwK1!A2}Iw+W-(@Eblv4*T# zr;}bYFwyCxUzqvWPB)!^F_^WmP7igP80+-V{|I)O>uoCKN0UO9yv$|Ku626IWniMy zLjl1IJH52o#8_u4-ELy6GnFcMDMVw)r(jN~7wjfd{a;B*r7KPBZtsBR|g=3rgNw!G%3!`N1CV()qwt2ME~48|Hv?6$0fDQUFW#D3&`9@ugd z%SwG2*cub7O5uF#O>A}!W1CFu8PVBiV&@lp2l=*}*gLte1KVk0C+EEl>@pKOin^!K z)h4zxHGr}7CKEfw!`PjI?Q-=?zAp-PEgtfI4lR!fHby@y_W^Nk{Q6>(^)3=E72Je%!sor z8W7B|C5wI_*iGb1b$O@Kl2Wk+mOZ3SrHz99++8~RYGoSfCYE1twQU+*Xkth5ZU(kn zup{o`!T{C+SDL*2nO^|!Ruj8Fb1PN`26jZ5tEGB#UjZvI`vbS8f|be1!n@uz zIIYE7MiCR+>RqLlQNM|u?>!B?9VXW4Kf_y2SDV;cm9e`_>{X4iCk4BSPR?8nY)Lt{ z(M@zx`VMaueOa)}TxFS;c<0jtCN@9w3Sf_$*a!Y=y|whTiT%O<2-VWdCf1yBt+$T8 zYhvvgk5C=`SgCe&0ZpPMC7?>`+uwY{}n0u4Ao_3hn zgguIE)y>;pgn@UK)dtyco)zsCbp{RUhj$I!ZRk1XODRv_AaCv6FVvW>)w;i zID3HxJ=mC>A3hWfK-4W&KFL@Wy!@?V*&b&k3Ci<0$?auqYx0(KCVn6l2=WU^U zZ1rFy63bd?j$lR+Hyfy=)M+<)GZKZPsdyc-A`LK5}^&31_r~7x_Q)!2Z^|=2C zY|O;YFSypbgm#zREQD9bd0)*~k($7oAbDn-w<&9N>e@Iq&2x6@S#j(q7~R&zv0r+-Qn70&pBEhe zgPCM~9J>OeerFte2>#p<$C^?isi8PlofApzieq&diX$Ay&ciIj&lCK4!9jU_sap&L z*uH{+)JPorF3K}7tG4IGdCi^xC9sQA1GF{H+nqO*+7ri8p(TM$(dv}mIPV?Qp)Zd4 zF+b>nS>^S|d0)b8k-$F0Y_ToQ`!hTkjbkZU0X!iYpBEfo$h+1%5XY+0ccu>0b_IG>i9m>2Khcf@%vMR5#`({cf=GbNv(gDDr# zC9@JK)*t*wkHgk^Qr&7tmA_yVw3Uc*cswu=4`C*yO#Z zvX)y-?2k6a9yGD*)Sb|hobN(9X!24WJ5w*De;15(I%p^TO0Xl!qh5tBqTia>(%AuE zSJ$&;N8BgO`cBG4@!3fUu%yNNa8@O-n+-{2&-9y9FQUUH#L2Sz? z)eCl+>rjf{HA;(2>_;i@fY)YXj-mspV|0dz`HQ{=EZLtIAA_wE$z2%tzma+|J!#|< z?_NxA31)bA7iFGg*upj1Mei+SY>X~Z6<~C-@Z8tVdNy^}xcVh{T)$o9G9(rIjF%v} z(!{uayJ)RoMwIUwXXh^J7Tzw}N3SE_4io#PdL;D{8a1)QwqK`SN;jC;N%oIYFQeN{ zY>VCD`!~AF#BQ`_`!1*FOzg+@S-#!$x{1wJ%6wO#I{f^F?Qyyu+m8yJo1UQj278C& zDNR`k$`RyM@tm8WB&S)Yp;@8zSedIdlEU9i`kcuBT(}eIUn=BWcj2kRfU0TF^O?t| zD0C_0RSHS2gd}TFK{sj?LG4ufp@;P_<#=g4MT$4>!wVD}%`Hevv(t0aIG3SeBqc4) zCQo^mXAJ!bzbUjzJju|gHJ)nWf#X5T!eoybY4(^xMvan7nV2@@xkVI8h6`0Cpiosn z4X9KHXrqG~1t3NSWbUsr5@}a9Q*hh@^4Das{QFtS^2bZ3S15SiH@L^9=a~ABm-my= z+0HA(KO990T`%bblscO9dHlMQ&%Sc|IzyOADT`*CTwZozd2o)PkK?Z-78*Dbj2z_Y;|T>Ej&!gUL-JFw@; z>-}^<2Xu0JCZLBZ0n-R8T);+wZ352_c$UDBz=*(YfLSyo=`n%31zs!g7J+*K6?zrGG0706wX$ zL%p8FGgmGxn5F>UnjZuFQO*$HU*t)N;Wp&DhK8i9YbCuG@Fsdm(mw>egKDJQg;Lh< zKvC#VfR9j%(3c9mQf2xAz%*(D{63u_l(PhK`WlJ=zC-I|r*WO^G~O*W?WU6hZlxQ0 zG8tG4ak@v+-PrG#jWol%Wd~(QtuAO#E)flv5bsZ3Dd{VT_a|>4-k-dIcz^OG@?c-% zCHi$H!zt4lP7}Ci#&yb&`jf)XD~C{e8XY2~;4Wpix*>2kC@BT^D~FUjJP!lz&U;F^ z9WsZMy=s5K_m#WV>%D(c?gB+q?-lyP0-qH4oWR}68axN?R%SV_R$dXx>jL+xHx*u> zzJ>Hv>U%=@0PrF81KE50P$+*DxL5Ui-%{=H@FB%3aIbo6p=Qfe>OH0Ov}*HQ4g0GL zT5K<=&!n7gI|Q$y7D|a@gYAdvkUL^4RCp&dm-v|;v5sf?LgHt5#5|tXk4TFY5T!Rw7Jo`=C7$n~U zT(t_qcL$Uc0?RdCb4rzE^Dv4BDsJiqb1-rY)9p zx&4TehgIv4TAP1^eWrSfKaFOpSK;~Ph|)6cDSM@Df9A9H1-8cvUP9iF3V&c%=Nc)P&61l}v~VS!Hyd`{pi0$&&SmelJ#sn`3;oIt+w2>mp($T?F= z{6OJ7O~ga~NzM-y-i!RJCA=vBhH8|OM3zEONiJWg~yG%=|imgrYU~~EL6E2OI2>i8nr0B)>)%ogY*X| zcPk+8xPB<<4<&6kZQ$KVyUJW2#-UX%AKt!(0842HU0$xd{ zxP5rv(E@moP6vFBI^1~cMYHgQyF-9JysOv>$@eJF? zz(VCDj|+R$vjDFJ^wTYXGqJAAPA|t>+_|1|yyH6LE62%{9|6v$KLM7II;EVdsSNNW ziU2l~4tP3U0oYFa0M8M67yT6Jt&)z>uaF+5oV0SRF53V`L7YW=Ta8%$f<-e#P z{aNKCWgcKNy!5QXHXIi2y9#@NRL*Nt&r!}xx2avq-t>S_776SE{k8Niq3=>T?=Ce) zAEfV6?@_Y-yVQrEbB{>w6Yf563;g?pyHB_Wh5M{^P*rnVE zIIQf?+#}qVkp5HV`+##j?>lY-bZeY;Yn%>ftaF}F=4tzs6SCSQ9TNJOz^9!ob3~xx z;`B^`jRFIn)0H2mQ3`uFAaGF%%WM!hDsYd$nO>$h3OwlJ^kIQCh0_6niv+ezd5ASg zdQ{*gLfIqfg8~l=+>^$64+=aikkW;c!*Oz0U|=ez7YW=Ta8%$Pfi#V|0f8F?jtbl( z@SwoM0%^L~DR7a%4FV5OXALxi;Rb=D0`~|!DDbereYD84pPD>h!u@V)_1uH&K04L& zRooBY{tzzg0D8WL>v1~W^L1M0c?$P$;(8j_v$&o|-WPFy3HNW~{wnf(2kAq!!t*`g zZ_p~w_i_CY=^x{I8`n>89l`Z3uAkxh5Affo4$prg?|;)gQFH zl*Q>I#4@gIPXE-g1n@5&hF4`Ud^Bq*;IF)A0QRP?0c=fSdTkEFH$>)(1?M1b%i92W zWnKg@P3RY=Mv-pzFdPuNU*lYV6`fgAIsFwsLzkDKFN-DrCN@;e-j4LFf(rnDmUju@ zGqag{F!O4_p=mb)e#UznV3&U{;5wDzA&p^c=6=9mWZVN75x6;n^WNir0Pu?iOb>b) z{xyrsZJo{XZw0s(x6FJLa9aSsaAKxi-ftqky6Abp6K8(gq#JF*&)5!JPW;1u0NO>x zd%F}sAMPqzWeQ*>o@G=#`(ywX;0=lFYNfurKNXg1(n zU@GSDxq$QVPeof}ro>eSSc)?lDpHkzm1upHs?h!_QgwhehyfM&Nr1K3zQJ=aodUQJ zPfsfFX21q~V^9V4RKP{p7gxY*LwXhFT@|zLGEh1IRXHD_;`O+0jl(4?6a!$Hcp&*q30C9 zOHd~-U55I4vEFF~yaM(0($%P`7bl}n2fP7w^y0@;PC|R^R89aKRVo25R;mFn!D*LS zbeU2Ic)79w@CxN5wAOWyETkJCSx7f2=K$Ue$wImnl7)1;axUPVkeo{oKyofU2+6th zsL}=a7$oP?6Of!sPby)+Z-~zMuw)~4-S*Rc^mVMmUZtPmY|9L#2J49>$~vWAc~SXD zDN-xcdbL$utA^Ff)X%B+sxPUptN*PYRTtPAZNs)_Z2|iU_B#71_VxC6?H|~)9cMV! zJN7%CalGaDlOtbSq=mIn?Pl#o?S0MX%yyPJS31vi_BaQeW6t}WQ(Z-_DwlnfpZk+P zgV>9*ClMC8b~qRvPpj}wq`i!sysMP>w7Jr-A7*^Gl~b}HkViQ|Nx;4Tw)(M5uLXo<+eY8qk3037F!kW(xcr>uc{NdQ^NyoeX)U1 zAGi(OAR})>S7&ESB-Y;(8fxwd#bT8eouVKm=C7*b3Sl9_U=yu%)xJBO2<9^@pP2zJa!GfXJpuILhXjb+0_8?!cMF z)kSi$@|g}dB$Wt1fY`ZaU}Mjzq5jHF)^V&);O66~qlMvy`OuJT2rVA$>)KFBZLNKS zz2RtRV^5f|X1%8;+%*u<`+%(qhq_nv^$e|SUl#7&7>>5}ZPF(d7)NYt-8LBNi3|)i z>%HujXi_O~Dr4>80o;~^Vq1(<+tNskDGM9wIy=vgqJ~Hw&$X4}`sKP6C5!dw%Fw`; z&c^8GSS2;L7cOk5@9e~nRfT$hFKp=T>|7k_3-$C2jdMWNq%LHt(Ktv=);1t-3J-K` z33szgfyLvRvu^6_Z0ytfhI;kESjSL*xMIU()XGVzRg+SyL6xSeqSM1e;qD|?3uR5H zXE3}n6p2EvttBE&7m5x6vs!N)kzGI7Z@CUx1aH>+HbpiMMnl~2Y_Zu4NOBy8y0(Tk z8@+LUb=*NnP39nw<1S%gQ`K>#Ym%gqHhn*Ta@8QmRU>T**G?`Ba$Fc`Q@Cz&VUXj( zNV718t@b=cN+d*jU zT)v`X@ru>UTWE3H($&O?dN7G-mexEN<>As${4o}mCyhr7&6-Y9k#h8mct2R! z0j#t`@P>x+#_sNqEg;c5#$}Xz{axXnfyG8|HwT@8RpHGTtHRN6H!as!g*WOtNZSS@ zOfV*jHp6R?3qa}Vioj2U01J;lpj9+Pc!&!3Kq_KReUp&ZgrhNZ&m_+F2^1@Pyvt71 zjKJ+4>>5Z?)7IUvwX?Gcy&PZ2FzrY*?D2UMIUK?hsiuD>GI-dYNNw!tnMkxMKf%y= zO(${^6ZQliq5`3HMiW$lV($=?K-)hz`wlsu=$`)RqP$U^iTDb7U7wgw!BGeW``2%JW zvzw7zHHd{-ZNr*0w%W zU=spT2=JZUrFy0bLKxtDXKq2zh#GTH5KUG(KEgxB3^r3kI~I;8RUm6NCy>=`&4WDy zSe%PWE6rWON=L&+P+B-=UJ>o)^?5Pl#;c5fC-WVJ;=aX!W#!SZw^PBqWbv~$p*O<1C@R( zk!#V8hbIJ_A%*gd#KMNk&d!*qXQ8$jFT#5eV(2{aWXll3Akx)1fUn?f9AwiVgm{R` z!T_dqCs;grC1JdBU;*yV39@B7f@F0pEPXw}Y#;3JM>E8h;X>z6FqVf02cn^#Rb~^R zbN6i?94BGu<-VNAWtA28Q-XDjuM!N@Z%rf?8K=kUKOK>QWcA~sIiX?2;6VSNM0SD} z*ANYLhkHZOtqE2~C<^y4=H7k2p3JbUUlQrYvtlAc>yCkNAGc?MWU1$#YL1l?nfyo= z?UkV?)N@>^aqIhuN?O9Pu4ts6$FBs_idzgU=-){U@euk?%yemop2*Y^-V{QQG#(cd zWF7z#&&COQeEdt08haw4SOQ(#6WW}F8i6X+T@~&L?U2+szU&Vq@>@>C(8#Nv1f#hZ zlRy$~2{!BfLs1Nfld|wU+c%WZVNNP+6b#uI;fLM?*K*);%-onbRzzES`;+;`5>>_n zS*-Htf_`kIk+4Q{gMb-gJYhR8_CI!Mn^-A-!nO0NaIe1o`2L0A^T13r^i_woGEvpvj;w@13eveUrRr!nEOjG1D6 zJO|5!CJCmr_Q41|o`Vk2BrKd@@Z8M7JP_G#Nb-P)GUG~?V)ce`GOj%t=R(?I6DYiX zGZ!0H;pY4#Jw|BeB$Xh*LTgGg_f(|SP<8G)mY!s^KZ(`Pm0ZfZE>@E!;xAUdV&i#O z_TsTOoW$hokLV!aY6Z-T$~O7Btc=rHXBMgqDIO0YBgo5-t!7&hIsAga^FAUaWVGB?TUt zN$>Amig@eTf;3lPRd_q*8T?u%BWW%bSip0L8D6!~l=OkpY!wje-Q7hRS+F>Ov11y{i+HO3jT5)0QHT0t8 z%Nqv=bYu4V_%b`n%}JUEN>!M-Faw{sLzVnPm^k=R!4lMtIoO;Ff9Nz*UKNiy15K zcC)nuCp!H;I8@nBeE? zA>nZ?o3}*zxq5Q%V&vm`wPik@J+^)P-pP5`fyX)`Y3pPH#dK6doM8rJf?)Nt_-;a6 zG|n-%O)N(IS;g8fk8i7e8t&~0qejhJ0N8cL^DsOrQxLUbXhNrn<3?b}_CnamU@fVQ zo))#7$&2(0MM2hh$XXKaK~oV%P`)LH8PG0s6QA18k6>1euh`;GFOA)yeuQ39 zlUtOfp$pLbvW18hfMMCP(0RI zc0Mr)%lnQ`tH5KoLq1^{W;jhcG?uSyH(biA5b0F^Kp{!nBgfRb5yRj15Z7OJqyhOg z8m1l|29e;29&3F3I!i3lg~ME!|BS`56>v#R)9jW=2oKY-0hV7b3z=o%fh~G>%xY6> zbtlgbdVhO3x;=u0+OcdaFynk~EGz)pcuL|If-(aqFsY=h0unSdaRM>zv-(5qW5=9w z)4h1bS02xSzk6a#_c1b_-LSUgWlT(ZrZrv&5;3ie6Jbg`-EY1``A01<)En&$gHU)^ z4_>BOA;6?#-%CoC$#yn**iF=p`V(5g;BBD^4RK66@rc3CaIyF(Z(y4-ILYHljI4+E zmC;Bqy8L#?Hbwfd(Xv?{;qXGCrn7SZf4;|R^Q4%8dh8}({v3*dFRu^dV*k75FJo~@ z@fBLUz>e^a0W|;SLAbkhhwNNnK^JEy*)wheULmW77~Z;V4)3rK_79KZIP@4#E!Sl~ z)QBIfGh?)s0S2zF&;S`jJKSt6n59XZutjB}8xu)+Ix@++tggw}Tec#H7wu9mnqJau zO*}?T8Uny#BP>MBcnqbaXNvfskFG}Oj^l8MGZ|WBXp(fhF<@9to#eA}Yh1!e!<`H{ zv4(EL9#y0(GC=&og|Kx3$kv+M3A8v8jSZ}bT4=Yi%YbLX)%_?G(_<(a3H1$3R$hF~ zlNk5^r5VKBdHht0G6#qFR(eDEKdJqnoi#p2eR8{ejM%5`PU#`W+ra-#F8u^0{IhQT zJ4GbBHd%CGLse&ImsxpX;Hkn0eB%X^^jFtWG!vY@hHJsQJ8~k(nLOFP2)p2 zXLaT<&NT*bYOxRJ5fK)+ci}u^0Ou|P;8ftGW9cOPl@!H^#%`Qm>_RR}x)Ra>Tq8th zeNw$4Xxs`rHp2@3oe#r~B+Z}D;x*8Rr7CKwqmGm4?KLc3h_j|!aKf$=ry`=_j~ISY zJ%n@^`37(nv)fEXXcM%t){3}3%U|37UTIUtOHlf;EkHBd;%(A{-$4waY{T~DNcTej z@g-J@G=7<2?(uYt_Xg#Rmur#yC?Ta?Vq$mC<+@>kx>6Ff#xb<+Ah+cJ>K4F>P9yebA&tE{Py=XfUG#M$ZNw1U zGQQVvK91E{IOk3o95b6lE5{D^E1GhQ=iSY)j{QcdO(@;ye`q;(2kc;rxNWeq#d&0| z8JA~70i~}77NKph2`>Z@l{E5c)~$bBs{~-%v2`)L6t8VtCC+Wv;ny%L&>l57%gpD! zo2dn-yvu;&ToSltpjT5B=w-kvL2Z(M3y_aFwV)_j-KZCjD@IMZ?_sFwS|(I(sTi(J z;Fsf^H^<~4Y{lj=R(Fklav{d3}dRcxzO5lj)7~$B$D|57Sa+&z`VNf|T zxxTEA+*^SSu{EhWqV_zKN7Igt(nJqjiZYB6*bkOP;VN|b#* zO2SqO>cSTFz;E2I*y?p?od_s+p(B=YS-tqJM*Jv)+b9{oO42@=-s88A(I(amQjX~0 z(UmRWda}0_V@eiqoOGs{1^SMdk!0UQ+vA(shbAh_`xUd+lM zM}YjJcldxEP_oiTA5k+1KS={M8RW|H4+-iFs6d4boUuG`InP*f9#!%AG`3~*RVQwF z{?YeM1G>#}yIFCb4~p5WF*he*&Y0VsyfsaPcF z9K#pU@pTi_MN+Vgg~{Y_HC!3^QEfXDGn5PkTzJ8!W+-m5p|nO{UM`C8N)e*kNP?D@ zpczTfG7~f-X~$ntaM?YIKMb`lXTYww@Uc4;f5c9T%FSVOA!V~U;53)bMO>-$u{L*r z-E&p?*fP|7bhnDMk418)PES)+@k%}_3g_f=L+w`cDPO^_h>INl4#(JP4dfpxK73)) zkjr4XH{cGZQ}tiyA01`0x>@Q{#b@&qtBaiLCzmc zkxW_sL8M(Cl*EaYGz1wKY#a(0k$6&snL$>LA=Q^FIXD5S3fxlAs8&q5g~rWd*obeI zD-y4}Ri8R7Ena|G4sPrOOG0V`$=uR3n;orxmYT2RTQ-@p;z02nGu}}06^F;>^GP4L z6sAL+hA8mm=AwlZp98HndL`F-3pe0etF0m$DY*0 z7q@&xb+bLv1}TytNf47teR34Hn?Bk9#w# z8gp|u2;k1W%9rbQ+Wi3^n(+n~*^mP^y5VnhC;Zm}YHn^G2Pwy@IOJxt7c7~ANX(}+ zsWS%+$Hx*W)aQ|CmOjUx7<2obs0bSzFzOoc2c{r}-VK6_%?WzZh2sbhpu6E+gQZG< z8EGS%#qkG_ErDdjb$Ue^_rEklZ-U@AbtfpQkHaK^8)}j%{s7dBCm1HO$!RFdmtqtn z=~S}>L5ygW! z(a7oB(zhWj4A1gRsAQ%oHrOIDCM}D>fxO8z=YT`I1$_?GILM=o7!H|4p*(IKz?6&F z3v)TXG=CVulRkQ_s<|A5(bZ#f8vHF#Fm?d#WX8-Gx|77&B|PSiJ>tMT;@={a%Qd9) z{2K(hW{T71Ut&}V@ipd#Lf2G;HY6l;5zsQ4NN97n7=0f$qGBeMk-Zk;G>M}6cGuS~ zShr?c&Eegy%5T=MxvzHld|xFNWfk!&+d_HWT#j#0R%4w#pNf;oWmrsg;cJ-uI9-W< zHCTdHW)6J9$&T#VxiiNy7Il;OwW@CvQpG}1Yb+N5WI81;VX!*d9H(<#s5IQQ{s}P@zmcd z>lBn{<>T_Ww0VI1_1HqPXR7A+5$y`rj&@*Vw=-}tz~8m|NO7sfxk|2*rFbw+%~11F z@pZ~Pmxi$J#fm}0nnp|ChN~axpc7=C`}xkJq^hZ$unTr0VN_ZeITtOW@S z?T{J0DwXxbW1J=Sq#3FcBdtlW1_6H{Ga-=}5qUN@dcDa`9uY8>C-TT(;4^S@Xi(#M z5+j7nNh*GkA0vc6j8%ruhwpyM>e}CpkqEQ^VI>C8&uGxa6EOQ!671&`Jg>@}67+XZ zsZj7U?fLkIIIrX||6&5dVg)0}S})H<*6JOI8b^GX|MKFP%Zd36S$W=+*%Kqi)36Vd z?DH_qH!DrGTO*@5gO?6sfZTWyh1H)eAKBjau?2+IHrD)qKn?PI#_9)iU^fscT1r}qRPS0|XW$tNg>PX=0X-m^ z)fv4CX%(5!blgJ>4`2=ovUGPG#j_8*hQ$Uu8+i!p2!JuSth#v88gonKjY?tCheh$V zLK@d8QAN`=L6BJ{!4R)Yc=`jFY^)jAluOj!&R-s8pxA=sLjtiF+z+SnFCTMGMMW*Wu!T68Joxc6<(~8~p?H<#7`2hWxp#z8feyw7{zv z;$cw1_ni5Y&wn(cL(M)D-?zlU?f97_9L>ZDbOmxeoDmf(RAd~x3mE6_0(i$a*dIuo zy9*d!XCpdEq1q&JFJ372gaZS5Aj01fz=nDNhe);rCeemJ6T>4YO~F8@(wq(WO|tm0-9WH;USLuU+AW!>?UbDpVRTwlnWleW=cV$&QQCOjQ<;z<;;s?k} z%_B@F^Pko!#T%YYc-?byb58_E%+QFw%(!II*F_3gMIg=z)C7zS0W<%J$-XXMi7#>R zxgs31C}?i1tgoy*v2nW|>0aH3Bb_~b@PJQ^gynlYgy1kfZiVb}Op+EURBgU4N*tc( z(Dj~JIgS+ReVcoR_|QPN#Wc?=Ir(n|;U9kG13x@+YItDEns$jV>)i)eFUNfIoBZOL z&n?){qU9XM_`5zI+ZvWKZ0g}pF&OX3k1fb({@UvLil*kpi_2;%TdK=ynyOmM>KQaG zu5YZZudHaUuR#N${)PuQVrr`DYpUxi7nHR$)ifhwnybp{TkD$3YHJq5jkT?dTjn3{o=}&`OSu8Q%xn<&DCY~^|iHSHPub?%bJ?2tIMjIDr)O$7uPMW zscJPO>lakkR5veZEUTE`i25yPswiu!tE?-lt7@&PtZ&3+>1=7NZ>evrtSxJ*YJ$$1 z`ns~ls>PLM^B2@KUJsO+k&n_m{H-WZ1D#;&pjm79=ferh{v4mGT)6E z-+Q4>h0Zp^&o~rO8nC_wTNj~Q#F0rTME$$iFGP)Y+U2RGsb0$zxA;G1WCCvU8) z3(fDE59e3cge&UN;So0+iD{FCU_VBX$#`}?O4Z?Vt<+=kZA0!d9jY6Lyt?`P)sWJm zrpxgeIZ+;1^?YL=8MjM z*7(t%;!ks|4+htDmju_91I`<+EUg}ycVY0Hb?2-Lp1UqqJb`x#6H6!M%wyg;>&{&_ z@7#Iw3ef`P~f*-$|N%Ze8|K=M!1YC=&XTrE= z4RZW~I~@H~Lvi-R!i?`WT4iGNWFxW8l+Vh>7XH8E(!G~PZ~y!qj_C(yEeC`DZXtgb zpc2`~d0_%}`9Js0gnwjZ`te_zpdXQv*|8-Yh{<1n3q)dpFn=qiT$#C?R|&>3zrZFw zUJV!x@V5#AvxX~1W|b?eC8Ml8x{{B12Kvnt53H_FAHd{peMcK}ek?YKbMKqMNTSD! zieUjDpU+--)ml!l>vG|fgZJXPnrKyfOFRAg$#t8WzP*sGG#Q2*C>tfNab>>8t^aX`=IHaddpGjQI?UzzKAgTY_utLA;x@GiH2#VXr}A4YlKp#~QqMT!mB{{;Jw? z;BA15k><}+_TL;eepw2?Gn-im3isYZZ0Z;vR>^%n`O^YKfmVKJZM`$)_r`p7fIryy z0Zg1MNj}#h0`!1=2}Bgwlz9I?4#;o3t#`@B*`D*|9p@JClHcYA#6s(RJ7d=ScCN=#;=ea$oy*`; z4MT|&Bj8Wm84Q?joBLq}pM$X8{^K|K@OxTH+?&TaZ33PDn00~U_+nX-F9=(s-lQ^* zIkSPXlDAkBJFkRO8vGZ=xWx@08t^fe1ltbFsl;txamUcd=#U};$t$h@~ zqsJ#5%28%9tV*`ah + + + Microsoft.VisualStudio.Setup.Instance + + + PSStandardMembers + + + DefaultDisplayProperty + InstanceId + + + DefaultKeyPropertySet + + InstanceId + + + + DefaultDisplayPropertySet + + InstanceId + DisplayName + InstallationVersion + InstallationPath + InstallDate + + + + + + + Microsoft.VisualStudio.Setup.PowerShell.InstanceAdapter + + + + Microsoft.VisualStudio.Setup.PackageReference + + + PSStandardMembers + + + DefaultDisplayProperty + Id + + + DefaultKeyPropertySet + + UniqueId + + + + DefaultDisplayPropertySet + + Id + Version + Chip + Language + Branch + + + + + + + diff --git a/gyp/tools/vssetup.powershell/about_VSSetup.help.txt b/gyp/tools/vssetup.powershell/about_VSSetup.help.txt new file mode 100644 index 0000000000..864b5c28cd --- /dev/null +++ b/gyp/tools/vssetup.powershell/about_VSSetup.help.txt @@ -0,0 +1,46 @@ +TOPIC + about_vssetup + +SHORT DESCRIPTION + Enumerate and select instances of Visual Studio. + +LONG DESCRIPTION + Visual Studio 2017 introduced a new setup engine capable of installing + multiple instances of Visual Studio and other products in the Visual Studio + family. This module provides commands to enumerate those instances and + select instances that meet your criteria. For example, in a development + environment you might have a script that finds an instance of Visual Studio + with the the Managed Desktop workload for writing projects targeting the + .NET Framework. See below for more examples. + +VARIABLES + You can get the version of this module or of the query API the module uses + from the `$VSSetupVersionTable` variable. + + PS> $VSSetupVersionTable + + Name Value + ---- ----- + QueryVersion 1.15.23.19330 + ModuleVersion 2.1.2.4917 + +EXAMPLES + You can enumerate all instances - even those with errors that require a + repair - with the following command. + + Get-VSSetupInstance -All + + If you want to select all launchable instances of Visual Studio products + that have the Managed Desktop workload, use the following command. + + Get-VSSetupInstance | Get-VSSetupInstance ` + -Require 'Microsoft.VisualStudio.Workload.ManagedDesktop' + + You can also get the instance for an installation directory if you want to + discover more about what is installed to that directory. + + Get-VSSetupInstance 'C:\Program Files (x86)\Microsoft Visual Studio\2017\Community' + +SEE ALSO + https://github.com/Microsoft/vssetup.powershell + From 2eb4d2bce58930409c8bca0a87cd5967e9461d28 Mon Sep 17 00:00:00 2001 From: Refael Ackermann Date: Fri, 26 Jul 2019 15:44:28 -0400 Subject: [PATCH 2/4] meta: more generic .gitignore --- .gitignore | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 776066e34d..7ca902a0f2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,15 @@ -gyp/test +.* +!.github/ +!.gitignore +!.travis.yml + +/gyp/test/ +/gyp/testlib/ + node_modules -test/.node-gyp -.ncu -.nyc_output package-lock.json + +!/test/node_modules/ +/test/node_modules/hello_world/build/ + +/1*/ \ No newline at end of file From 555e3991648731a320df7bfe5e7094e075a1db91 Mon Sep 17 00:00:00 2001 From: Refael Ackermann Date: Fri, 26 Jul 2019 16:26:06 -0400 Subject: [PATCH 3/4] config: fix syntax error in addon.gypi --- addon.gypi | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/addon.gypi b/addon.gypi index 6462f539ff..aef6f221f1 100644 --- a/addon.gypi +++ b/addon.gypi @@ -8,14 +8,6 @@ 'win_delay_load_hook': 'true', 'product_prefix': '', - 'conditions': [ - [ 'node_engine=="chakracore"', { - 'variables': { - 'node_engine_include_dir%': 'deps/chakrashim/include' - }, - }] - ], - 'include_dirs': [ '<(node_root_dir)/include/node', '<(node_root_dir)/src', @@ -89,6 +81,12 @@ ], 'conditions': [ + [ 'node_engine=="chakracore"', { + 'variables': { + 'node_engine_include_dir%': 'deps/chakrashim/include' + }, + }], + [ 'OS=="mac"', { 'defines': [ '_DARWIN_USE_64_BIT_INODE=1' From b9afb43b344c121102bd4abb09b678ade1d7dbcd Mon Sep 17 00:00:00 2001 From: Refael Ackermann Date: Fri, 26 Jul 2019 16:42:02 -0400 Subject: [PATCH 4/4] test: better logging for test/test-addon.js --- test/test-addon.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test/test-addon.js b/test/test-addon.js index f97215c0a2..b8a925cc5c 100644 --- a/test/test-addon.js +++ b/test/test-addon.js @@ -36,14 +36,18 @@ function checkCharmapValid () { } test('build simple addon', function (t) { - t.plan(3) + t.plan(2) // Set the loglevel otherwise the output disappears when run via 'npm test' var cmd = [ nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose' ] var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + if (err) { + console.error('failed to run: ' + cmd.join(' ')) + console.log(stderr) + t.fail(err) + } var logLines = stderr.toString().trim().split(/\r?\n/) var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') t.strictEqual(runHello().trim(), 'world') })