mirror of
https://github.com/zebrajr/node.git
synced 2025-12-06 00:20:08 +01:00
tools: update gyp-next to 0.20.5
PR-URL: https://github.com/nodejs/node/pull/60313 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Luigi Pinca <luigipinca@gmail.com> Reviewed-By: Chengzhong Wu <legendecas@gmail.com>
This commit is contained in:
parent
5c50445dfc
commit
f0679db554
|
|
@ -1,5 +1,13 @@
|
|||
# Changelog
|
||||
|
||||
## [0.20.5](https://github.com/nodejs/gyp-next/compare/v0.20.4...v0.20.5) (2025-10-13)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fix ruff v0.13.0 adds ruff rule RUF059 ([bd4491a](https://github.com/nodejs/gyp-next/commit/bd4491a3ba641eeb040b785bbce367f72c3baf19))
|
||||
* handle `None` case in xcode_emulation regexes ([#311](https://github.com/nodejs/gyp-next/issues/311)) ([b21ee31](https://github.com/nodejs/gyp-next/commit/b21ee3150eea9fc1a8811e910e5ba64f42e1fb77))
|
||||
|
||||
## [0.20.4](https://github.com/nodejs/gyp-next/compare/v0.20.3...v0.20.4) (2025-08-25)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -378,7 +378,7 @@ class AndroidMkWriter:
|
|||
inputs = rule.get("inputs")
|
||||
for rule_source in rule.get("rule_sources", []):
|
||||
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
|
||||
(rule_source_root, rule_source_ext) = os.path.splitext(
|
||||
(rule_source_root, _rule_source_ext) = os.path.splitext(
|
||||
rule_source_basename
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ def AddCommandsForTarget(cwd, target, params, per_config_commands):
|
|||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
per_config_commands = {}
|
||||
for qualified_target, target in target_dicts.items():
|
||||
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
|
||||
build_file, _target_name, _toolset = gyp.common.ParseQualifiedTarget(
|
||||
qualified_target
|
||||
)
|
||||
if IsMac(params):
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ for v in _generator_identity_variables:
|
|||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
output_files = {}
|
||||
for qualified_target in target_list:
|
||||
[input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||
[input_file, _target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||
|
||||
if input_file[-4:] != ".gyp":
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1169,7 +1169,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
for rule_source in rule.get("rule_sources", []):
|
||||
dirs = set()
|
||||
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
|
||||
(rule_source_root, rule_source_ext) = os.path.splitext(
|
||||
(rule_source_root, _rule_source_ext) = os.path.splitext(
|
||||
rule_source_basename
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1666,7 +1666,7 @@ def _HandlePreCompiledHeaders(p, sources, spec):
|
|||
p.AddFileConfig(
|
||||
source, _ConfigFullName(config_name, config), {}, tools=[tool]
|
||||
)
|
||||
basename, extension = os.path.splitext(source)
|
||||
_basename, extension = os.path.splitext(source)
|
||||
if extension == ".c":
|
||||
extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"]
|
||||
else:
|
||||
|
|
@ -1677,7 +1677,7 @@ def _HandlePreCompiledHeaders(p, sources, spec):
|
|||
if isinstance(source, MSVSProject.Filter):
|
||||
DisableForSourceTree(source.contents)
|
||||
else:
|
||||
basename, extension = os.path.splitext(source)
|
||||
_basename, extension = os.path.splitext(source)
|
||||
if extension in extensions_excluded_from_precompile:
|
||||
for config_name, config in spec["configurations"].items():
|
||||
tool = MSVSProject.Tool(
|
||||
|
|
@ -3579,7 +3579,7 @@ def _AddSources2(
|
|||
# If the precompiled header is generated by a C source,
|
||||
# we must not try to use it for C++ sources,
|
||||
# and vice versa.
|
||||
basename, extension = os.path.splitext(precompiled_source)
|
||||
_basename, extension = os.path.splitext(precompiled_source)
|
||||
if extension == ".c":
|
||||
extensions_excluded_from_precompile = [
|
||||
".cc",
|
||||
|
|
|
|||
|
|
@ -531,7 +531,7 @@ def AddSourceToTarget(source, type, pbxp, xct):
|
|||
library_extensions = ["a", "dylib", "framework", "o"]
|
||||
|
||||
basename = posixpath.basename(source)
|
||||
(root, ext) = posixpath.splitext(basename)
|
||||
(_root, ext) = posixpath.splitext(basename)
|
||||
if ext:
|
||||
ext = ext[1:].lower()
|
||||
|
||||
|
|
@ -696,7 +696,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||
xcode_targets = {}
|
||||
xcode_target_to_target_dict = {}
|
||||
for qualified_target in target_list:
|
||||
[build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget(
|
||||
[build_file, target_name, _toolset] = gyp.common.ParseQualifiedTarget(
|
||||
qualified_target
|
||||
)
|
||||
|
||||
|
|
@ -1215,7 +1215,7 @@ exit 1
|
|||
|
||||
# Add "sources".
|
||||
for source in spec.get("sources", []):
|
||||
(source_root, source_extension) = posixpath.splitext(source)
|
||||
(_source_root, source_extension) = posixpath.splitext(source)
|
||||
if source_extension[1:] not in rules_by_ext:
|
||||
# AddSourceToTarget will add the file to a root group if it's not
|
||||
# already there.
|
||||
|
|
@ -1227,7 +1227,7 @@ exit 1
|
|||
# it's a bundle of any type.
|
||||
if is_bundle:
|
||||
for resource in tgt_mac_bundle_resources:
|
||||
(resource_root, resource_extension) = posixpath.splitext(resource)
|
||||
(_resource_root, resource_extension) = posixpath.splitext(resource)
|
||||
if resource_extension[1:] not in rules_by_ext:
|
||||
AddResourceToTarget(resource, pbxp, xct)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2757,7 +2757,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
|
|||
source_keys.extend(extra_sources_for_rules)
|
||||
for source_key in source_keys:
|
||||
for source in target_dict.get(source_key, []):
|
||||
(source_root, source_extension) = os.path.splitext(source)
|
||||
(_source_root, source_extension) = os.path.splitext(source)
|
||||
if source_extension.startswith("."):
|
||||
source_extension = source_extension[1:]
|
||||
if source_extension == rule_extension:
|
||||
|
|
|
|||
|
|
@ -1534,18 +1534,20 @@ def CLTVersion():
|
|||
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
|
||||
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
|
||||
|
||||
regex = re.compile("version: (?P<version>.+)")
|
||||
regex = re.compile(r"version: (?P<version>.+)")
|
||||
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
|
||||
try:
|
||||
output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key])
|
||||
return re.search(regex, output).groupdict()["version"]
|
||||
if m := re.search(regex, output):
|
||||
return m.groupdict()["version"]
|
||||
except (GypError, OSError):
|
||||
continue
|
||||
|
||||
regex = re.compile(r"Command Line Tools for Xcode\s+(?P<version>\S+)")
|
||||
try:
|
||||
output = GetStdout(["/usr/sbin/softwareupdate", "--history"])
|
||||
return re.search(regex, output).groupdict()["version"]
|
||||
if m := re.search(regex, output):
|
||||
return m.groupdict()["version"]
|
||||
except (GypError, OSError):
|
||||
return None
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ import gyp.generator.ninja
|
|||
|
||||
def _WriteWorkspace(main_gyp, sources_gyp, params):
|
||||
"""Create a workspace to wrap main and sources gyp paths."""
|
||||
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
|
||||
(build_file_root, _build_file_ext) = os.path.splitext(main_gyp)
|
||||
workspace_path = build_file_root + ".xcworkspace"
|
||||
options = params["options"]
|
||||
if options.generator_output:
|
||||
|
|
|
|||
|
|
@ -487,7 +487,7 @@ class XCObject:
|
|||
|
||||
children = []
|
||||
for property, attributes in self._schema.items():
|
||||
(is_list, property_type, is_strong) = attributes[0:3]
|
||||
(is_list, _property_type, is_strong) = attributes[0:3]
|
||||
if is_strong and property in self._properties:
|
||||
if not is_list:
|
||||
children.append(self._properties[property])
|
||||
|
|
@ -913,7 +913,7 @@ class XCObject:
|
|||
# TODO(mark): A stronger verification mechanism is needed. Some
|
||||
# subclasses need to perform validation beyond what the schema can enforce.
|
||||
for property, attributes in self._schema.items():
|
||||
(is_list, property_type, is_strong, is_required) = attributes[0:4]
|
||||
(_is_list, _property_type, _is_strong, is_required) = attributes[0:4]
|
||||
if is_required and property not in self._properties:
|
||||
raise KeyError(self.__class__.__name__ + " requires " + property)
|
||||
|
||||
|
|
@ -923,7 +923,7 @@ class XCObject:
|
|||
|
||||
defaults = {}
|
||||
for property, attributes in self._schema.items():
|
||||
(is_list, property_type, is_strong, is_required) = attributes[0:4]
|
||||
(_is_list, _property_type, _is_strong, is_required) = attributes[0:4]
|
||||
if (
|
||||
is_required
|
||||
and len(attributes) >= 5
|
||||
|
|
@ -1616,7 +1616,7 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
|
|||
prop_name = "lastKnownFileType"
|
||||
else:
|
||||
basename = posixpath.basename(self._properties["path"])
|
||||
(root, ext) = posixpath.splitext(basename)
|
||||
(_root, ext) = posixpath.splitext(basename)
|
||||
# Check the map using a lowercase extension.
|
||||
# TODO(mark): Maybe it should try with the original case first and fall
|
||||
# back to lowercase, in case there are any instances where case
|
||||
|
|
@ -2010,7 +2010,7 @@ class PBXFrameworksBuildPhase(XCBuildPhase):
|
|||
return "Frameworks"
|
||||
|
||||
def FileGroup(self, path):
|
||||
(root, ext) = posixpath.splitext(path)
|
||||
(_root, ext) = posixpath.splitext(path)
|
||||
if ext != "":
|
||||
ext = ext[1:].lower()
|
||||
if ext == "o":
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||
|
||||
[project]
|
||||
name = "gyp-next"
|
||||
version = "0.20.4"
|
||||
version = "0.20.5"
|
||||
authors = [
|
||||
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
|
||||
]
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ def WriteGraph(edges):
|
|||
# Bucket targets by file.
|
||||
files = collections.defaultdict(list)
|
||||
for src, dst in edges.items():
|
||||
build_file, target_name, toolset = ParseTarget(src)
|
||||
build_file, target_name, _toolset = ParseTarget(src)
|
||||
files[build_file].append(src)
|
||||
|
||||
print("digraph D {")
|
||||
|
|
@ -62,14 +62,14 @@ def WriteGraph(edges):
|
|||
# If there's only one node for this file, simplify
|
||||
# the display by making it a box without an internal node.
|
||||
target = targets[0]
|
||||
build_file, target_name, toolset = ParseTarget(target)
|
||||
build_file, target_name, _toolset = ParseTarget(target)
|
||||
print(f' "{target}" [shape=box, label="{filename}\\n{target_name}"]')
|
||||
else:
|
||||
# Group multiple nodes together in a subgraph.
|
||||
print(' subgraph "cluster_%s" {' % filename)
|
||||
print(' label = "%s"' % filename)
|
||||
for target in targets:
|
||||
build_file, target_name, toolset = ParseTarget(target)
|
||||
build_file, target_name, _toolset = ParseTarget(target)
|
||||
print(f' "{target}" [label="{target_name}"]')
|
||||
print(" }")
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user