reconnect moved files to git repo
This commit is contained in:
@ -0,0 +1,3 @@
|
||||
from __future__ import annotations
|
||||
|
||||
__version__ = "0.43.0"
|
||||
@ -0,0 +1,23 @@
|
||||
"""
|
||||
Wheel command line tool (enable python -m wheel syntax)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def main(): # needed for console script
|
||||
if __package__ == "":
|
||||
# To be able to run 'python wheel-0.9.whl/wheel':
|
||||
import os.path
|
||||
|
||||
path = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path[0:0] = [path]
|
||||
import wheel.cli
|
||||
|
||||
sys.exit(wheel.cli.main())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,26 @@
|
||||
# copied from setuptools.logging, omitting monkeypatching
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
def _not_warning(record):
|
||||
return record.levelno < logging.WARNING
|
||||
|
||||
|
||||
def configure():
|
||||
"""
|
||||
Configure logging to emit warning and above to stderr
|
||||
and everything else to stdout. This behavior is provided
|
||||
for compatibility with distutils.log but may change in
|
||||
the future.
|
||||
"""
|
||||
err_handler = logging.StreamHandler()
|
||||
err_handler.setLevel(logging.WARNING)
|
||||
out_handler = logging.StreamHandler(sys.stdout)
|
||||
out_handler.addFilter(_not_warning)
|
||||
handlers = err_handler, out_handler
|
||||
logging.basicConfig(
|
||||
format="{message}", style="{", handlers=handlers, level=logging.DEBUG
|
||||
)
|
||||
@ -0,0 +1,595 @@
|
||||
"""
|
||||
Create a wheel (.whl) distribution.
|
||||
|
||||
A wheel is a built archive format.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import struct
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
from email.generator import BytesGenerator, Generator
|
||||
from email.policy import EmailPolicy
|
||||
from glob import iglob
|
||||
from shutil import rmtree
|
||||
from zipfile import ZIP_DEFLATED, ZIP_STORED
|
||||
|
||||
import setuptools
|
||||
from setuptools import Command
|
||||
|
||||
from . import __version__ as wheel_version
|
||||
from .macosx_libfile import calculate_macosx_platform_tag
|
||||
from .metadata import pkginfo_to_metadata
|
||||
from .util import log
|
||||
from .vendored.packaging import tags
|
||||
from .vendored.packaging import version as _packaging_version
|
||||
from .wheelfile import WheelFile
|
||||
|
||||
|
||||
def safe_name(name):
|
||||
"""Convert an arbitrary string to a standard distribution name
|
||||
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
||||
"""
|
||||
return re.sub("[^A-Za-z0-9.]+", "-", name)
|
||||
|
||||
|
||||
def safe_version(version):
|
||||
"""
|
||||
Convert an arbitrary string to a standard version string
|
||||
"""
|
||||
try:
|
||||
# normalize the version
|
||||
return str(_packaging_version.Version(version))
|
||||
except _packaging_version.InvalidVersion:
|
||||
version = version.replace(" ", ".")
|
||||
return re.sub("[^A-Za-z0-9.]+", "-", version)
|
||||
|
||||
|
||||
setuptools_major_version = int(setuptools.__version__.split(".")[0])
|
||||
|
||||
PY_LIMITED_API_PATTERN = r"cp3\d"
|
||||
|
||||
|
||||
def _is_32bit_interpreter():
|
||||
return struct.calcsize("P") == 4
|
||||
|
||||
|
||||
def python_tag():
|
||||
return f"py{sys.version_info[0]}"
|
||||
|
||||
|
||||
def get_platform(archive_root):
|
||||
"""Return our platform name 'win32', 'linux_x86_64'"""
|
||||
result = sysconfig.get_platform()
|
||||
if result.startswith("macosx") and archive_root is not None:
|
||||
result = calculate_macosx_platform_tag(archive_root, result)
|
||||
elif _is_32bit_interpreter():
|
||||
if result == "linux-x86_64":
|
||||
# pip pull request #3497
|
||||
result = "linux-i686"
|
||||
elif result == "linux-aarch64":
|
||||
# packaging pull request #234
|
||||
# TODO armv8l, packaging pull request #690 => this did not land
|
||||
# in pip/packaging yet
|
||||
result = "linux-armv7l"
|
||||
|
||||
return result.replace("-", "_")
|
||||
|
||||
|
||||
def get_flag(var, fallback, expected=True, warn=True):
|
||||
"""Use a fallback value for determining SOABI flags if the needed config
|
||||
var is unset or unavailable."""
|
||||
val = sysconfig.get_config_var(var)
|
||||
if val is None:
|
||||
if warn:
|
||||
warnings.warn(
|
||||
f"Config variable '{var}' is unset, Python ABI tag may " "be incorrect",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return fallback
|
||||
return val == expected
|
||||
|
||||
|
||||
def get_abi_tag():
|
||||
"""Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
|
||||
soabi = sysconfig.get_config_var("SOABI")
|
||||
impl = tags.interpreter_name()
|
||||
if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
|
||||
d = ""
|
||||
m = ""
|
||||
u = ""
|
||||
if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
|
||||
d = "d"
|
||||
|
||||
if get_flag(
|
||||
"WITH_PYMALLOC",
|
||||
impl == "cp",
|
||||
warn=(impl == "cp" and sys.version_info < (3, 8)),
|
||||
) and sys.version_info < (3, 8):
|
||||
m = "m"
|
||||
|
||||
abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
|
||||
elif soabi and impl == "cp" and soabi.startswith("cpython"):
|
||||
# non-Windows
|
||||
abi = "cp" + soabi.split("-")[1]
|
||||
elif soabi and impl == "cp" and soabi.startswith("cp"):
|
||||
# Windows
|
||||
abi = soabi.split("-")[0]
|
||||
elif soabi and impl == "pp":
|
||||
# we want something like pypy36-pp73
|
||||
abi = "-".join(soabi.split("-")[:2])
|
||||
abi = abi.replace(".", "_").replace("-", "_")
|
||||
elif soabi and impl == "graalpy":
|
||||
abi = "-".join(soabi.split("-")[:3])
|
||||
abi = abi.replace(".", "_").replace("-", "_")
|
||||
elif soabi:
|
||||
abi = soabi.replace(".", "_").replace("-", "_")
|
||||
else:
|
||||
abi = None
|
||||
|
||||
return abi
|
||||
|
||||
|
||||
def safer_name(name):
|
||||
return safe_name(name).replace("-", "_")
|
||||
|
||||
|
||||
def safer_version(version):
|
||||
return safe_version(version).replace("-", "_")
|
||||
|
||||
|
||||
def remove_readonly(func, path, excinfo):
|
||||
remove_readonly_exc(func, path, excinfo[1])
|
||||
|
||||
|
||||
def remove_readonly_exc(func, path, exc):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
func(path)
|
||||
|
||||
|
||||
class bdist_wheel(Command):
|
||||
description = "create a wheel distribution"
|
||||
|
||||
supported_compressions = {
|
||||
"stored": ZIP_STORED,
|
||||
"deflated": ZIP_DEFLATED,
|
||||
}
|
||||
|
||||
user_options = [
|
||||
("bdist-dir=", "b", "temporary directory for creating the distribution"),
|
||||
(
|
||||
"plat-name=",
|
||||
"p",
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform(None),
|
||||
),
|
||||
(
|
||||
"keep-temp",
|
||||
"k",
|
||||
"keep the pseudo-installation tree around after "
|
||||
"creating the distribution archive",
|
||||
),
|
||||
("dist-dir=", "d", "directory to put final built distributions in"),
|
||||
("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
|
||||
(
|
||||
"relative",
|
||||
None,
|
||||
"build the archive using relative paths " "(default: false)",
|
||||
),
|
||||
(
|
||||
"owner=",
|
||||
"u",
|
||||
"Owner name used when creating a tar file" " [default: current user]",
|
||||
),
|
||||
(
|
||||
"group=",
|
||||
"g",
|
||||
"Group name used when creating a tar file" " [default: current group]",
|
||||
),
|
||||
("universal", None, "make a universal wheel" " (default: false)"),
|
||||
(
|
||||
"compression=",
|
||||
None,
|
||||
"zipfile compression (one of: {})" " (default: 'deflated')".format(
|
||||
", ".join(supported_compressions)
|
||||
),
|
||||
),
|
||||
(
|
||||
"python-tag=",
|
||||
None,
|
||||
"Python implementation compatibility tag"
|
||||
" (default: '%s')" % (python_tag()),
|
||||
),
|
||||
(
|
||||
"build-number=",
|
||||
None,
|
||||
"Build number for this particular version. "
|
||||
"As specified in PEP-0427, this must start with a digit. "
|
||||
"[default: None]",
|
||||
),
|
||||
(
|
||||
"py-limited-api=",
|
||||
None,
|
||||
"Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
|
||||
),
|
||||
]
|
||||
|
||||
boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.data_dir = None
|
||||
self.plat_name = None
|
||||
self.plat_tag = None
|
||||
self.format = "zip"
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
self.egginfo_dir = None
|
||||
self.root_is_pure = None
|
||||
self.skip_build = None
|
||||
self.relative = False
|
||||
self.owner = None
|
||||
self.group = None
|
||||
self.universal = False
|
||||
self.compression = "deflated"
|
||||
self.python_tag = python_tag()
|
||||
self.build_number = None
|
||||
self.py_limited_api = False
|
||||
self.plat_name_supplied = False
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command("bdist").bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, "wheel")
|
||||
|
||||
egg_info = self.distribution.get_command_obj("egg_info")
|
||||
egg_info.ensure_finalized() # needed for correct `wheel_dist_name`
|
||||
|
||||
self.data_dir = self.wheel_dist_name + ".data"
|
||||
self.plat_name_supplied = self.plat_name is not None
|
||||
|
||||
try:
|
||||
self.compression = self.supported_compressions[self.compression]
|
||||
except KeyError:
|
||||
raise ValueError(f"Unsupported compression: {self.compression}") from None
|
||||
|
||||
need_options = ("dist_dir", "plat_name", "skip_build")
|
||||
|
||||
self.set_undefined_options("bdist", *zip(need_options, need_options))
|
||||
|
||||
self.root_is_pure = not (
|
||||
self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
|
||||
)
|
||||
|
||||
if self.py_limited_api and not re.match(
|
||||
PY_LIMITED_API_PATTERN, self.py_limited_api
|
||||
):
|
||||
raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
|
||||
|
||||
# Support legacy [wheel] section for setting universal
|
||||
wheel = self.distribution.get_option_dict("wheel")
|
||||
if "universal" in wheel:
|
||||
# please don't define this in your global configs
|
||||
log.warning(
|
||||
"The [wheel] section is deprecated. Use [bdist_wheel] instead.",
|
||||
)
|
||||
val = wheel["universal"][1].strip()
|
||||
if val.lower() in ("1", "true", "yes"):
|
||||
self.universal = True
|
||||
|
||||
if self.build_number is not None and not self.build_number[:1].isdigit():
|
||||
raise ValueError("Build tag (build-number) must start with a digit.")
|
||||
|
||||
@property
|
||||
def wheel_dist_name(self):
|
||||
"""Return distribution full name with - replaced with _"""
|
||||
components = (
|
||||
safer_name(self.distribution.get_name()),
|
||||
safer_version(self.distribution.get_version()),
|
||||
)
|
||||
if self.build_number:
|
||||
components += (self.build_number,)
|
||||
return "-".join(components)
|
||||
|
||||
def get_tag(self):
|
||||
# bdist sets self.plat_name if unset, we should only use it for purepy
|
||||
# wheels if the user supplied it.
|
||||
if self.plat_name_supplied:
|
||||
plat_name = self.plat_name
|
||||
elif self.root_is_pure:
|
||||
plat_name = "any"
|
||||
else:
|
||||
# macosx contains system version in platform name so need special handle
|
||||
if self.plat_name and not self.plat_name.startswith("macosx"):
|
||||
plat_name = self.plat_name
|
||||
else:
|
||||
# on macosx always limit the platform name to comply with any
|
||||
# c-extension modules in bdist_dir, since the user can specify
|
||||
# a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
|
||||
|
||||
# on other platforms, and on macosx if there are no c-extension
|
||||
# modules, use the default platform name.
|
||||
plat_name = get_platform(self.bdist_dir)
|
||||
|
||||
if _is_32bit_interpreter():
|
||||
if plat_name in ("linux-x86_64", "linux_x86_64"):
|
||||
plat_name = "linux_i686"
|
||||
if plat_name in ("linux-aarch64", "linux_aarch64"):
|
||||
# TODO armv8l, packaging pull request #690 => this did not land
|
||||
# in pip/packaging yet
|
||||
plat_name = "linux_armv7l"
|
||||
|
||||
plat_name = (
|
||||
plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
|
||||
)
|
||||
|
||||
if self.root_is_pure:
|
||||
if self.universal:
|
||||
impl = "py2.py3"
|
||||
else:
|
||||
impl = self.python_tag
|
||||
tag = (impl, "none", plat_name)
|
||||
else:
|
||||
impl_name = tags.interpreter_name()
|
||||
impl_ver = tags.interpreter_version()
|
||||
impl = impl_name + impl_ver
|
||||
# We don't work on CPython 3.1, 3.0.
|
||||
if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
|
||||
impl = self.py_limited_api
|
||||
abi_tag = "abi3"
|
||||
else:
|
||||
abi_tag = str(get_abi_tag()).lower()
|
||||
tag = (impl, abi_tag, plat_name)
|
||||
# issue gh-374: allow overriding plat_name
|
||||
supported_tags = [
|
||||
(t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
|
||||
]
|
||||
assert (
|
||||
tag in supported_tags
|
||||
), f"would build wheel with unsupported tag {tag}"
|
||||
return tag
|
||||
|
||||
def run(self):
|
||||
build_scripts = self.reinitialize_command("build_scripts")
|
||||
build_scripts.executable = "python"
|
||||
build_scripts.force = True
|
||||
|
||||
build_ext = self.reinitialize_command("build_ext")
|
||||
build_ext.inplace = False
|
||||
|
||||
if not self.skip_build:
|
||||
self.run_command("build")
|
||||
|
||||
install = self.reinitialize_command("install", reinit_subcommands=True)
|
||||
install.root = self.bdist_dir
|
||||
install.compile = False
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = False
|
||||
|
||||
# A wheel without setuptools scripts is more cross-platform.
|
||||
# Use the (undocumented) `no_ep` option to setuptools'
|
||||
# install_scripts command to avoid creating entry point scripts.
|
||||
install_scripts = self.reinitialize_command("install_scripts")
|
||||
install_scripts.no_ep = True
|
||||
|
||||
# Use a custom scheme for the archive, because we have to decide
|
||||
# at installation time which scheme to use.
|
||||
for key in ("headers", "scripts", "data", "purelib", "platlib"):
|
||||
setattr(install, "install_" + key, os.path.join(self.data_dir, key))
|
||||
|
||||
basedir_observed = ""
|
||||
|
||||
if os.name == "nt":
|
||||
# win32 barfs if any of these are ''; could be '.'?
|
||||
# (distutils.command.install:change_roots bug)
|
||||
basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
|
||||
self.install_libbase = self.install_lib = basedir_observed
|
||||
|
||||
setattr(
|
||||
install,
|
||||
"install_purelib" if self.root_is_pure else "install_platlib",
|
||||
basedir_observed,
|
||||
)
|
||||
|
||||
log.info(f"installing to {self.bdist_dir}")
|
||||
|
||||
self.run_command("install")
|
||||
|
||||
impl_tag, abi_tag, plat_tag = self.get_tag()
|
||||
archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
|
||||
if not self.relative:
|
||||
archive_root = self.bdist_dir
|
||||
else:
|
||||
archive_root = os.path.join(
|
||||
self.bdist_dir, self._ensure_relative(install.install_base)
|
||||
)
|
||||
|
||||
self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
|
||||
distinfo_dirname = (
|
||||
f"{safer_name(self.distribution.get_name())}-"
|
||||
f"{safer_version(self.distribution.get_version())}.dist-info"
|
||||
)
|
||||
distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
|
||||
self.egg2dist(self.egginfo_dir, distinfo_dir)
|
||||
|
||||
self.write_wheelfile(distinfo_dir)
|
||||
|
||||
# Make the archive
|
||||
if not os.path.exists(self.dist_dir):
|
||||
os.makedirs(self.dist_dir)
|
||||
|
||||
wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
|
||||
with WheelFile(wheel_path, "w", self.compression) as wf:
|
||||
wf.write_files(archive_root)
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution, "dist_files", []).append(
|
||||
(
|
||||
"bdist_wheel",
|
||||
"{}.{}".format(*sys.version_info[:2]), # like 3.7
|
||||
wheel_path,
|
||||
)
|
||||
)
|
||||
|
||||
if not self.keep_temp:
|
||||
log.info(f"removing {self.bdist_dir}")
|
||||
if not self.dry_run:
|
||||
if sys.version_info < (3, 12):
|
||||
rmtree(self.bdist_dir, onerror=remove_readonly)
|
||||
else:
|
||||
rmtree(self.bdist_dir, onexc=remove_readonly_exc)
|
||||
|
||||
def write_wheelfile(
|
||||
self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
|
||||
):
|
||||
from email.message import Message
|
||||
|
||||
msg = Message()
|
||||
msg["Wheel-Version"] = "1.0" # of the spec
|
||||
msg["Generator"] = generator
|
||||
msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
|
||||
if self.build_number is not None:
|
||||
msg["Build"] = self.build_number
|
||||
|
||||
# Doesn't work for bdist_wininst
|
||||
impl_tag, abi_tag, plat_tag = self.get_tag()
|
||||
for impl in impl_tag.split("."):
|
||||
for abi in abi_tag.split("."):
|
||||
for plat in plat_tag.split("."):
|
||||
msg["Tag"] = "-".join((impl, abi, plat))
|
||||
|
||||
wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
|
||||
log.info(f"creating {wheelfile_path}")
|
||||
with open(wheelfile_path, "wb") as f:
|
||||
BytesGenerator(f, maxheaderlen=0).flatten(msg)
|
||||
|
||||
def _ensure_relative(self, path):
|
||||
# copied from dir_util, deleted
|
||||
drive, path = os.path.splitdrive(path)
|
||||
if path[0:1] == os.sep:
|
||||
path = drive + path[1:]
|
||||
return path
|
||||
|
||||
@property
|
||||
def license_paths(self):
|
||||
if setuptools_major_version >= 57:
|
||||
# Setuptools has resolved any patterns to actual file names
|
||||
return self.distribution.metadata.license_files or ()
|
||||
|
||||
files = set()
|
||||
metadata = self.distribution.get_option_dict("metadata")
|
||||
if setuptools_major_version >= 42:
|
||||
# Setuptools recognizes the license_files option but does not do globbing
|
||||
patterns = self.distribution.metadata.license_files
|
||||
else:
|
||||
# Prior to those, wheel is entirely responsible for handling license files
|
||||
if "license_files" in metadata:
|
||||
patterns = metadata["license_files"][1].split()
|
||||
else:
|
||||
patterns = ()
|
||||
|
||||
if "license_file" in metadata:
|
||||
warnings.warn(
|
||||
'The "license_file" option is deprecated. Use "license_files" instead.',
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
files.add(metadata["license_file"][1])
|
||||
|
||||
if not files and not patterns and not isinstance(patterns, list):
|
||||
patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
|
||||
|
||||
for pattern in patterns:
|
||||
for path in iglob(pattern):
|
||||
if path.endswith("~"):
|
||||
log.debug(
|
||||
f'ignoring license file "{path}" as it looks like a backup'
|
||||
)
|
||||
continue
|
||||
|
||||
if path not in files and os.path.isfile(path):
|
||||
log.info(
|
||||
f'adding license file "{path}" (matched pattern "{pattern}")'
|
||||
)
|
||||
files.add(path)
|
||||
|
||||
return files
|
||||
|
||||
def egg2dist(self, egginfo_path, distinfo_path):
|
||||
"""Convert an .egg-info directory into a .dist-info directory"""
|
||||
|
||||
def adios(p):
|
||||
"""Appropriately delete directory, file or link."""
|
||||
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
|
||||
shutil.rmtree(p)
|
||||
elif os.path.exists(p):
|
||||
os.unlink(p)
|
||||
|
||||
adios(distinfo_path)
|
||||
|
||||
if not os.path.exists(egginfo_path):
|
||||
# There is no egg-info. This is probably because the egg-info
|
||||
# file/directory is not named matching the distribution name used
|
||||
# to name the archive file. Check for this case and report
|
||||
# accordingly.
|
||||
import glob
|
||||
|
||||
pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
|
||||
possible = glob.glob(pat)
|
||||
err = f"Egg metadata expected at {egginfo_path} but not found"
|
||||
if possible:
|
||||
alt = os.path.basename(possible[0])
|
||||
err += f" ({alt} found - possible misnamed archive file?)"
|
||||
|
||||
raise ValueError(err)
|
||||
|
||||
if os.path.isfile(egginfo_path):
|
||||
# .egg-info is a single file
|
||||
pkginfo_path = egginfo_path
|
||||
pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
|
||||
os.mkdir(distinfo_path)
|
||||
else:
|
||||
# .egg-info is a directory
|
||||
pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
|
||||
pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
|
||||
|
||||
# ignore common egg metadata that is useless to wheel
|
||||
shutil.copytree(
|
||||
egginfo_path,
|
||||
distinfo_path,
|
||||
ignore=lambda x, y: {
|
||||
"PKG-INFO",
|
||||
"requires.txt",
|
||||
"SOURCES.txt",
|
||||
"not-zip-safe",
|
||||
},
|
||||
)
|
||||
|
||||
# delete dependency_links if it is only whitespace
|
||||
dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
|
||||
with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
|
||||
dependency_links = dependency_links_file.read().strip()
|
||||
if not dependency_links:
|
||||
adios(dependency_links_path)
|
||||
|
||||
pkg_info_path = os.path.join(distinfo_path, "METADATA")
|
||||
serialization_policy = EmailPolicy(
|
||||
utf8=True,
|
||||
mangle_from_=False,
|
||||
max_line_length=0,
|
||||
)
|
||||
with open(pkg_info_path, "w", encoding="utf-8") as out:
|
||||
Generator(out, policy=serialization_policy).flatten(pkg_info)
|
||||
|
||||
for license_path in self.license_paths:
|
||||
filename = os.path.basename(license_path)
|
||||
shutil.copy(license_path, os.path.join(distinfo_path, filename))
|
||||
|
||||
adios(egginfo_path)
|
||||
@ -0,0 +1,155 @@
|
||||
"""
|
||||
Wheel command-line utility.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
|
||||
class WheelError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def unpack_f(args):
|
||||
from .unpack import unpack
|
||||
|
||||
unpack(args.wheelfile, args.dest)
|
||||
|
||||
|
||||
def pack_f(args):
|
||||
from .pack import pack
|
||||
|
||||
pack(args.directory, args.dest_dir, args.build_number)
|
||||
|
||||
|
||||
def convert_f(args):
|
||||
from .convert import convert
|
||||
|
||||
convert(args.files, args.dest_dir, args.verbose)
|
||||
|
||||
|
||||
def tags_f(args):
|
||||
from .tags import tags
|
||||
|
||||
names = (
|
||||
tags(
|
||||
wheel,
|
||||
args.python_tag,
|
||||
args.abi_tag,
|
||||
args.platform_tag,
|
||||
args.build,
|
||||
args.remove,
|
||||
)
|
||||
for wheel in args.wheel
|
||||
)
|
||||
|
||||
for name in names:
|
||||
print(name)
|
||||
|
||||
|
||||
def version_f(args):
|
||||
from .. import __version__
|
||||
|
||||
print("wheel %s" % __version__)
|
||||
|
||||
|
||||
def parse_build_tag(build_tag: str) -> str:
|
||||
if build_tag and not build_tag[0].isdigit():
|
||||
raise ArgumentTypeError("build tag must begin with a digit")
|
||||
elif "-" in build_tag:
|
||||
raise ArgumentTypeError("invalid character ('-') in build tag")
|
||||
|
||||
return build_tag
|
||||
|
||||
|
||||
TAGS_HELP = """\
|
||||
Make a new wheel with given tags. Any tags unspecified will remain the same.
|
||||
Starting the tags with a "+" will append to the existing tags. Starting with a
|
||||
"-" will remove a tag (use --option=-TAG syntax). Multiple tags can be
|
||||
separated by ".". The original file will remain unless --remove is given. The
|
||||
output filename(s) will be displayed on stdout for further processing.
|
||||
"""
|
||||
|
||||
|
||||
def parser():
|
||||
p = argparse.ArgumentParser()
|
||||
s = p.add_subparsers(help="commands")
|
||||
|
||||
unpack_parser = s.add_parser("unpack", help="Unpack wheel")
|
||||
unpack_parser.add_argument(
|
||||
"--dest", "-d", help="Destination directory", default="."
|
||||
)
|
||||
unpack_parser.add_argument("wheelfile", help="Wheel file")
|
||||
unpack_parser.set_defaults(func=unpack_f)
|
||||
|
||||
repack_parser = s.add_parser("pack", help="Repack wheel")
|
||||
repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
|
||||
repack_parser.add_argument(
|
||||
"--dest-dir",
|
||||
"-d",
|
||||
default=os.path.curdir,
|
||||
help="Directory to store the wheel (default %(default)s)",
|
||||
)
|
||||
repack_parser.add_argument(
|
||||
"--build-number", help="Build tag to use in the wheel name"
|
||||
)
|
||||
repack_parser.set_defaults(func=pack_f)
|
||||
|
||||
convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
|
||||
convert_parser.add_argument("files", nargs="*", help="Files to convert")
|
||||
convert_parser.add_argument(
|
||||
"--dest-dir",
|
||||
"-d",
|
||||
default=os.path.curdir,
|
||||
help="Directory to store wheels (default %(default)s)",
|
||||
)
|
||||
convert_parser.add_argument("--verbose", "-v", action="store_true")
|
||||
convert_parser.set_defaults(func=convert_f)
|
||||
|
||||
tags_parser = s.add_parser(
|
||||
"tags", help="Add or replace the tags on a wheel", description=TAGS_HELP
|
||||
)
|
||||
tags_parser.add_argument("wheel", nargs="*", help="Existing wheel(s) to retag")
|
||||
tags_parser.add_argument(
|
||||
"--remove",
|
||||
action="store_true",
|
||||
help="Remove the original files, keeping only the renamed ones",
|
||||
)
|
||||
tags_parser.add_argument(
|
||||
"--python-tag", metavar="TAG", help="Specify an interpreter tag(s)"
|
||||
)
|
||||
tags_parser.add_argument("--abi-tag", metavar="TAG", help="Specify an ABI tag(s)")
|
||||
tags_parser.add_argument(
|
||||
"--platform-tag", metavar="TAG", help="Specify a platform tag(s)"
|
||||
)
|
||||
tags_parser.add_argument(
|
||||
"--build", type=parse_build_tag, metavar="BUILD", help="Specify a build tag"
|
||||
)
|
||||
tags_parser.set_defaults(func=tags_f)
|
||||
|
||||
version_parser = s.add_parser("version", help="Print version and exit")
|
||||
version_parser.set_defaults(func=version_f)
|
||||
|
||||
help_parser = s.add_parser("help", help="Show this help")
|
||||
help_parser.set_defaults(func=lambda args: p.print_help())
|
||||
|
||||
return p
|
||||
|
||||
|
||||
def main():
|
||||
p = parser()
|
||||
args = p.parse_args()
|
||||
if not hasattr(args, "func"):
|
||||
p.print_help()
|
||||
else:
|
||||
try:
|
||||
args.func(args)
|
||||
return 0
|
||||
except WheelError as e:
|
||||
print(e, file=sys.stderr)
|
||||
|
||||
return 1
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,273 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import zipfile
|
||||
from glob import iglob
|
||||
|
||||
from ..bdist_wheel import bdist_wheel
|
||||
from ..wheelfile import WheelFile
|
||||
from . import WheelError
|
||||
|
||||
try:
|
||||
from setuptools import Distribution
|
||||
except ImportError:
|
||||
from distutils.dist import Distribution
|
||||
|
||||
egg_info_re = re.compile(
|
||||
r"""
|
||||
(?P<name>.+?)-(?P<ver>.+?)
|
||||
(-(?P<pyver>py\d\.\d+)
|
||||
(-(?P<arch>.+?))?
|
||||
)?.egg$""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
class _bdist_wheel_tag(bdist_wheel):
|
||||
# allow the client to override the default generated wheel tag
|
||||
# The default bdist_wheel implementation uses python and abi tags
|
||||
# of the running python process. This is not suitable for
|
||||
# generating/repackaging prebuild binaries.
|
||||
|
||||
full_tag_supplied = False
|
||||
full_tag = None # None or a (pytag, soabitag, plattag) triple
|
||||
|
||||
def get_tag(self):
|
||||
if self.full_tag_supplied and self.full_tag is not None:
|
||||
return self.full_tag
|
||||
else:
|
||||
return bdist_wheel.get_tag(self)
|
||||
|
||||
|
||||
def egg2wheel(egg_path: str, dest_dir: str) -> None:
|
||||
filename = os.path.basename(egg_path)
|
||||
match = egg_info_re.match(filename)
|
||||
if not match:
|
||||
raise WheelError(f"Invalid egg file name: {filename}")
|
||||
|
||||
egg_info = match.groupdict()
|
||||
dir = tempfile.mkdtemp(suffix="_e2w")
|
||||
if os.path.isfile(egg_path):
|
||||
# assume we have a bdist_egg otherwise
|
||||
with zipfile.ZipFile(egg_path) as egg:
|
||||
egg.extractall(dir)
|
||||
else:
|
||||
# support buildout-style installed eggs directories
|
||||
for pth in os.listdir(egg_path):
|
||||
src = os.path.join(egg_path, pth)
|
||||
if os.path.isfile(src):
|
||||
shutil.copy2(src, dir)
|
||||
else:
|
||||
shutil.copytree(src, os.path.join(dir, pth))
|
||||
|
||||
pyver = egg_info["pyver"]
|
||||
if pyver:
|
||||
pyver = egg_info["pyver"] = pyver.replace(".", "")
|
||||
|
||||
arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_")
|
||||
|
||||
# assume all binary eggs are for CPython
|
||||
abi = "cp" + pyver[2:] if arch != "any" else "none"
|
||||
|
||||
root_is_purelib = egg_info["arch"] is None
|
||||
if root_is_purelib:
|
||||
bw = bdist_wheel(Distribution())
|
||||
else:
|
||||
bw = _bdist_wheel_tag(Distribution())
|
||||
|
||||
bw.root_is_pure = root_is_purelib
|
||||
bw.python_tag = pyver
|
||||
bw.plat_name_supplied = True
|
||||
bw.plat_name = egg_info["arch"] or "any"
|
||||
if not root_is_purelib:
|
||||
bw.full_tag_supplied = True
|
||||
bw.full_tag = (pyver, abi, arch)
|
||||
|
||||
dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info))
|
||||
bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir)
|
||||
bw.write_wheelfile(dist_info_dir, generator="egg2wheel")
|
||||
wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info)
|
||||
with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf:
|
||||
wf.write_files(dir)
|
||||
|
||||
shutil.rmtree(dir)
|
||||
|
||||
|
||||
def parse_wininst_info(wininfo_name, egginfo_name):
|
||||
"""Extract metadata from filenames.
|
||||
|
||||
Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
|
||||
the installer filename and the name of the egg-info directory embedded in
|
||||
the zipfile (if any).
|
||||
|
||||
The egginfo filename has the format::
|
||||
|
||||
name-ver(-pyver)(-arch).egg-info
|
||||
|
||||
The installer filename has the format::
|
||||
|
||||
name-ver.arch(-pyver).exe
|
||||
|
||||
Some things to note:
|
||||
|
||||
1. The installer filename is not definitive. An installer can be renamed
|
||||
and work perfectly well as an installer. So more reliable data should
|
||||
be used whenever possible.
|
||||
2. The egg-info data should be preferred for the name and version, because
|
||||
these come straight from the distutils metadata, and are mandatory.
|
||||
3. The pyver from the egg-info data should be ignored, as it is
|
||||
constructed from the version of Python used to build the installer,
|
||||
which is irrelevant - the installer filename is correct here (even to
|
||||
the point that when it's not there, any version is implied).
|
||||
4. The architecture must be taken from the installer filename, as it is
|
||||
not included in the egg-info data.
|
||||
5. Architecture-neutral installers still have an architecture because the
|
||||
installer format itself (being executable) is architecture-specific. We
|
||||
should therefore ignore the architecture if the content is pure-python.
|
||||
"""
|
||||
|
||||
egginfo = None
|
||||
if egginfo_name:
|
||||
egginfo = egg_info_re.search(egginfo_name)
|
||||
if not egginfo:
|
||||
raise ValueError(f"Egg info filename {egginfo_name} is not valid")
|
||||
|
||||
# Parse the wininst filename
|
||||
# 1. Distribution name (up to the first '-')
|
||||
w_name, sep, rest = wininfo_name.partition("-")
|
||||
if not sep:
|
||||
raise ValueError(f"Installer filename {wininfo_name} is not valid")
|
||||
|
||||
# Strip '.exe'
|
||||
rest = rest[:-4]
|
||||
# 2. Python version (from the last '-', must start with 'py')
|
||||
rest2, sep, w_pyver = rest.rpartition("-")
|
||||
if sep and w_pyver.startswith("py"):
|
||||
rest = rest2
|
||||
w_pyver = w_pyver.replace(".", "")
|
||||
else:
|
||||
# Not version specific - use py2.py3. While it is possible that
|
||||
# pure-Python code is not compatible with both Python 2 and 3, there
|
||||
# is no way of knowing from the wininst format, so we assume the best
|
||||
# here (the user can always manually rename the wheel to be more
|
||||
# restrictive if needed).
|
||||
w_pyver = "py2.py3"
|
||||
# 3. Version and architecture
|
||||
w_ver, sep, w_arch = rest.rpartition(".")
|
||||
if not sep:
|
||||
raise ValueError(f"Installer filename {wininfo_name} is not valid")
|
||||
|
||||
if egginfo:
|
||||
w_name = egginfo.group("name")
|
||||
w_ver = egginfo.group("ver")
|
||||
|
||||
return {"name": w_name, "ver": w_ver, "arch": w_arch, "pyver": w_pyver}
|
||||
|
||||
|
||||
def wininst2wheel(path, dest_dir):
|
||||
with zipfile.ZipFile(path) as bdw:
|
||||
# Search for egg-info in the archive
|
||||
egginfo_name = None
|
||||
for filename in bdw.namelist():
|
||||
if ".egg-info" in filename:
|
||||
egginfo_name = filename
|
||||
break
|
||||
|
||||
info = parse_wininst_info(os.path.basename(path), egginfo_name)
|
||||
|
||||
root_is_purelib = True
|
||||
for zipinfo in bdw.infolist():
|
||||
if zipinfo.filename.startswith("PLATLIB"):
|
||||
root_is_purelib = False
|
||||
break
|
||||
if root_is_purelib:
|
||||
paths = {"purelib": ""}
|
||||
else:
|
||||
paths = {"platlib": ""}
|
||||
|
||||
dist_info = "{name}-{ver}".format(**info)
|
||||
datadir = "%s.data/" % dist_info
|
||||
|
||||
# rewrite paths to trick ZipFile into extracting an egg
|
||||
# XXX grab wininst .ini - between .exe, padding, and first zip file.
|
||||
members = []
|
||||
egginfo_name = ""
|
||||
for zipinfo in bdw.infolist():
|
||||
key, basename = zipinfo.filename.split("/", 1)
|
||||
key = key.lower()
|
||||
basepath = paths.get(key, None)
|
||||
if basepath is None:
|
||||
basepath = datadir + key.lower() + "/"
|
||||
oldname = zipinfo.filename
|
||||
newname = basepath + basename
|
||||
zipinfo.filename = newname
|
||||
del bdw.NameToInfo[oldname]
|
||||
bdw.NameToInfo[newname] = zipinfo
|
||||
# Collect member names, but omit '' (from an entry like "PLATLIB/"
|
||||
if newname:
|
||||
members.append(newname)
|
||||
# Remember egg-info name for the egg2dist call below
|
||||
if not egginfo_name:
|
||||
if newname.endswith(".egg-info"):
|
||||
egginfo_name = newname
|
||||
elif ".egg-info/" in newname:
|
||||
egginfo_name, sep, _ = newname.rpartition("/")
|
||||
dir = tempfile.mkdtemp(suffix="_b2w")
|
||||
bdw.extractall(dir, members)
|
||||
|
||||
# egg2wheel
|
||||
abi = "none"
|
||||
pyver = info["pyver"]
|
||||
arch = (info["arch"] or "any").replace(".", "_").replace("-", "_")
|
||||
# Wininst installers always have arch even if they are not
|
||||
# architecture-specific (because the format itself is).
|
||||
# So, assume the content is architecture-neutral if root is purelib.
|
||||
if root_is_purelib:
|
||||
arch = "any"
|
||||
# If the installer is architecture-specific, it's almost certainly also
|
||||
# CPython-specific.
|
||||
if arch != "any":
|
||||
pyver = pyver.replace("py", "cp")
|
||||
wheel_name = "-".join((dist_info, pyver, abi, arch))
|
||||
if root_is_purelib:
|
||||
bw = bdist_wheel(Distribution())
|
||||
else:
|
||||
bw = _bdist_wheel_tag(Distribution())
|
||||
|
||||
bw.root_is_pure = root_is_purelib
|
||||
bw.python_tag = pyver
|
||||
bw.plat_name_supplied = True
|
||||
bw.plat_name = info["arch"] or "any"
|
||||
|
||||
if not root_is_purelib:
|
||||
bw.full_tag_supplied = True
|
||||
bw.full_tag = (pyver, abi, arch)
|
||||
|
||||
dist_info_dir = os.path.join(dir, "%s.dist-info" % dist_info)
|
||||
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
|
||||
bw.write_wheelfile(dist_info_dir, generator="wininst2wheel")
|
||||
|
||||
wheel_path = os.path.join(dest_dir, wheel_name)
|
||||
with WheelFile(wheel_path, "w") as wf:
|
||||
wf.write_files(dir)
|
||||
|
||||
shutil.rmtree(dir)
|
||||
|
||||
|
||||
def convert(files, dest_dir, verbose):
|
||||
for pat in files:
|
||||
for installer in iglob(pat):
|
||||
if os.path.splitext(installer)[1] == ".egg":
|
||||
conv = egg2wheel
|
||||
else:
|
||||
conv = wininst2wheel
|
||||
|
||||
if verbose:
|
||||
print(f"{installer}... ", flush=True)
|
||||
|
||||
conv(installer, dest_dir)
|
||||
if verbose:
|
||||
print("OK")
|
||||
@ -0,0 +1,85 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import email.policy
|
||||
import os.path
|
||||
import re
|
||||
from email.generator import BytesGenerator
|
||||
from email.parser import BytesParser
|
||||
|
||||
from wheel.cli import WheelError
|
||||
from wheel.wheelfile import WheelFile
|
||||
|
||||
DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
|
||||
|
||||
|
||||
def pack(directory: str, dest_dir: str, build_number: str | None) -> None:
|
||||
"""Repack a previously unpacked wheel directory into a new wheel file.
|
||||
|
||||
The .dist-info/WHEEL file must contain one or more tags so that the target
|
||||
wheel file name can be determined.
|
||||
|
||||
:param directory: The unpacked wheel directory
|
||||
:param dest_dir: Destination directory (defaults to the current directory)
|
||||
"""
|
||||
# Find the .dist-info directory
|
||||
dist_info_dirs = [
|
||||
fn
|
||||
for fn in os.listdir(directory)
|
||||
if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)
|
||||
]
|
||||
if len(dist_info_dirs) > 1:
|
||||
raise WheelError(f"Multiple .dist-info directories found in {directory}")
|
||||
elif not dist_info_dirs:
|
||||
raise WheelError(f"No .dist-info directories found in {directory}")
|
||||
|
||||
# Determine the target wheel filename
|
||||
dist_info_dir = dist_info_dirs[0]
|
||||
name_version = DIST_INFO_RE.match(dist_info_dir).group("namever")
|
||||
|
||||
# Read the tags and the existing build number from .dist-info/WHEEL
|
||||
wheel_file_path = os.path.join(directory, dist_info_dir, "WHEEL")
|
||||
with open(wheel_file_path, "rb") as f:
|
||||
info = BytesParser(policy=email.policy.compat32).parse(f)
|
||||
tags: list[str] = info.get_all("Tag", [])
|
||||
existing_build_number = info.get("Build")
|
||||
|
||||
if not tags:
|
||||
raise WheelError(
|
||||
f"No tags present in {dist_info_dir}/WHEEL; cannot determine target "
|
||||
f"wheel filename"
|
||||
)
|
||||
|
||||
# Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
|
||||
build_number = build_number if build_number is not None else existing_build_number
|
||||
if build_number is not None:
|
||||
del info["Build"]
|
||||
if build_number:
|
||||
info["Build"] = build_number
|
||||
name_version += "-" + build_number
|
||||
|
||||
if build_number != existing_build_number:
|
||||
with open(wheel_file_path, "wb") as f:
|
||||
BytesGenerator(f, maxheaderlen=0).flatten(info)
|
||||
|
||||
# Reassemble the tags for the wheel file
|
||||
tagline = compute_tagline(tags)
|
||||
|
||||
# Repack the wheel
|
||||
wheel_path = os.path.join(dest_dir, f"{name_version}-{tagline}.whl")
|
||||
with WheelFile(wheel_path, "w") as wf:
|
||||
print(f"Repacking wheel as {wheel_path}...", end="", flush=True)
|
||||
wf.write_files(directory)
|
||||
|
||||
print("OK")
|
||||
|
||||
|
||||
def compute_tagline(tags: list[str]) -> str:
|
||||
"""Compute a tagline from a list of tags.
|
||||
|
||||
:param tags: A list of tags
|
||||
:return: A tagline
|
||||
"""
|
||||
impls = sorted({tag.split("-")[0] for tag in tags})
|
||||
abivers = sorted({tag.split("-")[1] for tag in tags})
|
||||
platforms = sorted({tag.split("-")[2] for tag in tags})
|
||||
return "-".join([".".join(impls), ".".join(abivers), ".".join(platforms)])
|
||||
@ -0,0 +1,139 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import email.policy
|
||||
import itertools
|
||||
import os
|
||||
from collections.abc import Iterable
|
||||
from email.parser import BytesParser
|
||||
|
||||
from ..wheelfile import WheelFile
|
||||
|
||||
|
||||
def _compute_tags(original_tags: Iterable[str], new_tags: str | None) -> set[str]:
|
||||
"""Add or replace tags. Supports dot-separated tags"""
|
||||
if new_tags is None:
|
||||
return set(original_tags)
|
||||
|
||||
if new_tags.startswith("+"):
|
||||
return {*original_tags, *new_tags[1:].split(".")}
|
||||
|
||||
if new_tags.startswith("-"):
|
||||
return set(original_tags) - set(new_tags[1:].split("."))
|
||||
|
||||
return set(new_tags.split("."))
|
||||
|
||||
|
||||
def tags(
|
||||
wheel: str,
|
||||
python_tags: str | None = None,
|
||||
abi_tags: str | None = None,
|
||||
platform_tags: str | None = None,
|
||||
build_tag: str | None = None,
|
||||
remove: bool = False,
|
||||
) -> str:
|
||||
"""Change the tags on a wheel file.
|
||||
|
||||
The tags are left unchanged if they are not specified. To specify "none",
|
||||
use ["none"]. To append to the previous tags, a tag should start with a
|
||||
"+". If a tag starts with "-", it will be removed from existing tags.
|
||||
Processing is done left to right.
|
||||
|
||||
:param wheel: The paths to the wheels
|
||||
:param python_tags: The Python tags to set
|
||||
:param abi_tags: The ABI tags to set
|
||||
:param platform_tags: The platform tags to set
|
||||
:param build_tag: The build tag to set
|
||||
:param remove: Remove the original wheel
|
||||
"""
|
||||
with WheelFile(wheel, "r") as f:
|
||||
assert f.filename, f"{f.filename} must be available"
|
||||
|
||||
wheel_info = f.read(f.dist_info_path + "/WHEEL")
|
||||
info = BytesParser(policy=email.policy.compat32).parsebytes(wheel_info)
|
||||
|
||||
original_wheel_name = os.path.basename(f.filename)
|
||||
namever = f.parsed_filename.group("namever")
|
||||
build = f.parsed_filename.group("build")
|
||||
original_python_tags = f.parsed_filename.group("pyver").split(".")
|
||||
original_abi_tags = f.parsed_filename.group("abi").split(".")
|
||||
original_plat_tags = f.parsed_filename.group("plat").split(".")
|
||||
|
||||
tags: list[str] = info.get_all("Tag", [])
|
||||
existing_build_tag = info.get("Build")
|
||||
|
||||
impls = {tag.split("-")[0] for tag in tags}
|
||||
abivers = {tag.split("-")[1] for tag in tags}
|
||||
platforms = {tag.split("-")[2] for tag in tags}
|
||||
|
||||
if impls != set(original_python_tags):
|
||||
msg = f"Wheel internal tags {impls!r} != filename tags {original_python_tags!r}"
|
||||
raise AssertionError(msg)
|
||||
|
||||
if abivers != set(original_abi_tags):
|
||||
msg = f"Wheel internal tags {abivers!r} != filename tags {original_abi_tags!r}"
|
||||
raise AssertionError(msg)
|
||||
|
||||
if platforms != set(original_plat_tags):
|
||||
msg = (
|
||||
f"Wheel internal tags {platforms!r} != filename tags {original_plat_tags!r}"
|
||||
)
|
||||
raise AssertionError(msg)
|
||||
|
||||
if existing_build_tag != build:
|
||||
msg = (
|
||||
f"Incorrect filename '{build}' "
|
||||
f"& *.dist-info/WHEEL '{existing_build_tag}' build numbers"
|
||||
)
|
||||
raise AssertionError(msg)
|
||||
|
||||
# Start changing as needed
|
||||
if build_tag is not None:
|
||||
build = build_tag
|
||||
|
||||
final_python_tags = sorted(_compute_tags(original_python_tags, python_tags))
|
||||
final_abi_tags = sorted(_compute_tags(original_abi_tags, abi_tags))
|
||||
final_plat_tags = sorted(_compute_tags(original_plat_tags, platform_tags))
|
||||
|
||||
final_tags = [
|
||||
namever,
|
||||
".".join(final_python_tags),
|
||||
".".join(final_abi_tags),
|
||||
".".join(final_plat_tags),
|
||||
]
|
||||
if build:
|
||||
final_tags.insert(1, build)
|
||||
|
||||
final_wheel_name = "-".join(final_tags) + ".whl"
|
||||
|
||||
if original_wheel_name != final_wheel_name:
|
||||
del info["Tag"], info["Build"]
|
||||
for a, b, c in itertools.product(
|
||||
final_python_tags, final_abi_tags, final_plat_tags
|
||||
):
|
||||
info["Tag"] = f"{a}-{b}-{c}"
|
||||
if build:
|
||||
info["Build"] = build
|
||||
|
||||
original_wheel_path = os.path.join(
|
||||
os.path.dirname(f.filename), original_wheel_name
|
||||
)
|
||||
final_wheel_path = os.path.join(os.path.dirname(f.filename), final_wheel_name)
|
||||
|
||||
with WheelFile(original_wheel_path, "r") as fin, WheelFile(
|
||||
final_wheel_path, "w"
|
||||
) as fout:
|
||||
fout.comment = fin.comment # preserve the comment
|
||||
for item in fin.infolist():
|
||||
if item.is_dir():
|
||||
continue
|
||||
if item.filename == f.dist_info_path + "/RECORD":
|
||||
continue
|
||||
if item.filename == f.dist_info_path + "/WHEEL":
|
||||
fout.writestr(item, info.as_bytes())
|
||||
else:
|
||||
fout.writestr(item, fin.read(item))
|
||||
|
||||
if remove:
|
||||
os.remove(original_wheel_path)
|
||||
|
||||
return final_wheel_name
|
||||
@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from ..wheelfile import WheelFile
|
||||
|
||||
|
||||
def unpack(path: str, dest: str = ".") -> None:
|
||||
"""Unpack a wheel.
|
||||
|
||||
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
|
||||
is the package name and {ver} its version.
|
||||
|
||||
:param path: The path to the wheel.
|
||||
:param dest: Destination directory (default to current directory).
|
||||
"""
|
||||
with WheelFile(path) as wf:
|
||||
namever = wf.parsed_filename.group("namever")
|
||||
destination = Path(dest) / namever
|
||||
print(f"Unpacking to: {destination}...", end="", flush=True)
|
||||
for zinfo in wf.filelist:
|
||||
wf.extract(zinfo, destination)
|
||||
|
||||
# Set permissions to the same values as they were set in the archive
|
||||
# We have to do this manually due to
|
||||
# https://github.com/python/cpython/issues/59999
|
||||
permissions = zinfo.external_attr >> 16 & 0o777
|
||||
destination.joinpath(zinfo.filename).chmod(permissions)
|
||||
|
||||
print("OK")
|
||||
@ -0,0 +1,469 @@
|
||||
"""
|
||||
This module contains function to analyse dynamic library
|
||||
headers to extract system information
|
||||
|
||||
Currently only for MacOSX
|
||||
|
||||
Library file on macosx system starts with Mach-O or Fat field.
|
||||
This can be distinguish by first 32 bites and it is called magic number.
|
||||
Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
|
||||
reversed bytes order.
|
||||
Both fields can occur in two types: 32 and 64 bytes.
|
||||
|
||||
FAT field inform that this library contains few version of library
|
||||
(typically for different types version). It contains
|
||||
information where Mach-O headers starts.
|
||||
|
||||
Each section started with Mach-O header contains one library
|
||||
(So if file starts with this field it contains only one version).
|
||||
|
||||
After filed Mach-O there are section fields.
|
||||
Each of them starts with two fields:
|
||||
cmd - magic number for this command
|
||||
cmdsize - total size occupied by this section information.
|
||||
|
||||
In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
|
||||
and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
|
||||
because them contains information about minimal system version.
|
||||
|
||||
Important remarks:
|
||||
- For fat files this implementation looks for maximum number version.
|
||||
It not check if it is 32 or 64 and do not compare it with currently built package.
|
||||
So it is possible to false report higher version that needed.
|
||||
- All structures signatures are taken form macosx header files.
|
||||
- I think that binary format will be more stable than `otool` output.
|
||||
and if apple introduce some changes both implementation will need to be updated.
|
||||
- The system compile will set the deployment target no lower than
|
||||
11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
|
||||
target when the arm64 target is 11.0.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import ctypes
|
||||
import os
|
||||
import sys
|
||||
|
||||
"""here the needed const and struct from mach-o header files"""
|
||||
|
||||
FAT_MAGIC = 0xCAFEBABE
|
||||
FAT_CIGAM = 0xBEBAFECA
|
||||
FAT_MAGIC_64 = 0xCAFEBABF
|
||||
FAT_CIGAM_64 = 0xBFBAFECA
|
||||
MH_MAGIC = 0xFEEDFACE
|
||||
MH_CIGAM = 0xCEFAEDFE
|
||||
MH_MAGIC_64 = 0xFEEDFACF
|
||||
MH_CIGAM_64 = 0xCFFAEDFE
|
||||
|
||||
LC_VERSION_MIN_MACOSX = 0x24
|
||||
LC_BUILD_VERSION = 0x32
|
||||
|
||||
CPU_TYPE_ARM64 = 0x0100000C
|
||||
|
||||
mach_header_fields = [
|
||||
("magic", ctypes.c_uint32),
|
||||
("cputype", ctypes.c_int),
|
||||
("cpusubtype", ctypes.c_int),
|
||||
("filetype", ctypes.c_uint32),
|
||||
("ncmds", ctypes.c_uint32),
|
||||
("sizeofcmds", ctypes.c_uint32),
|
||||
("flags", ctypes.c_uint32),
|
||||
]
|
||||
"""
|
||||
struct mach_header {
|
||||
uint32_t magic; /* mach magic number identifier */
|
||||
cpu_type_t cputype; /* cpu specifier */
|
||||
cpu_subtype_t cpusubtype; /* machine specifier */
|
||||
uint32_t filetype; /* type of file */
|
||||
uint32_t ncmds; /* number of load commands */
|
||||
uint32_t sizeofcmds; /* the size of all the load commands */
|
||||
uint32_t flags; /* flags */
|
||||
};
|
||||
typedef integer_t cpu_type_t;
|
||||
typedef integer_t cpu_subtype_t;
|
||||
"""
|
||||
|
||||
mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
|
||||
"""
|
||||
struct mach_header_64 {
|
||||
uint32_t magic; /* mach magic number identifier */
|
||||
cpu_type_t cputype; /* cpu specifier */
|
||||
cpu_subtype_t cpusubtype; /* machine specifier */
|
||||
uint32_t filetype; /* type of file */
|
||||
uint32_t ncmds; /* number of load commands */
|
||||
uint32_t sizeofcmds; /* the size of all the load commands */
|
||||
uint32_t flags; /* flags */
|
||||
uint32_t reserved; /* reserved */
|
||||
};
|
||||
"""
|
||||
|
||||
fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
|
||||
"""
|
||||
struct fat_header {
|
||||
uint32_t magic; /* FAT_MAGIC or FAT_MAGIC_64 */
|
||||
uint32_t nfat_arch; /* number of structs that follow */
|
||||
};
|
||||
"""
|
||||
|
||||
fat_arch_fields = [
|
||||
("cputype", ctypes.c_int),
|
||||
("cpusubtype", ctypes.c_int),
|
||||
("offset", ctypes.c_uint32),
|
||||
("size", ctypes.c_uint32),
|
||||
("align", ctypes.c_uint32),
|
||||
]
|
||||
"""
|
||||
struct fat_arch {
|
||||
cpu_type_t cputype; /* cpu specifier (int) */
|
||||
cpu_subtype_t cpusubtype; /* machine specifier (int) */
|
||||
uint32_t offset; /* file offset to this object file */
|
||||
uint32_t size; /* size of this object file */
|
||||
uint32_t align; /* alignment as a power of 2 */
|
||||
};
|
||||
"""
|
||||
|
||||
fat_arch_64_fields = [
|
||||
("cputype", ctypes.c_int),
|
||||
("cpusubtype", ctypes.c_int),
|
||||
("offset", ctypes.c_uint64),
|
||||
("size", ctypes.c_uint64),
|
||||
("align", ctypes.c_uint32),
|
||||
("reserved", ctypes.c_uint32),
|
||||
]
|
||||
"""
|
||||
struct fat_arch_64 {
|
||||
cpu_type_t cputype; /* cpu specifier (int) */
|
||||
cpu_subtype_t cpusubtype; /* machine specifier (int) */
|
||||
uint64_t offset; /* file offset to this object file */
|
||||
uint64_t size; /* size of this object file */
|
||||
uint32_t align; /* alignment as a power of 2 */
|
||||
uint32_t reserved; /* reserved */
|
||||
};
|
||||
"""
|
||||
|
||||
segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
|
||||
"""base for reading segment info"""
|
||||
|
||||
segment_command_fields = [
|
||||
("cmd", ctypes.c_uint32),
|
||||
("cmdsize", ctypes.c_uint32),
|
||||
("segname", ctypes.c_char * 16),
|
||||
("vmaddr", ctypes.c_uint32),
|
||||
("vmsize", ctypes.c_uint32),
|
||||
("fileoff", ctypes.c_uint32),
|
||||
("filesize", ctypes.c_uint32),
|
||||
("maxprot", ctypes.c_int),
|
||||
("initprot", ctypes.c_int),
|
||||
("nsects", ctypes.c_uint32),
|
||||
("flags", ctypes.c_uint32),
|
||||
]
|
||||
"""
|
||||
struct segment_command { /* for 32-bit architectures */
|
||||
uint32_t cmd; /* LC_SEGMENT */
|
||||
uint32_t cmdsize; /* includes sizeof section structs */
|
||||
char segname[16]; /* segment name */
|
||||
uint32_t vmaddr; /* memory address of this segment */
|
||||
uint32_t vmsize; /* memory size of this segment */
|
||||
uint32_t fileoff; /* file offset of this segment */
|
||||
uint32_t filesize; /* amount to map from the file */
|
||||
vm_prot_t maxprot; /* maximum VM protection */
|
||||
vm_prot_t initprot; /* initial VM protection */
|
||||
uint32_t nsects; /* number of sections in segment */
|
||||
uint32_t flags; /* flags */
|
||||
};
|
||||
typedef int vm_prot_t;
|
||||
"""
|
||||
|
||||
segment_command_fields_64 = [
|
||||
("cmd", ctypes.c_uint32),
|
||||
("cmdsize", ctypes.c_uint32),
|
||||
("segname", ctypes.c_char * 16),
|
||||
("vmaddr", ctypes.c_uint64),
|
||||
("vmsize", ctypes.c_uint64),
|
||||
("fileoff", ctypes.c_uint64),
|
||||
("filesize", ctypes.c_uint64),
|
||||
("maxprot", ctypes.c_int),
|
||||
("initprot", ctypes.c_int),
|
||||
("nsects", ctypes.c_uint32),
|
||||
("flags", ctypes.c_uint32),
|
||||
]
|
||||
"""
|
||||
struct segment_command_64 { /* for 64-bit architectures */
|
||||
uint32_t cmd; /* LC_SEGMENT_64 */
|
||||
uint32_t cmdsize; /* includes sizeof section_64 structs */
|
||||
char segname[16]; /* segment name */
|
||||
uint64_t vmaddr; /* memory address of this segment */
|
||||
uint64_t vmsize; /* memory size of this segment */
|
||||
uint64_t fileoff; /* file offset of this segment */
|
||||
uint64_t filesize; /* amount to map from the file */
|
||||
vm_prot_t maxprot; /* maximum VM protection */
|
||||
vm_prot_t initprot; /* initial VM protection */
|
||||
uint32_t nsects; /* number of sections in segment */
|
||||
uint32_t flags; /* flags */
|
||||
};
|
||||
"""
|
||||
|
||||
version_min_command_fields = segment_base_fields + [
|
||||
("version", ctypes.c_uint32),
|
||||
("sdk", ctypes.c_uint32),
|
||||
]
|
||||
"""
|
||||
struct version_min_command {
|
||||
uint32_t cmd; /* LC_VERSION_MIN_MACOSX or
|
||||
LC_VERSION_MIN_IPHONEOS or
|
||||
LC_VERSION_MIN_WATCHOS or
|
||||
LC_VERSION_MIN_TVOS */
|
||||
uint32_t cmdsize; /* sizeof(struct min_version_command) */
|
||||
uint32_t version; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
||||
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
||||
};
|
||||
"""
|
||||
|
||||
build_version_command_fields = segment_base_fields + [
|
||||
("platform", ctypes.c_uint32),
|
||||
("minos", ctypes.c_uint32),
|
||||
("sdk", ctypes.c_uint32),
|
||||
("ntools", ctypes.c_uint32),
|
||||
]
|
||||
"""
|
||||
struct build_version_command {
|
||||
uint32_t cmd; /* LC_BUILD_VERSION */
|
||||
uint32_t cmdsize; /* sizeof(struct build_version_command) plus */
|
||||
/* ntools * sizeof(struct build_tool_version) */
|
||||
uint32_t platform; /* platform */
|
||||
uint32_t minos; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
||||
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
||||
uint32_t ntools; /* number of tool entries following this */
|
||||
};
|
||||
"""
|
||||
|
||||
|
||||
def swap32(x):
|
||||
return (
|
||||
((x << 24) & 0xFF000000)
|
||||
| ((x << 8) & 0x00FF0000)
|
||||
| ((x >> 8) & 0x0000FF00)
|
||||
| ((x >> 24) & 0x000000FF)
|
||||
)
|
||||
|
||||
|
||||
def get_base_class_and_magic_number(lib_file, seek=None):
|
||||
if seek is None:
|
||||
seek = lib_file.tell()
|
||||
else:
|
||||
lib_file.seek(seek)
|
||||
magic_number = ctypes.c_uint32.from_buffer_copy(
|
||||
lib_file.read(ctypes.sizeof(ctypes.c_uint32))
|
||||
).value
|
||||
|
||||
# Handle wrong byte order
|
||||
if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
|
||||
if sys.byteorder == "little":
|
||||
BaseClass = ctypes.BigEndianStructure
|
||||
else:
|
||||
BaseClass = ctypes.LittleEndianStructure
|
||||
|
||||
magic_number = swap32(magic_number)
|
||||
else:
|
||||
BaseClass = ctypes.Structure
|
||||
|
||||
lib_file.seek(seek)
|
||||
return BaseClass, magic_number
|
||||
|
||||
|
||||
def read_data(struct_class, lib_file):
|
||||
return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
|
||||
|
||||
|
||||
def extract_macosx_min_system_version(path_to_lib):
|
||||
with open(path_to_lib, "rb") as lib_file:
|
||||
BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
|
||||
if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
|
||||
return
|
||||
|
||||
if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
|
||||
|
||||
class FatHeader(BaseClass):
|
||||
_fields_ = fat_header_fields
|
||||
|
||||
fat_header = read_data(FatHeader, lib_file)
|
||||
if magic_number == FAT_MAGIC:
|
||||
|
||||
class FatArch(BaseClass):
|
||||
_fields_ = fat_arch_fields
|
||||
|
||||
else:
|
||||
|
||||
class FatArch(BaseClass):
|
||||
_fields_ = fat_arch_64_fields
|
||||
|
||||
fat_arch_list = [
|
||||
read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
|
||||
]
|
||||
|
||||
versions_list = []
|
||||
for el in fat_arch_list:
|
||||
try:
|
||||
version = read_mach_header(lib_file, el.offset)
|
||||
if version is not None:
|
||||
if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
|
||||
# Xcode will not set the deployment target below 11.0.0
|
||||
# for the arm64 architecture. Ignore the arm64 deployment
|
||||
# in fat binaries when the target is 11.0.0, that way
|
||||
# the other architectures can select a lower deployment
|
||||
# target.
|
||||
# This is safe because there is no arm64 variant for
|
||||
# macOS 10.15 or earlier.
|
||||
if version == (11, 0, 0):
|
||||
continue
|
||||
versions_list.append(version)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if len(versions_list) > 0:
|
||||
return max(versions_list)
|
||||
else:
|
||||
return None
|
||||
|
||||
else:
|
||||
try:
|
||||
return read_mach_header(lib_file, 0)
|
||||
except ValueError:
|
||||
"""when some error during read library files"""
|
||||
return None
|
||||
|
||||
|
||||
def read_mach_header(lib_file, seek=None):
|
||||
"""
|
||||
This function parses a Mach-O header and extracts
|
||||
information about the minimal macOS version.
|
||||
|
||||
:param lib_file: reference to opened library file with pointer
|
||||
"""
|
||||
base_class, magic_number = get_base_class_and_magic_number(lib_file, seek)
|
||||
arch = "32" if magic_number == MH_MAGIC else "64"
|
||||
|
||||
class SegmentBase(base_class):
|
||||
_fields_ = segment_base_fields
|
||||
|
||||
if arch == "32":
|
||||
|
||||
class MachHeader(base_class):
|
||||
_fields_ = mach_header_fields
|
||||
|
||||
else:
|
||||
|
||||
class MachHeader(base_class):
|
||||
_fields_ = mach_header_fields_64
|
||||
|
||||
mach_header = read_data(MachHeader, lib_file)
|
||||
for _i in range(mach_header.ncmds):
|
||||
pos = lib_file.tell()
|
||||
segment_base = read_data(SegmentBase, lib_file)
|
||||
lib_file.seek(pos)
|
||||
if segment_base.cmd == LC_VERSION_MIN_MACOSX:
|
||||
|
||||
class VersionMinCommand(base_class):
|
||||
_fields_ = version_min_command_fields
|
||||
|
||||
version_info = read_data(VersionMinCommand, lib_file)
|
||||
return parse_version(version_info.version)
|
||||
elif segment_base.cmd == LC_BUILD_VERSION:
|
||||
|
||||
class VersionBuild(base_class):
|
||||
_fields_ = build_version_command_fields
|
||||
|
||||
version_info = read_data(VersionBuild, lib_file)
|
||||
return parse_version(version_info.minos)
|
||||
else:
|
||||
lib_file.seek(pos + segment_base.cmdsize)
|
||||
continue
|
||||
|
||||
|
||||
def parse_version(version):
|
||||
x = (version & 0xFFFF0000) >> 16
|
||||
y = (version & 0x0000FF00) >> 8
|
||||
z = version & 0x000000FF
|
||||
return x, y, z
|
||||
|
||||
|
||||
def calculate_macosx_platform_tag(archive_root, platform_tag):
|
||||
"""
|
||||
Calculate proper macosx platform tag basing on files which are included to wheel
|
||||
|
||||
Example platform tag `macosx-10.14-x86_64`
|
||||
"""
|
||||
prefix, base_version, suffix = platform_tag.split("-")
|
||||
base_version = tuple(int(x) for x in base_version.split("."))
|
||||
base_version = base_version[:2]
|
||||
if base_version[0] > 10:
|
||||
base_version = (base_version[0], 0)
|
||||
assert len(base_version) == 2
|
||||
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
|
||||
deploy_target = tuple(
|
||||
int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
|
||||
)
|
||||
deploy_target = deploy_target[:2]
|
||||
if deploy_target[0] > 10:
|
||||
deploy_target = (deploy_target[0], 0)
|
||||
if deploy_target < base_version:
|
||||
sys.stderr.write(
|
||||
"[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
|
||||
"the version on which the Python interpreter was compiled ({}), and "
|
||||
"will be ignored.\n".format(
|
||||
".".join(str(x) for x in deploy_target),
|
||||
".".join(str(x) for x in base_version),
|
||||
)
|
||||
)
|
||||
else:
|
||||
base_version = deploy_target
|
||||
|
||||
assert len(base_version) == 2
|
||||
start_version = base_version
|
||||
versions_dict = {}
|
||||
for dirpath, _dirnames, filenames in os.walk(archive_root):
|
||||
for filename in filenames:
|
||||
if filename.endswith(".dylib") or filename.endswith(".so"):
|
||||
lib_path = os.path.join(dirpath, filename)
|
||||
min_ver = extract_macosx_min_system_version(lib_path)
|
||||
if min_ver is not None:
|
||||
min_ver = min_ver[0:2]
|
||||
if min_ver[0] > 10:
|
||||
min_ver = (min_ver[0], 0)
|
||||
versions_dict[lib_path] = min_ver
|
||||
|
||||
if len(versions_dict) > 0:
|
||||
base_version = max(base_version, max(versions_dict.values()))
|
||||
|
||||
# macosx platform tag do not support minor bugfix release
|
||||
fin_base_version = "_".join([str(x) for x in base_version])
|
||||
if start_version < base_version:
|
||||
problematic_files = [k for k, v in versions_dict.items() if v > start_version]
|
||||
problematic_files = "\n".join(problematic_files)
|
||||
if len(problematic_files) == 1:
|
||||
files_form = "this file"
|
||||
else:
|
||||
files_form = "these files"
|
||||
error_message = (
|
||||
"[WARNING] This wheel needs a higher macOS version than {} "
|
||||
"To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
|
||||
+ fin_base_version
|
||||
+ " or recreate "
|
||||
+ files_form
|
||||
+ " with lower "
|
||||
"MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files
|
||||
)
|
||||
|
||||
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
|
||||
error_message = error_message.format(
|
||||
"is set in MACOSX_DEPLOYMENT_TARGET variable."
|
||||
)
|
||||
else:
|
||||
error_message = error_message.format(
|
||||
"the version your Python interpreter is compiled against."
|
||||
)
|
||||
|
||||
sys.stderr.write(error_message)
|
||||
|
||||
platform_tag = prefix + "_" + fin_base_version + "_" + suffix
|
||||
return platform_tag
|
||||
@ -0,0 +1,180 @@
|
||||
"""
|
||||
Tools for converting old- to new-style metadata.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import os.path
|
||||
import re
|
||||
import textwrap
|
||||
from email.message import Message
|
||||
from email.parser import Parser
|
||||
from typing import Iterator
|
||||
|
||||
from .vendored.packaging.requirements import Requirement
|
||||
|
||||
|
||||
def _nonblank(str):
|
||||
return str and not str.startswith("#")
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def yield_lines(iterable):
|
||||
r"""
|
||||
Yield valid lines of a string or iterable.
|
||||
>>> list(yield_lines(''))
|
||||
[]
|
||||
>>> list(yield_lines(['foo', 'bar']))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('foo\nbar'))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
|
||||
['foo', 'baz #comment']
|
||||
>>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
|
||||
['foo', 'bar', 'baz', 'bing']
|
||||
"""
|
||||
return itertools.chain.from_iterable(map(yield_lines, iterable))
|
||||
|
||||
|
||||
@yield_lines.register(str)
|
||||
def _(text):
|
||||
return filter(_nonblank, map(str.strip, text.splitlines()))
|
||||
|
||||
|
||||
def split_sections(s):
|
||||
"""Split a string or iterable thereof into (section, content) pairs
|
||||
Each ``section`` is a stripped version of the section header ("[section]")
|
||||
and each ``content`` is a list of stripped lines excluding blank lines and
|
||||
comment-only lines. If there are any such lines before the first section
|
||||
header, they're returned in a first ``section`` of ``None``.
|
||||
"""
|
||||
section = None
|
||||
content = []
|
||||
for line in yield_lines(s):
|
||||
if line.startswith("["):
|
||||
if line.endswith("]"):
|
||||
if section or content:
|
||||
yield section, content
|
||||
section = line[1:-1].strip()
|
||||
content = []
|
||||
else:
|
||||
raise ValueError("Invalid section heading", line)
|
||||
else:
|
||||
content.append(line)
|
||||
|
||||
# wrap up last segment
|
||||
yield section, content
|
||||
|
||||
|
||||
def safe_extra(extra):
|
||||
"""Convert an arbitrary string to a standard 'extra' name
|
||||
Any runs of non-alphanumeric characters are replaced with a single '_',
|
||||
and the result is always lowercased.
|
||||
"""
|
||||
return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()
|
||||
|
||||
|
||||
def safe_name(name):
|
||||
"""Convert an arbitrary string to a standard distribution name
|
||||
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
||||
"""
|
||||
return re.sub("[^A-Za-z0-9.]+", "-", name)
|
||||
|
||||
|
||||
def requires_to_requires_dist(requirement: Requirement) -> str:
|
||||
"""Return the version specifier for a requirement in PEP 345/566 fashion."""
|
||||
if getattr(requirement, "url", None):
|
||||
return " @ " + requirement.url
|
||||
|
||||
requires_dist = []
|
||||
for spec in requirement.specifier:
|
||||
requires_dist.append(spec.operator + spec.version)
|
||||
|
||||
if requires_dist:
|
||||
return " " + ",".join(sorted(requires_dist))
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def convert_requirements(requirements: list[str]) -> Iterator[str]:
|
||||
"""Yield Requires-Dist: strings for parsed requirements strings."""
|
||||
for req in requirements:
|
||||
parsed_requirement = Requirement(req)
|
||||
spec = requires_to_requires_dist(parsed_requirement)
|
||||
extras = ",".join(sorted(safe_extra(e) for e in parsed_requirement.extras))
|
||||
if extras:
|
||||
extras = f"[{extras}]"
|
||||
|
||||
yield safe_name(parsed_requirement.name) + extras + spec
|
||||
|
||||
|
||||
def generate_requirements(
|
||||
extras_require: dict[str, list[str]],
|
||||
) -> Iterator[tuple[str, str]]:
|
||||
"""
|
||||
Convert requirements from a setup()-style dictionary to
|
||||
('Requires-Dist', 'requirement') and ('Provides-Extra', 'extra') tuples.
|
||||
|
||||
extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
|
||||
using the empty extra {'': [requirements]} to hold install_requires.
|
||||
"""
|
||||
for extra, depends in extras_require.items():
|
||||
condition = ""
|
||||
extra = extra or ""
|
||||
if ":" in extra: # setuptools extra:condition syntax
|
||||
extra, condition = extra.split(":", 1)
|
||||
|
||||
extra = safe_extra(extra)
|
||||
if extra:
|
||||
yield "Provides-Extra", extra
|
||||
if condition:
|
||||
condition = "(" + condition + ") and "
|
||||
condition += "extra == '%s'" % extra
|
||||
|
||||
if condition:
|
||||
condition = " ; " + condition
|
||||
|
||||
for new_req in convert_requirements(depends):
|
||||
yield "Requires-Dist", new_req + condition
|
||||
|
||||
|
||||
def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message:
|
||||
"""
|
||||
Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
|
||||
"""
|
||||
with open(pkginfo_path, encoding="utf-8") as headers:
|
||||
pkg_info = Parser().parse(headers)
|
||||
|
||||
pkg_info.replace_header("Metadata-Version", "2.1")
|
||||
# Those will be regenerated from `requires.txt`.
|
||||
del pkg_info["Provides-Extra"]
|
||||
del pkg_info["Requires-Dist"]
|
||||
requires_path = os.path.join(egg_info_path, "requires.txt")
|
||||
if os.path.exists(requires_path):
|
||||
with open(requires_path, encoding="utf-8") as requires_file:
|
||||
requires = requires_file.read()
|
||||
|
||||
parsed_requirements = sorted(split_sections(requires), key=lambda x: x[0] or "")
|
||||
for extra, reqs in parsed_requirements:
|
||||
for key, value in generate_requirements({extra: reqs}):
|
||||
if (key, value) not in pkg_info.items():
|
||||
pkg_info[key] = value
|
||||
|
||||
description = pkg_info["Description"]
|
||||
if description:
|
||||
description_lines = pkg_info["Description"].splitlines()
|
||||
dedented_description = "\n".join(
|
||||
# if the first line of long_description is blank,
|
||||
# the first line here will be indented.
|
||||
(
|
||||
description_lines[0].lstrip(),
|
||||
textwrap.dedent("\n".join(description_lines[1:])),
|
||||
"\n",
|
||||
)
|
||||
)
|
||||
pkg_info.set_payload(dedented_description)
|
||||
del pkg_info["Description"]
|
||||
|
||||
return pkg_info
|
||||
@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import logging
|
||||
|
||||
log = logging.getLogger("wheel")
|
||||
|
||||
# ensure Python logging is configured
|
||||
try:
|
||||
__import__("setuptools.logging")
|
||||
except ImportError:
|
||||
# setuptools < ??
|
||||
from . import _setuptools_logging
|
||||
|
||||
_setuptools_logging.configure()
|
||||
|
||||
|
||||
def urlsafe_b64encode(data: bytes) -> bytes:
|
||||
"""urlsafe_b64encode without padding"""
|
||||
return base64.urlsafe_b64encode(data).rstrip(b"=")
|
||||
|
||||
|
||||
def urlsafe_b64decode(data: bytes) -> bytes:
|
||||
"""urlsafe_b64decode without padding"""
|
||||
pad = b"=" * (4 - (len(data) & 3))
|
||||
return base64.urlsafe_b64decode(data + pad)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,108 @@
|
||||
"""
|
||||
ELF file parser.
|
||||
|
||||
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
||||
interface to ``ZipFile``. Only the read interface is implemented.
|
||||
|
||||
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
||||
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
||||
"""
|
||||
|
||||
import enum
|
||||
import os
|
||||
import struct
|
||||
from typing import IO, Optional, Tuple
|
||||
|
||||
|
||||
class ELFInvalid(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class EIClass(enum.IntEnum):
|
||||
C32 = 1
|
||||
C64 = 2
|
||||
|
||||
|
||||
class EIData(enum.IntEnum):
|
||||
Lsb = 1
|
||||
Msb = 2
|
||||
|
||||
|
||||
class EMachine(enum.IntEnum):
|
||||
I386 = 3
|
||||
S390 = 22
|
||||
Arm = 40
|
||||
X8664 = 62
|
||||
AArc64 = 183
|
||||
|
||||
|
||||
class ELFFile:
|
||||
"""
|
||||
Representation of an ELF executable.
|
||||
"""
|
||||
|
||||
def __init__(self, f: IO[bytes]) -> None:
|
||||
self._f = f
|
||||
|
||||
try:
|
||||
ident = self._read("16B")
|
||||
except struct.error:
|
||||
raise ELFInvalid("unable to parse identification")
|
||||
magic = bytes(ident[:4])
|
||||
if magic != b"\x7fELF":
|
||||
raise ELFInvalid(f"invalid magic: {magic!r}")
|
||||
|
||||
self.capacity = ident[4] # Format for program header (bitness).
|
||||
self.encoding = ident[5] # Data structure encoding (endianness).
|
||||
|
||||
try:
|
||||
# e_fmt: Format for program header.
|
||||
# p_fmt: Format for section header.
|
||||
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
||||
e_fmt, self._p_fmt, self._p_idx = {
|
||||
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
||||
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
||||
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
||||
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
||||
}[(self.capacity, self.encoding)]
|
||||
except KeyError:
|
||||
raise ELFInvalid(
|
||||
f"unrecognized capacity ({self.capacity}) or "
|
||||
f"encoding ({self.encoding})"
|
||||
)
|
||||
|
||||
try:
|
||||
(
|
||||
_,
|
||||
self.machine, # Architecture type.
|
||||
_,
|
||||
_,
|
||||
self._e_phoff, # Offset of program header.
|
||||
_,
|
||||
self.flags, # Processor-specific flags.
|
||||
_,
|
||||
self._e_phentsize, # Size of section.
|
||||
self._e_phnum, # Number of sections.
|
||||
) = self._read(e_fmt)
|
||||
except struct.error as e:
|
||||
raise ELFInvalid("unable to parse machine and section information") from e
|
||||
|
||||
def _read(self, fmt: str) -> Tuple[int, ...]:
|
||||
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> Optional[str]:
|
||||
"""
|
||||
The path recorded in the ``PT_INTERP`` section header.
|
||||
"""
|
||||
for index in range(self._e_phnum):
|
||||
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
||||
try:
|
||||
data = self._read(self._p_fmt)
|
||||
except struct.error:
|
||||
continue
|
||||
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
||||
continue
|
||||
self._f.seek(data[self._p_idx[1]])
|
||||
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
||||
return None
|
||||
@ -0,0 +1,260 @@
|
||||
import collections
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
|
||||
|
||||
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
||||
|
||||
EF_ARM_ABIMASK = 0xFF000000
|
||||
EF_ARM_ABI_VER5 = 0x05000000
|
||||
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
||||
|
||||
|
||||
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
||||
# as the type for `path` until then.
|
||||
@contextlib.contextmanager
|
||||
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
yield ELFFile(f)
|
||||
except (OSError, TypeError, ValueError):
|
||||
yield None
|
||||
|
||||
|
||||
def _is_linux_armhf(executable: str) -> bool:
|
||||
# hard-float ABI can be detected from the ELF header of the running
|
||||
# process
|
||||
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.Arm
|
||||
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
||||
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
||||
)
|
||||
|
||||
|
||||
def _is_linux_i686(executable: str) -> bool:
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.I386
|
||||
)
|
||||
|
||||
|
||||
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
||||
if "armv7l" in archs:
|
||||
return _is_linux_armhf(executable)
|
||||
if "i686" in archs:
|
||||
return _is_linux_i686(executable)
|
||||
allowed_archs = {
|
||||
"x86_64",
|
||||
"aarch64",
|
||||
"ppc64",
|
||||
"ppc64le",
|
||||
"s390x",
|
||||
"loongarch64",
|
||||
"riscv64",
|
||||
}
|
||||
return any(arch in allowed_archs for arch in archs)
|
||||
|
||||
|
||||
# If glibc ever changes its major version, we need to know what the last
|
||||
# minor version was, so we can build the complete list of all versions.
|
||||
# For now, guess what the highest minor version might be, assume it will
|
||||
# be 50 for testing. Once this actually happens, update the dictionary
|
||||
# with the actual value.
|
||||
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
||||
|
||||
|
||||
class _GLibCVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _glibc_version_string_confstr() -> Optional[str]:
|
||||
"""
|
||||
Primary implementation of glibc_version_string using os.confstr.
|
||||
"""
|
||||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module.
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
||||
try:
|
||||
# Should be a string like "glibc 2.17".
|
||||
version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
|
||||
assert version_string is not None
|
||||
_, version = version_string.rsplit()
|
||||
except (AssertionError, AttributeError, OSError, ValueError):
|
||||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def _glibc_version_string_ctypes() -> Optional[str]:
|
||||
"""
|
||||
Fallback implementation of glibc_version_string using ctypes.
|
||||
"""
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
#
|
||||
# We must also handle the special case where the executable is not a
|
||||
# dynamically linked executable. This can occur when using musl libc,
|
||||
# for example. In this situation, dlopen() will error, leading to an
|
||||
# OSError. Interestingly, at least in the case of musl, there is no
|
||||
# errno set on the OSError. The single string argument used to construct
|
||||
# OSError comes from libc itself and is therefore not portable to
|
||||
# hard code here. In any case, failure to call dlopen() means we
|
||||
# can proceed, so we bail on our attempt.
|
||||
try:
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str: str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
def _glibc_version_string() -> Optional[str]:
|
||||
"""Returns glibc version string, or None if not using glibc."""
|
||||
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
||||
|
||||
|
||||
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
||||
"""Parse glibc version.
|
||||
|
||||
We use a regexp instead of str.split because we want to discard any
|
||||
random junk that might come after the minor version -- this might happen
|
||||
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
"""
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn(
|
||||
f"Expected glibc version with 2 components major.minor,"
|
||||
f" got: {version_str}",
|
||||
RuntimeWarning,
|
||||
)
|
||||
return -1, -1
|
||||
return int(m.group("major")), int(m.group("minor"))
|
||||
|
||||
|
||||
@functools.lru_cache
|
||||
def _get_glibc_version() -> Tuple[int, int]:
|
||||
version_str = _glibc_version_string()
|
||||
if version_str is None:
|
||||
return (-1, -1)
|
||||
return _parse_glibc_version(version_str)
|
||||
|
||||
|
||||
# From PEP 513, PEP 600
|
||||
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
||||
sys_glibc = _get_glibc_version()
|
||||
if sys_glibc < version:
|
||||
return False
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux
|
||||
except ImportError:
|
||||
return True
|
||||
if hasattr(_manylinux, "manylinux_compatible"):
|
||||
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
||||
if result is not None:
|
||||
return bool(result)
|
||||
return True
|
||||
if version == _GLibCVersion(2, 5):
|
||||
if hasattr(_manylinux, "manylinux1_compatible"):
|
||||
return bool(_manylinux.manylinux1_compatible)
|
||||
if version == _GLibCVersion(2, 12):
|
||||
if hasattr(_manylinux, "manylinux2010_compatible"):
|
||||
return bool(_manylinux.manylinux2010_compatible)
|
||||
if version == _GLibCVersion(2, 17):
|
||||
if hasattr(_manylinux, "manylinux2014_compatible"):
|
||||
return bool(_manylinux.manylinux2014_compatible)
|
||||
return True
|
||||
|
||||
|
||||
_LEGACY_MANYLINUX_MAP = {
|
||||
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
||||
(2, 17): "manylinux2014",
|
||||
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
||||
(2, 12): "manylinux2010",
|
||||
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
||||
(2, 5): "manylinux1",
|
||||
}
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate manylinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be manylinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible manylinux tags.
|
||||
"""
|
||||
if not _have_compatible_abi(sys.executable, archs):
|
||||
return
|
||||
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
||||
too_old_glibc2 = _GLibCVersion(2, 16)
|
||||
if set(archs) & {"x86_64", "i686"}:
|
||||
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
||||
too_old_glibc2 = _GLibCVersion(2, 4)
|
||||
current_glibc = _GLibCVersion(*_get_glibc_version())
|
||||
glibc_max_list = [current_glibc]
|
||||
# We can assume compatibility across glibc major versions.
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
||||
#
|
||||
# Build a list of maximum glibc versions so that we can
|
||||
# output the canonical list of all glibc from current_glibc
|
||||
# down to too_old_glibc2, including all intermediary versions.
|
||||
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
||||
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
||||
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
||||
for arch in archs:
|
||||
for glibc_max in glibc_max_list:
|
||||
if glibc_max.major == too_old_glibc2.major:
|
||||
min_minor = too_old_glibc2.minor
|
||||
else:
|
||||
# For other glibc major versions oldest supported is (x, 0).
|
||||
min_minor = -1
|
||||
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
||||
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
||||
tag = "manylinux_{}_{}".format(*glibc_version)
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{tag}_{arch}"
|
||||
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
||||
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
||||
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{legacy_tag}_{arch}"
|
||||
@ -0,0 +1,83 @@
|
||||
"""PEP 656 support.
|
||||
|
||||
This module implements logic to detect if the currently running Python is
|
||||
linked against musl, and what musl version is used.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator, NamedTuple, Optional, Sequence
|
||||
|
||||
from ._elffile import ELFFile
|
||||
|
||||
|
||||
class _MuslVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
||||
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
||||
if len(lines) < 2 or lines[0][:4] != "musl":
|
||||
return None
|
||||
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
||||
if not m:
|
||||
return None
|
||||
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
||||
|
||||
|
||||
@functools.lru_cache
|
||||
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
||||
"""Detect currently-running musl runtime version.
|
||||
|
||||
This is done by checking the specified executable's dynamic linking
|
||||
information, and invoking the loader to parse its output for a version
|
||||
string. If the loader is musl, the output would be something like::
|
||||
|
||||
musl libc (x86_64)
|
||||
Version 1.2.2
|
||||
Dynamic Program Loader
|
||||
"""
|
||||
try:
|
||||
with open(executable, "rb") as f:
|
||||
ld = ELFFile(f).interpreter
|
||||
except (OSError, TypeError, ValueError):
|
||||
return None
|
||||
if ld is None or "musl" not in ld:
|
||||
return None
|
||||
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
||||
return _parse_musl_version(proc.stderr)
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate musllinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be musllinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible musllinux tags.
|
||||
"""
|
||||
sys_musl = _get_musl_version(sys.executable)
|
||||
if sys_musl is None: # Python not dynamically linked against musl.
|
||||
return
|
||||
for arch in archs:
|
||||
for minor in range(sys_musl.minor, -1, -1):
|
||||
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
import sysconfig
|
||||
|
||||
plat = sysconfig.get_platform()
|
||||
assert plat.startswith("linux-"), "not linux"
|
||||
|
||||
print("plat:", plat)
|
||||
print("musl:", _get_musl_version(sys.executable))
|
||||
print("tags:", end=" ")
|
||||
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
||||
print(t, end="\n ")
|
||||
@ -0,0 +1,356 @@
|
||||
"""Handwritten parser of dependency specifiers.
|
||||
|
||||
The docstring for each __parse_* function contains EBNF-inspired grammar representing
|
||||
the implementation.
|
||||
"""
|
||||
|
||||
import ast
|
||||
from typing import Any, List, NamedTuple, Optional, Tuple, Union
|
||||
|
||||
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, value: str) -> None:
|
||||
self.value = value
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}('{self}')>"
|
||||
|
||||
def serialize(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Variable(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
class Value(Node):
|
||||
def serialize(self) -> str:
|
||||
return f'"{self}"'
|
||||
|
||||
|
||||
class Op(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
MarkerVar = Union[Variable, Value]
|
||||
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
||||
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
|
||||
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
|
||||
# mypy does not support recursive type definition
|
||||
# https://github.com/python/mypy/issues/731
|
||||
MarkerAtom = Any
|
||||
MarkerList = List[Any]
|
||||
|
||||
|
||||
class ParsedRequirement(NamedTuple):
|
||||
name: str
|
||||
url: str
|
||||
extras: List[str]
|
||||
specifier: str
|
||||
marker: Optional[MarkerList]
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------------------
|
||||
# Recursive descent parser for dependency specifier
|
||||
# --------------------------------------------------------------------------------------
|
||||
def parse_requirement(source: str) -> ParsedRequirement:
|
||||
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
|
||||
|
||||
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
||||
"""
|
||||
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
||||
"""
|
||||
tokenizer.consume("WS")
|
||||
|
||||
name_token = tokenizer.expect(
|
||||
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
||||
)
|
||||
name = name_token.text
|
||||
tokenizer.consume("WS")
|
||||
|
||||
extras = _parse_extras(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
url, specifier, marker = _parse_requirement_details(tokenizer)
|
||||
tokenizer.expect("END", expected="end of dependency specifier")
|
||||
|
||||
return ParsedRequirement(name, url, extras, specifier, marker)
|
||||
|
||||
|
||||
def _parse_requirement_details(
|
||||
tokenizer: Tokenizer,
|
||||
) -> Tuple[str, str, Optional[MarkerList]]:
|
||||
"""
|
||||
requirement_details = AT URL (WS requirement_marker?)?
|
||||
| specifier WS? (requirement_marker)?
|
||||
"""
|
||||
|
||||
specifier = ""
|
||||
url = ""
|
||||
marker = None
|
||||
|
||||
if tokenizer.check("AT"):
|
||||
tokenizer.read()
|
||||
tokenizer.consume("WS")
|
||||
|
||||
url_start = tokenizer.position
|
||||
url = tokenizer.expect("URL", expected="URL after @").text
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
tokenizer.expect("WS", expected="whitespace after URL")
|
||||
|
||||
# The input might end after whitespace.
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
marker = _parse_requirement_marker(
|
||||
tokenizer, span_start=url_start, after="URL and whitespace"
|
||||
)
|
||||
else:
|
||||
specifier_start = tokenizer.position
|
||||
specifier = _parse_specifier(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
marker = _parse_requirement_marker(
|
||||
tokenizer,
|
||||
span_start=specifier_start,
|
||||
after=(
|
||||
"version specifier"
|
||||
if specifier
|
||||
else "name and no valid version specifier"
|
||||
),
|
||||
)
|
||||
|
||||
return (url, specifier, marker)
|
||||
|
||||
|
||||
def _parse_requirement_marker(
|
||||
tokenizer: Tokenizer, *, span_start: int, after: str
|
||||
) -> MarkerList:
|
||||
"""
|
||||
requirement_marker = SEMICOLON marker WS?
|
||||
"""
|
||||
|
||||
if not tokenizer.check("SEMICOLON"):
|
||||
tokenizer.raise_syntax_error(
|
||||
f"Expected end or semicolon (after {after})",
|
||||
span_start=span_start,
|
||||
)
|
||||
tokenizer.read()
|
||||
|
||||
marker = _parse_marker(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return marker
|
||||
|
||||
|
||||
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
|
||||
"""
|
||||
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
||||
"""
|
||||
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
||||
return []
|
||||
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_BRACKET",
|
||||
"RIGHT_BRACKET",
|
||||
around="extras",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
extras = _parse_extras_list(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return extras
|
||||
|
||||
|
||||
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
|
||||
"""
|
||||
extras_list = identifier (wsp* ',' wsp* identifier)*
|
||||
"""
|
||||
extras: List[str] = []
|
||||
|
||||
if not tokenizer.check("IDENTIFIER"):
|
||||
return extras
|
||||
|
||||
extras.append(tokenizer.read().text)
|
||||
|
||||
while True:
|
||||
tokenizer.consume("WS")
|
||||
if tokenizer.check("IDENTIFIER", peek=True):
|
||||
tokenizer.raise_syntax_error("Expected comma between extra names")
|
||||
elif not tokenizer.check("COMMA"):
|
||||
break
|
||||
|
||||
tokenizer.read()
|
||||
tokenizer.consume("WS")
|
||||
|
||||
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
||||
extras.append(extra_token.text)
|
||||
|
||||
return extras
|
||||
|
||||
|
||||
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
||||
"""
|
||||
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
||||
| WS? version_many WS?
|
||||
"""
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_PARENTHESIS",
|
||||
"RIGHT_PARENTHESIS",
|
||||
around="version specifier",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
parsed_specifiers = _parse_version_many(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return parsed_specifiers
|
||||
|
||||
|
||||
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
||||
"""
|
||||
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
||||
"""
|
||||
parsed_specifiers = ""
|
||||
while tokenizer.check("SPECIFIER"):
|
||||
span_start = tokenizer.position
|
||||
parsed_specifiers += tokenizer.read().text
|
||||
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
|
||||
tokenizer.raise_syntax_error(
|
||||
".* suffix can only be used with `==` or `!=` operators",
|
||||
span_start=span_start,
|
||||
span_end=tokenizer.position + 1,
|
||||
)
|
||||
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
|
||||
tokenizer.raise_syntax_error(
|
||||
"Local version label can only be used with `==` or `!=` operators",
|
||||
span_start=span_start,
|
||||
span_end=tokenizer.position,
|
||||
)
|
||||
tokenizer.consume("WS")
|
||||
if not tokenizer.check("COMMA"):
|
||||
break
|
||||
parsed_specifiers += tokenizer.read().text
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return parsed_specifiers
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------------------
|
||||
# Recursive descent parser for marker expression
|
||||
# --------------------------------------------------------------------------------------
|
||||
def parse_marker(source: str) -> MarkerList:
|
||||
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
|
||||
|
||||
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
retval = _parse_marker(tokenizer)
|
||||
tokenizer.expect("END", expected="end of marker expression")
|
||||
return retval
|
||||
|
||||
|
||||
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
"""
|
||||
marker = marker_atom (BOOLOP marker_atom)+
|
||||
"""
|
||||
expression = [_parse_marker_atom(tokenizer)]
|
||||
while tokenizer.check("BOOLOP"):
|
||||
token = tokenizer.read()
|
||||
expr_right = _parse_marker_atom(tokenizer)
|
||||
expression.extend((token.text, expr_right))
|
||||
return expression
|
||||
|
||||
|
||||
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
||||
"""
|
||||
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
||||
| WS? marker_item WS?
|
||||
"""
|
||||
|
||||
tokenizer.consume("WS")
|
||||
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_PARENTHESIS",
|
||||
"RIGHT_PARENTHESIS",
|
||||
around="marker expression",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
marker: MarkerAtom = _parse_marker(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
else:
|
||||
marker = _parse_marker_item(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
return marker
|
||||
|
||||
|
||||
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
||||
"""
|
||||
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
||||
"""
|
||||
tokenizer.consume("WS")
|
||||
marker_var_left = _parse_marker_var(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
marker_op = _parse_marker_op(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
marker_var_right = _parse_marker_var(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
return (marker_var_left, marker_op, marker_var_right)
|
||||
|
||||
|
||||
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
||||
"""
|
||||
marker_var = VARIABLE | QUOTED_STRING
|
||||
"""
|
||||
if tokenizer.check("VARIABLE"):
|
||||
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
||||
elif tokenizer.check("QUOTED_STRING"):
|
||||
return process_python_str(tokenizer.read().text)
|
||||
else:
|
||||
tokenizer.raise_syntax_error(
|
||||
message="Expected a marker variable or quoted string"
|
||||
)
|
||||
|
||||
|
||||
def process_env_var(env_var: str) -> Variable:
|
||||
if env_var in ("platform_python_implementation", "python_implementation"):
|
||||
return Variable("platform_python_implementation")
|
||||
else:
|
||||
return Variable(env_var)
|
||||
|
||||
|
||||
def process_python_str(python_str: str) -> Value:
|
||||
value = ast.literal_eval(python_str)
|
||||
return Value(str(value))
|
||||
|
||||
|
||||
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
||||
"""
|
||||
marker_op = IN | NOT IN | OP
|
||||
"""
|
||||
if tokenizer.check("IN"):
|
||||
tokenizer.read()
|
||||
return Op("in")
|
||||
elif tokenizer.check("NOT"):
|
||||
tokenizer.read()
|
||||
tokenizer.expect("WS", expected="whitespace after 'not'")
|
||||
tokenizer.expect("IN", expected="'in' after 'not'")
|
||||
return Op("not in")
|
||||
elif tokenizer.check("OP"):
|
||||
return Op(tokenizer.read().text)
|
||||
else:
|
||||
return tokenizer.raise_syntax_error(
|
||||
"Expected marker operator, one of "
|
||||
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
||||
)
|
||||
@ -0,0 +1,61 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
|
||||
class InfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __neg__(self: object) -> "NegativeInfinityType":
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = InfinityType()
|
||||
|
||||
|
||||
class NegativeInfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __neg__(self: object) -> InfinityType:
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinityType()
|
||||
@ -0,0 +1,192 @@
|
||||
import contextlib
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
|
||||
|
||||
from .specifiers import Specifier
|
||||
|
||||
|
||||
@dataclass
|
||||
class Token:
|
||||
name: str
|
||||
text: str
|
||||
position: int
|
||||
|
||||
|
||||
class ParserSyntaxError(Exception):
|
||||
"""The provided source text could not be parsed correctly."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
*,
|
||||
source: str,
|
||||
span: Tuple[int, int],
|
||||
) -> None:
|
||||
self.span = span
|
||||
self.message = message
|
||||
self.source = source
|
||||
|
||||
super().__init__()
|
||||
|
||||
def __str__(self) -> str:
|
||||
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
||||
return "\n ".join([self.message, self.source, marker])
|
||||
|
||||
|
||||
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
|
||||
"LEFT_PARENTHESIS": r"\(",
|
||||
"RIGHT_PARENTHESIS": r"\)",
|
||||
"LEFT_BRACKET": r"\[",
|
||||
"RIGHT_BRACKET": r"\]",
|
||||
"SEMICOLON": r";",
|
||||
"COMMA": r",",
|
||||
"QUOTED_STRING": re.compile(
|
||||
r"""
|
||||
(
|
||||
('[^']*')
|
||||
|
|
||||
("[^"]*")
|
||||
)
|
||||
""",
|
||||
re.VERBOSE,
|
||||
),
|
||||
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
||||
"BOOLOP": r"\b(or|and)\b",
|
||||
"IN": r"\bin\b",
|
||||
"NOT": r"\bnot\b",
|
||||
"VARIABLE": re.compile(
|
||||
r"""
|
||||
\b(
|
||||
python_version
|
||||
|python_full_version
|
||||
|os[._]name
|
||||
|sys[._]platform
|
||||
|platform_(release|system)
|
||||
|platform[._](version|machine|python_implementation)
|
||||
|python_implementation
|
||||
|implementation_(name|version)
|
||||
|extra
|
||||
)\b
|
||||
""",
|
||||
re.VERBOSE,
|
||||
),
|
||||
"SPECIFIER": re.compile(
|
||||
Specifier._operator_regex_str + Specifier._version_regex_str,
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
),
|
||||
"AT": r"\@",
|
||||
"URL": r"[^ \t]+",
|
||||
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
||||
"VERSION_PREFIX_TRAIL": r"\.\*",
|
||||
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
|
||||
"WS": r"[ \t]+",
|
||||
"END": r"$",
|
||||
}
|
||||
|
||||
|
||||
class Tokenizer:
|
||||
"""Context-sensitive token parsing.
|
||||
|
||||
Provides methods to examine the input stream to check whether the next token
|
||||
matches.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
source: str,
|
||||
*,
|
||||
rules: "Dict[str, Union[str, re.Pattern[str]]]",
|
||||
) -> None:
|
||||
self.source = source
|
||||
self.rules: Dict[str, re.Pattern[str]] = {
|
||||
name: re.compile(pattern) for name, pattern in rules.items()
|
||||
}
|
||||
self.next_token: Optional[Token] = None
|
||||
self.position = 0
|
||||
|
||||
def consume(self, name: str) -> None:
|
||||
"""Move beyond provided token name, if at current position."""
|
||||
if self.check(name):
|
||||
self.read()
|
||||
|
||||
def check(self, name: str, *, peek: bool = False) -> bool:
|
||||
"""Check whether the next token has the provided name.
|
||||
|
||||
By default, if the check succeeds, the token *must* be read before
|
||||
another check. If `peek` is set to `True`, the token is not loaded and
|
||||
would need to be checked again.
|
||||
"""
|
||||
assert (
|
||||
self.next_token is None
|
||||
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
||||
assert name in self.rules, f"Unknown token name: {name!r}"
|
||||
|
||||
expression = self.rules[name]
|
||||
|
||||
match = expression.match(self.source, self.position)
|
||||
if match is None:
|
||||
return False
|
||||
if not peek:
|
||||
self.next_token = Token(name, match[0], self.position)
|
||||
return True
|
||||
|
||||
def expect(self, name: str, *, expected: str) -> Token:
|
||||
"""Expect a certain token name next, failing with a syntax error otherwise.
|
||||
|
||||
The token is *not* read.
|
||||
"""
|
||||
if not self.check(name):
|
||||
raise self.raise_syntax_error(f"Expected {expected}")
|
||||
return self.read()
|
||||
|
||||
def read(self) -> Token:
|
||||
"""Consume the next token and return it."""
|
||||
token = self.next_token
|
||||
assert token is not None
|
||||
|
||||
self.position += len(token.text)
|
||||
self.next_token = None
|
||||
|
||||
return token
|
||||
|
||||
def raise_syntax_error(
|
||||
self,
|
||||
message: str,
|
||||
*,
|
||||
span_start: Optional[int] = None,
|
||||
span_end: Optional[int] = None,
|
||||
) -> NoReturn:
|
||||
"""Raise ParserSyntaxError at the given position."""
|
||||
span = (
|
||||
self.position if span_start is None else span_start,
|
||||
self.position if span_end is None else span_end,
|
||||
)
|
||||
raise ParserSyntaxError(
|
||||
message,
|
||||
source=self.source,
|
||||
span=span,
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def enclosing_tokens(
|
||||
self, open_token: str, close_token: str, *, around: str
|
||||
) -> Iterator[None]:
|
||||
if self.check(open_token):
|
||||
open_position = self.position
|
||||
self.read()
|
||||
else:
|
||||
open_position = None
|
||||
|
||||
yield
|
||||
|
||||
if open_position is None:
|
||||
return
|
||||
|
||||
if not self.check(close_token):
|
||||
self.raise_syntax_error(
|
||||
f"Expected matching {close_token} for {open_token}, after {around}",
|
||||
span_start=open_position,
|
||||
)
|
||||
|
||||
self.read()
|
||||
@ -0,0 +1,253 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import operator
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from ._parser import (
|
||||
MarkerAtom,
|
||||
MarkerList,
|
||||
Op,
|
||||
Value,
|
||||
Variable,
|
||||
)
|
||||
from ._parser import (
|
||||
parse_marker as _parse_marker,
|
||||
)
|
||||
from ._tokenizer import ParserSyntaxError
|
||||
from .specifiers import InvalidSpecifier, Specifier
|
||||
from .utils import canonicalize_name
|
||||
|
||||
__all__ = [
|
||||
"InvalidMarker",
|
||||
"UndefinedComparison",
|
||||
"UndefinedEnvironmentName",
|
||||
"Marker",
|
||||
"default_environment",
|
||||
]
|
||||
|
||||
Operator = Callable[[str, str], bool]
|
||||
|
||||
|
||||
class InvalidMarker(ValueError):
|
||||
"""
|
||||
An invalid marker was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedComparison(ValueError):
|
||||
"""
|
||||
An invalid operation was attempted on a value that doesn't support it.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedEnvironmentName(ValueError):
|
||||
"""
|
||||
A name was attempted to be used that does not exist inside of the
|
||||
environment.
|
||||
"""
|
||||
|
||||
|
||||
def _normalize_extra_values(results: Any) -> Any:
|
||||
"""
|
||||
Normalize extra values.
|
||||
"""
|
||||
if isinstance(results[0], tuple):
|
||||
lhs, op, rhs = results[0]
|
||||
if isinstance(lhs, Variable) and lhs.value == "extra":
|
||||
normalized_extra = canonicalize_name(rhs.value)
|
||||
rhs = Value(normalized_extra)
|
||||
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
||||
normalized_extra = canonicalize_name(lhs.value)
|
||||
lhs = Value(normalized_extra)
|
||||
results[0] = lhs, op, rhs
|
||||
return results
|
||||
|
||||
|
||||
def _format_marker(
|
||||
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
|
||||
) -> str:
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
# Sometimes we have a structure like [[...]] which is a single item list
|
||||
# where the single item is itself it's own list. In that case we want skip
|
||||
# the rest of this function so that we don't get extraneous () on the
|
||||
# outside.
|
||||
if (
|
||||
isinstance(marker, list)
|
||||
and len(marker) == 1
|
||||
and isinstance(marker[0], (list, tuple))
|
||||
):
|
||||
return _format_marker(marker[0])
|
||||
|
||||
if isinstance(marker, list):
|
||||
inner = (_format_marker(m, first=False) for m in marker)
|
||||
if first:
|
||||
return " ".join(inner)
|
||||
else:
|
||||
return "(" + " ".join(inner) + ")"
|
||||
elif isinstance(marker, tuple):
|
||||
return " ".join([m.serialize() for m in marker])
|
||||
else:
|
||||
return marker
|
||||
|
||||
|
||||
_operators: Dict[str, Operator] = {
|
||||
"in": lambda lhs, rhs: lhs in rhs,
|
||||
"not in": lambda lhs, rhs: lhs not in rhs,
|
||||
"<": operator.lt,
|
||||
"<=": operator.le,
|
||||
"==": operator.eq,
|
||||
"!=": operator.ne,
|
||||
">=": operator.ge,
|
||||
">": operator.gt,
|
||||
}
|
||||
|
||||
|
||||
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
||||
try:
|
||||
spec = Specifier("".join([op.serialize(), rhs]))
|
||||
except InvalidSpecifier:
|
||||
pass
|
||||
else:
|
||||
return spec.contains(lhs, prereleases=True)
|
||||
|
||||
oper: Optional[Operator] = _operators.get(op.serialize())
|
||||
if oper is None:
|
||||
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
||||
|
||||
return oper(lhs, rhs)
|
||||
|
||||
|
||||
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
|
||||
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
||||
# https://peps.python.org/pep-0685/
|
||||
# > When comparing extra names, tools MUST normalize the names being
|
||||
# > compared using the semantics outlined in PEP 503 for names
|
||||
if key == "extra":
|
||||
return tuple(canonicalize_name(v) for v in values)
|
||||
|
||||
# other environment markers don't have such standards
|
||||
return values
|
||||
|
||||
|
||||
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
||||
groups: List[List[bool]] = [[]]
|
||||
|
||||
for marker in markers:
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
if isinstance(marker, list):
|
||||
groups[-1].append(_evaluate_markers(marker, environment))
|
||||
elif isinstance(marker, tuple):
|
||||
lhs, op, rhs = marker
|
||||
|
||||
if isinstance(lhs, Variable):
|
||||
environment_key = lhs.value
|
||||
lhs_value = environment[environment_key]
|
||||
rhs_value = rhs.value
|
||||
else:
|
||||
lhs_value = lhs.value
|
||||
environment_key = rhs.value
|
||||
rhs_value = environment[environment_key]
|
||||
|
||||
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
||||
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
||||
else:
|
||||
assert marker in ["and", "or"]
|
||||
if marker == "or":
|
||||
groups.append([])
|
||||
|
||||
return any(all(item) for item in groups)
|
||||
|
||||
|
||||
def format_full_version(info: "sys._version_info") -> str:
|
||||
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
||||
kind = info.releaselevel
|
||||
if kind != "final":
|
||||
version += kind[0] + str(info.serial)
|
||||
return version
|
||||
|
||||
|
||||
def default_environment() -> Dict[str, str]:
|
||||
iver = format_full_version(sys.implementation.version)
|
||||
implementation_name = sys.implementation.name
|
||||
return {
|
||||
"implementation_name": implementation_name,
|
||||
"implementation_version": iver,
|
||||
"os_name": os.name,
|
||||
"platform_machine": platform.machine(),
|
||||
"platform_release": platform.release(),
|
||||
"platform_system": platform.system(),
|
||||
"platform_version": platform.version(),
|
||||
"python_full_version": platform.python_version(),
|
||||
"platform_python_implementation": platform.python_implementation(),
|
||||
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
||||
"sys_platform": sys.platform,
|
||||
}
|
||||
|
||||
|
||||
class Marker:
|
||||
def __init__(self, marker: str) -> None:
|
||||
# Note: We create a Marker object without calling this constructor in
|
||||
# packaging.requirements.Requirement. If any additional logic is
|
||||
# added here, make sure to mirror/adapt Requirement.
|
||||
try:
|
||||
self._markers = _normalize_extra_values(_parse_marker(marker))
|
||||
# The attribute `_markers` can be described in terms of a recursive type:
|
||||
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
||||
#
|
||||
# For example, the following expression:
|
||||
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
||||
#
|
||||
# is parsed into:
|
||||
# [
|
||||
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
||||
# 'and',
|
||||
# [
|
||||
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
||||
# 'or',
|
||||
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
||||
# ]
|
||||
# ]
|
||||
except ParserSyntaxError as e:
|
||||
raise InvalidMarker(str(e)) from e
|
||||
|
||||
def __str__(self) -> str:
|
||||
return _format_marker(self._markers)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Marker('{self}')>"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__.__name__, str(self)))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(other, Marker):
|
||||
return NotImplemented
|
||||
|
||||
return str(self) == str(other)
|
||||
|
||||
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
||||
"""Evaluate a marker.
|
||||
|
||||
Return the boolean from evaluating the given marker against the
|
||||
environment. environment is an optional argument to override all or
|
||||
part of the determined environment.
|
||||
|
||||
The environment is determined from the current Python process.
|
||||
"""
|
||||
current_environment = default_environment()
|
||||
current_environment["extra"] = ""
|
||||
if environment is not None:
|
||||
current_environment.update(environment)
|
||||
# The API used to allow setting extra to None. We need to handle this
|
||||
# case for backwards compatibility.
|
||||
if current_environment["extra"] is None:
|
||||
current_environment["extra"] = ""
|
||||
|
||||
return _evaluate_markers(self._markers, current_environment)
|
||||
@ -0,0 +1,90 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from typing import Any, Iterator, Optional, Set
|
||||
|
||||
from ._parser import parse_requirement as _parse_requirement
|
||||
from ._tokenizer import ParserSyntaxError
|
||||
from .markers import Marker, _normalize_extra_values
|
||||
from .specifiers import SpecifierSet
|
||||
from .utils import canonicalize_name
|
||||
|
||||
|
||||
class InvalidRequirement(ValueError):
|
||||
"""
|
||||
An invalid requirement was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class Requirement:
|
||||
"""Parse a requirement.
|
||||
|
||||
Parse a given requirement string into its parts, such as name, specifier,
|
||||
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
||||
string.
|
||||
"""
|
||||
|
||||
# TODO: Can we test whether something is contained within a requirement?
|
||||
# If so how do we do that? Do we need to test against the _name_ of
|
||||
# the thing as well as the version? What about the markers?
|
||||
# TODO: Can we normalize the name and extra name?
|
||||
|
||||
def __init__(self, requirement_string: str) -> None:
|
||||
try:
|
||||
parsed = _parse_requirement(requirement_string)
|
||||
except ParserSyntaxError as e:
|
||||
raise InvalidRequirement(str(e)) from e
|
||||
|
||||
self.name: str = parsed.name
|
||||
self.url: Optional[str] = parsed.url or None
|
||||
self.extras: Set[str] = set(parsed.extras or [])
|
||||
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
||||
self.marker: Optional[Marker] = None
|
||||
if parsed.marker is not None:
|
||||
self.marker = Marker.__new__(Marker)
|
||||
self.marker._markers = _normalize_extra_values(parsed.marker)
|
||||
|
||||
def _iter_parts(self, name: str) -> Iterator[str]:
|
||||
yield name
|
||||
|
||||
if self.extras:
|
||||
formatted_extras = ",".join(sorted(self.extras))
|
||||
yield f"[{formatted_extras}]"
|
||||
|
||||
if self.specifier:
|
||||
yield str(self.specifier)
|
||||
|
||||
if self.url:
|
||||
yield f"@ {self.url}"
|
||||
if self.marker:
|
||||
yield " "
|
||||
|
||||
if self.marker:
|
||||
yield f"; {self.marker}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "".join(self._iter_parts(self.name))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Requirement('{self}')>"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(
|
||||
(
|
||||
self.__class__.__name__,
|
||||
*self._iter_parts(canonicalize_name(self.name)),
|
||||
)
|
||||
)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(other, Requirement):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
canonicalize_name(self.name) == canonicalize_name(other.name)
|
||||
and self.extras == other.extras
|
||||
and self.specifier == other.specifier
|
||||
and self.url == other.url
|
||||
and self.marker == other.marker
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,571 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import logging
|
||||
import platform
|
||||
import re
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import sysconfig
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
from typing import (
|
||||
Dict,
|
||||
FrozenSet,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import _manylinux, _musllinux
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PythonVersion = Sequence[int]
|
||||
MacVersion = Tuple[int, int]
|
||||
|
||||
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
||||
"python": "py", # Generic.
|
||||
"cpython": "cp",
|
||||
"pypy": "pp",
|
||||
"ironpython": "ip",
|
||||
"jython": "jy",
|
||||
}
|
||||
|
||||
|
||||
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
|
||||
|
||||
|
||||
class Tag:
|
||||
"""
|
||||
A representation of the tag triple for a wheel.
|
||||
|
||||
Instances are considered immutable and thus are hashable. Equality checking
|
||||
is also supported.
|
||||
"""
|
||||
|
||||
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
||||
|
||||
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
||||
self._interpreter = interpreter.lower()
|
||||
self._abi = abi.lower()
|
||||
self._platform = platform.lower()
|
||||
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
||||
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
||||
# times when scanning a page of links for packages with tags matching that
|
||||
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
||||
# downstream consumers.
|
||||
self._hash = hash((self._interpreter, self._abi, self._platform))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> str:
|
||||
return self._interpreter
|
||||
|
||||
@property
|
||||
def abi(self) -> str:
|
||||
return self._abi
|
||||
|
||||
@property
|
||||
def platform(self) -> str:
|
||||
return self._platform
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Tag):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
||||
and (self._platform == other._platform)
|
||||
and (self._abi == other._abi)
|
||||
and (self._interpreter == other._interpreter)
|
||||
)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self._hash
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self} @ {id(self)}>"
|
||||
|
||||
|
||||
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
||||
"""
|
||||
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
||||
|
||||
Returning a set is required due to the possibility that the tag is a
|
||||
compressed tag set.
|
||||
"""
|
||||
tags = set()
|
||||
interpreters, abis, platforms = tag.split("-")
|
||||
for interpreter in interpreters.split("."):
|
||||
for abi in abis.split("."):
|
||||
for platform_ in platforms.split("."):
|
||||
tags.add(Tag(interpreter, abi, platform_))
|
||||
return frozenset(tags)
|
||||
|
||||
|
||||
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
||||
value: Union[int, str, None] = sysconfig.get_config_var(name)
|
||||
if value is None and warn:
|
||||
logger.debug(
|
||||
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _normalize_string(string: str) -> str:
|
||||
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
||||
|
||||
|
||||
def _is_threaded_cpython(abis: List[str]) -> bool:
|
||||
"""
|
||||
Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
|
||||
|
||||
The threaded builds are indicated by a "t" in the abiflags.
|
||||
"""
|
||||
if len(abis) == 0:
|
||||
return False
|
||||
# expect e.g., cp313
|
||||
m = re.match(r"cp\d+(.*)", abis[0])
|
||||
if not m:
|
||||
return False
|
||||
abiflags = m.group(1)
|
||||
return "t" in abiflags
|
||||
|
||||
|
||||
def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
|
||||
"""
|
||||
Determine if the Python version supports abi3.
|
||||
|
||||
PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
|
||||
builds do not support abi3.
|
||||
"""
|
||||
return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
|
||||
|
||||
|
||||
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
||||
py_version = tuple(py_version) # To allow for version comparison.
|
||||
abis = []
|
||||
version = _version_nodot(py_version[:2])
|
||||
threading = debug = pymalloc = ucs4 = ""
|
||||
with_debug = _get_config_var("Py_DEBUG", warn)
|
||||
has_refcount = hasattr(sys, "gettotalrefcount")
|
||||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
||||
# extension modules is the best option.
|
||||
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
||||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
||||
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
||||
debug = "d"
|
||||
if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
|
||||
threading = "t"
|
||||
if py_version < (3, 8):
|
||||
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
||||
if with_pymalloc or with_pymalloc is None:
|
||||
pymalloc = "m"
|
||||
if py_version < (3, 3):
|
||||
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
||||
if unicode_size == 4 or (
|
||||
unicode_size is None and sys.maxunicode == 0x10FFFF
|
||||
):
|
||||
ucs4 = "u"
|
||||
elif debug:
|
||||
# Debug builds can also load "normal" extension modules.
|
||||
# We can also assume no UCS-4 or pymalloc requirement.
|
||||
abis.append(f"cp{version}{threading}")
|
||||
abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
|
||||
return abis
|
||||
|
||||
|
||||
def cpython_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a CPython interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- cp<python_version>-<abi>-<platform>
|
||||
- cp<python_version>-abi3-<platform>
|
||||
- cp<python_version>-none-<platform>
|
||||
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
||||
|
||||
If python_version only specifies a major version then user-provided ABIs and
|
||||
the 'none' ABItag will be used.
|
||||
|
||||
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
||||
their normal position and not at the beginning.
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
|
||||
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
||||
|
||||
if abis is None:
|
||||
if len(python_version) > 1:
|
||||
abis = _cpython_abis(python_version, warn)
|
||||
else:
|
||||
abis = []
|
||||
abis = list(abis)
|
||||
# 'abi3' and 'none' are explicitly handled later.
|
||||
for explicit_abi in ("abi3", "none"):
|
||||
try:
|
||||
abis.remove(explicit_abi)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
platforms = list(platforms or platform_tags())
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
|
||||
threading = _is_threaded_cpython(abis)
|
||||
use_abi3 = _abi3_applies(python_version, threading)
|
||||
if use_abi3:
|
||||
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
||||
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
||||
|
||||
if use_abi3:
|
||||
for minor_version in range(python_version[1] - 1, 1, -1):
|
||||
for platform_ in platforms:
|
||||
interpreter = "cp{version}".format(
|
||||
version=_version_nodot((python_version[0], minor_version))
|
||||
)
|
||||
yield Tag(interpreter, "abi3", platform_)
|
||||
|
||||
|
||||
def _generic_abi() -> List[str]:
|
||||
"""
|
||||
Return the ABI tag based on EXT_SUFFIX.
|
||||
"""
|
||||
# The following are examples of `EXT_SUFFIX`.
|
||||
# We want to keep the parts which are related to the ABI and remove the
|
||||
# parts which are related to the platform:
|
||||
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
||||
# - mac: '.cpython-310-darwin.so' => cp310
|
||||
# - win: '.cp310-win_amd64.pyd' => cp310
|
||||
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
||||
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
||||
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
||||
# => graalpy_38_native
|
||||
|
||||
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
||||
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
||||
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
||||
parts = ext_suffix.split(".")
|
||||
if len(parts) < 3:
|
||||
# CPython3.7 and earlier uses ".pyd" on Windows.
|
||||
return _cpython_abis(sys.version_info[:2])
|
||||
soabi = parts[1]
|
||||
if soabi.startswith("cpython"):
|
||||
# non-windows
|
||||
abi = "cp" + soabi.split("-")[1]
|
||||
elif soabi.startswith("cp"):
|
||||
# windows
|
||||
abi = soabi.split("-")[0]
|
||||
elif soabi.startswith("pypy"):
|
||||
abi = "-".join(soabi.split("-")[:2])
|
||||
elif soabi.startswith("graalpy"):
|
||||
abi = "-".join(soabi.split("-")[:3])
|
||||
elif soabi:
|
||||
# pyston, ironpython, others?
|
||||
abi = soabi
|
||||
else:
|
||||
return []
|
||||
return [_normalize_string(abi)]
|
||||
|
||||
|
||||
def generic_tags(
|
||||
interpreter: Optional[str] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a generic interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- <interpreter>-<abi>-<platform>
|
||||
|
||||
The "none" ABI will be added if it was not explicitly provided.
|
||||
"""
|
||||
if not interpreter:
|
||||
interp_name = interpreter_name()
|
||||
interp_version = interpreter_version(warn=warn)
|
||||
interpreter = "".join([interp_name, interp_version])
|
||||
if abis is None:
|
||||
abis = _generic_abi()
|
||||
else:
|
||||
abis = list(abis)
|
||||
platforms = list(platforms or platform_tags())
|
||||
if "none" not in abis:
|
||||
abis.append("none")
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
|
||||
|
||||
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
||||
"""
|
||||
Yields Python versions in descending order.
|
||||
|
||||
After the latest version, the major-only version will be yielded, and then
|
||||
all previous versions of that major version.
|
||||
"""
|
||||
if len(py_version) > 1:
|
||||
yield f"py{_version_nodot(py_version[:2])}"
|
||||
yield f"py{py_version[0]}"
|
||||
if len(py_version) > 1:
|
||||
for minor in range(py_version[1] - 1, -1, -1):
|
||||
yield f"py{_version_nodot((py_version[0], minor))}"
|
||||
|
||||
|
||||
def compatible_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
interpreter: Optional[str] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the sequence of tags that are compatible with a specific version of Python.
|
||||
|
||||
The tags consist of:
|
||||
- py*-none-<platform>
|
||||
- <interpreter>-none-any # ... if `interpreter` is provided.
|
||||
- py*-none-any
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
platforms = list(platforms or platform_tags())
|
||||
for version in _py_interpreter_range(python_version):
|
||||
for platform_ in platforms:
|
||||
yield Tag(version, "none", platform_)
|
||||
if interpreter:
|
||||
yield Tag(interpreter, "none", "any")
|
||||
for version in _py_interpreter_range(python_version):
|
||||
yield Tag(version, "none", "any")
|
||||
|
||||
|
||||
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
||||
if not is_32bit:
|
||||
return arch
|
||||
|
||||
if arch.startswith("ppc"):
|
||||
return "ppc"
|
||||
|
||||
return "i386"
|
||||
|
||||
|
||||
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
||||
formats = [cpu_arch]
|
||||
if cpu_arch == "x86_64":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat64", "fat32"])
|
||||
|
||||
elif cpu_arch == "i386":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat32", "fat"])
|
||||
|
||||
elif cpu_arch == "ppc64":
|
||||
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
||||
if version > (10, 5) or version < (10, 4):
|
||||
return []
|
||||
formats.append("fat64")
|
||||
|
||||
elif cpu_arch == "ppc":
|
||||
if version > (10, 6):
|
||||
return []
|
||||
formats.extend(["fat32", "fat"])
|
||||
|
||||
if cpu_arch in {"arm64", "x86_64"}:
|
||||
formats.append("universal2")
|
||||
|
||||
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
||||
formats.append("universal")
|
||||
|
||||
return formats
|
||||
|
||||
|
||||
def mac_platforms(
|
||||
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
||||
) -> Iterator[str]:
|
||||
"""
|
||||
Yields the platform tags for a macOS system.
|
||||
|
||||
The `version` parameter is a two-item tuple specifying the macOS version to
|
||||
generate platform tags for. The `arch` parameter is the CPU architecture to
|
||||
generate platform tags for. Both parameters default to the appropriate value
|
||||
for the current system.
|
||||
"""
|
||||
version_str, _, cpu_arch = platform.mac_ver()
|
||||
if version is None:
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
if version == (10, 16):
|
||||
# When built against an older macOS SDK, Python will report macOS 10.16
|
||||
# instead of the real version.
|
||||
version_str = subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
"-sS",
|
||||
"-c",
|
||||
"import platform; print(platform.mac_ver()[0])",
|
||||
],
|
||||
check=True,
|
||||
env={"SYSTEM_VERSION_COMPAT": "0"},
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
).stdout
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
else:
|
||||
version = version
|
||||
if arch is None:
|
||||
arch = _mac_arch(cpu_arch)
|
||||
else:
|
||||
arch = arch
|
||||
|
||||
if (10, 0) <= version and version < (11, 0):
|
||||
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
||||
# "minor" version number. The major version was always 10.
|
||||
for minor_version in range(version[1], -1, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=10, minor=minor_version, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Starting with Mac OS 11, each yearly release bumps the major version
|
||||
# number. The minor versions are now the midyear updates.
|
||||
for major_version in range(version[0], 10, -1):
|
||||
compat_version = major_version, 0
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=major_version, minor=0, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
||||
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
||||
# releases exist.
|
||||
#
|
||||
# However, the "universal2" binary format can have a
|
||||
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
||||
# that version of macOS.
|
||||
if arch == "x86_64":
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
else:
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_format = "universal2"
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
|
||||
|
||||
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
||||
linux = _normalize_string(sysconfig.get_platform())
|
||||
if not linux.startswith("linux_"):
|
||||
# we should never be here, just yield the sysconfig one and return
|
||||
yield linux
|
||||
return
|
||||
if is_32bit:
|
||||
if linux == "linux_x86_64":
|
||||
linux = "linux_i686"
|
||||
elif linux == "linux_aarch64":
|
||||
linux = "linux_armv8l"
|
||||
_, arch = linux.split("_", 1)
|
||||
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
|
||||
yield from _manylinux.platform_tags(archs)
|
||||
yield from _musllinux.platform_tags(archs)
|
||||
for arch in archs:
|
||||
yield f"linux_{arch}"
|
||||
|
||||
|
||||
def _generic_platforms() -> Iterator[str]:
|
||||
yield _normalize_string(sysconfig.get_platform())
|
||||
|
||||
|
||||
def platform_tags() -> Iterator[str]:
|
||||
"""
|
||||
Provides the platform tags for this installation.
|
||||
"""
|
||||
if platform.system() == "Darwin":
|
||||
return mac_platforms()
|
||||
elif platform.system() == "Linux":
|
||||
return _linux_platforms()
|
||||
else:
|
||||
return _generic_platforms()
|
||||
|
||||
|
||||
def interpreter_name() -> str:
|
||||
"""
|
||||
Returns the name of the running interpreter.
|
||||
|
||||
Some implementations have a reserved, two-letter abbreviation which will
|
||||
be returned when appropriate.
|
||||
"""
|
||||
name = sys.implementation.name
|
||||
return INTERPRETER_SHORT_NAMES.get(name) or name
|
||||
|
||||
|
||||
def interpreter_version(*, warn: bool = False) -> str:
|
||||
"""
|
||||
Returns the version of the running interpreter.
|
||||
"""
|
||||
version = _get_config_var("py_version_nodot", warn=warn)
|
||||
if version:
|
||||
version = str(version)
|
||||
else:
|
||||
version = _version_nodot(sys.version_info[:2])
|
||||
return version
|
||||
|
||||
|
||||
def _version_nodot(version: PythonVersion) -> str:
|
||||
return "".join(map(str, version))
|
||||
|
||||
|
||||
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
||||
"""
|
||||
Returns the sequence of tag triples for the running interpreter.
|
||||
|
||||
The order of the sequence corresponds to priority order for the
|
||||
interpreter, from most to least important.
|
||||
"""
|
||||
|
||||
interp_name = interpreter_name()
|
||||
if interp_name == "cp":
|
||||
yield from cpython_tags(warn=warn)
|
||||
else:
|
||||
yield from generic_tags()
|
||||
|
||||
if interp_name == "pp":
|
||||
interp = "pp3"
|
||||
elif interp_name == "cp":
|
||||
interp = "cp" + interpreter_version(warn=warn)
|
||||
else:
|
||||
interp = None
|
||||
yield from compatible_tags(interpreter=interp)
|
||||
@ -0,0 +1,172 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import re
|
||||
from typing import FrozenSet, NewType, Tuple, Union, cast
|
||||
|
||||
from .tags import Tag, parse_tag
|
||||
from .version import InvalidVersion, Version
|
||||
|
||||
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
||||
NormalizedName = NewType("NormalizedName", str)
|
||||
|
||||
|
||||
class InvalidName(ValueError):
|
||||
"""
|
||||
An invalid distribution name; users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidWheelFilename(ValueError):
|
||||
"""
|
||||
An invalid wheel filename was found, users should refer to PEP 427.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidSdistFilename(ValueError):
|
||||
"""
|
||||
An invalid sdist filename was found, users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
# Core metadata spec for `Name`
|
||||
_validate_regex = re.compile(
|
||||
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
||||
)
|
||||
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
||||
# PEP 427: The build number must start with a digit.
|
||||
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
||||
|
||||
|
||||
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
||||
if validate and not _validate_regex.match(name):
|
||||
raise InvalidName(f"name is invalid: {name!r}")
|
||||
# This is taken from PEP 503.
|
||||
value = _canonicalize_regex.sub("-", name).lower()
|
||||
return cast(NormalizedName, value)
|
||||
|
||||
|
||||
def is_normalized_name(name: str) -> bool:
|
||||
return _normalized_regex.match(name) is not None
|
||||
|
||||
|
||||
def canonicalize_version(
|
||||
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
||||
) -> str:
|
||||
"""
|
||||
This is very similar to Version.__str__, but has one subtle difference
|
||||
with the way it handles the release segment.
|
||||
"""
|
||||
if isinstance(version, str):
|
||||
try:
|
||||
parsed = Version(version)
|
||||
except InvalidVersion:
|
||||
# Legacy versions cannot be normalized
|
||||
return version
|
||||
else:
|
||||
parsed = version
|
||||
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if parsed.epoch != 0:
|
||||
parts.append(f"{parsed.epoch}!")
|
||||
|
||||
# Release segment
|
||||
release_segment = ".".join(str(x) for x in parsed.release)
|
||||
if strip_trailing_zero:
|
||||
# NB: This strips trailing '.0's to normalize
|
||||
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
||||
parts.append(release_segment)
|
||||
|
||||
# Pre-release
|
||||
if parsed.pre is not None:
|
||||
parts.append("".join(str(x) for x in parsed.pre))
|
||||
|
||||
# Post-release
|
||||
if parsed.post is not None:
|
||||
parts.append(f".post{parsed.post}")
|
||||
|
||||
# Development release
|
||||
if parsed.dev is not None:
|
||||
parts.append(f".dev{parsed.dev}")
|
||||
|
||||
# Local version segment
|
||||
if parsed.local is not None:
|
||||
parts.append(f"+{parsed.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def parse_wheel_filename(
|
||||
filename: str,
|
||||
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
||||
if not filename.endswith(".whl"):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
||||
)
|
||||
|
||||
filename = filename[:-4]
|
||||
dashes = filename.count("-")
|
||||
if dashes not in (4, 5):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (wrong number of parts): {filename}"
|
||||
)
|
||||
|
||||
parts = filename.split("-", dashes - 2)
|
||||
name_part = parts[0]
|
||||
# See PEP 427 for the rules on escaping the project name.
|
||||
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
||||
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
||||
name = canonicalize_name(name_part)
|
||||
|
||||
try:
|
||||
version = Version(parts[1])
|
||||
except InvalidVersion as e:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
if dashes == 5:
|
||||
build_part = parts[2]
|
||||
build_match = _build_tag_regex.match(build_part)
|
||||
if build_match is None:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid build number: {build_part} in '{filename}'"
|
||||
)
|
||||
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
||||
else:
|
||||
build = ()
|
||||
tags = parse_tag(parts[-1])
|
||||
return (name, version, build, tags)
|
||||
|
||||
|
||||
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
||||
if filename.endswith(".tar.gz"):
|
||||
file_stem = filename[: -len(".tar.gz")]
|
||||
elif filename.endswith(".zip"):
|
||||
file_stem = filename[: -len(".zip")]
|
||||
else:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
||||
f" {filename}"
|
||||
)
|
||||
|
||||
# We are requiring a PEP 440 version, which cannot contain dashes,
|
||||
# so we split on the last dash.
|
||||
name_part, sep, version_part = file_stem.rpartition("-")
|
||||
if not sep:
|
||||
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
||||
|
||||
name = canonicalize_name(name_part)
|
||||
|
||||
try:
|
||||
version = Version(version_part)
|
||||
except InvalidVersion as e:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
return (name, version)
|
||||
@ -0,0 +1,561 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
"""
|
||||
.. testsetup::
|
||||
|
||||
from packaging.version import parse, Version
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import re
|
||||
from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
|
||||
|
||||
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
||||
|
||||
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
||||
|
||||
LocalType = Tuple[Union[int, str], ...]
|
||||
|
||||
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
||||
CmpLocalType = Union[
|
||||
NegativeInfinityType,
|
||||
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
||||
]
|
||||
CmpKey = Tuple[
|
||||
int,
|
||||
Tuple[int, ...],
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpLocalType,
|
||||
]
|
||||
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
||||
|
||||
|
||||
class _Version(NamedTuple):
|
||||
epoch: int
|
||||
release: Tuple[int, ...]
|
||||
dev: Optional[Tuple[str, int]]
|
||||
pre: Optional[Tuple[str, int]]
|
||||
post: Optional[Tuple[str, int]]
|
||||
local: Optional[LocalType]
|
||||
|
||||
|
||||
def parse(version: str) -> "Version":
|
||||
"""Parse the given version string.
|
||||
|
||||
>>> parse('1.0.dev1')
|
||||
<Version('1.0.dev1')>
|
||||
|
||||
:param version: The version string to parse.
|
||||
:raises InvalidVersion: When the version string is not a valid version.
|
||||
"""
|
||||
return Version(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""Raised when a version string is not a valid version.
|
||||
|
||||
>>> Version("invalid")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion:
|
||||
_key: Tuple[Any, ...]
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._key)
|
||||
|
||||
# Please keep the duplicated `isinstance` check
|
||||
# in the six comparisons hereunder
|
||||
# unless you find a way to avoid adding overhead function calls.
|
||||
def __lt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key < other._key
|
||||
|
||||
def __le__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key <= other._key
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key == other._key
|
||||
|
||||
def __ge__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key >= other._key
|
||||
|
||||
def __gt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key > other._key
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key != other._key
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
_VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
VERSION_PATTERN = _VERSION_PATTERN
|
||||
"""
|
||||
A string containing the regular expression used to match a valid version.
|
||||
|
||||
The pattern is not anchored at either end, and is intended for embedding in larger
|
||||
expressions (for example, matching a version number as part of a file name). The
|
||||
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
||||
flags set.
|
||||
|
||||
:meta hide-value:
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
"""This class abstracts handling of a project's versions.
|
||||
|
||||
A :class:`Version` instance is comparison aware and can be compared and
|
||||
sorted using the standard Python interfaces.
|
||||
|
||||
>>> v1 = Version("1.0a5")
|
||||
>>> v2 = Version("1.0")
|
||||
>>> v1
|
||||
<Version('1.0a5')>
|
||||
>>> v2
|
||||
<Version('1.0')>
|
||||
>>> v1 < v2
|
||||
True
|
||||
>>> v1 == v2
|
||||
False
|
||||
>>> v1 > v2
|
||||
False
|
||||
>>> v1 >= v2
|
||||
False
|
||||
>>> v1 <= v2
|
||||
True
|
||||
"""
|
||||
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
_key: CmpKey
|
||||
|
||||
def __init__(self, version: str) -> None:
|
||||
"""Initialize a Version object.
|
||||
|
||||
:param version:
|
||||
The string representation of a version which will be parsed and normalized
|
||||
before use.
|
||||
:raises InvalidVersion:
|
||||
If the ``version`` does not conform to PEP 440 in any way then this
|
||||
exception will be raised.
|
||||
"""
|
||||
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion(f"Invalid version: '{version}'")
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""A representation of the Version that shows all internal state.
|
||||
|
||||
>>> Version('1.0.0')
|
||||
<Version('1.0.0')>
|
||||
"""
|
||||
return f"<Version('{self}')>"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""A string representation of the version that can be rounded-tripped.
|
||||
|
||||
>>> str(Version("1.0a5"))
|
||||
'1.0a5'
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self.post is not None:
|
||||
parts.append(f".post{self.post}")
|
||||
|
||||
# Development release
|
||||
if self.dev is not None:
|
||||
parts.append(f".dev{self.dev}")
|
||||
|
||||
# Local version segment
|
||||
if self.local is not None:
|
||||
parts.append(f"+{self.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self) -> int:
|
||||
"""The epoch of the version.
|
||||
|
||||
>>> Version("2.0.0").epoch
|
||||
0
|
||||
>>> Version("1!2.0.0").epoch
|
||||
1
|
||||
"""
|
||||
return self._version.epoch
|
||||
|
||||
@property
|
||||
def release(self) -> Tuple[int, ...]:
|
||||
"""The components of the "release" segment of the version.
|
||||
|
||||
>>> Version("1.2.3").release
|
||||
(1, 2, 3)
|
||||
>>> Version("2.0.0").release
|
||||
(2, 0, 0)
|
||||
>>> Version("1!2.0.0.post0").release
|
||||
(2, 0, 0)
|
||||
|
||||
Includes trailing zeroes but not the epoch or any pre-release / development /
|
||||
post-release suffixes.
|
||||
"""
|
||||
return self._version.release
|
||||
|
||||
@property
|
||||
def pre(self) -> Optional[Tuple[str, int]]:
|
||||
"""The pre-release segment of the version.
|
||||
|
||||
>>> print(Version("1.2.3").pre)
|
||||
None
|
||||
>>> Version("1.2.3a1").pre
|
||||
('a', 1)
|
||||
>>> Version("1.2.3b1").pre
|
||||
('b', 1)
|
||||
>>> Version("1.2.3rc1").pre
|
||||
('rc', 1)
|
||||
"""
|
||||
return self._version.pre
|
||||
|
||||
@property
|
||||
def post(self) -> Optional[int]:
|
||||
"""The post-release number of the version.
|
||||
|
||||
>>> print(Version("1.2.3").post)
|
||||
None
|
||||
>>> Version("1.2.3.post1").post
|
||||
1
|
||||
"""
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self) -> Optional[int]:
|
||||
"""The development number of the version.
|
||||
|
||||
>>> print(Version("1.2.3").dev)
|
||||
None
|
||||
>>> Version("1.2.3.dev1").dev
|
||||
1
|
||||
"""
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self) -> Optional[str]:
|
||||
"""The local version segment of the version.
|
||||
|
||||
>>> print(Version("1.2.3").local)
|
||||
None
|
||||
>>> Version("1.2.3+abc").local
|
||||
'abc'
|
||||
"""
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self) -> str:
|
||||
"""The public portion of the version.
|
||||
|
||||
>>> Version("1.2.3").public
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc").public
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc.dev1").public
|
||||
'1.2.3'
|
||||
"""
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self) -> str:
|
||||
"""The "base version" of the version.
|
||||
|
||||
>>> Version("1.2.3").base_version
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc").base_version
|
||||
'1.2.3'
|
||||
>>> Version("1!1.2.3+abc.dev1").base_version
|
||||
'1!1.2.3'
|
||||
|
||||
The "base version" is the public version of the project without any pre or post
|
||||
release markers.
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def is_prerelease(self) -> bool:
|
||||
"""Whether this version is a pre-release.
|
||||
|
||||
>>> Version("1.2.3").is_prerelease
|
||||
False
|
||||
>>> Version("1.2.3a1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3b1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3rc1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3dev1").is_prerelease
|
||||
True
|
||||
"""
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self) -> bool:
|
||||
"""Whether this version is a post-release.
|
||||
|
||||
>>> Version("1.2.3").is_postrelease
|
||||
False
|
||||
>>> Version("1.2.3.post1").is_postrelease
|
||||
True
|
||||
"""
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self) -> bool:
|
||||
"""Whether this version is a development release.
|
||||
|
||||
>>> Version("1.2.3").is_devrelease
|
||||
False
|
||||
>>> Version("1.2.3.dev1").is_devrelease
|
||||
True
|
||||
"""
|
||||
return self.dev is not None
|
||||
|
||||
@property
|
||||
def major(self) -> int:
|
||||
"""The first item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").major
|
||||
1
|
||||
"""
|
||||
return self.release[0] if len(self.release) >= 1 else 0
|
||||
|
||||
@property
|
||||
def minor(self) -> int:
|
||||
"""The second item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").minor
|
||||
2
|
||||
>>> Version("1").minor
|
||||
0
|
||||
"""
|
||||
return self.release[1] if len(self.release) >= 2 else 0
|
||||
|
||||
@property
|
||||
def micro(self) -> int:
|
||||
"""The third item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").micro
|
||||
3
|
||||
>>> Version("1").micro
|
||||
0
|
||||
"""
|
||||
return self.release[2] if len(self.release) >= 3 else 0
|
||||
|
||||
|
||||
def _parse_letter_version(
|
||||
letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
|
||||
) -> Optional[Tuple[str, int]]:
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _cmpkey(
|
||||
epoch: int,
|
||||
release: Tuple[int, ...],
|
||||
pre: Optional[Tuple[str, int]],
|
||||
post: Optional[Tuple[str, int]],
|
||||
dev: Optional[Tuple[str, int]],
|
||||
local: Optional[LocalType],
|
||||
) -> CmpKey:
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
_release = tuple(
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
_pre: CmpPrePostDevType = NegativeInfinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
_pre = Infinity
|
||||
else:
|
||||
_pre = pre
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
_post: CmpPrePostDevType = NegativeInfinity
|
||||
|
||||
else:
|
||||
_post = post
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
_dev: CmpPrePostDevType = Infinity
|
||||
|
||||
else:
|
||||
_dev = dev
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
_local: CmpLocalType = NegativeInfinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
_local = tuple(
|
||||
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
||||
)
|
||||
|
||||
return epoch, _release, _pre, _post, _dev, _local
|
||||
@ -0,0 +1 @@
|
||||
packaging==24.0
|
||||
@ -0,0 +1,196 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import hashlib
|
||||
import os.path
|
||||
import re
|
||||
import stat
|
||||
import time
|
||||
from io import StringIO, TextIOWrapper
|
||||
from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
|
||||
|
||||
from wheel.cli import WheelError
|
||||
from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
|
||||
|
||||
# Non-greedy matching of an optional build number may be too clever (more
|
||||
# invalid wheel filenames will match). Separate regex for .dist-info?
|
||||
WHEEL_INFO_RE = re.compile(
|
||||
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))(-(?P<build>\d[^\s-]*))?
|
||||
-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>\S+)\.whl$""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
MINIMUM_TIMESTAMP = 315532800 # 1980-01-01 00:00:00 UTC
|
||||
|
||||
|
||||
def get_zipinfo_datetime(timestamp=None):
|
||||
# Some applications need reproducible .whl files, but they can't do this without
|
||||
# forcing the timestamp of the individual ZipInfo objects. See issue #143.
|
||||
timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
|
||||
timestamp = max(timestamp, MINIMUM_TIMESTAMP)
|
||||
return time.gmtime(timestamp)[0:6]
|
||||
|
||||
|
||||
class WheelFile(ZipFile):
|
||||
"""A ZipFile derivative class that also reads SHA-256 hashes from
|
||||
.dist-info/RECORD and checks any read files against those.
|
||||
"""
|
||||
|
||||
_default_algorithm = hashlib.sha256
|
||||
|
||||
def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
|
||||
basename = os.path.basename(file)
|
||||
self.parsed_filename = WHEEL_INFO_RE.match(basename)
|
||||
if not basename.endswith(".whl") or self.parsed_filename is None:
|
||||
raise WheelError(f"Bad wheel filename {basename!r}")
|
||||
|
||||
ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
|
||||
|
||||
self.dist_info_path = "{}.dist-info".format(
|
||||
self.parsed_filename.group("namever")
|
||||
)
|
||||
self.record_path = self.dist_info_path + "/RECORD"
|
||||
self._file_hashes = {}
|
||||
self._file_sizes = {}
|
||||
if mode == "r":
|
||||
# Ignore RECORD and any embedded wheel signatures
|
||||
self._file_hashes[self.record_path] = None, None
|
||||
self._file_hashes[self.record_path + ".jws"] = None, None
|
||||
self._file_hashes[self.record_path + ".p7s"] = None, None
|
||||
|
||||
# Fill in the expected hashes by reading them from RECORD
|
||||
try:
|
||||
record = self.open(self.record_path)
|
||||
except KeyError:
|
||||
raise WheelError(f"Missing {self.record_path} file") from None
|
||||
|
||||
with record:
|
||||
for line in csv.reader(
|
||||
TextIOWrapper(record, newline="", encoding="utf-8")
|
||||
):
|
||||
path, hash_sum, size = line
|
||||
if not hash_sum:
|
||||
continue
|
||||
|
||||
algorithm, hash_sum = hash_sum.split("=")
|
||||
try:
|
||||
hashlib.new(algorithm)
|
||||
except ValueError:
|
||||
raise WheelError(
|
||||
f"Unsupported hash algorithm: {algorithm}"
|
||||
) from None
|
||||
|
||||
if algorithm.lower() in {"md5", "sha1"}:
|
||||
raise WheelError(
|
||||
f"Weak hash algorithm ({algorithm}) is not permitted by "
|
||||
f"PEP 427"
|
||||
)
|
||||
|
||||
self._file_hashes[path] = (
|
||||
algorithm,
|
||||
urlsafe_b64decode(hash_sum.encode("ascii")),
|
||||
)
|
||||
|
||||
def open(self, name_or_info, mode="r", pwd=None):
|
||||
def _update_crc(newdata):
|
||||
eof = ef._eof
|
||||
update_crc_orig(newdata)
|
||||
running_hash.update(newdata)
|
||||
if eof and running_hash.digest() != expected_hash:
|
||||
raise WheelError(f"Hash mismatch for file '{ef_name}'")
|
||||
|
||||
ef_name = (
|
||||
name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
|
||||
)
|
||||
if (
|
||||
mode == "r"
|
||||
and not ef_name.endswith("/")
|
||||
and ef_name not in self._file_hashes
|
||||
):
|
||||
raise WheelError(f"No hash found for file '{ef_name}'")
|
||||
|
||||
ef = ZipFile.open(self, name_or_info, mode, pwd)
|
||||
if mode == "r" and not ef_name.endswith("/"):
|
||||
algorithm, expected_hash = self._file_hashes[ef_name]
|
||||
if expected_hash is not None:
|
||||
# Monkey patch the _update_crc method to also check for the hash from
|
||||
# RECORD
|
||||
running_hash = hashlib.new(algorithm)
|
||||
update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
|
||||
|
||||
return ef
|
||||
|
||||
def write_files(self, base_dir):
|
||||
log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
|
||||
deferred = []
|
||||
for root, dirnames, filenames in os.walk(base_dir):
|
||||
# Sort the directory names so that `os.walk` will walk them in a
|
||||
# defined order on the next iteration.
|
||||
dirnames.sort()
|
||||
for name in sorted(filenames):
|
||||
path = os.path.normpath(os.path.join(root, name))
|
||||
if os.path.isfile(path):
|
||||
arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
|
||||
if arcname == self.record_path:
|
||||
pass
|
||||
elif root.endswith(".dist-info"):
|
||||
deferred.append((path, arcname))
|
||||
else:
|
||||
self.write(path, arcname)
|
||||
|
||||
deferred.sort()
|
||||
for path, arcname in deferred:
|
||||
self.write(path, arcname)
|
||||
|
||||
def write(self, filename, arcname=None, compress_type=None):
|
||||
with open(filename, "rb") as f:
|
||||
st = os.fstat(f.fileno())
|
||||
data = f.read()
|
||||
|
||||
zinfo = ZipInfo(
|
||||
arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
|
||||
)
|
||||
zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
|
||||
zinfo.compress_type = compress_type or self.compression
|
||||
self.writestr(zinfo, data, compress_type)
|
||||
|
||||
def writestr(self, zinfo_or_arcname, data, compress_type=None):
|
||||
if isinstance(zinfo_or_arcname, str):
|
||||
zinfo_or_arcname = ZipInfo(
|
||||
zinfo_or_arcname, date_time=get_zipinfo_datetime()
|
||||
)
|
||||
zinfo_or_arcname.compress_type = self.compression
|
||||
zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
|
||||
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf-8")
|
||||
|
||||
ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
|
||||
fname = (
|
||||
zinfo_or_arcname.filename
|
||||
if isinstance(zinfo_or_arcname, ZipInfo)
|
||||
else zinfo_or_arcname
|
||||
)
|
||||
log.info(f"adding '{fname}'")
|
||||
if fname != self.record_path:
|
||||
hash_ = self._default_algorithm(data)
|
||||
self._file_hashes[fname] = (
|
||||
hash_.name,
|
||||
urlsafe_b64encode(hash_.digest()).decode("ascii"),
|
||||
)
|
||||
self._file_sizes[fname] = len(data)
|
||||
|
||||
def close(self):
|
||||
# Write RECORD
|
||||
if self.fp is not None and self.mode == "w" and self._file_hashes:
|
||||
data = StringIO()
|
||||
writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
|
||||
writer.writerows(
|
||||
(
|
||||
(fname, algorithm + "=" + hash_, self._file_sizes[fname])
|
||||
for fname, (algorithm, hash_) in self._file_hashes.items()
|
||||
)
|
||||
)
|
||||
writer.writerow((format(self.record_path), "", ""))
|
||||
self.writestr(self.record_path, data.getvalue())
|
||||
|
||||
ZipFile.close(self)
|
||||
Reference in New Issue
Block a user