summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAlex Grönholm <alex.gronholm@nextday.fi>2021-12-24 01:27:26 +0200
committerAlex Grönholm <alex.gronholm@nextday.fi>2021-12-24 01:45:01 +0200
commit5eb690c72ea59bc0f8a2fa34d3993ebe3dbe0d38 (patch)
treedff2a2103314f0fe6c5cc53b91a120f59edf78d3 /src
parent64d0b8d779b5b41bacea2ef3b59f3e06f0e683ed (diff)
downloadwheel-git-5eb690c72ea59bc0f8a2fa34d3993ebe3dbe0d38.tar.gz
Adopted black and reformatted the codebase to match
Diffstat (limited to 'src')
-rw-r--r--src/wheel/__init__.py2
-rw-r--r--src/wheel/__main__.py4
-rw-r--r--src/wheel/bdist_wheel.py398
-rw-r--r--src/wheel/cli/__init__.py49
-rwxr-xr-xsrc/wheel/cli/convert.py95
-rw-r--r--src/wheel/cli/pack.py62
-rw-r--r--src/wheel/cli/unpack.py8
-rw-r--r--src/wheel/macosx_libfile.py165
-rw-r--r--src/wheel/metadata.py65
-rw-r--r--src/wheel/util.py10
-rw-r--r--src/wheel/wheelfile.py105
11 files changed, 557 insertions, 406 deletions
diff --git a/src/wheel/__init__.py b/src/wheel/__init__.py
index a4b3835..04b7c86 100644
--- a/src/wheel/__init__.py
+++ b/src/wheel/__init__.py
@@ -1 +1 @@
-__version__ = '0.37.1'
+__version__ = "0.37.1"
diff --git a/src/wheel/__main__.py b/src/wheel/__main__.py
index b3773a2..99993ed 100644
--- a/src/wheel/__main__.py
+++ b/src/wheel/__main__.py
@@ -6,12 +6,14 @@ import sys
def main(): # needed for console script
- if __package__ == '':
+ if __package__ == "":
# To be able to run 'python wheel-0.9.whl/wheel':
import os.path
+
path = os.path.dirname(os.path.dirname(__file__))
sys.path[0:0] = [path]
import wheel.cli
+
sys.exit(wheel.cli.main())
diff --git a/src/wheel/bdist_wheel.py b/src/wheel/bdist_wheel.py
index ae8da2a..bd08898 100644
--- a/src/wheel/bdist_wheel.py
+++ b/src/wheel/bdist_wheel.py
@@ -34,11 +34,11 @@ from . import __version__ as wheel_version
safe_name = pkg_resources.safe_name
safe_version = pkg_resources.safe_version
-PY_LIMITED_API_PATTERN = r'cp3\d'
+PY_LIMITED_API_PATTERN = r"cp3\d"
def python_tag():
- return f'py{sys.version_info[0]}'
+ return f"py{sys.version_info[0]}"
def get_platform(archive_root):
@@ -59,41 +59,46 @@ def get_flag(var, fallback, expected=True, warn=True):
val = get_config_var(var)
if val is None:
if warn:
- warnings.warn("Config variable '{}' is unset, Python ABI tag may "
- "be incorrect".format(var), RuntimeWarning, 2)
+ warnings.warn(
+ "Config variable '{}' is unset, Python ABI tag may "
+ "be incorrect".format(var),
+ RuntimeWarning,
+ 2,
+ )
return fallback
return val == expected
def get_abi_tag():
"""Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy)."""
- soabi = get_config_var('SOABI')
+ soabi = get_config_var("SOABI")
impl = tags.interpreter_name()
- if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
- d = ''
- m = ''
- u = ''
- if get_flag('Py_DEBUG',
- hasattr(sys, 'gettotalrefcount'),
- warn=(impl == 'cp')):
- d = 'd'
-
- if get_flag('WITH_PYMALLOC',
- impl == 'cp',
- warn=(impl == 'cp' and
- sys.version_info < (3, 8))) \
- and sys.version_info < (3, 8):
- m = 'm'
-
- abi = f'{impl}{tags.interpreter_version()}{d}{m}{u}'
- elif soabi and soabi.startswith('cpython-'):
- abi = 'cp' + soabi.split('-')[1]
- elif soabi and soabi.startswith('pypy-'):
+ if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
+ d = ""
+ m = ""
+ u = ""
+ if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
+ d = "d"
+
+ if (
+ get_flag(
+ "WITH_PYMALLOC",
+ impl == "cp",
+ warn=(impl == "cp" and sys.version_info < (3, 8)),
+ )
+ and sys.version_info < (3, 8)
+ ):
+ m = "m"
+
+ abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
+ elif soabi and soabi.startswith("cpython-"):
+ abi = "cp" + soabi.split("-")[1]
+ elif soabi and soabi.startswith("pypy-"):
# we want something like pypy36-pp73
- abi = '-'.join(soabi.split('-')[:2])
- abi = abi.replace('.', '_').replace('-', '_')
+ abi = "-".join(soabi.split("-")[:2])
+ abi = abi.replace(".", "_").replace("-", "_")
elif soabi:
- abi = soabi.replace('.', '_').replace('-', '_')
+ abi = soabi.replace(".", "_").replace("-", "_")
else:
abi = None
@@ -101,11 +106,11 @@ def get_abi_tag():
def safer_name(name):
- return safe_name(name).replace('-', '_')
+ return safe_name(name).replace("-", "_")
def safer_version(version):
- return safe_version(version).replace('-', '_')
+ return safe_version(version).replace("-", "_")
def remove_readonly(func, path, excinfo):
@@ -116,61 +121,78 @@ def remove_readonly(func, path, excinfo):
class bdist_wheel(Command):
- description = 'create a wheel distribution'
-
- supported_compressions = OrderedDict([
- ('stored', ZIP_STORED),
- ('deflated', ZIP_DEFLATED)
- ])
-
- user_options = [('bdist-dir=', 'b',
- "temporary directory for creating the distribution"),
- ('plat-name=', 'p',
- "platform name to embed in generated filenames "
- "(default: %s)" % get_platform(None)),
- ('keep-temp', 'k',
- "keep the pseudo-installation tree around after " +
- "creating the distribution archive"),
- ('dist-dir=', 'd',
- "directory to put final built distributions in"),
- ('skip-build', None,
- "skip rebuilding everything (for testing/debugging)"),
- ('relative', None,
- "build the archive using relative paths "
- "(default: false)"),
- ('owner=', 'u',
- "Owner name used when creating a tar file"
- " [default: current user]"),
- ('group=', 'g',
- "Group name used when creating a tar file"
- " [default: current group]"),
- ('universal', None,
- "make a universal wheel"
- " (default: false)"),
- ('compression=', None,
- "zipfile compression (one of: {})"
- " (default: 'deflated')"
- .format(', '.join(supported_compressions))),
- ('python-tag=', None,
- "Python implementation compatibility tag"
- " (default: '%s')" % (python_tag())),
- ('build-number=', None,
- "Build number for this particular version. "
- "As specified in PEP-0427, this must start with a digit. "
- "[default: None]"),
- ('py-limited-api=', None,
- "Python tag (cp32|cp33|cpNN) for abi3 wheel tag"
- " (default: false)"),
- ]
-
- boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal']
+ description = "create a wheel distribution"
+
+ supported_compressions = OrderedDict(
+ [("stored", ZIP_STORED), ("deflated", ZIP_DEFLATED)]
+ )
+
+ user_options = [
+ ("bdist-dir=", "b", "temporary directory for creating the distribution"),
+ (
+ "plat-name=",
+ "p",
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform(None),
+ ),
+ (
+ "keep-temp",
+ "k",
+ "keep the pseudo-installation tree around after "
+ + "creating the distribution archive",
+ ),
+ ("dist-dir=", "d", "directory to put final built distributions in"),
+ ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
+ (
+ "relative",
+ None,
+ "build the archive using relative paths " "(default: false)",
+ ),
+ (
+ "owner=",
+ "u",
+ "Owner name used when creating a tar file" " [default: current user]",
+ ),
+ (
+ "group=",
+ "g",
+ "Group name used when creating a tar file" " [default: current group]",
+ ),
+ ("universal", None, "make a universal wheel" " (default: false)"),
+ (
+ "compression=",
+ None,
+ "zipfile compression (one of: {})"
+ " (default: 'deflated')".format(", ".join(supported_compressions)),
+ ),
+ (
+ "python-tag=",
+ None,
+ "Python implementation compatibility tag"
+ " (default: '%s')" % (python_tag()),
+ ),
+ (
+ "build-number=",
+ None,
+ "Build number for this particular version. "
+ "As specified in PEP-0427, this must start with a digit. "
+ "[default: None]",
+ ),
+ (
+ "py-limited-api=",
+ None,
+ "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
+ ),
+ ]
+
+ boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
def initialize_options(self):
self.bdist_dir = None
self.data_dir = None
self.plat_name = None
self.plat_tag = None
- self.format = 'zip'
+ self.format = "zip"
self.keep_temp = False
self.dist_dir = None
self.egginfo_dir = None
@@ -180,7 +202,7 @@ class bdist_wheel(Command):
self.owner = None
self.group = None
self.universal = False
- self.compression = 'deflated'
+ self.compression = "deflated"
self.python_tag = python_tag()
self.build_number = None
self.py_limited_api = False
@@ -188,35 +210,37 @@ class bdist_wheel(Command):
def finalize_options(self):
if self.bdist_dir is None:
- bdist_base = self.get_finalized_command('bdist').bdist_base
- self.bdist_dir = os.path.join(bdist_base, 'wheel')
+ bdist_base = self.get_finalized_command("bdist").bdist_base
+ self.bdist_dir = os.path.join(bdist_base, "wheel")
- self.data_dir = self.wheel_dist_name + '.data'
+ self.data_dir = self.wheel_dist_name + ".data"
self.plat_name_supplied = self.plat_name is not None
try:
self.compression = self.supported_compressions[self.compression]
except KeyError:
- raise ValueError(f'Unsupported compression: {self.compression}')
+ raise ValueError(f"Unsupported compression: {self.compression}")
- need_options = ('dist_dir', 'plat_name', 'skip_build')
+ need_options = ("dist_dir", "plat_name", "skip_build")
- self.set_undefined_options('bdist',
- *zip(need_options, need_options))
+ self.set_undefined_options("bdist", *zip(need_options, need_options))
- self.root_is_pure = not (self.distribution.has_ext_modules()
- or self.distribution.has_c_libraries())
+ self.root_is_pure = not (
+ self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
+ )
- if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):
+ if self.py_limited_api and not re.match(
+ PY_LIMITED_API_PATTERN, self.py_limited_api
+ ):
raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
# Support legacy [wheel] section for setting universal
- wheel = self.distribution.get_option_dict('wheel')
- if 'universal' in wheel:
+ wheel = self.distribution.get_option_dict("wheel")
+ if "universal" in wheel:
# please don't define this in your global configs
- logger.warn('The [wheel] section is deprecated. Use [bdist_wheel] instead.')
- val = wheel['universal'][1].strip()
- if val.lower() in ('1', 'true', 'yes'):
+ logger.warn("The [wheel] section is deprecated. Use [bdist_wheel] instead.")
+ val = wheel["universal"][1].strip()
+ if val.lower() in ("1", "true", "yes"):
self.universal = True
if self.build_number is not None and not self.build_number[:1].isdigit():
@@ -225,11 +249,13 @@ class bdist_wheel(Command):
@property
def wheel_dist_name(self):
"""Return distribution full name with - replaced with _"""
- components = (safer_name(self.distribution.get_name()),
- safer_version(self.distribution.get_version()))
+ components = (
+ safer_name(self.distribution.get_name()),
+ safer_version(self.distribution.get_version()),
+ )
if self.build_number:
components += (self.build_number,)
- return '-'.join(components)
+ return "-".join(components)
def get_tag(self):
# bdist sets self.plat_name if unset, we should only use it for purepy
@@ -237,7 +263,7 @@ class bdist_wheel(Command):
if self.plat_name_supplied:
plat_name = self.plat_name
elif self.root_is_pure:
- plat_name = 'any'
+ plat_name = "any"
else:
# macosx contains system version in platform name so need special handle
if self.plat_name and not self.plat_name.startswith("macosx"):
@@ -251,47 +277,52 @@ class bdist_wheel(Command):
# modules, use the default platform name.
plat_name = get_platform(self.bdist_dir)
- if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647:
- plat_name = 'linux_i686'
+ if (
+ plat_name in ("linux-x86_64", "linux_x86_64")
+ and sys.maxsize == 2147483647
+ ):
+ plat_name = "linux_i686"
- plat_name = plat_name.lower().replace('-', '_').replace('.', '_')
+ plat_name = plat_name.lower().replace("-", "_").replace(".", "_")
if self.root_is_pure:
if self.universal:
- impl = 'py2.py3'
+ impl = "py2.py3"
else:
impl = self.python_tag
- tag = (impl, 'none', plat_name)
+ tag = (impl, "none", plat_name)
else:
impl_name = tags.interpreter_name()
impl_ver = tags.interpreter_version()
impl = impl_name + impl_ver
# We don't work on CPython 3.1, 3.0.
- if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'):
+ if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
impl = self.py_limited_api
- abi_tag = 'abi3'
+ abi_tag = "abi3"
else:
abi_tag = str(get_abi_tag()).lower()
tag = (impl, abi_tag, plat_name)
# issue gh-374: allow overriding plat_name
- supported_tags = [(t.interpreter, t.abi, plat_name)
- for t in tags.sys_tags()]
- assert tag in supported_tags, f"would build wheel with unsupported tag {tag}"
+ supported_tags = [
+ (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
+ ]
+ assert (
+ tag in supported_tags
+ ), f"would build wheel with unsupported tag {tag}"
return tag
def run(self):
- build_scripts = self.reinitialize_command('build_scripts')
- build_scripts.executable = 'python'
+ build_scripts = self.reinitialize_command("build_scripts")
+ build_scripts.executable = "python"
build_scripts.force = True
- build_ext = self.reinitialize_command('build_ext')
+ build_ext = self.reinitialize_command("build_ext")
build_ext.inplace = False
if not self.skip_build:
- self.run_command('build')
+ self.run_command("build")
- install = self.reinitialize_command('install',
- reinit_subcommands=True)
+ install = self.reinitialize_command("install", reinit_subcommands=True)
install.root = self.bdist_dir
install.compile = False
install.skip_build = self.skip_build
@@ -300,31 +331,31 @@ class bdist_wheel(Command):
# A wheel without setuptools scripts is more cross-platform.
# Use the (undocumented) `no_ep` option to setuptools'
# install_scripts command to avoid creating entry point scripts.
- install_scripts = self.reinitialize_command('install_scripts')
+ install_scripts = self.reinitialize_command("install_scripts")
install_scripts.no_ep = True
# Use a custom scheme for the archive, because we have to decide
# at installation time which scheme to use.
- for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'):
- setattr(install,
- 'install_' + key,
- os.path.join(self.data_dir, key))
+ for key in ("headers", "scripts", "data", "purelib", "platlib"):
+ setattr(install, "install_" + key, os.path.join(self.data_dir, key))
- basedir_observed = ''
+ basedir_observed = ""
- if os.name == 'nt':
+ if os.name == "nt":
# win32 barfs if any of these are ''; could be '.'?
# (distutils.command.install:change_roots bug)
- basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..'))
+ basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
self.install_libbase = self.install_lib = basedir_observed
- setattr(install,
- 'install_purelib' if self.root_is_pure else 'install_platlib',
- basedir_observed)
+ setattr(
+ install,
+ "install_purelib" if self.root_is_pure else "install_platlib",
+ basedir_observed,
+ )
logger.info("installing to %s", self.bdist_dir)
- self.run_command('install')
+ self.run_command("install")
impl_tag, abi_tag, plat_tag = self.get_tag()
archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
@@ -332,13 +363,14 @@ class bdist_wheel(Command):
archive_root = self.bdist_dir
else:
archive_root = os.path.join(
- self.bdist_dir,
- self._ensure_relative(install.install_base))
+ self.bdist_dir, self._ensure_relative(install.install_base)
+ )
- self.set_undefined_options('install_egg_info', ('target', 'egginfo_dir'))
- distinfo_dirname = '{}-{}.dist-info'.format(
+ self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
+ distinfo_dirname = "{}-{}.dist-info".format(
safer_name(self.distribution.get_name()),
- safer_version(self.distribution.get_version()))
+ safer_version(self.distribution.get_version()),
+ )
distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
self.egg2dist(self.egginfo_dir, distinfo_dir)
@@ -348,44 +380,49 @@ class bdist_wheel(Command):
if not os.path.exists(self.dist_dir):
os.makedirs(self.dist_dir)
- wheel_path = os.path.join(self.dist_dir, archive_basename + '.whl')
- with WheelFile(wheel_path, 'w', self.compression) as wf:
+ wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
+ with WheelFile(wheel_path, "w", self.compression) as wf:
wf.write_files(archive_root)
# Add to 'Distribution.dist_files' so that the "upload" command works
- getattr(self.distribution, 'dist_files', []).append(
- ('bdist_wheel',
- '{}.{}'.format(*sys.version_info[:2]), # like 3.7
- wheel_path))
+ getattr(self.distribution, "dist_files", []).append(
+ (
+ "bdist_wheel",
+ "{}.{}".format(*sys.version_info[:2]), # like 3.7
+ wheel_path,
+ )
+ )
if not self.keep_temp:
- logger.info('removing %s', self.bdist_dir)
+ logger.info("removing %s", self.bdist_dir)
if not self.dry_run:
rmtree(self.bdist_dir, onerror=remove_readonly)
- def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'):
+ def write_wheelfile(
+ self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
+ ):
from email.message import Message
msg = Message()
- msg['Wheel-Version'] = '1.0' # of the spec
- msg['Generator'] = generator
- msg['Root-Is-Purelib'] = str(self.root_is_pure).lower()
+ msg["Wheel-Version"] = "1.0" # of the spec
+ msg["Generator"] = generator
+ msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
if self.build_number is not None:
- msg['Build'] = self.build_number
+ msg["Build"] = self.build_number
# Doesn't work for bdist_wininst
impl_tag, abi_tag, plat_tag = self.get_tag()
- for impl in impl_tag.split('.'):
- for abi in abi_tag.split('.'):
- for plat in plat_tag.split('.'):
- msg['Tag'] = '-'.join((impl, abi, plat))
+ for impl in impl_tag.split("."):
+ for abi in abi_tag.split("."):
+ for plat in plat_tag.split("."):
+ msg["Tag"] = "-".join((impl, abi, plat))
- wheelfile_path = os.path.join(wheelfile_base, 'WHEEL')
- logger.info('creating %s', wheelfile_path)
+ wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
+ logger.info("creating %s", wheelfile_path)
buffer = BytesIO()
BytesGenerator(buffer, maxheaderlen=0).flatten(msg)
- with open(wheelfile_path, 'wb') as f:
- f.write(buffer.getvalue().replace(b'\r\n', b'\r'))
+ with open(wheelfile_path, "wb") as f:
+ f.write(buffer.getvalue().replace(b"\r\n", b"\r"))
def _ensure_relative(self, path):
# copied from dir_util, deleted
@@ -396,34 +433,42 @@ class bdist_wheel(Command):
@property
def license_paths(self):
- metadata = self.distribution.get_option_dict('metadata')
+ metadata = self.distribution.get_option_dict("metadata")
files = set()
- patterns = sorted({
- option for option in metadata.get('license_files', ('', ''))[1].split()
- })
+ patterns = sorted(
+ {option for option in metadata.get("license_files", ("", ""))[1].split()}
+ )
- if 'license_file' in metadata:
- warnings.warn('The "license_file" option is deprecated. Use '
- '"license_files" instead.', DeprecationWarning)
- files.add(metadata['license_file'][1])
+ if "license_file" in metadata:
+ warnings.warn(
+ 'The "license_file" option is deprecated. Use '
+ '"license_files" instead.',
+ DeprecationWarning,
+ )
+ files.add(metadata["license_file"][1])
- if 'license_file' not in metadata and 'license_files' not in metadata:
- patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*')
+ if "license_file" not in metadata and "license_files" not in metadata:
+ patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
for pattern in patterns:
for path in iglob(pattern):
- if path.endswith('~'):
- logger.debug('ignoring license file "%s" as it looks like a backup', path)
+ if path.endswith("~"):
+ logger.debug(
+ 'ignoring license file "%s" as it looks like a backup', path
+ )
continue
if path not in files and os.path.isfile(path):
- logger.info('adding license file "%s" (matched pattern "%s")', path, pattern)
+ logger.info(
+ 'adding license file "%s" (matched pattern "%s")', path, pattern
+ )
files.add(path)
return files
def egg2dist(self, egginfo_path, distinfo_path):
"""Convert an .egg-info directory into a .dist-info directory"""
+
def adios(p):
"""Appropriately delete directory, file or link."""
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
@@ -439,7 +484,8 @@ class bdist_wheel(Command):
# to name the archive file. Check for this case and report
# accordingly.
import glob
- pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
+
+ pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
possible = glob.glob(pat)
err = f"Egg metadata expected at {egginfo_path} but not found"
if possible:
@@ -455,23 +501,29 @@ class bdist_wheel(Command):
os.mkdir(distinfo_path)
else:
# .egg-info is a directory
- pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
+ pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
# ignore common egg metadata that is useless to wheel
- shutil.copytree(egginfo_path, distinfo_path,
- ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt',
- 'not-zip-safe'}
- )
+ shutil.copytree(
+ egginfo_path,
+ distinfo_path,
+ ignore=lambda x, y: {
+ "PKG-INFO",
+ "requires.txt",
+ "SOURCES.txt",
+ "not-zip-safe",
+ },
+ )
# delete dependency_links if it is only whitespace
- dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
+ dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
with open(dependency_links_path) as dependency_links_file:
dependency_links = dependency_links_file.read().strip()
if not dependency_links:
adios(dependency_links_path)
- write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
+ write_pkg_info(os.path.join(distinfo_path, "METADATA"), pkg_info)
for license_path in self.license_paths:
filename = os.path.basename(license_path)
diff --git a/src/wheel/cli/__init__.py b/src/wheel/cli/__init__.py
index c8a3b35..31ac4c8 100644
--- a/src/wheel/cli/__init__.py
+++ b/src/wheel/cli/__init__.py
@@ -20,21 +20,25 @@ class WheelError(Exception):
def unpack_f(args):
from .unpack import unpack
+
unpack(args.wheelfile, args.dest)
def pack_f(args):
from .pack import pack
+
pack(args.directory, args.dest_dir, args.build_number)
def convert_f(args):
from .convert import convert
+
convert(args.files, args.dest_dir, args.verbose)
def version_f(args):
from .. import __version__
+
print("wheel %s" % __version__)
@@ -42,30 +46,41 @@ def parser():
p = argparse.ArgumentParser()
s = p.add_subparsers(help="commands")
- unpack_parser = s.add_parser('unpack', help='Unpack wheel')
- unpack_parser.add_argument('--dest', '-d', help='Destination directory',
- default='.')
- unpack_parser.add_argument('wheelfile', help='Wheel file')
+ unpack_parser = s.add_parser("unpack", help="Unpack wheel")
+ unpack_parser.add_argument(
+ "--dest", "-d", help="Destination directory", default="."
+ )
+ unpack_parser.add_argument("wheelfile", help="Wheel file")
unpack_parser.set_defaults(func=unpack_f)
- repack_parser = s.add_parser('pack', help='Repack wheel')
- repack_parser.add_argument('directory', help='Root directory of the unpacked wheel')
- repack_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
- help="Directory to store the wheel (default %(default)s)")
- repack_parser.add_argument('--build-number', help="Build tag to use in the wheel name")
+ repack_parser = s.add_parser("pack", help="Repack wheel")
+ repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
+ repack_parser.add_argument(
+ "--dest-dir",
+ "-d",
+ default=os.path.curdir,
+ help="Directory to store the wheel (default %(default)s)",
+ )
+ repack_parser.add_argument(
+ "--build-number", help="Build tag to use in the wheel name"
+ )
repack_parser.set_defaults(func=pack_f)
- convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
- convert_parser.add_argument('files', nargs='*', help='Files to convert')
- convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
- help="Directory to store wheels (default %(default)s)")
- convert_parser.add_argument('--verbose', '-v', action='store_true')
+ convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
+ convert_parser.add_argument("files", nargs="*", help="Files to convert")
+ convert_parser.add_argument(
+ "--dest-dir",
+ "-d",
+ default=os.path.curdir,
+ help="Directory to store wheels (default %(default)s)",
+ )
+ convert_parser.add_argument("--verbose", "-v", action="store_true")
convert_parser.set_defaults(func=convert_f)
- version_parser = s.add_parser('version', help='Print version and exit')
+ version_parser = s.add_parser("version", help="Print version and exit")
version_parser.set_defaults(func=version_f)
- help_parser = s.add_parser('help', help='Show this help')
+ help_parser = s.add_parser("help", help="Show this help")
help_parser.set_defaults(func=lambda args: p.print_help())
return p
@@ -74,7 +89,7 @@ def parser():
def main():
p = parser()
args = p.parse_args()
- if not hasattr(args, 'func'):
+ if not hasattr(args, "func"):
p.print_help()
else:
try:
diff --git a/src/wheel/cli/convert.py b/src/wheel/cli/convert.py
index 5c76d5f..b2f685f 100755
--- a/src/wheel/cli/convert.py
+++ b/src/wheel/cli/convert.py
@@ -10,11 +10,14 @@ from ..bdist_wheel import bdist_wheel
from ..wheelfile import WheelFile
from . import WheelError, require_pkgresources
-egg_info_re = re.compile(r'''
+egg_info_re = re.compile(
+ r"""
(?P<name>.+?)-(?P<ver>.+?)
(-(?P<pyver>py\d\.\d+)
(-(?P<arch>.+?))?
- )?.egg$''', re.VERBOSE)
+ )?.egg$""",
+ re.VERBOSE,
+)
class _bdist_wheel_tag(bdist_wheel):
@@ -37,7 +40,7 @@ def egg2wheel(egg_path, dest_dir):
filename = os.path.basename(egg_path)
match = egg_info_re.match(filename)
if not match:
- raise WheelError(f'Invalid egg file name: {filename}')
+ raise WheelError(f"Invalid egg file name: {filename}")
egg_info = match.groupdict()
dir = tempfile.mkdtemp(suffix="_e2w")
@@ -54,16 +57,16 @@ def egg2wheel(egg_path, dest_dir):
else:
shutil.copytree(src, os.path.join(dir, pth))
- pyver = egg_info['pyver']
+ pyver = egg_info["pyver"]
if pyver:
- pyver = egg_info['pyver'] = pyver.replace('.', '')
+ pyver = egg_info["pyver"] = pyver.replace(".", "")
- arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
+ arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_")
# assume all binary eggs are for CPython
- abi = 'cp' + pyver[2:] if arch != 'any' else 'none'
+ abi = "cp" + pyver[2:] if arch != "any" else "none"
- root_is_purelib = egg_info['arch'] is None
+ root_is_purelib = egg_info["arch"] is None
if root_is_purelib:
bw = bdist_wheel(dist.Distribution())
else:
@@ -72,16 +75,16 @@ def egg2wheel(egg_path, dest_dir):
bw.root_is_pure = root_is_purelib
bw.python_tag = pyver
bw.plat_name_supplied = True
- bw.plat_name = egg_info['arch'] or 'any'
+ bw.plat_name = egg_info["arch"] or "any"
if not root_is_purelib:
bw.full_tag_supplied = True
bw.full_tag = (pyver, abi, arch)
- dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info))
- bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir)
- bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
- wheel_name = '{name}-{ver}-{pyver}-{}-{}.whl'.format(abi, arch, **egg_info)
- with WheelFile(os.path.join(dest_dir, wheel_name), 'w') as wf:
+ dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info))
+ bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir)
+ bw.write_wheelfile(dist_info_dir, generator="egg2wheel")
+ wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info)
+ with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf:
wf.write_files(dir)
shutil.rmtree(dir)
@@ -128,34 +131,34 @@ def parse_wininst_info(wininfo_name, egginfo_name):
# Parse the wininst filename
# 1. Distribution name (up to the first '-')
- w_name, sep, rest = wininfo_name.partition('-')
+ w_name, sep, rest = wininfo_name.partition("-")
if not sep:
raise ValueError(f"Installer filename {wininfo_name} is not valid")
# Strip '.exe'
rest = rest[:-4]
# 2. Python version (from the last '-', must start with 'py')
- rest2, sep, w_pyver = rest.rpartition('-')
- if sep and w_pyver.startswith('py'):
+ rest2, sep, w_pyver = rest.rpartition("-")
+ if sep and w_pyver.startswith("py"):
rest = rest2
- w_pyver = w_pyver.replace('.', '')
+ w_pyver = w_pyver.replace(".", "")
else:
# Not version specific - use py2.py3. While it is possible that
# pure-Python code is not compatible with both Python 2 and 3, there
# is no way of knowing from the wininst format, so we assume the best
# here (the user can always manually rename the wheel to be more
# restrictive if needed).
- w_pyver = 'py2.py3'
+ w_pyver = "py2.py3"
# 3. Version and architecture
- w_ver, sep, w_arch = rest.rpartition('.')
+ w_ver, sep, w_arch = rest.rpartition(".")
if not sep:
raise ValueError(f"Installer filename {wininfo_name} is not valid")
if egginfo:
- w_name = egginfo.group('name')
- w_ver = egginfo.group('ver')
+ w_name = egginfo.group("name")
+ w_ver = egginfo.group("ver")
- return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver}
+ return {"name": w_name, "ver": w_ver, "arch": w_arch, "pyver": w_pyver}
def wininst2wheel(path, dest_dir):
@@ -163,7 +166,7 @@ def wininst2wheel(path, dest_dir):
# Search for egg-info in the archive
egginfo_name = None
for filename in bdw.namelist():
- if '.egg-info' in filename:
+ if ".egg-info" in filename:
egginfo_name = filename
break
@@ -171,13 +174,13 @@ def wininst2wheel(path, dest_dir):
root_is_purelib = True
for zipinfo in bdw.infolist():
- if zipinfo.filename.startswith('PLATLIB'):
+ if zipinfo.filename.startswith("PLATLIB"):
root_is_purelib = False
break
if root_is_purelib:
- paths = {'purelib': ''}
+ paths = {"purelib": ""}
else:
- paths = {'platlib': ''}
+ paths = {"platlib": ""}
dist_info = "%(name)s-%(ver)s" % info
datadir = "%s.data/" % dist_info
@@ -185,13 +188,13 @@ def wininst2wheel(path, dest_dir):
# rewrite paths to trick ZipFile into extracting an egg
# XXX grab wininst .ini - between .exe, padding, and first zip file.
members = []
- egginfo_name = ''
+ egginfo_name = ""
for zipinfo in bdw.infolist():
- key, basename = zipinfo.filename.split('/', 1)
+ key, basename = zipinfo.filename.split("/", 1)
key = key.lower()
basepath = paths.get(key, None)
if basepath is None:
- basepath = datadir + key.lower() + '/'
+ basepath = datadir + key.lower() + "/"
oldname = zipinfo.filename
newname = basepath + basename
zipinfo.filename = newname
@@ -202,27 +205,27 @@ def wininst2wheel(path, dest_dir):
members.append(newname)
# Remember egg-info name for the egg2dist call below
if not egginfo_name:
- if newname.endswith('.egg-info'):
+ if newname.endswith(".egg-info"):
egginfo_name = newname
- elif '.egg-info/' in newname:
- egginfo_name, sep, _ = newname.rpartition('/')
+ elif ".egg-info/" in newname:
+ egginfo_name, sep, _ = newname.rpartition("/")
dir = tempfile.mkdtemp(suffix="_b2w")
bdw.extractall(dir, members)
# egg2wheel
- abi = 'none'
- pyver = info['pyver']
- arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')
+ abi = "none"
+ pyver = info["pyver"]
+ arch = (info["arch"] or "any").replace(".", "_").replace("-", "_")
# Wininst installers always have arch even if they are not
# architecture-specific (because the format itself is).
# So, assume the content is architecture-neutral if root is purelib.
if root_is_purelib:
- arch = 'any'
+ arch = "any"
# If the installer is architecture-specific, it's almost certainly also
# CPython-specific.
- if arch != 'any':
- pyver = pyver.replace('py', 'cp')
- wheel_name = '-'.join((dist_info, pyver, abi, arch))
+ if arch != "any":
+ pyver = pyver.replace("py", "cp")
+ wheel_name = "-".join((dist_info, pyver, abi, arch))
if root_is_purelib:
bw = bdist_wheel(dist.Distribution())
else:
@@ -231,18 +234,18 @@ def wininst2wheel(path, dest_dir):
bw.root_is_pure = root_is_purelib
bw.python_tag = pyver
bw.plat_name_supplied = True
- bw.plat_name = info['arch'] or 'any'
+ bw.plat_name = info["arch"] or "any"
if not root_is_purelib:
bw.full_tag_supplied = True
bw.full_tag = (pyver, abi, arch)
- dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
+ dist_info_dir = os.path.join(dir, "%s.dist-info" % dist_info)
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
- bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')
+ bw.write_wheelfile(dist_info_dir, generator="wininst2wheel")
wheel_path = os.path.join(dest_dir, wheel_name)
- with WheelFile(wheel_path, 'w') as wf:
+ with WheelFile(wheel_path, "w") as wf:
wf.write_files(dir)
shutil.rmtree(dir)
@@ -250,11 +253,11 @@ def wininst2wheel(path, dest_dir):
def convert(files, dest_dir, verbose):
# Only support wheel convert if pkg_resources is present
- require_pkgresources('wheel convert')
+ require_pkgresources("wheel convert")
for pat in files:
for installer in iglob(pat):
- if os.path.splitext(installer)[1] == '.egg':
+ if os.path.splitext(installer)[1] == ".egg":
conv = egg2wheel
else:
conv = wininst2wheel
diff --git a/src/wheel/cli/pack.py b/src/wheel/cli/pack.py
index 094eb67..349f722 100644
--- a/src/wheel/cli/pack.py
+++ b/src/wheel/cli/pack.py
@@ -5,7 +5,7 @@ from wheel.cli import WheelError
from wheel.wheelfile import WheelFile
DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
-BUILD_NUM_RE = re.compile(br'Build: (\d\w*)$')
+BUILD_NUM_RE = re.compile(br"Build: (\d\w*)$")
def pack(directory, dest_dir, build_number):
@@ -18,44 +18,54 @@ def pack(directory, dest_dir, build_number):
:param dest_dir: Destination directory (defaults to the current directory)
"""
# Find the .dist-info directory
- dist_info_dirs = [fn for fn in os.listdir(directory)
- if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)]
+ dist_info_dirs = [
+ fn
+ for fn in os.listdir(directory)
+ if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)
+ ]
if len(dist_info_dirs) > 1:
- raise WheelError(f'Multiple .dist-info directories found in {directory}')
+ raise WheelError(f"Multiple .dist-info directories found in {directory}")
elif not dist_info_dirs:
- raise WheelError(f'No .dist-info directories found in {directory}')
+ raise WheelError(f"No .dist-info directories found in {directory}")
# Determine the target wheel filename
dist_info_dir = dist_info_dirs[0]
- name_version = DIST_INFO_RE.match(dist_info_dir).group('namever')
+ name_version = DIST_INFO_RE.match(dist_info_dir).group("namever")
# Read the tags and the existing build number from .dist-info/WHEEL
existing_build_number = None
- wheel_file_path = os.path.join(directory, dist_info_dir, 'WHEEL')
+ wheel_file_path = os.path.join(directory, dist_info_dir, "WHEEL")
with open(wheel_file_path) as f:
tags = []
for line in f:
- if line.startswith('Tag: '):
- tags.append(line.split(' ')[1].rstrip())
- elif line.startswith('Build: '):
- existing_build_number = line.split(' ')[1].rstrip()
+ if line.startswith("Tag: "):
+ tags.append(line.split(" ")[1].rstrip())
+ elif line.startswith("Build: "):
+ existing_build_number = line.split(" ")[1].rstrip()
if not tags:
- raise WheelError('No tags present in {}/WHEEL; cannot determine target wheel filename'
- .format(dist_info_dir))
+ raise WheelError(
+ "No tags present in {}/WHEEL; cannot determine target wheel "
+ "filename".format(dist_info_dir)
+ )
# Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
build_number = build_number if build_number is not None else existing_build_number
if build_number is not None:
if build_number:
- name_version += '-' + build_number
+ name_version += "-" + build_number
if build_number != existing_build_number:
- replacement = ('Build: %s\r\n' % build_number).encode('ascii') if build_number else b''
- with open(wheel_file_path, 'rb+') as f:
+ replacement = (
+ ("Build: %s\r\n" % build_number).encode("ascii")
+ if build_number
+ else b""
+ )
+ with open(wheel_file_path, "rb+") as f:
wheel_file_content = f.read()
- wheel_file_content, num_replaced = BUILD_NUM_RE.subn(replacement,
- wheel_file_content)
+ wheel_file_content, num_replaced = BUILD_NUM_RE.subn(
+ replacement, wheel_file_content
+ )
if not num_replaced:
wheel_file_content += replacement
@@ -64,15 +74,15 @@ def pack(directory, dest_dir, build_number):
f.write(wheel_file_content)
# Reassemble the tags for the wheel file
- impls = sorted({tag.split('-')[0] for tag in tags})
- abivers = sorted({tag.split('-')[1] for tag in tags})
- platforms = sorted({tag.split('-')[2] for tag in tags})
- tagline = '-'.join(['.'.join(impls), '.'.join(abivers), '.'.join(platforms)])
+ impls = sorted({tag.split("-")[0] for tag in tags})
+ abivers = sorted({tag.split("-")[1] for tag in tags})
+ platforms = sorted({tag.split("-")[2] for tag in tags})
+ tagline = "-".join([".".join(impls), ".".join(abivers), ".".join(platforms)])
# Repack the wheel
- wheel_path = os.path.join(dest_dir, f'{name_version}-{tagline}.whl')
- with WheelFile(wheel_path, 'w') as wf:
- print(f"Repacking wheel as {wheel_path}...", end='', flush=True)
+ wheel_path = os.path.join(dest_dir, f"{name_version}-{tagline}.whl")
+ with WheelFile(wheel_path, "w") as wf:
+ print(f"Repacking wheel as {wheel_path}...", end="", flush=True)
wf.write_files(directory)
- print('OK')
+ print("OK")
diff --git a/src/wheel/cli/unpack.py b/src/wheel/cli/unpack.py
index 3f3963c..ffd0e81 100644
--- a/src/wheel/cli/unpack.py
+++ b/src/wheel/cli/unpack.py
@@ -3,7 +3,7 @@ import os.path
from ..wheelfile import WheelFile
-def unpack(path, dest='.'):
+def unpack(path, dest="."):
"""Unpack a wheel.
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
@@ -13,9 +13,9 @@ def unpack(path, dest='.'):
:param dest: Destination directory (default to current directory).
"""
with WheelFile(path) as wf:
- namever = wf.parsed_filename.group('namever')
+ namever = wf.parsed_filename.group("namever")
destination = os.path.join(dest, namever)
- print(f"Unpacking to: {destination}...", end='', flush=True)
+ print(f"Unpacking to: {destination}...", end="", flush=True)
wf.extractall(destination)
- print('OK')
+ print("OK")
diff --git a/src/wheel/macosx_libfile.py b/src/wheel/macosx_libfile.py
index 29b0395..c96e7cb 100644
--- a/src/wheel/macosx_libfile.py
+++ b/src/wheel/macosx_libfile.py
@@ -44,26 +44,29 @@ import sys
"""here the needed const and struct from mach-o header files"""
-FAT_MAGIC = 0xcafebabe
-FAT_CIGAM = 0xbebafeca
-FAT_MAGIC_64 = 0xcafebabf
-FAT_CIGAM_64 = 0xbfbafeca
-MH_MAGIC = 0xfeedface
-MH_CIGAM = 0xcefaedfe
-MH_MAGIC_64 = 0xfeedfacf
-MH_CIGAM_64 = 0xcffaedfe
+FAT_MAGIC = 0xCAFEBABE
+FAT_CIGAM = 0xBEBAFECA
+FAT_MAGIC_64 = 0xCAFEBABF
+FAT_CIGAM_64 = 0xBFBAFECA
+MH_MAGIC = 0xFEEDFACE
+MH_CIGAM = 0xCEFAEDFE
+MH_MAGIC_64 = 0xFEEDFACF
+MH_CIGAM_64 = 0xCFFAEDFE
LC_VERSION_MIN_MACOSX = 0x24
LC_BUILD_VERSION = 0x32
-CPU_TYPE_ARM64 = 0x0100000c
+CPU_TYPE_ARM64 = 0x0100000C
mach_header_fields = [
- ("magic", ctypes.c_uint32), ("cputype", ctypes.c_int),
- ("cpusubtype", ctypes.c_int), ("filetype", ctypes.c_uint32),
- ("ncmds", ctypes.c_uint32), ("sizeofcmds", ctypes.c_uint32),
- ("flags", ctypes.c_uint32)
- ]
+ ("magic", ctypes.c_uint32),
+ ("cputype", ctypes.c_int),
+ ("cpusubtype", ctypes.c_int),
+ ("filetype", ctypes.c_uint32),
+ ("ncmds", ctypes.c_uint32),
+ ("sizeofcmds", ctypes.c_uint32),
+ ("flags", ctypes.c_uint32),
+]
"""
struct mach_header {
uint32_t magic; /* mach magic number identifier */
@@ -101,9 +104,11 @@ struct fat_header {
"""
fat_arch_fields = [
- ("cputype", ctypes.c_int), ("cpusubtype", ctypes.c_int),
- ("offset", ctypes.c_uint32), ("size", ctypes.c_uint32),
- ("align", ctypes.c_uint32)
+ ("cputype", ctypes.c_int),
+ ("cpusubtype", ctypes.c_int),
+ ("offset", ctypes.c_uint32),
+ ("size", ctypes.c_uint32),
+ ("align", ctypes.c_uint32),
]
"""
struct fat_arch {
@@ -116,9 +121,12 @@ struct fat_arch {
"""
fat_arch_64_fields = [
- ("cputype", ctypes.c_int), ("cpusubtype", ctypes.c_int),
- ("offset", ctypes.c_uint64), ("size", ctypes.c_uint64),
- ("align", ctypes.c_uint32), ("reserved", ctypes.c_uint32)
+ ("cputype", ctypes.c_int),
+ ("cpusubtype", ctypes.c_int),
+ ("offset", ctypes.c_uint64),
+ ("size", ctypes.c_uint64),
+ ("align", ctypes.c_uint32),
+ ("reserved", ctypes.c_uint32),
]
"""
struct fat_arch_64 {
@@ -135,13 +143,18 @@ segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
"""base for reading segment info"""
segment_command_fields = [
- ("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32),
- ("segname", ctypes.c_char * 16), ("vmaddr", ctypes.c_uint32),
- ("vmsize", ctypes.c_uint32), ("fileoff", ctypes.c_uint32),
- ("filesize", ctypes.c_uint32), ("maxprot", ctypes.c_int),
- ("initprot", ctypes.c_int), ("nsects", ctypes.c_uint32),
+ ("cmd", ctypes.c_uint32),
+ ("cmdsize", ctypes.c_uint32),
+ ("segname", ctypes.c_char * 16),
+ ("vmaddr", ctypes.c_uint32),
+ ("vmsize", ctypes.c_uint32),
+ ("fileoff", ctypes.c_uint32),
+ ("filesize", ctypes.c_uint32),
+ ("maxprot", ctypes.c_int),
+ ("initprot", ctypes.c_int),
+ ("nsects", ctypes.c_uint32),
("flags", ctypes.c_uint32),
- ]
+]
"""
struct segment_command { /* for 32-bit architectures */
uint32_t cmd; /* LC_SEGMENT */
@@ -160,13 +173,18 @@ typedef int vm_prot_t;
"""
segment_command_fields_64 = [
- ("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32),
- ("segname", ctypes.c_char * 16), ("vmaddr", ctypes.c_uint64),
- ("vmsize", ctypes.c_uint64), ("fileoff", ctypes.c_uint64),
- ("filesize", ctypes.c_uint64), ("maxprot", ctypes.c_int),
- ("initprot", ctypes.c_int), ("nsects", ctypes.c_uint32),
+ ("cmd", ctypes.c_uint32),
+ ("cmdsize", ctypes.c_uint32),
+ ("segname", ctypes.c_char * 16),
+ ("vmaddr", ctypes.c_uint64),
+ ("vmsize", ctypes.c_uint64),
+ ("fileoff", ctypes.c_uint64),
+ ("filesize", ctypes.c_uint64),
+ ("maxprot", ctypes.c_int),
+ ("initprot", ctypes.c_int),
+ ("nsects", ctypes.c_uint32),
("flags", ctypes.c_uint32),
- ]
+]
"""
struct segment_command_64 { /* for 64-bit architectures */
uint32_t cmd; /* LC_SEGMENT_64 */
@@ -183,8 +201,10 @@ struct segment_command_64 { /* for 64-bit architectures */
};
"""
-version_min_command_fields = segment_base_fields + \
- [("version", ctypes.c_uint32), ("sdk", ctypes.c_uint32)]
+version_min_command_fields = segment_base_fields + [
+ ("version", ctypes.c_uint32),
+ ("sdk", ctypes.c_uint32),
+]
"""
struct version_min_command {
uint32_t cmd; /* LC_VERSION_MIN_MACOSX or
@@ -197,9 +217,12 @@ struct version_min_command {
};
"""
-build_version_command_fields = segment_base_fields + \
- [("platform", ctypes.c_uint32), ("minos", ctypes.c_uint32),
- ("sdk", ctypes.c_uint32), ("ntools", ctypes.c_uint32)]
+build_version_command_fields = segment_base_fields + [
+ ("platform", ctypes.c_uint32),
+ ("minos", ctypes.c_uint32),
+ ("sdk", ctypes.c_uint32),
+ ("ntools", ctypes.c_uint32),
+]
"""
struct build_version_command {
uint32_t cmd; /* LC_BUILD_VERSION */
@@ -214,10 +237,12 @@ struct build_version_command {
def swap32(x):
- return (((x << 24) & 0xFF000000) |
- ((x << 8) & 0x00FF0000) |
- ((x >> 8) & 0x0000FF00) |
- ((x >> 24) & 0x000000FF))
+ return (
+ ((x << 24) & 0xFF000000)
+ | ((x << 8) & 0x00FF0000)
+ | ((x >> 8) & 0x0000FF00)
+ | ((x >> 24) & 0x000000FF)
+ )
def get_base_class_and_magic_number(lib_file, seek=None):
@@ -226,7 +251,8 @@ def get_base_class_and_magic_number(lib_file, seek=None):
else:
lib_file.seek(seek)
magic_number = ctypes.c_uint32.from_buffer_copy(
- lib_file.read(ctypes.sizeof(ctypes.c_uint32))).value
+ lib_file.read(ctypes.sizeof(ctypes.c_uint32))
+ ).value
# Handle wrong byte order
if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
@@ -244,8 +270,7 @@ def get_base_class_and_magic_number(lib_file, seek=None):
def read_data(struct_class, lib_file):
- return struct_class.from_buffer_copy(lib_file.read(
- ctypes.sizeof(struct_class)))
+ return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
def extract_macosx_min_system_version(path_to_lib):
@@ -255,6 +280,7 @@ def extract_macosx_min_system_version(path_to_lib):
return
if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
+
class FatHeader(BaseClass):
_fields_ = fat_header_fields
@@ -263,12 +289,15 @@ def extract_macosx_min_system_version(path_to_lib):
class FatArch(BaseClass):
_fields_ = fat_arch_fields
+
else:
class FatArch(BaseClass):
_fields_ = fat_arch_64_fields
- fat_arch_list = [read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)]
+ fat_arch_list = [
+ read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
+ ]
versions_list = []
for el in fat_arch_list:
@@ -333,12 +362,14 @@ def read_mach_header(lib_file, seek=None):
segment_base = read_data(SegmentBase, lib_file)
lib_file.seek(pos)
if segment_base.cmd == LC_VERSION_MIN_MACOSX:
+
class VersionMinCommand(base_class):
_fields_ = version_min_command_fields
version_info = read_data(VersionMinCommand, lib_file)
return parse_version(version_info.version)
elif segment_base.cmd == LC_BUILD_VERSION:
+
class VersionBuild(base_class):
_fields_ = build_version_command_fields
@@ -350,9 +381,9 @@ def read_mach_header(lib_file, seek=None):
def parse_version(version):
- x = (version & 0xffff0000) >> 16
- y = (version & 0x0000ff00) >> 8
- z = (version & 0x000000ff)
+ x = (version & 0xFFFF0000) >> 16
+ y = (version & 0x0000FF00) >> 8
+ z = version & 0x000000FF
return x, y, z
@@ -362,25 +393,28 @@ def calculate_macosx_platform_tag(archive_root, platform_tag):
Example platform tag `macosx-10.14-x86_64`
"""
- prefix, base_version, suffix = platform_tag.split('-')
+ prefix, base_version, suffix = platform_tag.split("-")
base_version = tuple(int(x) for x in base_version.split("."))
base_version = base_version[:2]
if base_version[0] > 10:
base_version = (base_version[0], 0)
assert len(base_version) == 2
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
- deploy_target = tuple(int(x) for x in os.environ[
- "MACOSX_DEPLOYMENT_TARGET"].split("."))
+ deploy_target = tuple(
+ int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
+ )
deploy_target = deploy_target[:2]
if deploy_target[0] > 10:
deploy_target = (deploy_target[0], 0)
if deploy_target < base_version:
sys.stderr.write(
- "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than the "
- "version on which the Python interpreter was compiled ({}), and will be "
- "ignored.\n".format('.'.join(str(x) for x in deploy_target),
- '.'.join(str(x) for x in base_version))
+ "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
+ "the version on which the Python interpreter was compiled ({}), and "
+ "will be ignored.\n".format(
+ ".".join(str(x) for x in deploy_target),
+ ".".join(str(x) for x in base_version),
)
+ )
else:
base_version = deploy_target
@@ -389,7 +423,7 @@ def calculate_macosx_platform_tag(archive_root, platform_tag):
versions_dict = {}
for (dirpath, dirnames, filenames) in os.walk(archive_root):
for filename in filenames:
- if filename.endswith('.dylib') or filename.endswith('.so'):
+ if filename.endswith(".dylib") or filename.endswith(".so"):
lib_path = os.path.join(dirpath, filename)
min_ver = extract_macosx_min_system_version(lib_path)
if min_ver is not None:
@@ -410,17 +444,24 @@ def calculate_macosx_platform_tag(archive_root, platform_tag):
files_form = "this file"
else:
files_form = "these files"
- error_message = \
- "[WARNING] This wheel needs a higher macOS version than {} " \
- "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least " +\
- fin_base_version + " or recreate " + files_form + " with lower " \
+ error_message = (
+ "[WARNING] This wheel needs a higher macOS version than {} "
+ "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
+ + fin_base_version
+ + " or recreate "
+ + files_form
+ + " with lower "
"MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files
+ )
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
- error_message = error_message.format("is set in MACOSX_DEPLOYMENT_TARGET variable.")
+ error_message = error_message.format(
+ "is set in MACOSX_DEPLOYMENT_TARGET variable."
+ )
else:
error_message = error_message.format(
- "the version your Python interpreter is compiled against.")
+ "the version your Python interpreter is compiled against."
+ )
sys.stderr.write(error_message)
diff --git a/src/wheel/metadata.py b/src/wheel/metadata.py
index 37efa74..ace796d 100644
--- a/src/wheel/metadata.py
+++ b/src/wheel/metadata.py
@@ -12,15 +12,15 @@ from .pkginfo import read_pkg_info
def requires_to_requires_dist(requirement):
"""Return the version specifier for a requirement in PEP 345/566 fashion."""
- if getattr(requirement, 'url', None):
+ if getattr(requirement, "url", None):
return " @ " + requirement.url
requires_dist = []
for op, ver in requirement.specs:
requires_dist.append(op + ver)
if not requires_dist:
- return ''
- return " (%s)" % ','.join(sorted(requires_dist))
+ return ""
+ return " (%s)" % ",".join(sorted(requires_dist))
def convert_requirements(requirements):
@@ -36,30 +36,30 @@ def convert_requirements(requirements):
def generate_requirements(extras_require):
"""
- Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement')
- and ('Provides-Extra', 'extra') tuples.
+ Convert requirements from a setup()-style dictionary to
+ ('Requires-Dist', 'requirement') and ('Provides-Extra', 'extra') tuples.
extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
using the empty extra {'': [requirements]} to hold install_requires.
"""
for extra, depends in extras_require.items():
- condition = ''
- extra = extra or ''
- if ':' in extra: # setuptools extra:condition syntax
- extra, condition = extra.split(':', 1)
+ condition = ""
+ extra = extra or ""
+ if ":" in extra: # setuptools extra:condition syntax
+ extra, condition = extra.split(":", 1)
extra = pkg_resources.safe_extra(extra)
if extra:
- yield 'Provides-Extra', extra
+ yield "Provides-Extra", extra
if condition:
condition = "(" + condition + ") and "
condition += "extra == '%s'" % extra
if condition:
- condition = ' ; ' + condition
+ condition = " ; " + condition
for new_req in convert_requirements(depends):
- yield 'Requires-Dist', new_req + condition
+ yield "Requires-Dist", new_req + condition
def pkginfo_to_metadata(egg_info_path, pkginfo_path):
@@ -67,26 +67,27 @@ def pkginfo_to_metadata(egg_info_path, pkginfo_path):
Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
"""
pkg_info = read_pkg_info(pkginfo_path)
- pkg_info.replace_header('Metadata-Version', '2.1')
+ pkg_info.replace_header("Metadata-Version", "2.1")
# Those will be regenerated from `requires.txt`.
- del pkg_info['Provides-Extra']
- del pkg_info['Requires-Dist']
- requires_path = os.path.join(egg_info_path, 'requires.txt')
+ del pkg_info["Provides-Extra"]
+ del pkg_info["Requires-Dist"]
+ requires_path = os.path.join(egg_info_path, "requires.txt")
if os.path.exists(requires_path):
with open(requires_path) as requires_file:
requires = requires_file.read()
- parsed_requirements = sorted(pkg_resources.split_sections(requires),
- key=lambda x: x[0] or '')
+ parsed_requirements = sorted(
+ pkg_resources.split_sections(requires), key=lambda x: x[0] or ""
+ )
for extra, reqs in parsed_requirements:
for key, value in generate_requirements({extra: reqs}):
if (key, value) not in pkg_info.items():
pkg_info[key] = value
- description = pkg_info['Description']
+ description = pkg_info["Description"]
if description:
pkg_info.set_payload(dedent_description(pkg_info))
- del pkg_info['Description']
+ del pkg_info["Description"]
return pkg_info
@@ -98,8 +99,7 @@ def pkginfo_unicode(pkg_info, field):
if not isinstance(text, str):
for item in pkg_info.raw_items():
if item[0].lower() == field:
- text = item[1].encode('ascii', 'surrogateescape') \
- .decode('utf-8')
+ text = item[1].encode("ascii", "surrogateescape").decode("utf-8")
break
return text
@@ -109,25 +109,28 @@ def dedent_description(pkg_info):
"""
Dedent and convert pkg_info['Description'] to Unicode.
"""
- description = pkg_info['Description']
+ description = pkg_info["Description"]
# Python 3 Unicode handling, sorta.
surrogates = False
if not isinstance(description, str):
surrogates = True
- description = pkginfo_unicode(pkg_info, 'Description')
+ description = pkginfo_unicode(pkg_info, "Description")
description_lines = description.splitlines()
- description_dedent = '\n'.join(
+ description_dedent = "\n".join(
# if the first line of long_description is blank,
# the first line here will be indented.
- (description_lines[0].lstrip(),
- textwrap.dedent('\n'.join(description_lines[1:])),
- '\n'))
+ (
+ description_lines[0].lstrip(),
+ textwrap.dedent("\n".join(description_lines[1:])),
+ "\n",
+ )
+ )
if surrogates:
- description_dedent = description_dedent \
- .encode("utf8") \
- .decode("ascii", "surrogateescape")
+ description_dedent = description_dedent.encode("utf8").decode(
+ "ascii", "surrogateescape"
+ )
return description_dedent
diff --git a/src/wheel/util.py b/src/wheel/util.py
index a1f1e3b..09f5b29 100644
--- a/src/wheel/util.py
+++ b/src/wheel/util.py
@@ -1,7 +1,7 @@
import base64
-def native(s, encoding='utf-8'):
+def native(s, encoding="utf-8"):
if isinstance(s, bytes):
return s.decode(encoding)
else:
@@ -10,23 +10,23 @@ def native(s, encoding='utf-8'):
def urlsafe_b64encode(data):
"""urlsafe_b64encode without padding"""
- return base64.urlsafe_b64encode(data).rstrip(b'=')
+ return base64.urlsafe_b64encode(data).rstrip(b"=")
def urlsafe_b64decode(data):
"""urlsafe_b64decode without padding"""
- pad = b'=' * (4 - (len(data) & 3))
+ pad = b"=" * (4 - (len(data) & 3))
return base64.urlsafe_b64decode(data + pad)
def as_unicode(s):
if isinstance(s, bytes):
- return s.decode('utf-8')
+ return s.decode("utf-8")
return s
def as_bytes(s):
if isinstance(s, str):
- return s.encode('utf-8')
+ return s.encode("utf-8")
else:
return s
diff --git a/src/wheel/wheelfile.py b/src/wheel/wheelfile.py
index 83f1611..36b38ef 100644
--- a/src/wheel/wheelfile.py
+++ b/src/wheel/wheelfile.py
@@ -10,20 +10,27 @@ from io import StringIO, TextIOWrapper
from zipfile import ZIP_DEFLATED, ZipInfo, ZipFile
from wheel.cli import WheelError
-from wheel.util import urlsafe_b64decode, as_unicode, native, urlsafe_b64encode, as_bytes
+from wheel.util import (
+ urlsafe_b64decode,
+ as_unicode,
+ native,
+ urlsafe_b64encode,
+ as_bytes,
+)
# Non-greedy matching of an optional build number may be too clever (more
# invalid wheel filenames will match). Separate regex for .dist-info?
WHEEL_INFO_RE = re.compile(
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.+?))(-(?P<build>\d[^-]*))?
-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl$""",
- re.VERBOSE)
+ re.VERBOSE,
+)
def get_zipinfo_datetime(timestamp=None):
- # Some applications need reproducible .whl files, but they can't do this without forcing
- # the timestamp of the individual ZipInfo objects. See issue #143.
- timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', timestamp or time.time()))
+ # Some applications need reproducible .whl files, but they can't do this without
+ # forcing the timestamp of the individual ZipInfo objects. See issue #143.
+ timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
return time.gmtime(timestamp)[0:6]
@@ -34,49 +41,56 @@ class WheelFile(ZipFile):
_default_algorithm = hashlib.sha256
- def __init__(self, file, mode='r', compression=ZIP_DEFLATED):
+ def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
basename = os.path.basename(file)
self.parsed_filename = WHEEL_INFO_RE.match(basename)
- if not basename.endswith('.whl') or self.parsed_filename is None:
+ if not basename.endswith(".whl") or self.parsed_filename is None:
raise WheelError(f"Bad wheel filename {basename!r}")
ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
- self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever'))
- self.record_path = self.dist_info_path + '/RECORD'
+ self.dist_info_path = "{}.dist-info".format(
+ self.parsed_filename.group("namever")
+ )
+ self.record_path = self.dist_info_path + "/RECORD"
self._file_hashes = OrderedDict()
self._file_sizes = {}
- if mode == 'r':
+ if mode == "r":
# Ignore RECORD and any embedded wheel signatures
self._file_hashes[self.record_path] = None, None
- self._file_hashes[self.record_path + '.jws'] = None, None
- self._file_hashes[self.record_path + '.p7s'] = None, None
+ self._file_hashes[self.record_path + ".jws"] = None, None
+ self._file_hashes[self.record_path + ".p7s"] = None, None
# Fill in the expected hashes by reading them from RECORD
try:
record = self.open(self.record_path)
except KeyError:
- raise WheelError(f'Missing {self.record_path} file')
+ raise WheelError(f"Missing {self.record_path} file")
with record:
- for line in csv.reader(TextIOWrapper(record, newline='', encoding='utf-8')):
+ for line in csv.reader(
+ TextIOWrapper(record, newline="", encoding="utf-8")
+ ):
path, hash_sum, size = line
if not hash_sum:
continue
- algorithm, hash_sum = hash_sum.split('=')
+ algorithm, hash_sum = hash_sum.split("=")
try:
hashlib.new(algorithm)
except ValueError:
- raise WheelError(f'Unsupported hash algorithm: {algorithm}')
+ raise WheelError(f"Unsupported hash algorithm: {algorithm}")
- if algorithm.lower() in {'md5', 'sha1'}:
+ if algorithm.lower() in {"md5", "sha1"}:
raise WheelError(
- 'Weak hash algorithm ({}) is not permitted by PEP 427'
- .format(algorithm))
+ "Weak hash algorithm ({}) is not permitted by PEP "
+ "427".format(algorithm)
+ )
self._file_hashes[path] = (
- algorithm, urlsafe_b64decode(hash_sum.encode('ascii')))
+ algorithm,
+ urlsafe_b64decode(hash_sum.encode("ascii")),
+ )
def open(self, name_or_info, mode="r", pwd=None):
def _update_crc(newdata, eof=None):
@@ -90,16 +104,22 @@ class WheelFile(ZipFile):
if eof and running_hash.digest() != expected_hash:
raise WheelError(f"Hash mismatch for file '{native(ef_name)}'")
- ef_name = as_unicode(name_or_info.filename if isinstance(name_or_info, ZipInfo)
- else name_or_info)
- if mode == 'r' and not ef_name.endswith('/') and ef_name not in self._file_hashes:
+ ef_name = as_unicode(
+ name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
+ )
+ if (
+ mode == "r"
+ and not ef_name.endswith("/")
+ and ef_name not in self._file_hashes
+ ):
raise WheelError(f"No hash found for file '{native(ef_name)}'")
ef = ZipFile.open(self, name_or_info, mode, pwd)
- if mode == 'r' and not ef_name.endswith('/'):
+ if mode == "r" and not ef_name.endswith("/"):
algorithm, expected_hash = self._file_hashes[ef_name]
if expected_hash is not None:
- # Monkey patch the _update_crc method to also check for the hash from RECORD
+ # Monkey patch the _update_crc method to also check for the hash from
+ # RECORD
running_hash = hashlib.new(algorithm)
update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
@@ -115,10 +135,10 @@ class WheelFile(ZipFile):
for name in sorted(filenames):
path = os.path.normpath(os.path.join(root, name))
if os.path.isfile(path):
- arcname = os.path.relpath(path, base_dir).replace(os.path.sep, '/')
+ arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
if arcname == self.record_path:
pass
- elif root.endswith('.dist-info'):
+ elif root.endswith(".dist-info"):
deferred.append((path, arcname))
else:
self.write(path, arcname)
@@ -128,38 +148,43 @@ class WheelFile(ZipFile):
self.write(path, arcname)
def write(self, filename, arcname=None, compress_type=None):
- with open(filename, 'rb') as f:
+ with open(filename, "rb") as f:
st = os.fstat(f.fileno())
data = f.read()
- zinfo = ZipInfo(arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime))
+ zinfo = ZipInfo(
+ arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
+ )
zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
zinfo.compress_type = compress_type or self.compression
self.writestr(zinfo, data, compress_type)
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
ZipFile.writestr(self, zinfo_or_arcname, bytes, compress_type)
- fname = (zinfo_or_arcname.filename if isinstance(zinfo_or_arcname, ZipInfo)
- else zinfo_or_arcname)
+ fname = (
+ zinfo_or_arcname.filename
+ if isinstance(zinfo_or_arcname, ZipInfo)
+ else zinfo_or_arcname
+ )
logger.info("adding '%s'", fname)
if fname != self.record_path:
hash_ = self._default_algorithm(bytes)
- self._file_hashes[fname] = hash_.name, native(urlsafe_b64encode(hash_.digest()))
+ self._file_hashes[fname] = hash_.name, native(
+ urlsafe_b64encode(hash_.digest())
+ )
self._file_sizes[fname] = len(bytes)
def close(self):
# Write RECORD
- if self.fp is not None and self.mode == 'w' and self._file_hashes:
+ if self.fp is not None and self.mode == "w" and self._file_hashes:
data = StringIO()
- writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n')
- writer.writerows((
+ writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
+ writer.writerows(
(
- fname,
- algorithm + "=" + hash_,
- self._file_sizes[fname]
+ (fname, algorithm + "=" + hash_, self._file_sizes[fname])
+ for fname, (algorithm, hash_) in self._file_hashes.items()
)
- for fname, (algorithm, hash_) in self._file_hashes.items()
- ))
+ )
writer.writerow((format(self.record_path), "", ""))
zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime())
zinfo.compress_type = self.compression