diff options
author | Charles Harris <charlesr.harris@gmail.com> | 2022-05-26 17:24:18 -0600 |
---|---|---|
committer | Charles Harris <charlesr.harris@gmail.com> | 2022-05-26 21:58:30 -0600 |
commit | 14101ae5cf8a498a8f9573eaa3963a4fc47acc8d (patch) | |
tree | a55067a54a7a7562e8b3f822b4445c71e71bab75 /tools/download-wheels.py | |
parent | 8d205fdac2daa0c6d90ad4b8911413071e9a3f09 (diff) | |
download | numpy-14101ae5cf8a498a8f9573eaa3963a4fc47acc8d.tar.gz |
MAINT, STY: Make download-wheels download source files.
Using cibuild to build both wheels and the sdist requires that both be
downloaded from the staging repo. After this change all of the whl, zip,
and gzip files will be downloaded.
This also reqularizes strings to conform to the Black standard.
Diffstat (limited to 'tools/download-wheels.py')
-rw-r--r-- | tools/download-wheels.py | 28 |
1 files changed, 17 insertions, 11 deletions
diff --git a/tools/download-wheels.py b/tools/download-wheels.py index dd066d9ad..41e1e9e5d 100644 --- a/tools/download-wheels.py +++ b/tools/download-wheels.py @@ -31,11 +31,17 @@ import argparse import urllib3 from bs4 import BeautifulSoup -__version__ = '0.1' +__version__ = "0.1" # Edit these for other projects. -STAGING_URL = 'https://anaconda.org/multibuild-wheels-staging/numpy' -PREFIX = 'numpy' +STAGING_URL = "https://anaconda.org/multibuild-wheels-staging/numpy" +PREFIX = "numpy" + +# Name endings of the files to download. +WHL = r"-.*\.whl$" +ZIP = r"\.zip$" +GZIP = r"\.tar\.gz$" +SUFFIX = rf"({WHL}|{GZIP}|{ZIP})" def get_wheel_names(version): @@ -50,11 +56,11 @@ def get_wheel_names(version): The release version. For instance, "1.18.3". """ - http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED') - tmpl = re.compile(rf"^.*{PREFIX}-{version}-.*\.whl$") + http = urllib3.PoolManager(cert_reqs="CERT_REQUIRED") + tmpl = re.compile(rf"^.*{PREFIX}-{version}{SUFFIX}") index_url = f"{STAGING_URL}/files" - index_html = http.request('GET', index_url) - soup = BeautifulSoup(index_html.data, 'html.parser') + index_html = http.request("GET", index_url) + soup = BeautifulSoup(index_html.data, "html.parser") return soup.findAll(text=tmpl) @@ -72,20 +78,20 @@ def download_wheels(version, wheelhouse): Directory in which to download the wheels. """ - http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED') + http = urllib3.PoolManager(cert_reqs="CERT_REQUIRED") wheel_names = get_wheel_names(version) for i, wheel_name in enumerate(wheel_names): wheel_url = f"{STAGING_URL}/{version}/download/{wheel_name}" wheel_path = os.path.join(wheelhouse, wheel_name) - with open(wheel_path, 'wb') as f: - with http.request('GET', wheel_url, preload_content=False,) as r: + with open(wheel_path, "wb") as f: + with http.request("GET", wheel_url, preload_content=False,) as r: print(f"{i + 1:<4}{wheel_name}") shutil.copyfileobj(r, f) print(f"\nTotal files downloaded: {len(wheel_names)}") -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "version", |