forked from scipy/scipy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdownload-wheels.py
102 lines (82 loc) · 2.91 KB
/
download-wheels.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
#!/usr/bin/env python
"""
Download SciPy wheels from Anaconda staging area.
"""
import os
import re
import shutil
import argparse
import urllib
import urllib.request
import urllib3
from bs4 import BeautifulSoup
__version__ = '0.1'
# Edit these for other projects.
STAGING_URL = 'https://anaconda.org/multibuild-wheels-staging/scipy'
PREFIX = 'scipy'
def http_manager():
"""
Return a urllib3 http request manager, leveraging
proxy settings when available.
"""
proxy_dict = urllib.request.getproxies()
if 'http' in proxy_dict:
http = urllib3.ProxyManager(proxy_dict['http'])
elif 'all' in proxy_dict:
http = urllib3.ProxyManager(proxy_dict['all'])
else:
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
return http
def get_wheel_names(version):
""" Get wheel names from Anaconda HTML directory.
This looks in the Anaconda multibuild-wheels-staging page and
parses the HTML to get all the wheel names for a release version.
Parameters
----------
version : str
The release version. For instance, "1.5.0".
"""
http = http_manager()
tmpl = re.compile(rf"^.*{PREFIX}-{version}-.*\.whl$")
index_url = f"{STAGING_URL}/files"
index_html = http.request('GET', index_url)
soup = BeautifulSoup(index_html.data, 'html.parser')
return soup.findAll(text=tmpl)
def download_wheels(version, wheelhouse):
"""Download release wheels.
The release wheels for the given SciPy version are downloaded
into the given directory.
Parameters
----------
version : str
The release version. For instance, "1.5.0".
wheelhouse : str
Directory in which to download the wheels.
"""
http = http_manager()
wheel_names = get_wheel_names(version)
for i, wheel_name in enumerate(wheel_names):
wheel_url = f"{STAGING_URL}/{version}/download/{wheel_name}"
wheel_path = os.path.join(wheelhouse, wheel_name)
with open(wheel_path, 'wb') as f:
with http.request('GET', wheel_url, preload_content=False,) as r:
print(f"{i + 1:<4}{wheel_name}")
shutil.copyfileobj(r, f)
print(f"\nTotal files downloaded: {len(wheel_names)}")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"version",
help="SciPy version to download.")
parser.add_argument(
"-w", "--wheelhouse",
default=os.path.join(os.getcwd(), "release", "installers"),
help="Directory in which to store downloaded wheels\n"
"[defaults to <cwd>/release/installers]")
args = parser.parse_args()
wheelhouse = os.path.expanduser(args.wheelhouse)
if not os.path.isdir(wheelhouse):
raise RuntimeError(
f"{wheelhouse} wheelhouse directory is not present."
" Perhaps you need to use the '-w' flag to specify one.")
download_wheels(args.version, wheelhouse)