Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Issue 23607: first pass of changes for compatibility with Python3 #24435

Merged
merged 8 commits into from Oct 16, 2019

Use urllib from six module in order to be compatible with Python3

  • Loading branch information
marmeladema committed Oct 15, 2019
commit f1d42fe787c20ff2d62b0aab00874fe9c79af352
@@ -11,7 +11,7 @@
import platform
import shutil
import subprocess
import urllib
import six.moves.urllib as urllib
from subprocess import PIPE
from zipfile import BadZipfile

@@ -293,7 +293,7 @@ def check_cmake(version):

def prepare_file(zip_path, full_spec):
if not os.path.isfile(zip_path):
zip_url = "{}{}.zip".format(deps_url, urllib.quote(full_spec))
zip_url = "{}{}.zip".format(deps_url, urllib.parse.quote(full_spec))
download_file(full_spec, zip_url, zip_path)

print("Extracting {}...".format(full_spec), end='')
@@ -18,7 +18,7 @@
import subprocess
import sys
import traceback
import urllib2
import six.moves.urllib as urllib
import glob

from mach.decorators import (
@@ -220,7 +220,7 @@ def bootstrap_hsts_preload(self, force=False):

try:
content_base64 = download_bytes("Chromium HSTS preload list", chromium_hsts_url)
except urllib2.URLError:
except urllib.error.URLError:
print("Unable to download chromium HSTS preload list; are you connected to the internet?")
sys.exit(1)

@@ -258,7 +258,7 @@ def bootstrap_pub_suffix(self, force=False):

try:
content = download_bytes("Public suffix list", list_url)
except urllib2.URLError:
except urllib.error.URLError:
print("Unable to download the public suffix list; are you connected to the internet?")
sys.exit(1)

@@ -16,7 +16,7 @@
import shutil
import subprocess
import sys
import urllib
import six.moves.urllib as urllib
import zipfile
import stat

@@ -484,7 +484,7 @@ def build(self, release=False, dev=False, jobs=None, params=None,
print("Downloading GStreamer dependencies")
gst_url = "https://servo-deps.s3.amazonaws.com/gstreamer/%s" % gst_lib_zip
print(gst_url)
urllib.urlretrieve(gst_url, gst_lib_zip)
urllib.request.urlretrieve(gst_url, gst_lib_zip)
zip_ref = zipfile.ZipFile(gst_lib_zip, "r")
zip_ref.extractall(gst_dir)
os.remove(gst_lib_zip)
@@ -27,7 +27,7 @@
import zipfile
from xml.etree.ElementTree import XML
from servo.util import download_file
import urllib2
import six.moves.urllib as urllib
from bootstrap import check_gstreamer_lib

from mach.decorators import CommandArgument
@@ -506,15 +506,15 @@ def get_nightly_binary_path(self, nightly_date):
nightly_date = nightly_date.strip()
# Fetch the filename to download from the build list
repository_index = NIGHTLY_REPOSITORY_URL + "?list-type=2&prefix=nightly"
req = urllib2.Request(
req = urllib.request.Request(
"{}/{}/{}".format(repository_index, os_prefix, nightly_date))
try:
response = urllib2.urlopen(req).read()
response = urllib.request.urlopen(req).read()
tree = XML(response)
namespaces = {'ns': tree.tag[1:tree.tag.index('}')]}
file_to_download = tree.find('ns:Contents', namespaces).find(
'ns:Key', namespaces).text
except urllib2.URLError as e:
except urllib.error.URLError as e:
print("Could not fetch the available nightly versions from the repository : {}".format(
e.reason))
sys.exit(1)
@@ -14,7 +14,7 @@
import signal
import sys
import tempfile
import urllib2
import six.moves.urllib as urllib
import json
import subprocess

@@ -201,7 +201,7 @@ def grep(self, params):
category='devenv')
def rustup(self):
url = get_static_rust_lang_org_dist() + "/channel-rust-nightly-date.txt"
nightly_date = urllib2.urlopen(url, **get_urlopen_kwargs()).read()
nightly_date = urllib.request.urlopen(url, **get_urlopen_kwargs()).read()
toolchain = "nightly-" + nightly_date
filename = path.join(self.context.topdir, "rust-toolchain")
with open(filename, "w") as f:
@@ -19,7 +19,7 @@
import subprocess
import sys
import tempfile
import urllib
import six.moves.urllib as urllib

from mach.decorators import (
CommandArgument,
@@ -594,7 +594,7 @@ def get_taskcluster_secret(name):
"/secrets/v1/secret/project/servo/" +
name
)
return json.load(urllib.urlopen(url))["secret"]
return json.load(urllib.request.urlopen(url))["secret"]

def get_s3_secret():
aws_access_key = None
@@ -18,7 +18,7 @@
from collections import OrderedDict
import time
import json
import urllib2
import six.moves.urllib as urllib
import base64
import shutil
import subprocess
@@ -510,9 +510,9 @@ def filter_intermittents(self, summary, log_filteredsummary, log_intermittents,
elif tracker_api.endswith('/'):
tracker_api = tracker_api[0:-1]

query = urllib2.quote(failure['test'], safe='')
request = urllib2.Request("%s/query.py?name=%s" % (tracker_api, query))
search = urllib2.urlopen(request)
query = urllib.parse.quote(failure['test'], safe='')
request = urllib.request.Request("%s/query.py?name=%s" % (tracker_api, query))
search = urllib.request.urlopen(request)
data = json.load(search)
if len(data) == 0:
actual_failures += [failure]
@@ -521,11 +521,11 @@ def filter_intermittents(self, summary, log_filteredsummary, log_intermittents,
else:
qstr = "repo:servo/servo+label:I-intermittent+type:issue+state:open+%s" % failure['test']
# we want `/` to get quoted, but not `+` (github's API doesn't like that), so we set `safe` to `+`
query = urllib2.quote(qstr, safe='+')
request = urllib2.Request("https://api.github.com/search/issues?q=%s" % query)
query = urllib.parse.quote(qstr, safe='+')
request = urllib.request.Request("https://api.github.com/search/issues?q=%s" % query)
if encoded_auth:
request.add_header("Authorization", "Basic %s" % encoded_auth)
search = urllib2.urlopen(request)
search = urllib.request.urlopen(request)
data = json.load(search)
if data['total_count'] == 0:
actual_failures += [failure]
@@ -20,7 +20,7 @@
import sys
import time
import zipfile
import urllib2
import six.moves.urllib as urllib


try:
@@ -101,10 +101,10 @@ def download(desc, src, writer, start_byte=0):
dumb = (os.environ.get("TERM") == "dumb") or (not sys.stdout.isatty())

try:
req = urllib2.Request(src)
req = urllib.request.Request(src)
if start_byte:
req = urllib2.Request(src, headers={'Range': 'bytes={}-'.format(start_byte)})
resp = urllib2.urlopen(req, **get_urlopen_kwargs())
req = urllib.request.Request(src, headers={'Range': 'bytes={}-'.format(start_byte)})
resp = urllib.request.urlopen(req, **get_urlopen_kwargs())

fsize = None
if resp.info().getheader('Content-Length'):
@@ -136,13 +136,13 @@ def download(desc, src, writer, start_byte=0):

if not dumb:
print()
except urllib2.HTTPError, e:
except urllib.error.HTTPError, e:
print("Download failed ({}): {} - {}".format(e.code, e.reason, src))
if e.code == 403:
print("No Rust compiler binary available for this platform. "
"Please see https://github.com/servo/servo/#prerequisites")
sys.exit(1)
except urllib2.URLError, e:
except urllib.error.URLError, e:
print("Error downloading {}: {}. The failing URL was: {}".format(desc, e.reason, src))
sys.exit(1)
except socket_error, e:
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.