Skip to content

Commit

Permalink
Merge b15919a into 28579d0
Browse files Browse the repository at this point in the history
  • Loading branch information
jneves committed Feb 15, 2020
2 parents 28579d0 + b15919a commit 535e9cf
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 40 deletions.
21 changes: 8 additions & 13 deletions tests/tests.py
Expand Up @@ -135,19 +135,6 @@ def test_create_lambda_package(self):
self.assertTrue(os.path.isfile(path))
os.remove(path)

def test_get_manylinux_python27(self):
z = Zappa(runtime='python2.7')
self.assertIsNotNone(z.get_cached_manylinux_wheel('cffi', '1.10.0'))
self.assertIsNone(z.get_cached_manylinux_wheel('derpderpderpderp', '0.0'))

# mock with a known manylinux wheel package so that code for downloading them gets invoked
mock_installed_packages = { 'cffi' : '1.10.0' }
with mock.patch('zappa.core.Zappa.get_installed_packages', return_value = mock_installed_packages):
z = Zappa(runtime='python2.7')
path = z.create_lambda_zip(handler_file=os.path.realpath(__file__))
self.assertTrue(os.path.isfile(path))
os.remove(path)

def test_get_manylinux_python36(self):
z = Zappa(runtime='python3.6')
self.assertIsNotNone(z.get_cached_manylinux_wheel('psycopg2', '2.7.1'))
Expand Down Expand Up @@ -187,6 +174,14 @@ def test_get_manylinux_python38(self):
self.assertTrue(os.path.isfile(path))
os.remove(path)

# same, but with an ABI3 package
mock_installed_packages = {'cryptography': '2.8'}
with mock.patch('zappa.core.Zappa.get_installed_packages', return_value=mock_installed_packages):
z = Zappa(runtime='python3.8')
path = z.create_lambda_zip(handler_file=os.path.realpath(__file__))
self.assertTrue(os.path.isfile(path))
os.remove(path)

def test_should_use_lambda_packages(self):
z = Zappa(runtime='python2.7')

Expand Down
65 changes: 38 additions & 27 deletions zappa/core.py
Expand Up @@ -15,6 +15,7 @@
import logging
import os
import random
import re
import shutil
import string
import subprocess
Expand Down Expand Up @@ -282,16 +283,19 @@ def __init__(self,

self.runtime = runtime

if self.runtime == 'python2.7':
self.manylinux_wheel_file_suffix = 'cp27mu-manylinux1_x86_64.whl'
elif self.runtime == 'python3.6':
self.manylinux_wheel_file_suffix = 'cp36m-manylinux1_x86_64.whl'
if self.runtime == 'python3.6':
self.manylinux_suffix_start = 'cp36m'
elif self.runtime == 'python3.7':
self.manylinux_wheel_file_suffix = 'cp37m-manylinux1_x86_64.whl'
self.manylinux_suffix_start = 'cp37m'
else:
# The 'm' has been dropped in python 3.8+ since builds with and without pymalloc are ABI compatible
# See https://github.com/pypa/manylinux for a more detailed explanation
self.manylinux_wheel_file_suffix = 'cp38-manylinux1_x86_64.whl'
self.manylinux_suffix_start = 'cp38'

# AWS Lambda supports manylinux1/2010 and manylinux2014
manylinux_suffixes = ("2014", "2010", "1")
self.manylinux_wheel_file_match = re.compile(f'^.*{self.manylinux_suffix_start}-manylinux({"|".join(manylinux_suffixes)})_x86_64.whl$')
self.manylinux_wheel_abi3_file_match = re.compile('^.*cp3.-abi3-manylinux({"|".join(manylinux_suffixes)})_x86_64.whl$')

self.endpoint_urls = endpoint_urls
self.xray_tracing = xray_tracing
Expand Down Expand Up @@ -852,26 +856,31 @@ def get_cached_manylinux_wheel(self, package_name, package_version, disable_prog
Gets the locally stored version of a manylinux wheel. If one does not exist, the function downloads it.
"""
cached_wheels_dir = os.path.join(tempfile.gettempdir(), 'cached_wheels')

if not os.path.isdir(cached_wheels_dir):
os.makedirs(cached_wheels_dir)
else:
# Check if we already have a cached copy
wheel_file = f'{package_name}-{package_version}-*_x86_64.whl'
wheel_path = os.path.join(cached_wheels_dir, wheel_file)

for pathname in glob.iglob(wheel_path):
if re.match(self.manylinux_wheel_file_match, pathname) or re.match(self.manylinux_wheel_abi3_file_match, pathname):
print(f" - {package_name}=={package_version}: Using locally cached manylinux wheel")
return pathname

# The file is not cached, download it.
wheel_url, filename = self.get_manylinux_wheel_url(package_name, package_version)
if not wheel_url:
return None

wheel_file = '{0!s}-{1!s}-{2!s}'.format(package_name, package_version, self.manylinux_wheel_file_suffix)
wheel_path = os.path.join(cached_wheels_dir, wheel_file)

if not os.path.exists(wheel_path) or not zipfile.is_zipfile(wheel_path):
# The file is not cached, download it.
wheel_url = self.get_manylinux_wheel_url(package_name, package_version)
if not wheel_url:
return None

print(" - {}=={}: Downloading".format(package_name, package_version))
with open(wheel_path, 'wb') as f:
self.download_url_with_progress(wheel_url, f, disable_progress)
wheel_path = os.path.join(cached_wheels_dir, filename)
print(f" - {package_name}=={package_version}: Downloading")
with open(wheel_path, 'wb') as f:
self.download_url_with_progress(wheel_url, f, disable_progress)

if not zipfile.is_zipfile(wheel_path):
return None
else:
print(" - {}=={}: Using locally cached manylinux wheel".format(package_name, package_version))
if not zipfile.is_zipfile(wheel_path):
return None

return wheel_path

Expand Down Expand Up @@ -907,18 +916,20 @@ def get_manylinux_wheel_url(self, package_name, package_version):
res = requests.get(url, timeout=float(os.environ.get('PIP_TIMEOUT', 1.5)))
data = res.json()
except Exception as e: # pragma: no cover
return None
return None, None
with open(json_file_path, 'wb') as metafile:
jsondata = json.dumps(data)
metafile.write(bytes(jsondata, "utf-8"))

if package_version not in data['releases']:
return None
return None, None

for f in data['releases'][package_version]:
if f['filename'].endswith(self.manylinux_wheel_file_suffix):
return f['url']
return None
if re.match(self.manylinux_wheel_file_match, f['filename']):
return f['url'], f['filename']
elif re.match(self.manylinux_wheel_abi3_file_match, f['filename']):
return f['url'], f['filename']
return None, None

##
# S3
Expand Down

0 comments on commit 535e9cf

Please sign in to comment.