Skip to content

Commit

Permalink
Merge branch 'use-busybox' into 'master'
Browse files Browse the repository at this point in the history
  • Loading branch information
remram44 committed Jun 27, 2014
2 parents 1b98b17 + d785954 commit 71cc428
Show file tree
Hide file tree
Showing 5 changed files with 143 additions and 57 deletions.
23 changes: 11 additions & 12 deletions reprounzip-vagrant/reprounzip/unpackers/vagrant.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import tarfile

from reprounzip.unpackers.common import load_config, select_installer,\
shell_escape, join_root
shell_escape, busybox_url, join_root
from reprounzip.utils import unicode_


Expand Down Expand Up @@ -156,20 +156,19 @@ def create_vagrant(args):

# Copies /bin/sh + dependencies
if use_chroot:
regex = r'^\t(?:[^ ]+ => )?([^ ]+) \([x0-9a-z]+\)$'
url = busybox_url(runs[0]['architecture'])
fp.write(r'''
for i in $(ldd /bin/sh /usr/bin/env |
perl -n -e '/{regex}/ && print "$1\n"'); do
if [ -e "$i" ] ; then
mkdir -p "$(dirname /experimentroot/$i)"
cp -L "$i" "/experimentroot/$i"
fi
done
mkdir -p /experimentroot/bin
cp -L /bin/sh /experimentroot/bin/sh
mkdir -p /experimentroot/usr/bin
cp -L /usr/bin/env /experimentroot/usr/bin/env
'''.format(regex=regex))
if [ ! -e /experimentroot/bin/sh -o ! -e /experimentroot/usr/bin/env ]; then
wget -O /experimentroot/bin/busybox {url}
chmod +x /experimentroot/bin/busybox
fi
[ -e /experimentroot/bin/sh ] || \
ln -s /bin/busybox /experimentroot/bin/sh
[ -e /experimentroot/usr/bin/env ] || \
ln -s /bin/busybox /experimentroot/usr/bin/env
'''.format(url=url))

# Copies pack
pack.copyfile(target / 'experiment.rpz')
Expand Down
4 changes: 4 additions & 0 deletions reprounzip/reprounzip/unpackers/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,10 @@ def select_installer(pack, runs, target_distribution=THIS_DISTRIBUTION):
return installer


def busybox_url(arch):
return 'http://www.busybox.net/downloads/binaries/latest/busybox-%s' % arch


def join_root(root, path):
p_root, p_loc = path.split_root()
assert p_root == b'/'
Expand Down
64 changes: 19 additions & 45 deletions reprounzip/reprounzip/unpackers/default.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,13 @@
from __future__ import unicode_literals

import itertools
import re
from rpaths import PosixPath, Path
import subprocess
import sys
import tarfile

from reprounzip.utils import unicode_
from reprounzip.utils import unicode_, download_file
from reprounzip.unpackers.common import load_config, select_installer, \
shell_escape, join_root


_ldd_fmt = re.compile(r'^\t(?:[^ ]+ => )?([^ ]+) \([x0-9a-z]+\)$')


def ldd(program):
p = subprocess.Popen(['ldd', program], stdout=subprocess.PIPE)
try:
for l in p.stdout:
l = l.decode('ascii')
m = _ldd_fmt.match(l)
if m is None:
continue
f = Path(m.group(1))
yield f
finally:
p.wait()
assert p.returncode == 0


def copy_with_so(program, root):
for f in itertools.chain(ldd(program), [Path(program)]):
if not f.exists():
continue
dest = join_root(root, f)
dest.parent.mkdir(parents=True)
if not dest.exists():
f.copy(dest)
shell_escape, busybox_url, join_root


def installpkgs(args):
Expand Down Expand Up @@ -138,8 +108,7 @@ def create_chroot(args):
"""Unpacks the experiment in a folder so it can be run with chroot.
All the files in the pack are unpacked; system files are copied only if
they were not packed, and for /bin/sh and dependencies (if they were not
packed).
they were not packed, and busybox is installed if /bin/sh wasn't packed.
"""
pack = Path(args.pack[0])
target = Path(args.target[0])
Expand Down Expand Up @@ -188,23 +157,28 @@ def create_chroot(args):
tar.extractall(str(root), members)
tar.close()

# Copies /bin/sh + dependencies
copy_with_so('/bin/sh', root)

# Copies /usr/bin/env + dependencies
if Path('/usr/bin/env').exists():
has_env = True
copy_with_so('/usr/bin/env', root)
else:
has_env = False
# Sets up /bin/sh and /usr/bin/env, downloading busybox if necessary
sh_path = join_root(root, Path('/bin/sh'))
env_path = join_root(root, Path('/usr/bin/env'))
if not sh_path.lexists() or not env_path.lexists():
busybox_path = join_root(root, Path('/bin/busybox'))
busybox_path.parent.mkdir(parents=True)
download_file(busybox_url(runs[0]['architecture']),
busybox_path)
busybox_path.chmod(0o755)
if not sh_path.lexists():
sh_path.parent.mkdir(parents=True)
sh_path.symlink('/bin/busybox')
if not env_path.lexists():
env_path.parent.mkdir(parents=True)
env_path.symlink('/bin/busybox')

# Writes start script
with (target / 'script.sh').open('w', encoding='utf-8') as fp:
fp.write('#!/bin/sh\n\n')
for run in runs:
cmd = 'cd %s && ' % shell_escape(run['workingdir'])
if has_env:
cmd += '/usr/bin/env -i '
cmd += '/usr/bin/env -i '
cmd += ' '.join('%s=%s' % (k, shell_escape(v))
for k, v in run['environ'].items())
cmd += ' '
Expand Down
54 changes: 54 additions & 0 deletions reprounzip/reprounzip/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,22 @@

from __future__ import unicode_literals

import email.utils
import logging
from rpaths import Path
import sys


PY3 = sys.version_info[0] == 3


if PY3:
from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
else:
from urllib2 import Request, HTTPError, URLError, urlopen


if PY3:
unicode_ = str
else:
Expand Down Expand Up @@ -108,3 +117,48 @@ def find_all_links(filename, include_target=False):
if include_target:
files.append(path)
return files


def download_file(url, dest, cachename=None):
if cachename is None:
cachename = dest.name

request = Request(url)

cache = Path('~/.cache/reprozip').expand_user() / cachename
if cache.exists():
mtime = email.utils.formatdate(cache.mtime(), usegmt=True)
request.add_header('If-Modified-Since', mtime)

try:
response = urlopen(request)
except URLError as e:
if cache.exists():
if isinstance(e, HTTPError) and e.code == 304:
logging.info("Cached file %s is up to date" % cachename)
else:
logging.warning("Couldn't download %s: %s" % (url, e))
cache.copy(dest)
return
else:
raise

logging.info("Downloading %s" % url)
try:
CHUNK_SIZE = 4096
cache.parent.mkdir(parents=True)
with cache.open('wb') as f:
while True:
chunk = response.read(CHUNK_SIZE)
if not chunk:
break
f.write(chunk)
response.close()
except Exception as e:
try:
cache.remove()
except OSError:
pass
raise e

cache.copy(dest)
55 changes: 55 additions & 0 deletions reprozip/reprozip/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,22 @@

from __future__ import unicode_literals

import email.utils
import logging
from rpaths import Path
import sys


PY3 = sys.version_info[0] == 3


if PY3:
from urllib.error import URLError
from urllib.request import Request, urlopen
else:
from urllib2 import Request, URLError, urlopen


if PY3:
unicode_ = str
else:
Expand Down Expand Up @@ -108,3 +117,49 @@ def find_all_links(filename, include_target=False):
if include_target:
files.append(path)
return files


def download_file(url, dest, cachename=None):
if cachename is None:
cachename = dest.name

request = Request(url)

cache = Path('~/.cache/reprozip').expand_user() / cachename
if cache.exists():
mtime = email.utils.formatdate(cache.mtime(), usegmt=True)
request.add_header('If-Modified-Since', mtime)

try:
response = urlopen(request)
except URLError as e:
if cache.exists():
logging.warning("Couldn't download %s: %s" % (url, e))
cache.copy(dest)
return
else:
raise

if response is None:
logging.info("Cached file %s is up to date" % cachename)
cache.copy(dest)
return

logging.info("Downloading %s" % url)
try:
CHUNK_SIZE = 4096
with cache.open('wb') as f:
while True:
chunk = response.read(CHUNK_SIZE)
if not chunk:
break
f.write(chunk)
response.close()
except Exception as e:
try:
cache.remove()
except OSError:
pass
raise e

cache.copy(dest)

0 comments on commit 71cc428

Please sign in to comment.