Skip to content


Subversion checkout URL

You can clone with
Download ZIP
Tree: 2625956432
Fetching contributors…

Cannot retrieve contributors at this time

65 lines (54 sloc) 2.043 kB
# gPodder dependency installer for running the CLI from the source tree
# Run "python" and it will download and inject dependencies,
# so you only need a standard Python installation for the command-line utility
# Thomas Perl <>; 2012-02-11
import urllib2
import re
import sys
import StringIO
import tarfile
import os
import shutil
import tempfile
sys.stdout = sys.stderr
src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'src'))
tmp_dir = tempfile.mkdtemp()
# Module name, Regex-file chooser (1st group = location in "src/")
('feedparser', r'feedparser-[0-9.]+/feedparser/('),
('mygpoclient', r'mygpoclient-[0-9.]+/(mygpoclient/[^/]*\.py)')
def get_tarball_url(modulename):
url = '' + modulename
html = urllib2.urlopen(url).read()
match ='(http://[^>]*%s-([0-9.]*)\.tar\.gz)' % modulename, html)
return if match is not None else None
for module, required_files in MODULES:
print 'Fetching', module, '...',
tarball_url = get_tarball_url(module)
if tarball_url is None:
print 'Cannot determine download URL for', module, '- aborting!'
data = urllib2.urlopen(tarball_url).read()
print '%d KiB' % (len(data)/1024)
tar =
for name in tar.getnames():
match = re.match(required_files, name)
if match is not None:
target_name =
target_file = os.path.join(src_dir, target_name)
if os.path.exists(target_file):
print 'Skipping:', target_file
target_dir = os.path.dirname(target_file)
if not os.path.isdir(target_dir):
print 'Extracting:', target_name
tar.extract(name, tmp_dir)
shutil.move(os.path.join(tmp_dir, name), target_file)
Jump to Line
Something went wrong with that request. Please try again.