Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Upgrade WAF to 1.5.14

  • Loading branch information...
commit e898c1f2e30a05dd6a45f061468392ca44754b5f 1 parent c98b079
@ry ry authored
Showing with 1,046 additions and 455 deletions.
  1. +1 −1  bin/node-waf
  2. +6 −6 tools/waf-light
  3. +14 −7 tools/wafadmin/3rdparty/boost.py
  4. +48 −26 tools/wafadmin/Build.py
  5. +23 −10 tools/wafadmin/Configure.py
  6. +3 −3 tools/wafadmin/Constants.py
  7. +9 −4 tools/wafadmin/Environment.py
  8. +13 −12 tools/wafadmin/Logs.py
  9. +38 −7 tools/wafadmin/Node.py
  10. +2 −0  tools/wafadmin/Options.py
  11. +17 −11 tools/wafadmin/Runner.py
  12. +18 −10 tools/wafadmin/Scripting.py
  13. +121 −44 tools/wafadmin/Task.py
  14. +15 −12 tools/wafadmin/TaskGen.py
  15. +1 −1  tools/wafadmin/Tools/ar.py
  16. +7 −4 tools/wafadmin/Tools/ccroot.py
  17. +8 −9 tools/wafadmin/Tools/compiler_cc.py
  18. +8 −8 tools/wafadmin/Tools/compiler_cxx.py
  19. +67 −27 tools/wafadmin/Tools/config_c.py
  20. +3 −4 tools/wafadmin/Tools/gas.py
  21. +20 −9 tools/wafadmin/Tools/gnome.py
  22. +2 −3 tools/wafadmin/Tools/intltool.py
  23. +5 −6 tools/wafadmin/Tools/javaw.py
  24. +1 −1  tools/wafadmin/Tools/lua.py
  25. +1 −0  tools/wafadmin/Tools/misc.py
  26. +95 −91 tools/wafadmin/Tools/msvc.py
  27. +32 −40 tools/wafadmin/Tools/perl.py
  28. +6 −6 tools/wafadmin/Tools/preproc.py
  29. +2 −2 tools/wafadmin/Tools/python.py
  30. +39 −14 tools/wafadmin/Tools/qt4.py
  31. +120 −0 tools/wafadmin/Tools/ruby.py
  32. +60 −44 tools/wafadmin/Tools/{UnitTest.py → unittestw.py}
  33. +77 −0 tools/wafadmin/Tools/xlc.py
  34. +77 −0 tools/wafadmin/Tools/xlcxx.py
  35. +50 −25 tools/wafadmin/Utils.py
  36. +25 −6 tools/wafadmin/ansiterm.py
  37. +12 −2 tools/wafadmin/py3kfixes.py
View
2  bin/node-waf
@@ -12,6 +12,6 @@ t = join(w, 'Tools')
sys.path = [w, t] + sys.path
import Scripting
-VERSION="1.5.10"
+VERSION="1.5.14"
Scripting.prepare(t, os.getcwd(), VERSION, wafdir)
sys.exit(0)
View
12 tools/waf-light
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2009
+# encoding: ISO8859-1
+# Thomas Nagy, 2005-2010
"""
Redistribution and use in source and binary forms, with or without
@@ -37,7 +37,7 @@ if 'PSYCOWAF' in os.environ:
try:import psyco;psyco.full()
except:pass
-VERSION="1.5.10"
+VERSION="1.5.14"
REVISION="x"
INSTALL="x"
C1='x'
@@ -149,7 +149,7 @@ t = join(w, 'Tools')
f = join(w, '3rdparty')
sys.path = [w, t, f] + sys.path
-import Scripting
-Scripting.prepare(t, cwd, VERSION, wafdir)
-sys.exit(0)
+if __name__ == '__main__':
+ import Scripting
+ Scripting.prepare(t, cwd, VERSION, wafdir)
View
21 tools/wafadmin/3rdparty/boost.py
@@ -17,7 +17,7 @@
# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
#
#def build(bld):
-# bld.new_task_gen(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
+# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
#
#ISSUES:
# * find_includes should be called only once!
@@ -28,7 +28,7 @@
## * the rest of the code has not really been tried
# * make certain a demo is provided (in demos/adv for example)
-# TODO: boost.py will be removed in waf 1.6
+# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
import os.path, glob, types, re, sys
import Configure, config_c, Options, Utils, Logs
@@ -52,6 +52,7 @@
is_threadingtag = re.compile('^mt$')
is_abitag = re.compile('^[sgydpn]+$')
is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
+is_pythontag=re.compile('^py[0-9]{2}$')
def set_options(opt):
opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
@@ -74,8 +75,8 @@ def version_string(version):
def libfiles(lib, pattern, lib_paths):
result = []
for lib_path in lib_paths:
- libname = pattern % ('boost_' + lib + '*')
- result += glob.glob(lib_path + '/' + libname)
+ libname = pattern % ('boost_%s[!_]*' % lib)
+ result += glob.glob(os.path.join(lib_path, libname))
return result
@conf
@@ -99,9 +100,10 @@ def tags_score(tags, kw):
score = 0
needed_tags = {
'threading': kw['tag_threading'],
- 'abi': kw['tag_abi'],
- 'toolset': kw['tag_toolset'],
- 'version': kw['tag_version']
+ 'abi': kw['tag_abi'],
+ 'toolset': kw['tag_toolset'],
+ 'version': kw['tag_version'],
+ 'python': kw['tag_python']
}
if kw['tag_toolset'] is None:
@@ -120,6 +122,7 @@ def tags_score(tags, kw):
if is_threadingtag.match(tag): found_tags['threading'] = tag
if is_abitag.match(tag): found_tags['abi'] = tag
if is_toolsettag.match(tag): found_tags['toolset'] = tag
+ if is_pythontag.match(tag): found_tags['python'] = tag
for tagname in needed_tags.iterkeys():
if needed_tags[tagname] is not None and tagname in found_tags:
@@ -148,8 +151,12 @@ def validate_boost(self, kw):
set_default(kw, x, None)
set_default(kw, 'tag_abi', '^[^d]*$')
+ set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
+ set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
+
set_default(kw, 'score_threading', (10, -10))
set_default(kw, 'score_abi', (10, -10))
+ set_default(kw, 'score_python', (10,-10))
set_default(kw, 'score_toolset', (1, -1))
set_default(kw, 'score_version', (100, -100))
View
74 tools/wafadmin/Build.py
@@ -33,11 +33,14 @@ def __init__(self, b=None, t=[]):
Utils.WafError.__init__(self, self.format_error())
def format_error(self):
- lst = ['Build failed']
+ lst = ['Build failed:']
for tsk in self.tasks:
txt = tsk.format_error()
if txt: lst.append(txt)
- return '\n'.join(lst)
+ sep = ' '
+ if len(lst) > 2:
+ sep = '\n'
+ return sep.join(lst)
def group_method(fun):
"""
@@ -62,7 +65,8 @@ def f(*k, **kw):
m = k[0].task_manager
if not m.groups: m.add_group()
m.groups[m.current_group].post_funs.append((fun, k, kw))
- kw['cwd'] = k[0].path
+ if not 'cwd' in kw:
+ kw['cwd'] = k[0].path
else:
fun(*k, **kw)
return f
@@ -269,7 +273,7 @@ def dw(on=True):
self.generator.start()
except KeyboardInterrupt:
dw()
- if self.generator.consumers:
+ if Runner.TaskConsumer.consumers:
self.save()
raise
except Exception:
@@ -278,7 +282,7 @@ def dw(on=True):
raise
else:
dw()
- if self.generator.consumers:
+ if Runner.TaskConsumer.consumers:
self.save()
if self.generator.error:
@@ -316,6 +320,9 @@ def install(self):
except OSError: pass
def new_task_gen(self, *k, **kw):
+ if self.task_gen_cache_names:
+ self.task_gen_cache_names = {}
+
kw['bld'] = self
if len(k) == 0:
ret = TaskGen.task_gen(*k, **kw)
@@ -328,6 +335,13 @@ def new_task_gen(self, *k, **kw):
ret = cls(*k, **kw)
return ret
+ def __call__(self, *k, **kw):
+ if self.task_gen_cache_names:
+ self.task_gen_cache_names = {}
+
+ kw['bld'] = self
+ return TaskGen.task_gen(*k, **kw)
+
def load_envs(self):
try:
lst = Utils.listdir(self.cachedir)
@@ -384,7 +398,7 @@ def init_variants(self):
lstvariants.append(env.variant())
self.lst_variants = lstvariants
- debug('build: list of variants is %r' % lstvariants)
+ debug('build: list of variants is %r', lstvariants)
for name in lstvariants+[0]:
for v in 'node_sigs cache_node_abspath'.split():
@@ -418,7 +432,7 @@ def load_dirs(self, srcdir, blddir, load_cache=1):
if not self.srcnode:
self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
- debug('build: srcnode is %s and srcdir %s' % (self.srcnode.name, srcdir))
+ debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
self.path = self.srcnode
@@ -498,24 +512,30 @@ def rescan(self, src_dir_node):
lst.reverse()
# list the files in the build dirs
- # remove the existing timestamps if the build files are removed
- for variant in self.lst_variants:
- sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
- try:
+ try:
+ for variant in self.lst_variants:
+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
self.listdir_bld(src_dir_node, sub_path, variant)
- except OSError:
- #debug('build: osError on ' + sub_path)
- # listdir failed, remove all sigs of nodes
- # TODO more things to remove?
- dict = self.node_sigs[variant]
- for node in src_dir_node.childs.values():
- if node.id in dict:
+ except OSError:
+
+ # listdir failed, remove the build node signatures for all variants
+ for node in src_dir_node.childs.values():
+ if node.id & 3 != Node.BUILD:
+ continue
+
+ for dct in self.node_sigs:
+ if node.id in dct:
dict.__delitem__(node.id)
- # avoid deleting the build dir node
- if node.id != self.bldnode.id:
- src_dir_node.childs.__delitem__(node.name)
- os.makedirs(sub_path)
+ # the policy is to avoid removing nodes representing directories
+ src_dir_node.childs.__delitem__(node.name)
+
+ for variant in self.lst_variants:
+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
+ try:
+ os.makedirs(sub_path)
+ except OSError:
+ pass
# ======================================= #
def listdir_src(self, parent_node):
@@ -599,7 +619,7 @@ def hash_env_vars(self, env, vars_lst):
lst = [str(env[a]) for a in vars_lst]
ret = Utils.h_list(lst)
- debug("envhash: %r %r" % (ret, lst))
+ debug('envhash: %r %r', ret, lst)
# next time
self.cache_sig_vars[idx] = ret
@@ -769,6 +789,7 @@ def do_install(self, src, tgt, chmod=O644):
Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
return True
+ red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
def get_install_path(self, path, env=None):
"installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
if not env: env = self.env
@@ -776,7 +797,7 @@ def get_install_path(self, path, env=None):
path = path.replace('/', os.sep)
destpath = Utils.subst_vars(path, env)
if destdir:
- destpath = os.path.join(destdir, destpath.lstrip(os.sep))
+ destpath = os.path.join(destdir, self.red.sub('', destpath))
return destpath
def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
@@ -891,10 +912,11 @@ def symlink_as(self, path, src, env=None, cwd=None):
link = True
elif os.readlink(tgt) != src:
link = True
+
+ if link:
try: os.remove(tgt)
except OSError: pass
- if link:
info('* symlink %s (-> %s)' % (tgt, src))
os.symlink(src, tgt)
return 0
@@ -909,7 +931,7 @@ def symlink_as(self, path, src, env=None, cwd=None):
def exec_command(self, cmd, **kw):
# 'runner' zone is printed out for waf -v, see wafadmin/Options.py
- debug('runner: system command -> %s' % cmd)
+ debug('runner: system command -> %s', cmd)
if self.log:
self.log.write('%s\n' % cmd)
kw['log'] = self.log
View
33 tools/wafadmin/Configure.py
@@ -157,6 +157,7 @@ def check_tool(self, input, tooldir=None, funs=None):
for tool in tools:
tool = tool.replace('++', 'xx')
if tool == 'java': tool = 'javaw'
+ if tool.lower() == 'unittest': tool = 'unittestw'
# avoid loading the same tool more than once with the same functions
# used by composite projects
@@ -166,10 +167,14 @@ def check_tool(self, input, tooldir=None, funs=None):
self.tool_cache.append(mag)
module = Utils.load_tool(tool, tooldir)
- func = getattr(module, 'detect', None)
- if func:
- if type(func) is type(find_file): func(self)
- else: self.eval_rules(funs or func)
+
+ if funs:
+ self.eval_rules(funs)
+ else:
+ func = getattr(module, 'detect', None)
+ if func:
+ if type(func) is type(find_file): func(self)
+ else: self.eval_rules(func)
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
@@ -232,17 +237,20 @@ def add_os_flags(self, var, dest=None):
def check_message_1(self, sr):
self.line_just = max(self.line_just, len(sr))
- self.log.write(sr + '\n\n')
+ for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
+ self.log.write(x)
Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
def check_message_2(self, sr, color='GREEN'):
+ self.log.write(sr)
+ self.log.write('\n')
Utils.pprint(color, sr)
def check_message(self, th, msg, state, option=''):
sr = 'Checking for %s %s' % (th, msg)
self.check_message_1(sr)
p = self.check_message_2
- if state: p('ok ' + option)
+ if state: p('ok ' + str(option))
else: p('not found', 'YELLOW')
# FIXME remove in waf 1.6
@@ -268,10 +276,15 @@ def find_program(self, filename, path_list=[], var=None, mandatory=False):
ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
if ret: break
- self.check_message('program', ','.join(filename), ret, ret)
- self.log.write('find program=%r paths=%r var=%r -> %r\n\n' % (filename, path_list, var, ret))
- if not ret and mandatory:
- self.fatal('The program %r could not be found' % filename)
+ self.check_message_1('Check for program %s' % ' or '.join(filename))
+ self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
+ if ret:
+ Utils.pprint('GREEN', str(ret))
+ else:
+ Utils.pprint('YELLOW', 'not found')
+ if mandatory:
+ self.fatal('The program %r is required' % filename)
+
if var:
self.env[var] = ret
return ret
View
6 tools/wafadmin/Constants.py
@@ -9,9 +9,9 @@
"""
# do not touch these three lines, they are updated automatically
-HEXVERSION = 0x105010
-WAFVERSION="1.5.10"
-WAFREVISION = "6794M"
+HEXVERSION = 0x105014
+WAFVERSION="1.5.14"
+WAFREVISION = "7363M"
ABI = 7
# permissions
View
13 tools/wafadmin/Environment.py
@@ -53,8 +53,13 @@ def __getitem__(self, key):
def __setitem__(self, key, value):
self.table[key] = value
- def __delitem__(self, key, value):
+ def __delitem__(self, key):
del self.table[key]
+
+ def pop(self, key, *args):
+ if len(args):
+ return self.table.pop(key, *args)
+ return self.table.pop(key)
def set_variant(self, name):
self.table[VARIANT] = name
@@ -173,7 +178,7 @@ def load(self, filename):
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
- Logs.debug('env: %s' % str(self.table))
+ Logs.debug('env: %s', self.table)
def get_destdir(self):
"return the destdir, useful for installing"
@@ -197,9 +202,9 @@ def __setattr__(self, name, value):
else:
self[name] = value
- def __detattr__(self, name):
+ def __delattr__(self, name):
if name in self.__slots__:
- object.__detattr__(self, name)
+ object.__delattr__(self, name)
else:
del self[name]
View
25 tools/wafadmin/Logs.py
@@ -23,12 +23,13 @@
'cursor_off' :'\x1b[?25l',
}
-got_tty = not os.environ.get('TERM', 'dumb') in ['dumb', 'emacs']
-if got_tty:
+got_tty = False
+term = os.environ.get('TERM', 'dumb')
+if not term in ['dumb', 'emacs']:
try:
- got_tty = sys.stderr.isatty()
+ got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
except AttributeError:
- got_tty = False
+ pass
import Utils
@@ -93,17 +94,17 @@ def format(self, rec):
return rec.c1+rec.msg+rec.c2
return logging.Formatter.format(self, rec)
-def debug(msg):
+def debug(*k, **kw):
if verbose:
- # FIXME why does it eat the newlines????
- msg = msg.replace('\n', ' ')
- logging.debug(msg)
+ k = list(k)
+ k[0] = k[0].replace('\n', ' ')
+ logging.debug(*k, **kw)
-def error(msg):
- logging.error(msg)
+def error(*k, **kw):
+ logging.error(*k, **kw)
if verbose > 1:
- if isinstance(msg, Utils.WafError):
- st = msg.stack
+ if isinstance(k[0], Utils.WafError):
+ st = k[0].stack
else:
st = traceback.extract_stack()
if st:
View
45 tools/wafadmin/Node.py
@@ -30,8 +30,8 @@
"""
-import os, sys, fnmatch, re
-import Utils
+import os, sys, fnmatch, re, stat
+import Utils, Constants
UNDEFINED = 0
DIR = 1
@@ -256,7 +256,6 @@ def find_dir(self, lst):
return None
return current
- # FIXME: remove in waf 1.6 ?
def ensure_dir_node_from_path(self, lst):
"used very rarely, force the construction of a branch of node instance for representing folders"
@@ -278,7 +277,6 @@ def ensure_dir_node_from_path(self, lst):
current = self.__class__(name, prev, DIR)
return current
- # FIXME: remove in waf 1.6
def exclusive_build_node(self, path):
"""
create a hierarchy in the build dir (no source folders) for ill-behaving compilers
@@ -495,7 +493,7 @@ def suffix(self):
return self.name[k:]
def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
- "find nodes in the filesystem hierarchy, try to instanciate the nodes passively"
+ """find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
bld_ctx = self.__class__.bld
bld_ctx.rescan(self)
for name in bld_ctx.cache_dir_contents[self.id]:
@@ -534,7 +532,7 @@ def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prun
raise StopIteration
def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
- "find nodes recursively, this returns everything but folders by default"
+ """find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
if not (src or bld or dir):
raise StopIteration
@@ -568,9 +566,12 @@ def is_prune(node, name):
return ret
def ant_glob(self, *k, **kw):
+ """
+ known gotcha: will enumerate the files, but only if the folder exists in the source directory
+ """
src=kw.get('src', 1)
- bld=kw.get('bld', 1)
+ bld=kw.get('bld', 0)
dir=kw.get('dir', 0)
excl = kw.get('excl', exclude_regs)
incl = k and k[0] or kw.get('incl', '**')
@@ -655,6 +656,36 @@ def ant_iter(nodi, maxdepth=25, pats=[]):
return ret
+ def update_build_dir(self, env=None):
+
+ if not env:
+ for env in bld.all_envs:
+ self.update_build_dir(env)
+ return
+
+ path = self.abspath(env)
+
+ lst = Utils.listdir(path)
+ try:
+ self.__class__.bld.cache_dir_contents[self.id].update(lst)
+ except KeyError:
+ self.__class__.bld.cache_dir_contents[self.id] = set(lst)
+ self.__class__.bld.cache_scanned_folders[self.id] = True
+
+ for k in lst:
+ npath = path + os.sep + k
+ st = os.stat(npath)
+ if stat.S_ISREG(st[stat.ST_MODE]):
+ ick = self.find_or_declare(k)
+ if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
+ self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
+ elif stat.S_ISDIR(st[stat.ST_MODE]):
+ child = self.find_dir(k)
+ if not child:
+ child = self.ensure_dir_node_from_path(k)
+ child.update_build_dir(env)
+
+
class Nodu(Node):
pass
View
2  tools/wafadmin/Options.py
@@ -260,6 +260,8 @@ def tool_options(self, *k, **kw):
for tool in tools:
tool = tool.replace('++', 'xx')
+ if tool == 'java': tool = 'javaw'
+ if tool.lower() == 'unittest': tool = 'unittestw'
module = Utils.load_tool(tool, path)
try:
fun = module.set_options
View
28 tools/wafadmin/Runner.py
@@ -24,10 +24,12 @@ def run(*args, **kwargs):
threading.Thread.run = run
class TaskConsumer(threading.Thread):
- def __init__(self, m):
+ ready = Queue(0)
+ consumers = []
+
+ def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(1)
- self.master = m
self.start()
def run(self):
@@ -37,9 +39,9 @@ def run(self):
pass
def loop(self):
- m = self.master
while 1:
- tsk = m.ready.get()
+ tsk = TaskConsumer.ready.get()
+ m = tsk.master
if m.stop:
m.out.put(tsk)
continue
@@ -98,16 +100,13 @@ def __init__(self, bld, j=2):
# tasks that are awaiting for another task to complete
self.frozen = []
- # tasks waiting to be run by the consumers
- self.ready = Queue(0)
+ # tasks returned by the consumers
self.out = Queue(0)
self.count = 0 # tasks not in the producer area
self.processed = 1 # progress indicator
- self.consumers = None # the consumer threads, created lazily
-
self.stop = False # error condition to stop the build
self.error = False # error flag
@@ -162,6 +161,12 @@ def error_handler(self, tsk):
def start(self):
"execute the tasks"
+ if TaskConsumer.consumers:
+ # the worker pool is usually loaded lazily (see below)
+ # in case it is re-used with a different value of numjobs:
+ while len(TaskConsumer.consumers) < self.numjobs:
+ TaskConsumer.consumers.append(TaskConsumer())
+
while not self.stop:
self.refill_task_list()
@@ -202,12 +207,13 @@ def start(self):
# run me: put the task in ready queue
tsk.position = (self.processed, self.total)
self.count += 1
- self.ready.put(tsk)
+ tsk.master = self
+ TaskConsumer.ready.put(tsk)
self.processed += 1
# create the consumer threads only if there is something to consume
- if not self.consumers:
- self.consumers = [TaskConsumer(self) for i in xrange(self.numjobs)]
+ if not TaskConsumer.consumers:
+ TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
View
28 tools/wafadmin/Scripting.py
@@ -201,19 +201,20 @@ def configure(conf):
src = getattr(Options.options, SRCDIR, None)
if not src: src = getattr(Utils.g_module, SRCDIR, None)
+ if not src: src = getattr(Utils.g_module, 'top', None)
if not src:
src = '.'
incomplete_src = 1
src = os.path.abspath(src)
bld = getattr(Options.options, BLDDIR, None)
- if not bld:
- bld = getattr(Utils.g_module, BLDDIR, None)
- if bld == '.':
- raise Utils.WafError('Setting blddir="." may cause distclean problems')
+ if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
+ if not bld: bld = getattr(Utils.g_module, 'out', None)
if not bld:
bld = 'build'
incomplete_bld = 1
+ if bld == '.':
+ raise Utils.WafError('Setting blddir="." may cause distclean problems')
bld = os.path.abspath(bld)
try: os.makedirs(bld)
@@ -418,7 +419,7 @@ def dont_dist(name, src, build_dir):
if (name.startswith(',,')
or name.startswith('++')
- or name.startswith('.waf-1.')
+ or name.startswith('.waf')
or (src == '.' and name == Options.lockfile)
or name in excludes
or name == build_dir
@@ -451,6 +452,7 @@ def copytree(src, dst, build_dir):
# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
def distclean(ctx=None):
'''removes the build directory'''
+ global commands
lst = os.listdir('.')
for f in lst:
if f == Options.lockfile:
@@ -475,7 +477,7 @@ def distclean(ctx=None):
Logs.warn('file %r cannot be removed' % f)
# remove the local waf cache
- if f.startswith('.waf-'):
+ if not commands and f.startswith('.waf'):
shutil.rmtree(f, ignore_errors=True)
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
@@ -488,7 +490,10 @@ def dist(appname='', version=''):
if not version: version = getattr(Utils.g_module, VERSION, '1.0')
tmp_folder = appname + '-' + version
- arch_name = tmp_folder+'.tar.'+g_gz
+ if g_gz in ['gz', 'bz2']:
+ arch_name = tmp_folder + '.tar.' + g_gz
+ else:
+ arch_name = tmp_folder + '.' + 'zip'
# remove the previous dir
try:
@@ -516,9 +521,12 @@ def dist(appname='', version=''):
# go back to the root directory
os.chdir(back)
- tar = tarfile.open(arch_name, 'w:' + g_gz)
- tar.add(tmp_folder)
- tar.close()
+ if g_gz in ['gz', 'bz2']:
+ tar = tarfile.open(arch_name, 'w:' + g_gz)
+ tar.add(tmp_folder)
+ tar.close()
+ else:
+ Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
try: from hashlib import sha1 as sha
except ImportError: from sha import sha
View
165 tools/wafadmin/Task.py
@@ -42,7 +42,7 @@
"""
-import os, shutil, sys, re, random, datetime
+import os, shutil, sys, re, random, datetime, tempfile
from Utils import md5
import Build, Runner, Utils, Node, Logs, Options
from Logs import debug, warn, error
@@ -346,6 +346,7 @@ def __init__(cls, name, bases, dict):
if name.endswith('_task'):
name = name.replace('_task', '')
+ if name != 'TaskBase':
TaskBase.classes[name] = cls
class TaskBase(object):
@@ -502,9 +503,9 @@ class Task(TaskBase):
* persistence: do not re-execute tasks that have already run
* caching: same files can be saved and retrieved from a cache directory
* dependencies:
- implicit, like .c files depending on .h files
- explicit, like the input nodes or the dep_nodes
- environment variables, like the CXXFLAGS in self.env
+ implicit, like .c files depending on .h files
+ explicit, like the input nodes or the dep_nodes
+ environment variables, like the CXXFLAGS in self.env
"""
vars = []
def __init__(self, env, **kw):
@@ -612,18 +613,14 @@ def runnable_status(self):
bld = self.generator.bld
# first compute the signature
- try:
- new_sig = self.signature()
- except KeyError:
- debug("task: something is wrong, computing the task %r signature failed" % self)
- return RUN_ME
+ new_sig = self.signature()
# compare the signature to a signature computed previously
key = self.unique_id()
try:
prev_sig = bld.task_sigs[key][0]
except KeyError:
- debug("task: task %r must run as it was never run before or the task code changed" % self)
+ debug("task: task %r must run as it was never run before or the task code changed", self)
return RUN_ME
# compare the signatures of the outputs
@@ -633,7 +630,7 @@ def runnable_status(self):
if bld.node_sigs[variant][node.id] != new_sig:
return RUN_ME
except KeyError:
- debug("task: task %r must run as the output nodes do not exist" % self)
+ debug("task: task %r must run as the output nodes do not exist", self)
return RUN_ME
# debug if asked to
@@ -648,8 +645,8 @@ def post_run(self):
bld = self.generator.bld
env = self.env
sig = self.signature()
+ ssig = sig.encode('hex')
- cnt = 0
variant = env.variant()
for node in self.outputs:
# check if the node exists ..
@@ -662,35 +659,78 @@ def post_run(self):
# important, store the signature for the next run
bld.node_sigs[variant][node.id] = sig
+ bld.task_sigs[self.unique_id()] = self.cache_sig
- # We could re-create the signature of the task with the signature of the outputs
- # in practice, this means hashing the output files
- # this is unnecessary
- if Options.cache_global:
- ssig = sig.encode('hex')
- dest = os.path.join(Options.cache_global, '%s_%d_%s' % (ssig, cnt, node.name))
- try: shutil.copy2(node.abspath(env), dest)
- except IOError: warn('Could not write the file to the cache')
- cnt += 1
+ # file caching, if possible
+ # try to avoid data corruption as much as possible
+ if not Options.cache_global or Options.options.nocache or not self.outputs:
+ return None
- bld.task_sigs[self.unique_id()] = self.cache_sig
+ if getattr(self, 'cached', None):
+ return None
+
+ dname = os.path.join(Options.cache_global, ssig)
+ tmpdir = tempfile.mkdtemp(prefix=Options.cache_global)
+
+ try:
+ shutil.rmtree(dname)
+ except:
+ pass
+
+ try:
+ for node in self.outputs:
+ variant = node.variant(env)
+ dest = os.path.join(tmpdir, node.name)
+ shutil.copy2(node.abspath(env), dest)
+ except (OSError, IOError):
+ try:
+ shutil.rmtree(tmpdir)
+ except:
+ pass
+ else:
+ try:
+ os.rename(tmpdir, dname)
+ except OSError:
+ try:
+ shutil.rmtree(tmpdir)
+ except:
+ pass
+ else:
+ try:
+ os.chmod(dname, O755)
+ except:
+ pass
def can_retrieve_cache(self):
- """Retrieve build nodes from the cache - the file time stamps are updated
- for cleaning the least used files from the cache dir - be careful when overridding"""
- if not Options.cache_global: return None
- if Options.options.nocache: return None
- if not self.outputs: return None
+ """
+ Retrieve build nodes from the cache
+ update the file timestamps to help cleaning the least used entries from the cache
+ additionally, set an attribute 'cached' to avoid re-creating the same cache files
+
+ suppose there are files in cache/dir1/file1 and cache/dir2/file2
+ first, read the timestamp of dir1
+ then try to copy the files
+ then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
+ should an exception occur, ignore the data
+ """
+ if not Options.cache_global or Options.options.nocache or not self.outputs:
+ return None
env = self.env
sig = self.signature()
+ ssig = sig.encode('hex')
+
+ # first try to access the cache folder for the task
+ dname = os.path.join(Options.cache_global, ssig)
+ try:
+ t1 = os.stat(dname).st_mtime
+ except OSError:
+ return None
- cnt = 0
for node in self.outputs:
variant = node.variant(env)
- ssig = sig.encode('hex')
- orig = os.path.join(Options.cache_global, '%s_%d_%s' % (ssig, cnt, node.name))
+ orig = os.path.join(dname, node.name)
try:
shutil.copy2(orig, node.abspath(env))
# mark the cache file as used recently (modified)
@@ -698,13 +738,21 @@ def can_retrieve_cache(self):
except (OSError, IOError):
debug('task: failed retrieving file')
return None
- else:
- cnt += 1
+
+ # is it the same folder?
+ try:
+ t2 = os.stat(dname).st_mtime
+ except OSError:
+ return None
+
+ if t1 != t2:
+ return None
for node in self.outputs:
self.generator.bld.node_sigs[variant][node.id] = sig
self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
+ self.cached = True
return 1
def debug_why(self, old_sigs):
@@ -714,12 +762,12 @@ def debug_why(self, old_sigs):
def v(x):
return x.encode('hex')
- debug("Task %r" % self)
+ debug("Task %r", self)
msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
tmp = 'task: -> %s: %s %s'
for x in xrange(len(msgs)):
if (new_sigs[x] != old_sigs[x]):
- debug(tmp % (msgs[x], v(old_sigs[x]), v(new_sigs[x])))
+ debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
def sig_explicit_deps(self):
bld = self.generator.bld
@@ -731,7 +779,10 @@ def sig_explicit_deps(self):
bld.rescan(x.parent)
variant = x.variant(self.env)
- m.update(bld.node_sigs[variant][x.id])
+ try:
+ m.update(bld.node_sigs[variant][x.id])
+ except KeyError:
+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
# manual dependencies, they can slow down the builds
if bld.deps_man:
@@ -748,8 +799,8 @@ def sig_explicit_deps(self):
variant = v.variant(self.env)
try:
v = bld.node_sigs[variant][v.id]
- except KeyError: # make it fatal?
- v = ''
+ except KeyError:
+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
elif hasattr(v, '__call__'):
v = v() # dependency is a function, call it
m.update(v)
@@ -805,14 +856,21 @@ def sig_implicit_deps(self):
# no previous run or the signature of the dependencies has changed, rescan the dependencies
(nodes, names) = self.scan()
if Logs.verbose:
- debug('deps: scanner for %s returned %s %s' % (str(self), str(nodes), str(names)))
+ debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
# store the dependencies in the cache
bld.node_deps[key] = nodes
bld.raw_deps[key] = names
# recompute the signature and return it
- sig = self.compute_sig_implicit_deps()
+ try:
+ sig = self.compute_sig_implicit_deps()
+ except KeyError:
+ try:
+ nodes = bld.node_deps.get(self.unique_id(), [])
+ except:
+ nodes = '?'
+ raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
return sig
@@ -855,7 +913,7 @@ def compile_fun_shell(name, line):
The reserved keywords TGT and SRC represent the task input and output nodes
quick test:
- bld.new_task_gen(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
+ bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
"""
extr = []
@@ -886,7 +944,7 @@ def repl(match):
c = COMPILE_TEMPLATE_SHELL % (line, parm)
- debug('action: %s' % c)
+ debug('action: %s', c)
return (funex(c), dvars)
def compile_fun_noshell(name, line):
@@ -924,7 +982,7 @@ def repl(match):
app("lst.extend(%r)" % params[-1].split())
fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
- debug('action: %s' % fun)
+ debug('action: %s', fun)
return (funex(fun), dvars)
def compile_fun(name, line, shell=None):
@@ -992,10 +1050,29 @@ def update_outputs(cls):
def post_run(self):
old_post_run(self)
bld = self.outputs[0].__class__.bld
- bld.node_sigs[self.env.variant()][self.outputs[0].id] = \
- Utils.h_file(self.outputs[0].abspath(self.env))
+ for output in self.outputs:
+ bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
cls.post_run = post_run
+ old_runnable_status = cls.runnable_status
+ def runnable_status(self):
+ status = old_runnable_status(self)
+ if status != RUN_ME:
+ return status
+
+ try:
+ bld = self.outputs[0].__class__.bld
+ new_sig = self.signature()
+ prev_sig = bld.task_sigs[self.unique_id()][0]
+ if prev_sig == new_sig:
+ return SKIP_ME
+ except KeyError:
+ pass
+ except IndexError:
+ pass
+ return RUN_ME
+ cls.runnable_status = runnable_status
+
def extract_outputs(tasks):
"""file_deps: Infer additional dependencies from task input and output nodes
"""
View
27 tools/wafadmin/TaskGen.py
@@ -196,13 +196,13 @@ def apply(self):
self.meths = out
# then we run the methods in order
- debug('task_gen: posting %s %d' % (self, id(self)))
+ debug('task_gen: posting %s %d', self, id(self))
for x in out:
try:
v = getattr(self, x)
except AttributeError:
raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
- debug('task_gen: -> %s (%d)' % (x, id(self)))
+ debug('task_gen: -> %s (%d)', x, id(self))
v()
def post(self):
@@ -217,7 +217,7 @@ def post(self):
#error("OBJECT ALREADY POSTED" + str( self))
return
self.apply()
- debug('task_gen: posted %s' % self.name)
+ debug('task_gen: posted %s', self.name)
self.posted = True
def get_hook(self, ext):
@@ -342,7 +342,7 @@ def declare_order(*k):
if not f1 in task_gen.prec[f2]:
task_gen.prec[f2].append(f1)
-def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=1, color='BLUE',
+def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
install=0, before=[], after=[], decider=None, rule=None, scan=None):
"""
see Tools/flex.py for an example
@@ -363,7 +363,7 @@ def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=1, color=
def x_file(self, node):
if decider:
ext = decider(self, node)
- elif isinstance(ext_out, str):
+ else:
ext = ext_out
if isinstance(ext, str):
@@ -373,7 +373,7 @@ def x_file(self, node):
elif isinstance(ext, list):
out_source = [node.change_ext(x) for x in ext]
if reentrant:
- for i in xrange(reentrant):
+ for i in xrange((reentrant is True) and len(out_source) or reentrant):
self.allnodes.append(out_source[i])
else:
# XXX: useless: it will fail on Utils.to_list above...
@@ -405,6 +405,7 @@ def sayHi(self):
"""
def taskgen(func):
setattr(task_gen, func.__name__, func)
+ return func
def feature(*k):
def deco(func):
@@ -502,6 +503,8 @@ def exec_rule(self):
# create the task class
name = getattr(self, 'name', None) or self.target or self.rule
+ if not isinstance(name, str):
+ name = str(self.idx)
cls = Task.task_type_from_func(name, func, vars)
# now create one instance
@@ -524,9 +527,6 @@ def exec_rule(self):
raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
tsk.inputs.append(y)
- if getattr(self, 'always', None):
- Task.always_run(cls)
-
if getattr(self, 'scan', None):
cls.scan = self.scan
@@ -539,7 +539,10 @@ def exec_rule(self):
if getattr(self, 'on_results', None):
Task.update_outputs(cls)
- for x in ['after', 'before']:
+ if getattr(self, 'always', None):
+ Task.always_run(cls)
+
+ for x in ['after', 'before', 'ext_in', 'ext_out']:
setattr(cls, x, getattr(self, x, []))
feature('*')(exec_rule)
before('apply_core')(exec_rule)
@@ -552,8 +555,8 @@ def sequence_order(self):
there is also an awesome trick for executing the method in last position
to use:
- bld.new_task_gen(features='javac seq')
- bld.new_task_gen(features='jar seq')
+ bld(features='javac seq')
+ bld(features='jar seq')
to start a new sequence, set the attribute seq_start, for example:
obj.seq_start = True
View
2  tools/wafadmin/Tools/ar.py
@@ -10,7 +10,7 @@
from Configure import conftest
ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
-cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', shell=False)
+cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada
View
11 tools/wafadmin/Tools/ccroot.py
@@ -132,7 +132,7 @@ def scan(self):
node = self.inputs[0]
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
- debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
return (nodes, names)
all_nodes = []
@@ -141,7 +141,7 @@ def scan(self):
for node in self.inputs:
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
- debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
for x in nodes:
if id(x) in seen: continue
seen.add(id(x))
@@ -209,7 +209,7 @@ def default_cc(self):
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
def apply_verif(self):
"""no particular order, used for diagnostic"""
- if not (self.source or getattr(self, 'add_objects', None)):
+ if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None)):
raise Utils.WafError('no source files specified for %s' % self)
if not self.target:
raise Utils.WafError('no target for %s' % self)
@@ -329,10 +329,13 @@ def apply_link(self):
self.link_task = tsk
@feature('cc', 'cxx')
-@after('apply_link', 'init_cc', 'init_cxx')
+@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
def apply_lib_vars(self):
"""after apply_link because of 'link_task'
after default_cc because of the attribute 'uselib'"""
+
+ # after 'apply_core' in case if 'cc' if there is no link
+
env = self.env
# 1. the case of the libs defined in the project (visit ancestors first)
View
17 tools/wafadmin/Tools/compiler_cc.py
@@ -11,7 +11,7 @@
'win32': ['msvc', 'gcc'],
'cygwin': ['gcc'],
'darwin': ['gcc'],
- 'aix5': ['gcc'],
+ 'aix': ['xlc', 'gcc'],
'linux': ['gcc', 'icc', 'suncc'],
'sunos': ['gcc', 'suncc'],
'irix': ['gcc'],
@@ -33,17 +33,24 @@ def detect(conf):
"""
try: test_for_compiler = Options.options.check_c_compiler
except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
+ orig = conf.env
for compiler in test_for_compiler.split():
+ conf.env = orig.copy()
try:
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cc: %r' % e)
else:
if conf.env['CC']:
+ orig.table = conf.env.get_merged_dict()
+ conf.env = orig
conf.check_message(compiler, '', True)
conf.env['COMPILER_CC'] = compiler
break
conf.check_message(compiler, '', False)
+ break
+ else:
+ conf.fatal('could not configure a c compiler!')
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
@@ -57,11 +64,3 @@ def set_options(opt):
for c_compiler in test_for_compiler.split():
opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
- """opt.add_option('-d', '--debug-level',
- action = 'store',
- default = ccroot.DEBUG_LEVELS.RELEASE,
- help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL),
- choices = ccroot.DEBUG_LEVELS.ALL,
- dest = 'debug_level')"""
-
-
View
16 tools/wafadmin/Tools/compiler_cxx.py
@@ -11,7 +11,7 @@
'win32': ['msvc', 'g++'],
'cygwin': ['g++'],
'darwin': ['g++'],
-'aix': ['g++'],
+'aix': ['xlc++', 'g++'],
'linux': ['g++', 'icpc', 'sunc++'],
'sunos': ['g++', 'sunc++'],
'irix': ['g++'],
@@ -28,17 +28,24 @@ def __list_possible_compiler(platform):
def detect(conf):
try: test_for_compiler = Options.options.check_cxx_compiler
except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
+ orig = conf.env
for compiler in test_for_compiler.split():
try:
+ conf.env = orig.copy()
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cxx: %r' % e)
else:
if conf.env['CXX']:
+ orig.table = conf.env.get_merged_dict()
+ conf.env = orig
conf.check_message(compiler, '', True)
conf.env['COMPILER_CXX'] = compiler
break
conf.check_message(compiler, '', False)
+ break
+ else:
+ conf.fatal('could not configure a cxx compiler!')
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
@@ -52,10 +59,3 @@ def set_options(opt):
for cxx_compiler in test_for_compiler.split():
opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
- """opt.add_option('-d', '--debug-level',
- action = 'store',
- default = ccroot.DEBUG_LEVELS.RELEASE,
- help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL),
- choices = ccroot.DEBUG_LEVELS.ALL,
- dest = 'debug_level')"""
-
View
94 tools/wafadmin/Tools/config_c.py
@@ -100,6 +100,11 @@ def validate_cfg(self, kw):
if 'modversion' in kw:
return
+ if 'variables' in kw:
+ if not 'msg' in kw:
+ kw['msg'] = 'Checking for %s variables' % kw['package']
+ return
+
# checking for the version of a module, for the moment, one thing at a time
for x in cfg_ver.keys():
y = x.replace('-', '_')
@@ -112,7 +117,7 @@ def validate_cfg(self, kw):
return
if not 'msg' in kw:
- kw['msg'] = 'Checking for %s' % kw['package']
+ kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
if not 'errmsg' in kw:
@@ -121,22 +126,31 @@ def validate_cfg(self, kw):
@conf
def cmd_and_log(self, cmd, kw):
Logs.debug('runner: %s\n' % cmd)
- if self.log: self.log.write('%s\n' % cmd)
+ if self.log:
+ self.log.write('%s\n' % cmd)
try:
- p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, shell=True)
- output = p.communicate()[0]
- except OSError:
- self.fatal('fail')
+ p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
+ (out, err) = p.communicate()
+ except OSError, e:
+ self.log.write('error %r' % e)
+ self.fatal(str(e))
+
+ out = str(out)
+ err = str(err)
+
+ if self.log:
+ self.log.write(out)
+ self.log.write(err)
if p.returncode:
if not kw.get('errmsg', ''):
if kw.get('mandatory', False):
- kw['errmsg'] = output.strip()
+ kw['errmsg'] = out.strip()
else:
kw['errmsg'] = 'fail'
self.fatal('fail')
- return output
+ return out
@conf
def exec_cfg(self, kw):
@@ -165,6 +179,18 @@ def exec_cfg(self, kw):
self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
return version
+ # retrieving variables of a module
+ if 'variables' in kw:
+ env = kw.get('env', self.env)
+ uselib = kw.get('uselib_store', kw['package'].upper())
+ vars = Utils.to_list(kw['variables'])
+ for v in vars:
+ val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
+ env.append_unique('%s_%s' % (uselib, v), val)
+ if not 'okmsg' in kw:
+ kw['okmsg'] = 'ok'
+ return
+
lst = [kw['path']]
for key, val in kw.get('define_variable', {}).iteritems():
lst.append('--define-variable=%s=%s' % (key, val))
@@ -184,6 +210,12 @@ def exec_cfg(self, kw):
@conf
def check_cfg(self, *k, **kw):
+ """
+ for pkg-config mostly, but also all the -config tools
+ conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
+ conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
+ """
+
self.validate_cfg(kw)
if 'msg' in kw:
self.check_message_1(kw['msg'])
@@ -213,7 +245,7 @@ def check_cfg(self, *k, **kw):
# env: an optional environment (modified -> provide a copy)
# compiler: cc or cxx - it tries to guess what is best
-# type: program, shlib, staticlib, objects
+# type: cprogram, cshlib, cstaticlib
# code: a c code to execute
# uselib_store: where to add the variables
# uselib: parameters to use for building
@@ -364,23 +396,26 @@ def to_header(dct):
def post_check(self, *k, **kw):
"set the variables after a test was run successfully"
- is_success = 0
+ is_success = False
if kw['execute']:
if kw['success']:
- is_success = kw['success']
+ is_success = True
else:
is_success = (kw['success'] == 0)
- def define_or_stuff():
- nm = kw['define_name']
- if kw['execute'] and kw.get('define_ret', None) and isinstance(is_success, str):
- self.define(kw['define_name'], is_success, quote=kw.get('quote', 1))
- else:
- self.define_cond(kw['define_name'], is_success)
-
if 'define_name' in kw:
if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
- define_or_stuff()
+ if kw['execute']:
+ key = kw['success']
+ if isinstance(key, str):
+ if key:
+ self.define(kw['define_name'], key, quote=kw.get('quote', 1))
+ else:
+ self.define_cond(kw['define_name'], True)
+ else:
+ self.define_cond(kw['define_name'], False)
+ else:
+ self.define_cond(kw['define_name'], is_success)
if is_success and 'uselib_store' in kw:
import cc, cxx
@@ -478,7 +513,7 @@ def run_c_code(self, *k, **kw):
bld.rescan(bld.srcnode)
- o = bld.new_task_gen(features=[kw['compile_mode'], kw['type']], source=test_f_name, target='testprog')
+ o = bld(features=[kw['compile_mode'], kw['type']], source=test_f_name, target='testprog')
for k, v in kw.iteritems():
setattr(o, k, v)
@@ -507,11 +542,18 @@ def run_c_code(self, *k, **kw):
# if we need to run the program, try to get its result
if kw['execute']:
args = Utils.to_list(kw.get('exec_args', []))
- try:
- data = Utils.cmd_output([lastprog] + args).strip()
- except ValueError, e:
+ proc = Utils.pproc.Popen([lastprog], *args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
+ (out, err) = proc.communicate()
+ w = self.log.write
+ w(str(out))
+ w('\n')
+ w(str(err))
+ w('\n')
+ w('returncode %r' % proc.returncode)
+ w('\n')
+ if proc.returncode:
self.fatal(Utils.ex_stack())
- ret = data
+ ret = out
return ret
@@ -540,7 +582,7 @@ def define(self, define, value, quote=1):
# the user forgot to tell if the value is quoted or not
if isinstance(value, str):
if quote:
- tbl[define] = '"%s"' % str(value)
+ tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
else:
tbl[define] = value
elif isinstance(value, int):
@@ -643,8 +685,6 @@ def get_config_header(self):
config_header.append('#define %s' % key)
elif value is UNDEFINED:
config_header.append('/* #undef %s */' % key)
- elif isinstance(value, str):
- config_header.append('#define %s %s' % (key, repr(value)[1:-1]))
else:
config_header.append('#define %s %s' % (key, value))
return "\n".join(config_header)
View
7 tools/wafadmin/Tools/gas.py
@@ -23,14 +23,13 @@ def asm_hook(self, node):
self.compiled_tasks.append(task)
self.meths.append('asm_incflags')
-@taskgen
@after('apply_obj_vars_cc')
@after('apply_obj_vars_cxx')
@before('apply_link')
def asm_incflags(self):
- if self.env['ASINCFLAGS']: self.env['_ASINCFLAGS'] = self.env['ASINCFLAGS']
- if 'cxx' in self.features: self.env['_ASINCFLAGS'] = self.env['_CXXINCFLAGS']
- else: self.env['_ASINCFLAGS'] = self.env['_CCINCFLAGS']
+ self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
+ var = ('cxx' in self.features) and 'CXX' or 'CC'
+ self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
def detect(conf):
conf.find_program(['gas', 'as'], var='AS')
View
29 tools/wafadmin/Tools/gnome.py
@@ -66,13 +66,18 @@ def apply_gnome_doc(self):
self.env['APPNAME'] = self.doc_module
lst = self.to_list(self.doc_linguas)
bld = self.bld
+ lst.append('C')
+
for x in lst:
- tsk = self.create_task('xml2po')
- node = self.path.find_resource(x+'/'+x+'.po')
- src = self.path.find_resource('C/%s.xml' % self.doc_module)
- out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
- tsk.set_inputs([node, src])
- tsk.set_outputs(out)
+ if not x == 'C':
+ tsk = self.create_task('xml2po')
+ node = self.path.find_resource(x+'/'+x+'.po')
+ src = self.path.find_resource('C/%s.xml' % self.doc_module)
+ out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
+ tsk.set_inputs([node, src])
+ tsk.set_outputs(out)
+ else:
+ out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
tsk2 = self.create_task('xsltproc2po')
out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
@@ -83,8 +88,8 @@ def apply_gnome_doc(self):
tsk2.run_after.append(tsk)
if bld.is_install:
- path = self.install_path + 'gnome/help/%s/%s' % (self.doc_module, x)
- bld.install_files(self.install_path + 'omf', out2, env=self.env)
+ path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
+ bld.install_files(self.install_path + '/omf', out2, env=self.env)
for y in self.to_list(self.doc_figures):
try:
os.stat(self.path.abspath() + '/' + x + '/' + y)
@@ -92,6 +97,12 @@ def apply_gnome_doc(self):
except:
bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
+ if x == 'C':
+ xmls = self.to_list(self.doc_includes)
+ xmls.append(self.doc_entities)
+ for z in xmls:
+ out = self.path.find_resource('%s/%s' % (x, z))
+ bld.install_as(path + '/%s' % z, out.abspath(self.env))
# OBSOLETE
class xml_to_taskgen(TaskGen.task_gen):
@@ -180,7 +191,7 @@ def install_result(task):
xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
--stringparam db2omf.basename ${APPNAME} \
--stringparam db2omf.format docbook \
---stringparam db2omf.lang C \
+--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
View
5 tools/wafadmin/Tools/intltool.py
@@ -12,11 +12,10 @@
"""
Usage:
-bld.new_task_gen(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
-
-
+bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
"""
+
class intltool_in_taskgen(TaskGen.task_gen):
"""deprecated"""
def __init__(self, *k, **kw):
View
11 tools/wafadmin/Tools/javaw.py
@@ -123,16 +123,15 @@ def apply_java(self):
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
if self.jarname:
- tsk = self.create_task('jar_create')
- tsk.set_inputs(bld_nodes)
- tsk.set_outputs(self.path.find_or_declare(self.jarname))
+ jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
+ jtsk.set_run_after(tsk)
- if not self.env['JAROPTS']:
+ if not self.env.JAROPTS:
if self.jaropts:
- self.env['JAROPTS'] = self.jaropts
+ self.env.JAROPTS = self.jaropts
else:
dirs = '.'
- self.env['JAROPTS'] = ['-C', ''.join(self.env['OUTDIR']), dirs]
+ self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN')
cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}')
View
2  tools/wafadmin/Tools/lua.py
@@ -12,7 +12,7 @@
rule = '${LUAC} -s -o ${TGT} ${SRC}',
ext_in = '.lua',
ext_out = '.luac',
- reentrant = 0,
+ reentrant = False,
install = 'LUADIR', # env variable
)
View
1  tools/wafadmin/Tools/misc.py
@@ -72,6 +72,7 @@ def apply_copy(self):
tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.chmod = self.chmod
+ tsk.install_path = self.install_path
if not tsk.env:
tsk.debug()
View
186 tools/wafadmin/Tools/msvc.py
@@ -80,15 +80,15 @@ def setup_msvc(conf, versions):
for target in platforms:
try:
arch,(p1,p2,p3) = targets[target]
- compiler,version = version.split()
- return compiler,p1,p2,p3
+ compiler,revision = version.split()
+ return compiler,revision,p1,p2,p3
except KeyError: continue
except KeyError: continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf, compiler, version, target, vcvars):
- debug('msvc: get_msvc_version: ' + compiler + ' ' + version + ' ' + target + ' ...')
+ debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
f = open(batfile, 'w')
f.write("""@echo off
@@ -107,7 +107,7 @@ def get_msvc_version(conf, compiler, version, target, vcvars):
if lines[0].find(x) != -1:
break
else:
- debug('msvc: get_msvc_version: %r %r %r -> not found' % (compiler, version, target))
+ debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
for line in lines[1:]:
@@ -136,11 +136,11 @@ def get_msvc_version(conf, compiler, version, target, vcvars):
if p.returncode != 0:
raise Exception('return code: %r: %r' % (p.returncode, err))
except Exception, e:
- debug('msvc: get_msvc_version: %r %r %r -> failure' % (compiler, version, target))
+ debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
- debug('msvc: get_msvc_version: %r %r %r -> OK' % (compiler, version, target))
+ debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
@@ -297,12 +297,13 @@ def gather_icl_versions(conf, versions):
@conf
def get_msvc_versions(conf):
- if not conf.env['MSVC_INSTALLED_VERSIONS']:
- conf.env['MSVC_INSTALLED_VERSIONS'] = []
- conf.gather_msvc_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
- conf.gather_wsdk_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
- conf.gather_icl_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
- return conf.env['MSVC_INSTALLED_VERSIONS']
+ if not conf.env.MSVC_INSTALLED_VERSIONS:
+ lst = []
+ conf.gather_msvc_versions(lst)
+ conf.gather_wsdk_versions(lst)
+ conf.gather_icl_versions(lst)
+ conf.env.MSVC_INSTALLED_VERSIONS = lst
+ return conf.env.MSVC_INSTALLED_VERSIONS
@conf
def print_all_msvc_detected(conf):
@@ -401,7 +402,7 @@ def libname_msvc(self, libname, is_static=False, mandatory=False):
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path, libn)):
- debug('msvc: lib found: %s' % os.path.join(path,libn))
+ debug('msvc: lib found: %s', os.path.join(path,libn))
return re.sub('\.lib$', '',libn)
#if no lib can be found, just return the libname as msvc expects it
@@ -449,7 +450,7 @@ def no_autodetect(conf):
@conftest
def autodetect(conf):
v = conf.env
- compiler, path, includes, libdirs = detect_msvc(conf)
+ compiler, version, path, includes, libdirs = detect_msvc(conf)
v['PATH'] = path
v['CPPPATH'] = includes
v['LIBPATH'] = libdirs
@@ -475,19 +476,16 @@ def find_msvc(conf):
v = conf.env
- compiler, path, includes, libdirs = detect_msvc(conf)
- v['PATH'] = path
- v['CPPPATH'] = includes
- v['LIBPATH'] = libdirs
+ compiler, version, path, includes, libdirs = detect_msvc(conf)
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+ has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11)
# compiler
cxx = None
- if v['CXX']: cxx = v['CXX']
+ if v.CXX: cxx = v.CXX
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
- if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path)
- if not cxx: conf.fatal('%s was not found (compiler)' % compiler_name)
+ if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
cxx = conf.cmd_to_list(cxx)
# before setting anything, check if the compiler is really msvc
@@ -496,43 +494,50 @@ def find_msvc(conf):
if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
conf.fatal('the msvc compiler could not be identified')
- # c/c++ compiler
- v['CC'] = v['CXX'] = cxx
- v['CC_NAME'] = v['CXX_NAME'] = 'msvc'
+ link = v.LINK_CXX
+ if not link:
+ link = conf.find_program(linker_name, path_list=path, mandatory=True)
+ ar = v.AR
+ if not ar:
+ ar = conf.find_program(lib_name, path_list=path, mandatory=True)
- # environment flags
- try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
- except KeyError: pass
- try: v.prepend_value('LIBPATH', conf.environ['LIB'])
- except KeyError: pass
+ # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
+ mt = v.MT
+ if has_msvc_manifest:
+ mt = conf.find_program('MT', path_list=path, mandatory=True)
- # linker
- if not v['LINK_CXX']:
- link = conf.find_program(linker_name, path_list=path)
- if link: v['LINK_CXX'] = link
- else: conf.fatal('%s was not found (linker)' % linker_name)
- v['LINK'] = link
+ # no more possibility of failure means the data state will be consistent
+ # we may store the data safely now
- if not v['LINK_CC']: v['LINK_CC'] = v['LINK_CXX']
+ v.MSVC_MANIFEST = has_msvc_manifest
+ v.PATH = path
+ v.CPPPATH = includes
+ v.LIBPATH = libdirs
- # staticlib linker
- if not v['AR']:
- stliblink = conf.find_program(lib_name, path_list=path)
- if not stliblink: return
- v['AR'] = stliblink
- v['ARFLAGS'] = ['/NOLOGO']
+ # c/c++ compiler
+ v.CC = v.CXX = cxx
+ v.CC_NAME = v.CXX_NAME = 'msvc'
+
+ v.LINK = v.LINK_CXX = link
+ if not v.LINK_CC:
+ v.LINK_CC = v.LINK_CXX
+
+ v.AR = ar
+ v.MT = mt
+ v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
- # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
- manifesttool = conf.find_program('MT', path_list=path)
- if manifesttool:
- v['MT'] = manifesttool
- v['MTFLAGS'] = ['/NOLOGO']
conf.check_tool('winres')
- if not conf.env['WINRC']:
+ if not conf.env.WINRC:
warn('Resource compiler not found. Compiling resource file is disabled')
+ # environment flags
+ try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
+ except KeyError: pass
+ try: v.prepend_value('LIBPATH', conf.environ['LIB'])
+ except KeyError: pass
+
@conftest
def msvc_common_flags(conf):
v = conf.env
@@ -702,52 +707,47 @@ def apply_manifest(self):
"""Special linker for MSVC with support for embedding manifests into DLL's
and executables compiled by Visual Studio 2005 or probably later. Without
the manifest file, the binaries are unusable.
- See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
- Problems with this tool: it is always called whether MSVC creates manifests or not."""
-
- if self.env.CC_NAME != 'msvc':
- return
+ See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
- tsk = self.create_task('msvc_manifest')
- tsk.set_inputs(self.link_task.outputs[0])
+ if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
+ out_node = self.link_task.outputs[0]
+ man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
+ self.link_task.outputs.append(man_node)
+ self.link_task.do_manifest = True
def exec_mf(self):
env = self.env
- outfile = self.inputs[0].bldpath(env)
- manifest = outfile + '.manifest'
- if os.path.exists(manifest):
- debug('msvc: manifesttool')
- mtool = env['MT']
- if not mtool:
- return 0
-
- mode = ''
- # embedding mode. Different for EXE's and DLL's.
- # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
- if 'cprogram' in self.generator.features:
- mode = '1'
- elif 'cshlib' in self.generator.features:
- mode = '2'
-
- debug('msvc: embedding manifest')
- #flags = ' '.join(env['MTFLAGS'] or [])
-
- lst = []
- lst.extend(Utils.to_list(env['MT']))
- lst.extend(Utils.to_list(env['MTFLAGS']))
- lst.extend(Utils.to_list("-manifest"))
- lst.extend(Utils.to_list(manifest))
- lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
-
- #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
- # manifest, outfile, mode)
- lst = [lst]
- ret = self.exec_command(*lst)
-
- return ret
-
-cls = Task.task_type_from_func('msvc_manifest', vars=['MT', 'MTFLAGS'], color='BLUE', func=exec_mf, ext_in='.bin')
-cls.quiet = 1
+ mtool = env['MT']
+ if not mtool:
+ return 0
+
+ self.do_manifest = False
+
+ outfile = self.outputs[0].bldpath(env)
+ manifest = self.outputs[-1].bldpath(env)
+
+ # embedding mode. Different for EXE's and DLL's.
+ # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
+ mode = ''
+ if 'cprogram' in self.generator.features:
+ mode = '1'
+ elif 'cshlib' in self.generator.features:
+ mode = '2'
+
+ debug('msvc: embedding manifest')
+ #flags = ' '.join(env['MTFLAGS'] or [])
+
+ lst = []
+ lst.extend(Utils.to_list(env['MT']))
+ lst.extend(Utils.to_list(env['MTFLAGS']))
+ lst.extend(Utils.to_list("-manifest"))
+ lst.extend(Utils.to_list(manifest))
+ lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
+
+ #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
+ # manifest, outfile, mode)
+ lst = [lst]
+ return self.exec_command(*lst)
########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
@@ -769,7 +769,11 @@ def exec_command_msvc(self, *k, **kw):
env.update(PATH = ';'.join(self.env['PATH']))
kw['env'] = env
- return self.generator.bld.exec_command(*k, **kw)
+ ret = self.generator.bld.exec_command(*k, **kw)
+ if ret: return ret
+ if getattr(self, 'do_manifest', None):
+ ret = exec_mf(self)
+ return ret
for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
cls = Task.TaskBase.classes.get(k, None)
View
72 tools/wafadmin/Tools/perl.py
@@ -36,31 +36,29 @@ def check_perl_version(conf, minver=None):
Perl binary can be overridden by --with-perl-binary config variable
"""
- res = True
- if not getattr(Options.options, 'perlbinary', None):
- perl = conf.find_program("perl", var="PERL")
- if not perl:
- return False
+ if getattr(Options.options, 'perlbinary', None):
+ conf.env.PERL = Options.options.perlbinary
else:
- perl = Options.options.perlbinary
- conf.env['PERL'] = perl
-
- version = Utils.cmd_output(perl + " -e'printf \"%vd\", $^V'")
- if not version:
- res = False
- version = "Unknown"
- elif not minver is None:
- ver = tuple(map(int, version.split(".")))
+ conf.find_program('perl', var='PERL', mandatory=True)
+
+ try:
+ version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
+ except:
+ conf.fatal('could not determine the perl version')
+
+ conf.env.PERL_VERSION = version
+ cver = ''
+ if minver:
+ try:
+ ver = tuple(map(int, version.split('.')))
+ except:
+ conf.fatal('unsupported perl version %r' % version)
if ver < minver:
- res = False
+ conf.fatal('perl is too old')
- if minver is None:
- cver = ""
- else:
- cver = ".".join(map(str,minver))
- conf.check_message("perl", cver, res, version)
- return res
+ cver = '.'.join(map(str,minver))
+ conf.check_message('perl', cver, True, version)
@conf
def check_perl_module(conf, module):
@@ -85,31 +83,25 @@ def check_perl_ext_devel(conf):
Sets different xxx_PERLEXT variables in the environment.
Also sets the ARCHDIR_PERL variable useful as installation path,
- which can be overridden by --with-perl-archdir option.
+ which can be overridden by --with-perl-archdir
"""
- if not conf.env['PERL']:
- return False
-
- perl = conf.env['PERL']
+ if not conf.env.PERL:
+ conf.fatal('perl detection is required first')
def read_out(cmd):
- return Utils.to_list(Utils.cmd_output(perl + cmd))
-
- conf.env["LINKFLAGS_PERLEXT"] = read_out(" -MConfig -e'print $Config{lddlflags}'")
- conf.env["CPPPATH_PERLEXT"] = read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
- conf.env["CCFLAGS_PERLEXT"] = read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
+ return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
- conf.env["XSUBPP"] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
- conf.env["EXTUTILS_TYPEMAP"] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
+ conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
+ conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"')
+ conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
+ conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
+ conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"')
+ conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0]
- if not getattr(Options.options, 'perlarchdir', None):
- conf.env["ARCHDIR_PERL"] = Utils.cmd_output(perl + " -MConfig -e'print $Config{sitearch}'")
+ if getattr(Options.options, 'perlarchdir', None):
+ conf.env.ARCHDIR_PERL = Options.options.perlarchdir
else:
- conf.env["ARCHDIR_PERL"] =