Permalink
Browse files

unpacking waf as per bug 669470

  • Loading branch information...
1 parent b11571c commit 7ed5e3c383ddc134163b6864bfa5644489aa72bf @tstriker tstriker committed Feb 10, 2012
View
BIN waf
Binary file not shown.
View
225 wafadmin/3rdpartys/boost.py
@@ -0,0 +1,225 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os.path,glob,types,re,sys
+import Configure,config_c,Options,Utils,Logs
+from Logs import warn,debug
+from Configure import conf
+boost_code='''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_VERSION << std::endl; }
+'''
+boost_libpath=['/usr/lib','/usr/local/lib','/opt/local/lib','/sw/lib','/lib']
+boost_cpppath=['/usr/include','/usr/local/include','/opt/local/include','/sw/include']
+STATIC_NOSTATIC='nostatic'
+STATIC_BOTH='both'
+STATIC_ONLYSTATIC='onlystatic'
+is_versiontag=re.compile('^\d+_\d+_?\d*$')
+is_threadingtag=re.compile('^mt$')
+is_abitag=re.compile('^[sgydpn]+$')
+is_toolsettag=re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
+is_pythontag=re.compile('^py[0-9]{2}$')
+def set_options(opt):
+ opt.add_option('--boost-includes',type='string',default='',dest='boostincludes',help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
+ opt.add_option('--boost-libs',type='string',default='',dest='boostlibs',help='path to the directory where the boost libs are e.g. /usr/local/lib')
+def string_to_version(s):
+ version=s.split('.')
+ if len(version)<3:return 0
+ return int(version[0])*100000+int(version[1])*100+int(version[2])
+def version_string(version):
+ major=version/100000
+ minor=version/100%1000
+ minor_minor=version%100
+ if minor_minor==0:
+ return"%d_%d"%(major,minor)
+ else:
+ return"%d_%d_%d"%(major,minor,minor_minor)
+def libfiles(lib,pattern,lib_paths):
+ result=[]
+ for lib_path in lib_paths:
+ libname=pattern%('boost_%s[!_]*'%lib)
+ result+=glob.glob(os.path.join(lib_path,libname))
+ return result
+def get_boost_version_number(self,dir):
+ try:
+ return self.run_c_code(compiler='cxx',code=boost_code,includes=dir,execute=1,env=self.env.copy(),type='cprogram',compile_mode='cxx',compile_filename='test.cpp')
+ except Configure.ConfigurationError,e:
+ return-1
+def set_default(kw,var,val):
+ if not var in kw:
+ kw[var]=val
+def tags_score(tags,kw):
+ score=0
+ needed_tags={'threading':kw['tag_threading'],'abi':kw['tag_abi'],'toolset':kw['tag_toolset'],'version':kw['tag_version'],'python':kw['tag_python']}
+ if kw['tag_toolset']is None:
+ v=kw['env']
+ toolset=v['CXX_NAME']
+ if v['CXX_VERSION']:
+ version_no=v['CXX_VERSION'].split('.')
+ toolset+=version_no[0]
+ if len(version_no)>1:
+ toolset+=version_no[1]
+ needed_tags['toolset']=toolset
+ found_tags={}
+ for tag in tags:
+ if is_versiontag.match(tag):found_tags['version']=tag
+ if is_threadingtag.match(tag):found_tags['threading']=tag
+ if is_abitag.match(tag):found_tags['abi']=tag
+ if is_toolsettag.match(tag):found_tags['toolset']=tag
+ if is_pythontag.match(tag):found_tags['python']=tag
+ for tagname in needed_tags.iterkeys():
+ if needed_tags[tagname]is not None and tagname in found_tags:
+ if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
+ score+=kw['score_'+tagname][0]
+ else:
+ score+=kw['score_'+tagname][1]
+ return score
+def validate_boost(self,kw):
+ ver=kw.get('version','')
+ for x in'min_version max_version version'.split():
+ set_default(kw,x,ver)
+ set_default(kw,'lib','')
+ kw['lib']=Utils.to_list(kw['lib'])
+ set_default(kw,'env',self.env)
+ set_default(kw,'libpath',boost_libpath)
+ set_default(kw,'cpppath',boost_cpppath)
+ for x in'tag_threading tag_version tag_toolset'.split():
+ set_default(kw,x,None)
+ set_default(kw,'tag_abi','^[^d]*$')
+ set_default(kw,'python',str(sys.version_info[0])+str(sys.version_info[1]))
+ set_default(kw,'tag_python','^py'+kw['python']+'$')
+ set_default(kw,'score_threading',(10,-10))
+ set_default(kw,'score_abi',(10,-10))
+ set_default(kw,'score_python',(10,-10))
+ set_default(kw,'score_toolset',(1,-1))
+ set_default(kw,'score_version',(100,-100))
+ set_default(kw,'score_min',0)
+ set_default(kw,'static',STATIC_NOSTATIC)
+ set_default(kw,'found_includes',False)
+ set_default(kw,'min_score',0)
+ set_default(kw,'errmsg','not found')
+ set_default(kw,'okmsg','ok')
+def find_boost_includes(self,kw):
+ boostPath=getattr(Options.options,'boostincludes','')
+ if boostPath:
+ boostPath=[os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
+ else:
+ boostPath=Utils.to_list(kw['cpppath'])
+ min_version=string_to_version(kw.get('min_version',''))
+ max_version=string_to_version(kw.get('max_version',''))or(sys.maxint-1)
+ version=0
+ for include_path in boostPath:
+ boost_paths=[p for p in glob.glob(os.path.join(include_path,'boost*'))if os.path.isdir(p)]
+ debug('BOOST Paths: %r'%boost_paths)
+ for path in boost_paths:
+ pathname=os.path.split(path)[-1]
+ ret=-1
+ if pathname=='boost':
+ path=include_path
+ ret=self.get_boost_version_number(path)
+ elif pathname.startswith('boost-'):
+ ret=self.get_boost_version_number(path)
+ ret=int(ret)
+ if ret!=-1 and ret>=min_version and ret<=max_version and ret>version:
+ boost_path=path
+ version=ret
+ if not version:
+ self.fatal('boost headers not found! (required version min: %s max: %s)'%(kw['min_version'],kw['max_version']))
+ return False
+ found_version=version_string(version)
+ versiontag='^'+found_version+'$'
+ if kw['tag_version']is None:
+ kw['tag_version']=versiontag
+ elif kw['tag_version']!=versiontag:
+ warn('boost header version %r and tag_version %r do not match!'%(versiontag,kw['tag_version']))
+ env=self.env
+ env['CPPPATH_BOOST']=boost_path
+ env['BOOST_VERSION']=found_version
+ self.found_includes=1
+ ret='Version %s (%s)'%(found_version,boost_path)
+ return ret
+def find_boost_library(self,lib,kw):
+ def find_library_from_list(lib,files):
+ lib_pattern=re.compile('.*boost_(.*?)\..*')
+ result=(None,None)
+ resultscore=kw['min_score']-1
+ for file in files:
+ m=lib_pattern.search(file,1)
+ if m:
+ libname=m.group(1)
+ libtags=libname.split('-')[1:]
+ currentscore=tags_score(libtags,kw)
+ if currentscore>resultscore:
+ result=(libname,file)
+ resultscore=currentscore
+ return result
+ lib_paths=getattr(Options.options,'boostlibs','')
+ if lib_paths:
+ lib_paths=[os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
+ else:
+ lib_paths=Utils.to_list(kw['libpath'])
+ v=kw.get('env',self.env)
+ (libname,file)=(None,None)
+ if kw['static']in[STATIC_NOSTATIC,STATIC_BOTH]:
+ st_env_prefix='LIB'
+ files=libfiles(lib,v['shlib_PATTERN'],lib_paths)
+ (libname,file)=find_library_from_list(lib,files)
+ if libname is None and kw['static']in[STATIC_ONLYSTATIC,STATIC_BOTH]:
+ st_env_prefix='STATICLIB'
+ staticLibPattern=v['staticlib_PATTERN']
+ if self.env['CC_NAME']=='msvc':
+ staticLibPattern='lib'+staticLibPattern
+ files=libfiles(lib,staticLibPattern,lib_paths)
+ (libname,file)=find_library_from_list(lib,files)
+ if libname is not None:
+ v['LIBPATH_BOOST_'+lib.upper()]=[os.path.split(file)[0]]
+ if self.env['CC_NAME']=='msvc'and os.path.splitext(file)[1]=='.lib':
+ v[st_env_prefix+'_BOOST_'+lib.upper()]=['libboost_'+libname]
+ else:
+ v[st_env_prefix+'_BOOST_'+lib.upper()]=['boost_'+libname]
+ return
+ self.fatal('lib boost_'+lib+' not found!')
+def check_boost(self,*k,**kw):
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
+ self.validate_boost(kw)
+ ret=None
+ try:
+ if not kw.get('found_includes',None):
+ self.check_message_1(kw.get('msg_includes','boost headers'))
+ ret=self.find_boost_includes(kw)
+ except Configure.ConfigurationError,e:
+ if'errmsg'in kw:
+ self.check_message_2(kw['errmsg'],'YELLOW')
+ if'mandatory'in kw:
+ if Logs.verbose>1:
+ raise
+ else:
+ self.fatal('the configuration failed (see %r)'%self.log.name)
+ else:
+ if'okmsg'in kw:
+ self.check_message_2(kw.get('okmsg_includes',ret))
+ for lib in kw['lib']:
+ self.check_message_1('library boost_'+lib)
+ try:
+ self.find_boost_library(lib,kw)
+ except Configure.ConfigurationError,e:
+ ret=False
+ if'errmsg'in kw:
+ self.check_message_2(kw['errmsg'],'YELLOW')
+ if'mandatory'in kw:
+ if Logs.verbose>1:
+ raise
+ else:
+ self.fatal('the configuration failed (see %r)'%self.log.name)
+ else:
+ if'okmsg'in kw:
+ self.check_message_2(kw['okmsg'])
+ return ret
+
+conf(get_boost_version_number)
+conf(validate_boost)
+conf(find_boost_includes)
+conf(find_boost_library)
+conf(check_boost)
View
17 wafadmin/3rdpartys/fluid.py
@@ -0,0 +1,17 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import Task
+from TaskGen import extension
+Task.simple_task_type('fluid','${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}','BLUE',shell=False,ext_out='.cxx')
+def fluid(self,node):
+ cpp=node.change_ext('.cpp')
+ hpp=node.change_ext('.hpp')
+ self.create_task('fluid',node,[cpp,hpp])
+ if'cxx'in self.features:
+ self.allnodes.append(cpp)
+def detect(conf):
+ fluid=conf.find_program('fluid',var='FLUID',mandatory=True)
+ conf.check_cfg(path='fltk-config',package='',args='--cxxflags --ldflags',uselib_store='FLTK',mandatory=True)
+
+extension('.fl')(fluid)
View
678 wafadmin/Build.py
@@ -0,0 +1,678 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,sys,errno,re,glob,gc,datetime,shutil
+try:import cPickle
+except:import pickle as cPickle
+import Runner,TaskGen,Node,Scripting,Utils,Environment,Task,Logs,Options
+from Logs import debug,error,info
+from Constants import*
+SAVED_ATTRS='root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
+bld=None
+class BuildError(Utils.WafError):
+ def __init__(self,b=None,t=[]):
+ self.bld=b
+ self.tasks=t
+ self.ret=1
+ Utils.WafError.__init__(self,self.format_error())
+ def format_error(self):
+ lst=['Build failed:']
+ for tsk in self.tasks:
+ txt=tsk.format_error()
+ if txt:lst.append(txt)
+ sep=' '
+ if len(lst)>2:
+ sep='\n'
+ return sep.join(lst)
+def group_method(fun):
+ def f(*k,**kw):
+ if not k[0].is_install:
+ return False
+ postpone=True
+ if'postpone'in kw:
+ postpone=kw['postpone']
+ del kw['postpone']
+ if postpone:
+ m=k[0].task_manager
+ if not m.groups:m.add_group()
+ m.groups[m.current_group].post_funs.append((fun,k,kw))
+ if not'cwd'in kw:
+ kw['cwd']=k[0].path
+ else:
+ fun(*k,**kw)
+ return f
+class BuildContext(Utils.Context):
+ def __init__(self):
+ global bld
+ bld=self
+ self.task_manager=Task.TaskManager()
+ self.id_nodes=0
+ self.idx={}
+ self.all_envs={}
+ self.bdir=''
+ self.path=None
+ self.deps_man=Utils.DefaultDict(list)
+ self.cache_node_abspath={}
+ self.cache_scanned_folders={}
+ self.uninstall=[]
+ for v in'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
+ var={}
+ setattr(self,v,var)
+ self.cache_dir_contents={}
+ self.all_task_gen=[]
+ self.task_gen_cache_names={}
+ self.cache_sig_vars={}
+ self.log=None
+ self.root=None
+ self.srcnode=None
+ self.bldnode=None
+ class node_class(Node.Node):
+ pass
+ self.node_class=node_class
+ self.node_class.__module__="Node"
+ self.node_class.__name__="Nodu"
+ self.node_class.bld=self
+ self.is_install=None
+ def __copy__(self):
+ raise Utils.WafError('build contexts are not supposed to be cloned')
+ def load(self):
+ try:
+ env=Environment.Environment(os.path.join(self.cachedir,'build.config.py'))
+ except(IOError,OSError):
+ pass
+ else:
+ if env['version']<HEXVERSION:
+ raise Utils.WafError('Version mismatch! reconfigure the project')
+ for t in env['tools']:
+ self.setup(**t)
+ try:
+ gc.disable()
+ f=data=None
+ Node.Nodu=self.node_class
+ try:
+ f=open(os.path.join(self.bdir,DBFILE),'rb')
+ except(IOError,EOFError):
+ pass
+ try:
+ if f:data=cPickle.load(f)
+ except AttributeError:
+ if Logs.verbose>1:raise
+ if data:
+ for x in SAVED_ATTRS:setattr(self,x,data[x])
+ else:
+ debug('build: Build cache loading failed')
+ finally:
+ if f:f.close()
+ gc.enable()
+ def save(self):
+ gc.disable()
+ self.root.__class__.bld=None
+ Node.Nodu=self.node_class
+ db=os.path.join(self.bdir,DBFILE)
+ file=open(db+'.tmp','wb')
+ data={}
+ for x in SAVED_ATTRS:data[x]=getattr(self,x)
+ cPickle.dump(data,file,-1)
+ file.close()
+ try:os.unlink(db)
+ except OSError:pass
+ os.rename(db+'.tmp',db)
+ self.root.__class__.bld=self
+ gc.enable()
+ def clean(self):
+ debug('build: clean called')
+ precious=set([])
+ for env in self.all_envs.values():
+ for x in env[CFG_FILES]:
+ node=self.srcnode.find_resource(x)
+ if node:
+ precious.add(node.id)
+ def clean_rec(node):
+ for x in list(node.childs.keys()):
+ nd=node.childs[x]
+ tp=nd.id&3
+ if tp==Node.DIR:
+ clean_rec(nd)
+ elif tp==Node.BUILD:
+ if nd.id in precious:continue
+ for env in self.all_envs.values():
+ try:os.remove(nd.abspath(env))
+ except OSError:pass
+ node.childs.__delitem__(x)
+ clean_rec(self.srcnode)
+ for v in'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
+ setattr(self,v,{})
+ def compile(self):
+ debug('build: compile called')
+ self.flush()
+ self.generator=Runner.Parallel(self,Options.options.jobs)
+ def dw(on=True):
+ if Options.options.progress_bar:
+ if on:sys.stderr.write(Logs.colors.cursor_on)
+ else:sys.stderr.write(Logs.colors.cursor_off)
+ debug('build: executor starting')
+ back=os.getcwd()
+ os.chdir(self.bldnode.abspath())
+ try:
+ try:
+ dw(on=False)
+ self.generator.start()
+ except KeyboardInterrupt:
+ dw()
+ if Runner.TaskConsumer.consumers:
+ self.save()
+ raise
+ except Exception:
+ dw()
+ raise
+ else:
+ dw()
+ if Runner.TaskConsumer.consumers:
+ self.save()
+ if self.generator.error:
+ raise BuildError(self,self.task_manager.tasks_done)
+ finally:
+ os.chdir(back)
+ def install(self):
+ debug('build: install called')
+ self.flush()
+ if self.is_install<0:
+ lst=[]
+ for x in self.uninstall:
+ dir=os.path.dirname(x)
+ if not dir in lst:lst.append(dir)
+ lst.sort()
+ lst.reverse()
+ nlst=[]
+ for y in lst:
+ x=y
+ while len(x)>4:
+ if not x in nlst:nlst.append(x)
+ x=os.path.dirname(x)
+ nlst.sort()
+ nlst.reverse()
+ for x in nlst:
+ try:os.rmdir(x)
+ except OSError:pass
+ def new_task_gen(self,*k,**kw):
+ if self.task_gen_cache_names:
+ self.task_gen_cache_names={}
+ kw['bld']=self
+ if len(k)==0:
+ ret=TaskGen.task_gen(*k,**kw)
+ else:
+ cls_name=k[0]
+ try:cls=TaskGen.task_gen.classes[cls_name]
+ except KeyError:raise Utils.WscriptError('%s is not a valid task generator -> %s'%(cls_name,[x for x in TaskGen.task_gen.classes]))
+ ret=cls(*k,**kw)
+ return ret
+ def __call__(self,*k,**kw):
+ if self.task_gen_cache_names:
+ self.task_gen_cache_names={}
+ kw['bld']=self
+ return TaskGen.task_gen(*k,**kw)
+ def load_envs(self):
+ try:
+ lst=Utils.listdir(self.cachedir)
+ except OSError,e:
+ if e.errno==errno.ENOENT:
+ raise Utils.WafError('The project was not configured: run "waf configure" first!')
+ else:
+ raise
+ if not lst:
+ raise Utils.WafError('The cache directory is empty: reconfigure the project')
+ for file in lst:
+ if file.endswith(CACHE_SUFFIX):
+ env=Environment.Environment(os.path.join(self.cachedir,file))
+ name=file[:-len(CACHE_SUFFIX)]
+ self.all_envs[name]=env
+ self.init_variants()
+ for env in self.all_envs.values():
+ for f in env[CFG_FILES]:
+ newnode=self.path.find_or_declare(f)
+ try:
+ hash=Utils.h_file(newnode.abspath(env))
+ except(IOError,AttributeError):
+ error("cannot find "+f)
+ hash=SIG_NIL
+ self.node_sigs[env.variant()][newnode.id]=hash
+ self.bldnode=self.root.find_dir(self.bldnode.abspath())
+ self.path=self.srcnode=self.root.find_dir(self.srcnode.abspath())
+ self.cwd=self.bldnode.abspath()
+ def setup(self,tool,tooldir=None,funs=None):
+ if isinstance(tool,list):
+ for i in tool:self.setup(i,tooldir)
+ return
+ if not tooldir:tooldir=Options.tooldir
+ module=Utils.load_tool(tool,tooldir)
+ if hasattr(module,"setup"):module.setup(self)
+ def init_variants(self):
+ debug('build: init variants')
+ lstvariants=[]
+ for env in self.all_envs.values():
+ if not env.variant()in lstvariants:
+ lstvariants.append(env.variant())
+ self.lst_variants=lstvariants
+ debug('build: list of variants is %r',lstvariants)
+ for name in lstvariants+[0]:
+ for v in'node_sigs cache_node_abspath'.split():
+ var=getattr(self,v)
+ if not name in var:
+ var[name]={}
+ def load_dirs(self,srcdir,blddir,load_cache=1):
+ assert(os.path.isabs(srcdir))
+ assert(os.path.isabs(blddir))
+ self.cachedir=os.path.join(blddir,CACHE_DIR)
+ if srcdir==blddir:
+ raise Utils.WafError("build dir must be different from srcdir: %s <-> %s "%(srcdir,blddir))
+ self.bdir=blddir
+ self.load()
+ if not self.root:
+ Node.Nodu=self.node_class
+ self.root=Node.Nodu('',None,Node.DIR)
+ if not self.srcnode:
+ self.srcnode=self.root.ensure_dir_node_from_path(srcdir)
+ debug('build: srcnode is %s and srcdir %s',self.srcnode.name,srcdir)
+ self.path=self.srcnode
+ try:os.makedirs(blddir)
+ except OSError:pass
+ if not self.bldnode:
+ self.bldnode=self.root.ensure_dir_node_from_path(blddir)
+ self.init_variants()
+ def rescan(self,src_dir_node):
+ if self.cache_scanned_folders.get(src_dir_node.id,None):return
+ self.cache_scanned_folders[src_dir_node.id]=True
+ if hasattr(self,'repository'):self.repository(src_dir_node)
+ if not src_dir_node.name and sys.platform=='win32':
+ return
+ parent_path=src_dir_node.abspath()
+ try:
+ lst=set(Utils.listdir(parent_path))
+ except OSError:
+ lst=set([])
+ self.cache_dir_contents[src_dir_node.id]=lst
+ cache=self.node_sigs[0]
+ for x in src_dir_node.childs.values():
+ if x.id&3!=Node.FILE:continue
+ if x.name in lst:
+ try:
+ cache[x.id]=Utils.h_file(x.abspath())
+ except IOError:
+ raise Utils.WafError('The file %s is not readable or has become a dir'%x.abspath())
+ else:
+ try:del cache[x.id]
+ except KeyError:pass
+ del src_dir_node.childs[x.name]
+ h1=self.srcnode.height()
+ h2=src_dir_node.height()
+ lst=[]
+ child=src_dir_node
+ while h2>h1:
+ lst.append(child.name)
+ child=child.parent
+ h2-=1
+ lst.reverse()
+ try:
+ for variant in self.lst_variants:
+ sub_path=os.path.join(self.bldnode.abspath(),variant,*lst)
+ self.listdir_bld(src_dir_node,sub_path,variant)
+ except OSError:
+ for node in src_dir_node.childs.values():
+ if node.id&3!=Node.BUILD:
+ continue
+ for dct in self.node_sigs.values():
+ if node.id in dct:
+ dct.__delitem__(node.id)
+ src_dir_node.childs.__delitem__(node.name)
+ for variant in self.lst_variants:
+ sub_path=os.path.join(self.bldnode.abspath(),variant,*lst)
+ try:
+ os.makedirs(sub_path)
+ except OSError:
+ pass
+ def listdir_src(self,parent_node):
+ pass
+ def remove_node(self,node):
+ pass
+ def listdir_bld(self,parent_node,path,variant):
+ i_existing_nodes=[x for x in parent_node.childs.values()if x.id&3==Node.BUILD]
+ lst=set(Utils.listdir(path))
+ node_names=set([x.name for x in i_existing_nodes])
+ remove_names=node_names-lst
+ ids_to_remove=[x.id for x in i_existing_nodes if x.name in remove_names]
+ cache=self.node_sigs[variant]
+ for nid in ids_to_remove:
+ if nid in cache:
+ cache.__delitem__(nid)
+ def get_env(self):
+ return self.env_of_name('default')
+ def set_env(self,name,val):
+ self.all_envs[name]=val
+ env=property(get_env,set_env)
+ def add_manual_dependency(self,path,value):
+ if isinstance(path,Node.Node):
+ node=path
+ elif os.path.isabs(path):
+ node=self.root.find_resource(path)
+ else:
+ node=self.path.find_resource(path)
+ self.deps_man[node.id].append(value)
+ def launch_node(self):
+ try:
+ return self.p_ln
+ except AttributeError:
+ self.p_ln=self.root.find_dir(Options.launch_dir)
+ return self.p_ln
+ def glob(self,pattern,relative=True):
+ path=self.path.abspath()
+ files=[self.root.find_resource(x)for x in glob.glob(path+os.sep+pattern)]
+ if relative:
+ files=[x.path_to_parent(self.path)for x in files if x]
+ else:
+ files=[x.abspath()for x in files if x]
+ return files
+ def add_group(self,*k):
+ self.task_manager.add_group(*k)
+ def set_group(self,*k,**kw):
+ self.task_manager.set_group(*k,**kw)
+ def hash_env_vars(self,env,vars_lst):
+ idx=str(id(env))+str(vars_lst)
+ try:return self.cache_sig_vars[idx]
+ except KeyError:pass
+ lst=[str(env[a])for a in vars_lst]
+ ret=Utils.h_list(lst)
+ debug('envhash: %r %r',ret,lst)
+ self.cache_sig_vars[idx]=ret
+ return ret
+ def name_to_obj(self,name,env):
+ cache=self.task_gen_cache_names
+ if not cache:
+ for x in self.all_task_gen:
+ vt=x.env.variant()+'_'
+ if x.name:
+ cache[vt+x.name]=x
+ else:
+ if isinstance(x.target,str):
+ target=x.target
+ else:
+ target=' '.join(x.target)
+ v=vt+target
+ if not cache.get(v,None):
+ cache[v]=x
+ return cache.get(env.variant()+'_'+name,None)
+ def flush(self,all=1):
+ self.ini=datetime.datetime.now()
+ self.task_gen_cache_names={}
+ self.name_to_obj('',self.env)
+ debug('build: delayed operation TaskGen.flush() called')
+ if Options.options.compile_targets:
+ debug('task_gen: posting objects %r listed in compile_targets',Options.options.compile_targets)
+ mana=self.task_manager
+ to_post=[]
+ min_grp=0
+ target_objects=Utils.DefaultDict(list)
+ for target_name in Options.options.compile_targets.split(','):
+ target_name=target_name.strip()
+ for env in self.all_envs.values():
+ tg=self.name_to_obj(target_name,env)
+ if tg:
+ target_objects[target_name].append(tg)
+ m=mana.group_idx(tg)
+ if m>min_grp:
+ min_grp=m
+ to_post=[tg]
+ elif m==min_grp:
+ to_post.append(tg)
+ if not target_name in target_objects and all:
+ raise Utils.WafError("target '%s' does not exist"%target_name)
+ debug('group: Forcing up to group %s for target %s',mana.group_name(min_grp),Options.options.compile_targets)
+ for i in xrange(len(mana.groups)):
+ mana.current_group=i
+ if i==min_grp:
+ break
+ g=mana.groups[i]
+ debug('group: Forcing group %s',mana.group_name(g))
+ for t in g.tasks_gen:
+ debug('group: Posting %s',t.name or t.target)
+ t.post()
+ for t in to_post:
+ t.post()
+ else:
+ debug('task_gen: posting objects (normal)')
+ ln=self.launch_node()
+ if ln.is_child_of(self.bldnode)or not ln.is_child_of(self.srcnode):
+ ln=self.srcnode
+ proj_node=self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
+ if proj_node.id!=self.srcnode.id:
+ ln=self.srcnode
+ for i in xrange(len(self.task_manager.groups)):
+ g=self.task_manager.groups[i]
+ self.task_manager.current_group=i
+ if Logs.verbose:
+ groups=[x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x])==id(g)]
+ name=groups and groups[0]or'unnamed'
+ Logs.debug('group: group',name)
+ for tg in g.tasks_gen:
+ if not tg.path.is_child_of(ln):
+ continue
+ if Logs.verbose:
+ Logs.debug('group: %s'%tg)
+ tg.post()
+ def env_of_name(self,name):
+ try:
+ return self.all_envs[name]
+ except KeyError:
+ error('no such environment: '+name)
+ return None
+ def progress_line(self,state,total,col1,col2):
+ n=len(str(total))
+ Utils.rot_idx+=1
+ ind=Utils.rot_chr[Utils.rot_idx%4]
+ ini=self.ini
+ pc=(100.*state)/total
+ eta=Utils.get_elapsed_time(ini)
+ fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
+ left=fs%(state,total,col1,pc,col2)
+ right='][%s%s%s]'%(col1,eta,col2)
+ cols=Utils.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
+ if cols<7:cols=7
+ ratio=int((cols*state)/total)-1
+ bar=('='*ratio+'>').ljust(cols)
+ msg=Utils.indicator%(left,bar,right)
+ return msg
+ def do_install(self,src,tgt,chmod=O644):
+ if self.is_install>0:
+ if not Options.options.force:
+ try:
+ st1=os.stat(tgt)
+ st2=os.stat(src)
+ except OSError:
+ pass
+ else:
+ if st1.st_mtime>=st2.st_mtime and st1.st_size==st2.st_size:
+ return False
+ srclbl=src.replace(self.srcnode.abspath(None)+os.sep,'')
+ info("* installing %s as %s"%(srclbl,tgt))
+ try:os.remove(tgt)
+ except OSError:pass
+ try:
+ shutil.copy2(src,tgt)
+ os.chmod(tgt,chmod)
+ except IOError:
+ try:
+ os.stat(src)
+ except(OSError,IOError):
+ error('File %r does not exist'%src)
+ raise Utils.WafError('Could not install the file %r'%tgt)
+ return True
+ elif self.is_install<0:
+ info("* uninstalling %s"%tgt)
+ self.uninstall.append(tgt)
+ try:
+ os.remove(tgt)
+ except OSError,e:
+ if e.errno!=errno.ENOENT:
+ if not getattr(self,'uninstall_error',None):
+ self.uninstall_error=True
+ Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+ if Logs.verbose>1:
+ Logs.warn('could not remove %s (error code %r)'%(e.filename,e.errno))
+ return True
+ red=re.compile(r"^([A-Za-z]:)?[/\\\\]*")
+ def get_install_path(self,path,env=None):
+ if not env:env=self.env
+ destdir=env.get_destdir()
+ path=path.replace('/',os.sep)
+ destpath=Utils.subst_vars(path,env)
+ if destdir:
+ destpath=os.path.join(destdir,self.red.sub('',destpath))
+ return destpath
+ def install_dir(self,path,env=None):
+ if env:
+ assert isinstance(env,Environment.Environment),"invalid parameter"
+ else:
+ env=self.env
+ if not path:
+ return[]
+ destpath=self.get_install_path(path,env)
+ if self.is_install>0:
+ info('* creating %s'%destpath)
+ Utils.check_dir(destpath)
+ elif self.is_install<0:
+ info('* removing %s'%destpath)
+ self.uninstall.append(destpath+'/xxx')
+ def install_files(self,path,files,env=None,chmod=O644,relative_trick=False,cwd=None):
+ if env:
+ assert isinstance(env,Environment.Environment),"invalid parameter"
+ else:
+ env=self.env
+ if not path:return[]
+ if not cwd:
+ cwd=self.path
+ if isinstance(files,str)and'*'in files:
+ gl=cwd.abspath()+os.sep+files
+ lst=glob.glob(gl)
+ else:
+ lst=Utils.to_list(files)
+ if not getattr(lst,'__iter__',False):
+ lst=[lst]
+ destpath=self.get_install_path(path,env)
+ Utils.check_dir(destpath)
+ installed_files=[]
+ for filename in lst:
+ if isinstance(filename,str)and os.path.isabs(filename):
+ alst=Utils.split_path(filename)
+ destfile=os.path.join(destpath,alst[-1])
+ else:
+ if isinstance(filename,Node.Node):
+ nd=filename
+ else:
+ nd=cwd.find_resource(filename)
+ if not nd:
+ raise Utils.WafError("Unable to install the file %r (not found in %s)"%(filename,cwd))
+ if relative_trick:
+ destfile=os.path.join(destpath,filename)
+ Utils.check_dir(os.path.dirname(destfile))
+ else:
+ destfile=os.path.join(destpath,nd.name)
+ filename=nd.abspath(env)
+ if self.do_install(filename,destfile,chmod):
+ installed_files.append(destfile)
+ return installed_files
+ def install_as(self,path,srcfile,env=None,chmod=O644,cwd=None):
+ if env:
+ assert isinstance(env,Environment.Environment),"invalid parameter"
+ else:
+ env=self.env
+ if not path:
+ raise Utils.WafError("where do you want to install %r? (%r?)"%(srcfile,path))
+ if not cwd:
+ cwd=self.path
+ destpath=self.get_install_path(path,env)
+ dir,name=os.path.split(destpath)
+ Utils.check_dir(dir)
+ if isinstance(srcfile,Node.Node):
+ src=srcfile.abspath(env)
+ else:
+ src=srcfile
+ if not os.path.isabs(srcfile):
+ node=cwd.find_resource(srcfile)
+ if not node:
+ raise Utils.WafError("Unable to install the file %r (not found in %s)"%(srcfile,cwd))
+ src=node.abspath(env)
+ return self.do_install(src,destpath,chmod)
+ def symlink_as(self,path,src,env=None,cwd=None):
+ if sys.platform=='win32':
+ return
+ if not path:
+ raise Utils.WafError("where do you want to install %r? (%r?)"%(src,path))
+ tgt=self.get_install_path(path,env)
+ dir,name=os.path.split(tgt)
+ Utils.check_dir(dir)
+ if self.is_install>0:
+ link=False
+ if not os.path.islink(tgt):
+ link=True
+ elif os.readlink(tgt)!=src:
+ link=True
+ if link:
+ try:os.remove(tgt)
+ except OSError:pass
+ info('* symlink %s (-> %s)'%(tgt,src))
+ os.symlink(src,tgt)
+ return 0
+ else:
+ try:
+ info('* removing %s'%(tgt))
+ os.remove(tgt)
+ return 0
+ except OSError:
+ return 1
+ def exec_command(self,cmd,**kw):
+ debug('runner: system command -> %s',cmd)
+ if self.log:
+ self.log.write('%s\n'%cmd)
+ kw['log']=self.log
+ try:
+ if not kw.get('cwd',None):
+ kw['cwd']=self.cwd
+ except AttributeError:
+ self.cwd=kw['cwd']=self.bldnode.abspath()
+ return Utils.exec_command(cmd,**kw)
+ def printout(self,s):
+ f=self.log or sys.stderr
+ f.write(s)
+ f.flush()
+ def add_subdirs(self,dirs):
+ self.recurse(dirs,'build')
+ def pre_recurse(self,name_or_mod,path,nexdir):
+ if not hasattr(self,'oldpath'):
+ self.oldpath=[]
+ self.oldpath.append(self.path)
+ self.path=self.root.find_dir(nexdir)
+ return{'bld':self,'ctx':self}
+ def post_recurse(self,name_or_mod,path,nexdir):
+ self.path=self.oldpath.pop()
+ def pre_build(self):
+ if hasattr(self,'pre_funs'):
+ for m in self.pre_funs:
+ m(self)
+ def post_build(self):
+ if hasattr(self,'post_funs'):
+ for m in self.post_funs:
+ m(self)
+ def add_pre_fun(self,meth):
+ try:self.pre_funs.append(meth)
+ except AttributeError:self.pre_funs=[meth]
+ def add_post_fun(self,meth):
+ try:self.post_funs.append(meth)
+ except AttributeError:self.post_funs=[meth]
+ def use_the_magic(self):
+ Task.algotype=Task.MAXPARALLEL
+ Task.file_deps=Task.extract_deps
+ self.magic=True
+ install_as=group_method(install_as)
+ install_files=group_method(install_files)
+ symlink_as=group_method(symlink_as)
+
View
310 wafadmin/Configure.py
@@ -0,0 +1,310 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,shlex,sys,time
+try:import cPickle
+except ImportError:import pickle as cPickle
+import Environment,Utils,Options,Logs
+from Logs import warn
+from Constants import*
+try:
+ from urllib import request
+except:
+ from urllib import urlopen
+else:
+ urlopen=request.urlopen
+conf_template='''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#
+'''
+class ConfigurationError(Utils.WscriptError):
+ pass
+autoconfig=False
+def find_file(filename,path_list):
+ for directory in Utils.to_list(path_list):
+ if os.path.exists(os.path.join(directory,filename)):
+ return directory
+ return''
+def find_program_impl(env,filename,path_list=[],var=None,environ=None):
+ if not environ:
+ environ=os.environ
+ try:path_list=path_list.split()
+ except AttributeError:pass
+ if var:
+ if env[var]:return env[var]
+ if var in environ:env[var]=environ[var]
+ if not path_list:path_list=environ.get('PATH','').split(os.pathsep)
+ ext=(Options.platform=='win32')and'.exe,.com,.bat,.cmd'or''
+ for y in[filename+x for x in ext.split(',')]:
+ for directory in path_list:
+ x=os.path.join(directory,y)
+ if os.path.isfile(x):
+ if var:env[var]=x
+ return x
+ return''
+class ConfigurationContext(Utils.Context):
+ tests={}
+ error_handlers=[]
+ def __init__(self,env=None,blddir='',srcdir=''):
+ self.env=None
+ self.envname=''
+ self.environ=dict(os.environ)
+ self.line_just=40
+ self.blddir=blddir
+ self.srcdir=srcdir
+ self.all_envs={}
+ self.cwd=self.curdir=os.getcwd()
+ self.tools=[]
+ self.setenv(DEFAULT)
+ self.lastprog=''
+ self.hash=0
+ self.files=[]
+ self.tool_cache=[]
+ if self.blddir:
+ self.post_init()
+ def post_init(self):
+ self.cachedir=os.path.join(self.blddir,CACHE_DIR)
+ path=os.path.join(self.blddir,WAF_CONFIG_LOG)
+ try:os.unlink(path)
+ except(OSError,IOError):pass
+ try:
+ self.log=open(path,'w')
+ except(OSError,IOError):
+ self.fatal('could not open %r for writing'%path)
+ app=Utils.g_module.APPNAME
+ if app:
+ ver=getattr(Utils.g_module,'VERSION','')
+ if ver:
+ app="%s (%s)"%(app,ver)
+ now=time.ctime()
+ pyver=sys.hexversion
+ systype=sys.platform
+ args=" ".join(sys.argv)
+ wafver=WAFVERSION
+ abi=ABI
+ self.log.write(conf_template%vars())
+ def __del__(self):
+ if hasattr(self,'log')and self.log:
+ self.log.close()
+ def fatal(self,msg):
+ raise ConfigurationError(msg)
+ def check_tool(self,input,tooldir=None,funs=None):
+ tools=Utils.to_list(input)
+ if tooldir:tooldir=Utils.to_list(tooldir)
+ for tool in tools:
+ tool=tool.replace('++','xx')
+ if tool=='java':tool='javaw'
+ if tool.lower()=='unittest':tool='unittestw'
+ mag=(tool,id(self.env),funs)
+ if mag in self.tool_cache:
+ continue
+ self.tool_cache.append(mag)
+ module=None
+ try:
+ module=Utils.load_tool(tool,tooldir)
+ except Exception,e:
+ ex=e
+ if Options.options.download:
+ _3rdparty=os.path.normpath(Options.tooldir[0]+os.sep+'..'+os.sep+'3rdparty')
+ for x in Utils.to_list(Options.remote_repo):
+ for sub in['branches/waf-%s/wafadmin/3rdparty'%WAFVERSION,'trunk/wafadmin/3rdparty']:
+ url='/'.join((x,sub,tool+'.py'))
+ try:
+ web=urlopen(url)
+ if web.getcode()!=200:
+ continue
+ except Exception,e:
+ continue
+ else:
+ loc=None
+ try:
+ loc=open(_3rdparty+os.sep+tool+'.py','wb')
+ loc.write(web.read())
+ web.close()
+ finally:
+ if loc:
+ loc.close()
+ Logs.warn('downloaded %s from %s'%(tool,url))
+ try:
+ module=Utils.load_tool(tool,tooldir)
+ except:
+ Logs.warn('module %s from %s is unusable'%(tool,url))
+ try:
+ os.unlink(_3rdparty+os.sep+tool+'.py')
+ except:
+ pass
+ continue
+ else:
+ break
+ if not module:
+ Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
+ raise ex
+ else:
+ Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s'%(tool,sys.path,e))
+ raise ex
+ if funs is not None:
+ self.eval_rules(funs)
+ else:
+ func=getattr(module,'detect',None)
+ if func:
+ if type(func)is type(find_file):func(self)
+ else:self.eval_rules(func)
+ self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
+ def sub_config(self,k):
+ self.recurse(k,name='configure')
+ def pre_recurse(self,name_or_mod,path,nexdir):
+ return{'conf':self,'ctx':self}
+ def post_recurse(self,name_or_mod,path,nexdir):
+ if not autoconfig:
+ return
+ self.hash=hash((self.hash,getattr(name_or_mod,'waf_hash_val',name_or_mod)))
+ self.files.append(path)
+ def store(self,file=''):
+ if not os.path.isdir(self.cachedir):
+ os.makedirs(self.cachedir)
+ if not file:
+ file=open(os.path.join(self.cachedir,'build.config.py'),'w')
+ file.write('version = 0x%x\n'%HEXVERSION)
+ file.write('tools = %r\n'%self.tools)
+ file.close()
+ if not self.all_envs:
+ self.fatal('nothing to store in the configuration context!')
+ for key in self.all_envs:
+ tmpenv=self.all_envs[key]
+ tmpenv.store(os.path.join(self.cachedir,key+CACHE_SUFFIX))
+ def set_env_name(self,name,env):
+ self.all_envs[name]=env
+ return env
+ def retrieve(self,name,fromenv=None):
+ try:
+ env=self.all_envs[name]
+ except KeyError:
+ env=Environment.Environment()
+ env['PREFIX']=os.path.abspath(os.path.expanduser(Options.options.prefix))
+ self.all_envs[name]=env
+ else:
+ if fromenv:warn("The environment %s may have been configured already"%name)
+ return env
+ def setenv(self,name):
+ self.env=self.retrieve(name)
+ self.envname=name
+ def add_os_flags(self,var,dest=None):
+ try:self.env.append_value(dest or var,Utils.to_list(self.environ[var]))
+ except KeyError:pass
+ def check_message_1(self,sr):
+ self.line_just=max(self.line_just,len(sr))
+ for x in('\n',self.line_just*'-','\n',sr,'\n'):
+ self.log.write(x)
+ Utils.pprint('NORMAL',"%s :"%sr.ljust(self.line_just),sep='')
+ def check_message_2(self,sr,color='GREEN'):
+ self.log.write(sr)
+ self.log.write('\n')
+ Utils.pprint(color,sr)
+ def check_message(self,th,msg,state,option=''):
+ sr='Checking for %s %s'%(th,msg)
+ self.check_message_1(sr)
+ p=self.check_message_2
+ if state:p('ok '+str(option))
+ else:p('not found','YELLOW')
+ def check_message_custom(self,th,msg,custom,option='',color='PINK'):
+ sr='Checking for %s %s'%(th,msg)
+ self.check_message_1(sr)
+ self.check_message_2(custom,color)
+ def start_msg(self,msg):
+ try:
+ if self.in_msg:
+ return
+ except:
+ self.in_msg=0
+ self.in_msg+=1
+ self.line_just=max(self.line_just,len(msg))
+ for x in('\n',self.line_just*'-','\n',msg,'\n'):
+ self.log.write(x)
+ Utils.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
+ def end_msg(self,result):
+ self.in_msg-=1
+ if self.in_msg:
+ return
+ color='GREEN'
+ if result==True:
+ msg='ok'
+ elif result==False:
+ msg='not found'
+ color='YELLOW'
+ else:
+ msg=str(result)
+ self.log.write(msg)
+ self.log.write('\n')
+ Utils.pprint(color,msg)
+ def find_program(self,filename,path_list=[],var=None,mandatory=False):
+ ret=None
+ if var:
+ if self.env[var]:
+ ret=self.env[var]
+ elif var in os.environ:
+ ret=os.environ[var]
+ if not isinstance(filename,list):filename=[filename]
+ if not ret:
+ for x in filename:
+ ret=find_program_impl(self.env,x,path_list,var,environ=self.environ)
+ if ret:break
+ self.check_message_1('Checking for program %s'%' or '.join(filename))
+ self.log.write(' find program=%r paths=%r var=%r\n -> %r\n'%(filename,path_list,var,ret))
+ if ret:
+ Utils.pprint('GREEN',str(ret))
+ else:
+ Utils.pprint('YELLOW','not found')
+ if mandatory:
+ self.fatal('The program %r is required'%filename)
+ if var:
+ self.env[var]=ret
+ return ret
+ def cmd_to_list(self,cmd):
+ if isinstance(cmd,str)and cmd.find(' '):
+ try:
+ os.stat(cmd)
+ except OSError:
+ return shlex.split(cmd)
+ else:
+ return[cmd]
+ return cmd
+ def __getattr__(self,name):
+ r=self.__class__.__dict__.get(name,None)
+ if r:return r
+ if name and name.startswith('require_'):
+ for k in['check_','find_']:
+ n=name.replace('require_',k)
+ ret=self.__class__.__dict__.get(n,None)
+ if ret:
+ def run(*k,**kw):
+ r=ret(self,*k,**kw)
+ if not r:
+ self.fatal('requirement failure')
+ return r
+ return run
+ self.fatal('No such method %r'%name)
+ def eval_rules(self,rules):
+ self.rules=Utils.to_list(rules)
+ for x in self.rules:
+ f=getattr(self,x)
+ if not f:self.fatal("No such method '%s'."%x)
+ try:
+ f()
+ except Exception,e:
+ ret=self.err_handler(x,e)
+ if ret==BREAK:
+ break
+ elif ret==CONTINUE:
+ continue
+ else:
+ self.fatal(e)
+ def err_handler(self,fun,error):
+ pass
+def conf(f):
+ setattr(ConfigurationContext,f.__name__,f)
+ return f
+def conftest(f):
+ ConfigurationContext.tests[f.__name__]=f
+ return conf(f)
+
View
47 wafadmin/Constants.py
@@ -0,0 +1,47 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+HEXVERSION=0x10511
+WAFVERSION="1.5.17"
+WAFREVISION="8002"
+ABI=7
+O644=420
+O755=493
+MAXJOBS=99999999
+CACHE_DIR='c4che'
+CACHE_SUFFIX='.cache.py'
+DBFILE='.wafpickle-%d'%ABI
+WSCRIPT_FILE='wscript'
+WSCRIPT_BUILD_FILE='wscript_build'
+WAF_CONFIG_LOG='config.log'
+WAF_CONFIG_H='config.h'
+SIG_NIL='iluvcuteoverload'
+VARIANT='_VARIANT_'
+DEFAULT='default'
+SRCDIR='srcdir'
+BLDDIR='blddir'
+APPNAME='APPNAME'
+VERSION='VERSION'
+DEFINES='defines'
+UNDEFINED=()
+BREAK="break"
+CONTINUE="continue"
+JOBCONTROL="JOBCONTROL"
+MAXPARALLEL="MAXPARALLEL"
+NORMAL="NORMAL"
+NOT_RUN=0
+MISSING=1
+CRASHED=2
+EXCEPTION=3
+SKIPPED=8
+SUCCESS=9
+ASK_LATER=-1
+SKIP_ME=-2
+RUN_ME=-3
+LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
+HOUR_FORMAT="%H:%M:%S"
+TEST_OK=True
+CFG_FILES='cfg_files'
+INSTALL=1337
+UNINSTALL=-1337
+
View
158 wafadmin/Environment.py
@@ -0,0 +1,158 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,copy,re
+import Logs,Options,Utils
+from Constants import*
+re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
+class Environment(object):
+ __slots__=("table","parent")
+ def __init__(self,filename=None):
+ self.table={}
+ if filename:
+ self.load(filename)
+ def __contains__(self,key):
+ if key in self.table:return True
+ try:return self.parent.__contains__(key)
+ except AttributeError:return False
+ def __str__(self):
+ keys=set()
+ cur=self
+ while cur:
+ keys.update(cur.table.keys())
+ cur=getattr(cur,'parent',None)
+ keys=list(keys)
+ keys.sort()
+ return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in keys])
+ def __getitem__(self,key):
+ try:
+ while 1:
+ x=self.table.get(key,None)
+ if not x is None:
+ return x
+ self=self.parent
+ except AttributeError:
+ return[]
+ def __setitem__(self,key,value):
+ self.table[key]=value
+ def __delitem__(self,key):
+ del self.table[key]
+ def pop(self,key,*args):
+ if len(args):
+ return self.table.pop(key,*args)
+ return self.table.pop(key)
+ def set_variant(self,name):
+ self.table[VARIANT]=name
+ def variant(self):
+ try:
+ while 1:
+ x=self.table.get(VARIANT,None)
+ if not x is None:
+ return x
+ self=self.parent
+ except AttributeError:
+ return DEFAULT
+ def copy(self):
+ newenv=Environment()
+ newenv.parent=self
+ return newenv
+ def detach(self):
+ tbl=self.get_merged_dict()
+ try:
+ delattr(self,'parent')
+ except AttributeError:
+ pass
+ else:
+ keys=tbl.keys()
+ for x in keys:
+ tbl[x]=copy.deepcopy(tbl[x])
+ self.table=tbl
+ def get_flat(self,key):
+ s=self[key]
+ if isinstance(s,str):return s
+ return' '.join(s)
+ def _get_list_value_for_modification(self,key):
+ try:
+ value=self.table[key]
+ except KeyError:
+ try:value=self.parent[key]
+ except AttributeError:value=[]
+ if isinstance(value,list):
+ value=value[:]
+ else:
+ value=[value]
+ else:
+ if not isinstance(value,list):
+ value=[value]
+ self.table[key]=value
+ return value
+ def append_value(self,var,value):
+ current_value=self._get_list_value_for_modification(var)
+ if isinstance(value,list):
+ current_value.extend(value)
+ else:
+ current_value.append(value)
+ def prepend_value(self,var,value):
+ current_value=self._get_list_value_for_modification(var)
+ if isinstance(value,list):
+ current_value=value+current_value
+ self.table[var]=current_value
+ else:
+ current_value.insert(0,value)
+ def append_unique(self,var,value):
+ current_value=self._get_list_value_for_modification(var)
+ if isinstance(value,list):
+ for value_item in value:
+ if value_item not in current_value:
+ current_value.append(value_item)
+ else:
+ if value not in current_value:
+ current_value.append(value)
+ def get_merged_dict(self):
+ table_list=[]
+ env=self
+ while 1:
+ table_list.insert(0,env.table)
+ try:env=env.parent
+ except AttributeError:break
+ merged_table={}
+ for table in table_list:
+ merged_table.update(table)
+ return merged_table
+ def store(self,filename):
+ file=open(filename,'w')
+ merged_table=self.get_merged_dict()
+ keys=list(merged_table.keys())
+ keys.sort()
+ for k in keys:file.write('%s = %r\n'%(k,merged_table[k]))
+ file.close()
+ def load(self,filename):
+ tbl=self.table
+ code=Utils.readf(filename)
+ for m in re_imp.finditer(code):
+ g=m.group
+ tbl[g(2)]=eval(g(3))
+ Logs.debug('env: %s',self.table)
+ def get_destdir(self):
+ if self.__getitem__('NOINSTALL'):return''
+ return Options.options.destdir
+ def update(self,d):
+ for k,v in d.iteritems():
+ self[k]=v
+ def __getattr__(self,name):
+ if name in self.__slots__:
+ return object.__getattr__(self,name)
+ else:
+ return self[name]
+ def __setattr__(self,name,value):
+ if name in self.__slots__:
+ object.__setattr__(self,name,value)
+ else:
+ self[name]=value
+ def __delattr__(self,name):
+ if name in self.__slots__:
+ object.__delattr__(self,name)
+ else:
+ del self[name]
+
View
97 wafadmin/Logs.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import ansiterm
+import os,re,logging,traceback,sys
+from Constants import*
+zones=''
+verbose=0
+colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
+got_tty=False
+term=os.environ.get('TERM','dumb')
+if not term in['dumb','emacs']:
+ try:
+ got_tty=sys.stderr.isatty()or(sys.platform=='win32'and term in['xterm','msys'])
+ except AttributeError:
+ pass
+import Utils
+if not got_tty or'NOCOLOR'in os.environ:
+ colors_lst['USE']=False
+def get_color(cl):
+ if not colors_lst['USE']:return''
+ return colors_lst.get(cl,'')
+class foo(object):
+ def __getattr__(self,a):
+ return get_color(a)
+ def __call__(self,a):
+ return get_color(a)
+colors=foo()
+re_log=re.compile(r'(\w+): (.*)',re.M)
+class log_filter(logging.Filter):
+ def __init__(self,name=None):
+ pass
+ def filter(self,rec):
+ rec.c1=colors.PINK
+ rec.c2=colors.NORMAL
+ rec.zone=rec.module
+ if rec.levelno>=logging.INFO:
+ if rec.levelno>=logging.ERROR:
+ rec.c1=colors.RED
+ elif rec.levelno>=logging.WARNING:
+ rec.c1=colors.YELLOW
+ else:
+ rec.c1=colors.GREEN
+ return True
+ zone=''
+ m=re_log.match(rec.msg)
+ if m:
+ zone=rec.zone=m.group(1)
+ rec.msg=m.group(2)
+ if zones:
+ return getattr(rec,'zone','')in zones or'*'in zones
+ elif not verbose>2:
+ return False
+ return True
+class formatter(logging.Formatter):
+ def __init__(self):
+ logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
+ def format(self,rec):
+ if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
+ try:
+ return'%s%s%s'%(rec.c1,rec.msg.decode('utf-8'),rec.c2)
+ except:
+ return rec.c1+rec.msg+rec.c2
+ return logging.Formatter.format(self,rec)
+def debug(*k,**kw):
+ if verbose:
+ k=list(k)
+ k[0]=k[0].replace('\n',' ')
+ logging.debug(*k,**kw)
+def error(*k,**kw):
+ logging.error(*k,**kw)
+ if verbose>1:
+ if isinstance(k[0],Utils.WafError):
+ st=k[0].stack
+ else:
+ st=traceback.extract_stack()
+ if st:
+ st=st[:-1]
+ buf=[]
+ for filename,lineno,name,line in st:
+ buf.append(' File "%s", line %d, in %s'%(filename,lineno,name))
+ if line:
+ buf.append(' %s'%line.strip())
+ if buf:logging.error("\n".join(buf))
+warn=logging.warn
+info=logging.info
+def init_log():
+ log=logging.getLogger()
+ log.handlers=[]
+ log.filters=[]
+ hdlr=logging.StreamHandler()
+ hdlr.setFormatter(formatter())
+ log.addHandler(hdlr)
+ log.addFilter(log_filter())
+ log.setLevel(logging.DEBUG)
+init_log()
+
View
494 wafadmin/Node.py
@@ -0,0 +1,494 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,sys,fnmatch,re,stat
+import Utils,Constants
+UNDEFINED=0
+DIR=1
+FILE=2
+BUILD=3
+type_to_string={UNDEFINED:"unk",DIR:"dir",FILE:"src",BUILD:"bld"}
+prune_pats='.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
+exclude_pats=prune_pats+'*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
+exclude_regs='''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/_darcs
+**/_darcs/**
+**/.DS_Store'''
+class Node(object):
+ __slots__=("name","parent","id","childs")
+ def __init__(self,name,parent,node_type=UNDEFINED):
+ self.name=name
+ self.parent=parent
+ self.__class__.bld.id_nodes+=4
+ self.id=self.__class__.bld.id_nodes+node_type
+ if node_type==DIR:self.childs={}
+ if parent and name in parent.childs:
+ raise Utils.WafError('node %s exists in the parent files %r already'%(name,parent))
+ if parent:parent.childs[name]=self
+ def __setstate__(self,data):
+ if len(data)==4:
+ (self.parent,self.name,self.id,self.childs)=data
+ else:
+ (self.parent,self.name,self.id)=data
+ def __getstate__(self):
+ if getattr(self,'childs',None)is None:
+ return(self.parent,self.name,self.id)
+ else:
+ return(self.parent,self.name,self.id,self.childs)
+ def __str__(self):
+ if not self.parent:return''
+ return"%s://%s"%(type_to_string[self.id&3],self.abspath())
+ def __repr__(self):
+ return self.__str__()
+ def __hash__(self):
+ raise Utils.WafError('nodes, you are doing it wrong')
+ def __copy__(self):
+ raise Utils.WafError('nodes are not supposed to be cloned')
+ def get_type(self):
+ return self.id&3
+ def set_type(self,t):
+ self.id=self.id+t-self.id&3
+ def dirs(self):
+ return[x for x in self.childs.values()if x.id&3==DIR]
+ def files(self):
+ return[x for x in self.childs.values()if x.id&3==FILE]
+ def get_dir(self,name,default=None):
+ node=self.childs.get(name,None)
+ if not node or node.id&3!=DIR:return default
+ return node
+ def get_file(self,name,default=None):
+ node=self.childs.get(name,None)
+ if not node or node.id&3!=FILE:return default
+ return node
+ def get_build(self,name,default=None):
+ node=self.childs.get(name,None)
+ if not node or node.id&3!=BUILD:return default
+ return node
+ def find_resource(self,lst):
+ if isinstance(lst,str):
+ lst=Utils.split_path(lst)
+ if len(lst)==1:
+ parent=self
+ else:
+ parent=self.find_dir(lst[:-1])
+ if not parent:return None
+ self.__class__.bld.rescan(parent)
+ name=lst[-1]
+ node=parent.childs.get(name,None)
+ if node:
+ tp=node.id&3
+ if tp==FILE or tp==BUILD:
+ return node
+ else:
+ return None
+ tree=self.__class__.bld
+ if not name in tree.cache_dir_contents[parent.id]:
+ return None
+ path=parent.abspath()+os.sep+name
+ try:
+ st=Utils.h_file(path)
+ except IOError:
+ return None
+ child=self.__class__(name,parent,FILE)
+ tree.node_sigs[0][child.id]=st
+ return child
+ def find_or_declare(self,lst):
+ if isinstance(lst,str):
+ lst=Utils.split_path(lst)
+ if len(lst)==1:
+ parent=self
+ else:
+ parent=self.find_dir(lst[:-1])
+ if not parent:return None
+ self.__class__.bld.rescan(parent)
+ name=lst[-1]
+ node=parent.childs.get(name,None)
+ if node:
+ tp=node.id&3
+ if tp!=BUILD:
+ raise Utils.WafError('find_or_declare found a source file where a build file was expected %r'%'/'.join(lst))
+ return node
+ node=self.__class__(name,parent,BUILD)
+ return node
+ def find_dir(self,lst):
+ if isinstance(lst,str):
+ lst=Utils.split_path(lst)
+ current=self
+ for name in lst:
+ self.__class__.bld.rescan(current)
+ prev=current
+ if not current.parent and name==current.name:
+ continue
+ elif not name:
+ continue
+ elif name=='.':
+ continue
+ elif name=='..':
+ current=current.parent or current
+ else:
+ current=prev.childs.get(name,None)
+ if current is None:
+ dir_cont=self.__class__.bld.cache_dir_contents
+ if prev.id in dir_cont and name in dir_cont[prev.id]:
+ if not prev.name:
+ if os.sep=='/':
+ dirname=os.sep+name
+ else:
+ dirname=name
+ else:
+ dirname=prev.abspath()+os.sep+name
+ if not os.path.isdir(dirname):
+ return None
+ current=self.__class__(name,prev,DIR)
+ elif(not prev.name and len(name)==2 and name[1]==':')or name.startswith('\\\\'):
+ current=self.__class__(name,prev,DIR)
+ else:
+ return None
+ else:
+ if current.id&3!=DIR:
+ return None
+ return current
+ def ensure_dir_node_from_path(self,lst):
+ if isinstance(lst,str):
+ lst=Utils.split_path(lst)
+ current=self
+ for name in lst:
+ if not name:
+ continue
+ elif name=='.':
+ continue
+ elif name=='..':
+ current=current.parent or current
+ else:
+ prev=current
+ current=prev.childs.get(name,None)
+ if current is None:
+ current=self.__class__(name,prev,DIR)
+ return current
+ def exclusive_build_node(self,path):
+ lst=Utils.split_path(path)
+ name=lst[-1]
+ if len(lst)>1:
+ parent=None
+ try:
+ parent=self.find_dir(lst[:-1])
+ except OSError:
+ pass
+ if not parent:
+ parent=self.ensure_dir_node_from_path(lst[:-1])
+ self.__class__.bld.rescan(parent)
+ else:
+ try:
+ self.__class__.bld.rescan(parent)
+ except OSError:
+ pass
+ else:
+ parent=self
+ node=parent.childs.get(name,None)
+ if not node:
+ node=self.__class__(name,parent,BUILD)
+ return node
+ def path_to_parent(self,parent):
+ lst=[]
+ p=self
+ h1=parent.height()
+ h2=p.height()
+ while h2>h1:
+ h2-=1
+ lst.append(p.name)
+ p=p.parent
+ if lst:
+ lst.reverse()
+ ret=os.path.join(*lst)
+ else:
+ ret=''
+ return ret
+ def find_ancestor(self,node):
+ dist=self.height()-node.height()
+ if dist<0:return node.find_ancestor(self)
+ cand=self
+ while dist>0:
+ cand=cand.parent
+ dist-=1
+ if cand==node:return cand
+ cursor=node
+ while cand.parent:
+ cand=cand.parent
+ cursor=cursor.parent
+ if cand==cursor:return cand
+ def relpath_gen(self,from_node):
+ if self==from_node:return'.'
+ if from_node.parent==self:return'..'
+ ancestor=self.find_ancestor(from_node)
+ lst=[]
+ cand=self
+ while not cand.id==ancestor.id:
+ lst.append(cand.name)
+ cand=cand.parent
+ cand=from_node
+ while not cand.id==ancestor.id:
+ lst.append('..')
+ cand=cand.parent
+ lst.reverse()
+ return os.sep.join(lst)
+ def nice_path(self,env=None):
+ tree=self.__class__.bld
+ ln=tree.launch_node()
+ if self.id&3==FILE:return self.relpath_gen(ln)
+ else:return os.path.join(tree.bldnode.relpath_gen(ln),env.variant(),self.relpath_gen(tree.srcnode))
+ def is_child_of(self,node):
+ p=self
+ diff=self.height()-node.height()
+ while diff>0:
+ diff-=1
+ p=p.parent
+ return p.id==node.id
+ def variant(self,env):
+ if not env:return 0
+ elif self.id&3==FILE:return 0
+ else:return env.variant()
+ def height(self):
+ d=self
+ val=-1
+ while d:
+ d=d.parent
+ val+=1
+ return val
+ def abspath(self,env=None):
+ variant=(env and(self.id&3!=FILE)and env.variant())or 0
+ ret=self.__class__.bld.cache_node_abspath[variant].get(self.id,None)
+ if ret:return ret
+ if not variant:
+ if not self.parent:
+ val=os.sep=='/'and os.sep or''
+ elif not self.parent.name:
+ val=(os.sep=='/'and os.sep or'')+self.name
+ else:
+ val=self.parent.abspath()+os.sep+self.name
+ else:
+ val=os.sep.join((self.__class__.bld.bldnode.abspath(),variant,self.path_to_parent(self.__class__.bld.srcnode)))
+ self.__class__.bld.cache_node_abspath[variant][self.id]=val
+ return val
+ def change_ext(self,ext):
+ name=self.name
+ k=name.rfind('.')
+ if k>=0:
+ name=name[:k]+ext
+ else:
+ name=name+ext
+ return self.parent.find_or_declare([name])
+ def src_dir(self,env):
+ return self.parent.srcpath(env)
+ def bld_dir(self,env):
+ return self.parent.bldpath(env)
+ def bld_base(self,env):
+ s=os.path.splitext(self.name)[0]
+ return os.path.join(self.bld_dir(env),s)
+ def bldpath(self,env=None):
+ if self.id&3==FILE:
+ return self.relpath_gen(self.__class__.bld.bldnode)
+ p=self.path_to_parent(self.__class__.bld.srcnode)
+ if p is not'':
+ return env.variant()+os.sep+p
+ return env.variant()
+ def srcpath(self,env=None):
+ if self.id&3==BUILD:
+ return self.bldpath(env)
+ return self.relpath_gen(self.__class__.bld.bldnode)
+ def read(self,env):
+ return Utils.readf(self.abspath(env))
+ def dir(self,env):
+ return self.parent.abspath(env)
+ def file(self):
+ return self.name
+ def file_base(self):
+ return os.path.splitext(self.name)[0]
+ def suffix(self):
+ k=max(0,self.name.rfind('.'))
+ return self.name[k:]
+ def find_iter_impl(self,src=True,bld=True,dir=True,accept_name=None,is_prune=None,maxdepth=25):
+ bld_ctx=self.__class__.bld
+ bld_ctx.rescan(self)
+ for name in bld_ctx.cache_dir_contents[self.id]:
+ if accept_name(self,name):
+ node=self.find_resource(name)
+ if node:
+ if src and node.id&3==FILE:
+ yield node
+ else:
+ node=self.find_dir(name)
+ if node and node.id!=bld_ctx.bldnode.id:
+ if dir:
+ yield node
+ if not is_prune(self,name):
+ if maxdepth:
+ for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
+ yield k
+ else:
+ if not is_prune(self,name):
+ node=self.find_resource(name)
+ if not node:
+ node=self.find_dir(name)
+ if node and node.id!=bld_ctx.bldnode.id:
+ if maxdepth:
+ for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
+ yield k
+ if bld:
+ for node in self.childs.values():
+ if node.id==bld_ctx.bldnode.id:
+ continue
+ if node.id&3==BUILD:
+ if accept_name(self,node.name):
+ yield node
+ raise StopIteration
+ def find_iter(self,in_pat=['*'],ex_pat=exclude_pats,prune_pat=prune_pats,src=True,bld=True,dir=False,maxdepth=25,flat=False):
+ if not(src or bld or dir):
+ raise StopIteration
+ if self.id&3!=DIR:
+ raise StopIteration
+ in_pat=Utils.to_list(in_pat)
+ ex_pat=Utils.to_list(ex_pat)
+ prune_pat=Utils.to_list(prune_pat)
+ def accept_name(node,name):
+ for pat in ex_pat:
+ if fnmatch.fnmatchcase(name,pat):
+ return False
+ for pat in in_pat:
+ if fnmatch.fnmatchcase(name,pat):
+ return True
+ return False
+ def is_prune(node,name):
+ for pat in prune_pat:
+ if fnmatch.fnmatchcase(name,pat):
+ return True
+ return False
+ ret=self.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth)
+ if flat:
+ return" ".join([x.relpath_gen(self)for x in ret])
+ return ret
+ def ant_glob(self,*k,**kw):
+ src=kw.get('src',1)
+ bld=kw.get('bld',0)
+ dir=kw.get('dir',0)
+ excl=kw.get('excl',exclude_regs)
+ incl=k and k[0]or kw.get('incl','**')
+ def to_pat(s):
+ lst=Utils.to_list(s)
+ ret=[]
+ for x in lst:
+ x=x.replace('//','/')
+ if x.endswith('/'):
+ x+='**'
+ lst2=x.split('/')
+ accu=[]
+ for k in lst2:
+ if k=='**':
+ accu.append(k)
+ else:
+ k=k.replace('.','[.]').replace('*','.*').replace('?','.')
+ k='^%s$'%k
+ accu.append(re.compile(k))
+ ret.append(accu)
+ return ret
+ def filtre(name,nn):
+ ret=[]
+ for lst in nn:
+ if not lst:
+ pass
+ elif lst[0]=='**':
+ ret.append(lst)
+ if len(lst)>1:
+ if lst[1].match(name):
+ ret.append(lst[2:])
+ else:
+ ret.append([])
+ elif lst[0].match(name):
+ ret.append(lst[1:])
+ return ret
+ def accept(name,pats):
+ nacc=filtre(name,pats[0])
+ nrej=filtre(name,pats[1])
+ if[]in nrej:
+ nacc=[]
+ return[nacc,nrej]
+ def ant_iter(nodi,maxdepth=25,pats=[]):
+ nodi.__class__.bld.rescan(nodi)
+ for name in nodi.__class__.bld.cache_dir_contents[nodi.id]:
+ npats=accept(name,pats)
+ if npats and npats[0]:
+ accepted=[]in npats[0]
+ node=nodi.find_resource(name)
+ if node and accepted:
+ if src and node.id&3==FILE:
+ yield node
+ else:
+ node=nodi.find_dir(name)
+ if node and node.id!=nodi.__class__.bld.bldnode.id:
+ if accepted and dir:
+ yield node
+ if maxdepth:
+ for k in ant_iter(node,maxdepth=maxdepth-1,pats=npats):
+ yield k
+ if bld:
+ for node in nodi.childs.values():
+ if node.id==nodi.__class__.bld.bldnode.id:
+ continue
+ if node.id&3==BUILD:
+ npats=accept(node.name,pats)
+ if npats and npats[0]and[]in npats[0]:
+ yield node
+ raise StopIteration
+ ret=[x for x in ant_iter(self,pats=[to_pat(incl),to_pat(excl)])]
+ if kw.get('flat',True):
+ return" ".join([x.relpath_gen(self)for x in ret])
+ return ret
+ def update_build_dir(self,env=None):
+ if not env:
+ for env in bld.all_envs:
+ self.update_build_dir(env)
+ return
+ path=self.abspath(env)
+ lst=Utils.listdir(path)
+ try:
+ self.__class__.bld.cache_dir_contents[self.id].update(lst)
+ except KeyError:
+ self.__class__.bld.cache_dir_contents[self.id]=set(lst)
+ self.__class__.bld.cache_scanned_folders[self.id]=True
+ for k in lst:
+ npath=path+os.sep+k
+ st=os.stat(npath)
+ if stat.S_ISREG(st[stat.ST_MODE]):
+ ick=self.find_or_declare(k)
+ if not(ick.id in self.__class__.bld.node_sigs[env.variant()]):
+ self.__class__.bld.node_sigs[env.variant()][ick.id]=Constants.SIG_NIL
+ elif stat.S_ISDIR(st[stat.ST_MODE]):
+ child=self.find_dir(k)
+ if not child:
+ child=self.ensure_dir_node_from_path(k)
+ child.update_build_dir(env)
+class Nodu(Node):
+ pass
+
View
158 wafadmin/Options.py
@@ -0,0 +1,158 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,sys,imp,types,tempfile,optparse
+import Logs,Utils
+from Constants import*
+cmds='distclean configure build install clean uninstall check dist distcheck'.split()
+commands={}
+is_install=False
+options={}
+arg_line=[]
+launch_dir=''
+tooldir=''
+lockfile=os.environ.get('WAFLOCK','.lock-wscript')
+try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
+except KeyError:cache_global=''
+platform=Utils.unversioned_sys_platform()
+conf_file='conf-runs-%s-%d.pickle'%(platform,ABI)
+remote_repo=['http://waf.googlecode.com/svn/']
+default_prefix=os.environ.get('PREFIX')
+if not default_prefix:
+ if platform=='win32':
+ d=tempfile.gettempdir()
+ default_prefix=d[0].upper()+d[1:]
+ else:default_prefix='/usr/local/'
+default_jobs=os.environ.get('JOBS',-1)
+if default_jobs<1:
+ try:
+ if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
+ default_jobs=os.sysconf('SC_NPROCESSORS_ONLN')
+ else:
+ default_jobs=int(Utils.cmd_output(['sysctl','-n','hw.ncpu']))
+ except:
+ if os.name=='java':
+ from java.lang import Runtime
+ default_jobs=Runtime.getRuntime().availableProcessors()
+ else:
+ default_jobs=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
+default_destdir=os.environ.get('DESTDIR','')
+def get_usage(self):
+ cmds_str=[]
+ module=Utils.g_module
+ if module:
+ tbl=module.__dict__
+ keys=list(tbl.keys())
+ keys.sort()
+ if'build'in tbl:
+ if not module.build.__doc__:
+ module.build.__doc__='builds the project'
+ if'configure'in tbl:
+ if not module.configure.__doc__:
+ module.configure.__doc__='configures the project'
+ ban=['set_options','init','shutdown']
+ optlst=[x for x in keys if not x in ban and type(tbl[x])is type(parse_args_impl)and tbl[x].__doc__ and not x.startswith('_')]
+ just=max([len(x)for x in optlst])
+ for x in optlst:
+ cmds_str.append(' %s: %s'%(x.ljust(just),tbl[x].__doc__))
+ ret='\n'.join(cmds_str)
+ else:
+ ret=' '.join(cmds)
+ return'''waf [command] [options]
+
+Main commands (example: ./waf build -j4)
+%s
+'''%ret
+setattr(optparse.OptionParser,'get_usage',get_usage)
+def create_parser(module=None):
+ Logs.debug('options: create_parser is called')
+ parser=optparse.OptionParser(conflict_handler="resolve",version='waf %s (%s)'%(WAFVERSION,WAFREVISION))
+ parser.formatter.width=Utils.get_term_cols()
+ p=parser.add_option
+ p('-j','--jobs',type='int',default=default_jobs,help='amount of parallel jobs (%r)'%default_jobs,dest='jobs')
+ p('-k','--keep',action='store_true',default=False,help='keep running happily on independent task groups',dest='keep')
+ p('-v','--verbose',action='count',default=0,help='verbosity level -v -vv or -vvv [default: 0]',dest='verbose')
+ p('--nocache',action='store_true',default=False,help='ignore the WAFCACHE (if set)',dest='nocache')
+ p('--zones',action='store',default='',help='debugging zones (task_gen, deps, tasks, etc)',dest='zones')
+ p('-p','--progress',action='count',default=0,help='-p: progress bar; -pp: ide output',dest='progress_bar')
+ p('--targets',action='store',default='',help='build given task generators, e.g. "target1,target2"',dest='compile_targets')
+ gr=optparse.OptionGroup(parser,'configuration options')
+ parser.add_option_group(gr)
+ gr.add_option('-b','--blddir',action='store',default='',help='build dir for the project (configuration)',dest='blddir')
+ gr.add_option('-s','--srcdir',action='store',default='',help='src dir for the project (configuration)',dest='srcdir')
+ gr.add_option('--prefix',help='installation prefix (configuration) [default: %r]'%default_prefix,default=default_prefix,dest='prefix')
+ gr.add_option('--download',action='store_true',default=False,help='try to download the tools if missing',dest='download')
+ gr=optparse.OptionGroup(parser,'installation options')
+ parser.add_option_group(gr)
+ gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
+ gr.add_option('-f','--force',action='store_true',default=False,help='force file installation',dest='force')
+ return parser
+def parse_args_impl(parser,_args=None):
+ global options,commands,arg_line
+ (options,args)=parser.parse_args(args=_args)
+ arg_line=args
+ commands={}
+ for var in cmds:commands[var]=0
+ if not args:
+ commands['build']=1
+ args.append('build')
+ for arg in args:
+ commands[arg]=True
+ if'check'in args:
+ idx=args.index('check')
+ try:
+ bidx=args.index('build')
+ if bidx>idx:
+ raise ValueError('build before check')
+ except ValueError,e:
+ args.insert(idx,'build')
+ if args[0]!='init':
+ args.insert(0,'init')
+ if options.keep:options.jobs=1
+ if options.jobs<1:options.jobs=1
+ if'install'in sys.argv or'uninstall'in sys.argv:
+ options.destdir=options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
+ Logs.verbose=options.verbose
+ Logs.init_log()
+ if options.zones:
+ Logs.zones=options.zones.split(',')
+ if not Logs.verbose:Logs.verbose=1
+ elif Logs.verbose>0:
+ Logs.zones=['runner']
+ if Logs.verbose>2:
+ Logs.zones=['*']
+class Handler(Utils.Context):
+ parser=None
+ def __init__(self,module=None):
+ self.parser=create_parser(module)
+ self.cwd=os.getcwd()
+ Handler.parser=self
+ def add_option(self,*k,**kw):
+ self.parser.add_option(*k,**kw)
+ def add_option_group(self,*k,**kw):
+ return self.parser.add_option_group(*k,**kw)
+ def get_option_group(self,opt_str):
+ return self.parser.get_option_group(opt_str)
+ def sub_options(self,*k,**kw):
+ if not k:raise Utils.WscriptError('folder expected')
+ self.recurse(k[0],name='set_options')
+ def tool_options(self,*k,**kw):
+
+ if not k[0]:
+ raise Utils.WscriptError('invalid tool_options call %r %r'%(k,kw))
+ tools=Utils.to_list(k[0])
+ path=Utils.to_list(kw.get('tdir',kw.get('tooldir',tooldir)))
+ for tool in tools:
+ tool=tool.replace('++','xx')
+ if tool=='java':tool='javaw'
+ if tool.lower()=='unittest':tool='unittestw'
+ module=Utils.load_tool(tool,path)
+ try:
+ fun=module.set_options
+ except AttributeError:
+ pass
+ else:
+ fun(kw.get('option_group',self))
+ def parse_args(self,args=None):
+ parse_args_impl(self.parser,args)
+
View
160 wafadmin/Runner.py
@@ -0,0 +1,160 @@
+#! /usr/bin/env python