[hamster-applet] unpacking waf as per bug 669470



commit 7ed5e3c383ddc134163b6864bfa5644489aa72bf
Author: Toms BauÄis <toms baugis gmail com>
Date:   Fri Feb 10 22:26:03 2012 +0200

    unpacking waf as per bug 669470

 waf                         |  Bin 92257 -> 4027 bytes
 wafadmin/3rdpartys/boost.py |  225 +++++++++++++
 wafadmin/3rdpartys/fluid.py |   17 +
 wafadmin/Build.py           |  678 +++++++++++++++++++++++++++++++++++++
 wafadmin/Configure.py       |  310 +++++++++++++++++
 wafadmin/Constants.py       |   47 +++
 wafadmin/Environment.py     |  158 +++++++++
 wafadmin/Logs.py            |   97 ++++++
 wafadmin/Node.py            |  494 +++++++++++++++++++++++++++
 wafadmin/Options.py         |  158 +++++++++
 wafadmin/Runner.py          |  160 +++++++++
 wafadmin/Scripting.py       |  414 +++++++++++++++++++++++
 wafadmin/Task.py            |  774 +++++++++++++++++++++++++++++++++++++++++++
 wafadmin/TaskGen.py         |  345 +++++++++++++++++++
 wafadmin/Tools/__init__.py  |    4 +
 wafadmin/Tools/config_c.py  |  531 +++++++++++++++++++++++++++++
 wafadmin/Tools/dbus.py      |   24 ++
 wafadmin/Tools/gdc.py       |   36 ++
 wafadmin/Tools/glib2.py     |   83 +++++
 wafadmin/Tools/gnome.py     |  162 +++++++++
 wafadmin/Tools/gnu_dirs.py  |   63 ++++
 wafadmin/Tools/intltool.py  |   95 ++++++
 wafadmin/Tools/libtool.py   |  239 +++++++++++++
 wafadmin/Tools/misc.py      |  302 +++++++++++++++++
 wafadmin/Tools/preproc.py   |  598 +++++++++++++++++++++++++++++++++
 wafadmin/Tools/python.py    |  278 ++++++++++++++++
 wafadmin/Utils.py           |  520 +++++++++++++++++++++++++++++
 wafadmin/__init__.py        |    4 +
 wafadmin/ansiterm.py        |  144 ++++++++
 29 files changed, 6960 insertions(+), 0 deletions(-)
---
diff --git a/waf b/waf
index 0fe708e..16131d7 100755
Binary files a/waf and b/waf differ
diff --git a/wafadmin/3rdpartys/boost.py b/wafadmin/3rdpartys/boost.py
new file mode 100644
index 0000000..dca426c
--- /dev/null
+++ b/wafadmin/3rdpartys/boost.py
@@ -0,0 +1,225 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os.path,glob,types,re,sys
+import Configure,config_c,Options,Utils,Logs
+from Logs import warn,debug
+from Configure import conf
+boost_code='''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_VERSION << std::endl; }
+'''
+boost_libpath=['/usr/lib','/usr/local/lib','/opt/local/lib','/sw/lib','/lib']
+boost_cpppath=['/usr/include','/usr/local/include','/opt/local/include','/sw/include']
+STATIC_NOSTATIC='nostatic'
+STATIC_BOTH='both'
+STATIC_ONLYSTATIC='onlystatic'
+is_versiontag=re.compile('^\d+_\d+_?\d*$')
+is_threadingtag=re.compile('^mt$')
+is_abitag=re.compile('^[sgydpn]+$')
+is_toolsettag=re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
+is_pythontag=re.compile('^py[0-9]{2}$')
+def set_options(opt):
+	opt.add_option('--boost-includes',type='string',default='',dest='boostincludes',help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
+	opt.add_option('--boost-libs',type='string',default='',dest='boostlibs',help='path to the directory where the boost libs are e.g. /usr/local/lib')
+def string_to_version(s):
+	version=s.split('.')
+	if len(version)<3:return 0
+	return int(version[0])*100000+int(version[1])*100+int(version[2])
+def version_string(version):
+	major=version/100000
+	minor=version/100%1000
+	minor_minor=version%100
+	if minor_minor==0:
+		return"%d_%d"%(major,minor)
+	else:
+		return"%d_%d_%d"%(major,minor,minor_minor)
+def libfiles(lib,pattern,lib_paths):
+	result=[]
+	for lib_path in lib_paths:
+		libname=pattern%('boost_%s[!_]*'%lib)
+		result+=glob.glob(os.path.join(lib_path,libname))
+	return result
+def get_boost_version_number(self,dir):
+	try:
+		return self.run_c_code(compiler='cxx',code=boost_code,includes=dir,execute=1,env=self.env.copy(),type='cprogram',compile_mode='cxx',compile_filename='test.cpp')
+	except Configure.ConfigurationError,e:
+		return-1
+def set_default(kw,var,val):
+	if not var in kw:
+		kw[var]=val
+def tags_score(tags,kw):
+	score=0
+	needed_tags={'threading':kw['tag_threading'],'abi':kw['tag_abi'],'toolset':kw['tag_toolset'],'version':kw['tag_version'],'python':kw['tag_python']}
+	if kw['tag_toolset']is None:
+		v=kw['env']
+		toolset=v['CXX_NAME']
+		if v['CXX_VERSION']:
+			version_no=v['CXX_VERSION'].split('.')
+			toolset+=version_no[0]
+			if len(version_no)>1:
+				toolset+=version_no[1]
+		needed_tags['toolset']=toolset
+	found_tags={}
+	for tag in tags:
+		if is_versiontag.match(tag):found_tags['version']=tag
+		if is_threadingtag.match(tag):found_tags['threading']=tag
+		if is_abitag.match(tag):found_tags['abi']=tag
+		if is_toolsettag.match(tag):found_tags['toolset']=tag
+		if is_pythontag.match(tag):found_tags['python']=tag
+	for tagname in needed_tags.iterkeys():
+		if needed_tags[tagname]is not None and tagname in found_tags:
+			if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
+				score+=kw['score_'+tagname][0]
+			else:
+				score+=kw['score_'+tagname][1]
+	return score
+def validate_boost(self,kw):
+	ver=kw.get('version','')
+	for x in'min_version max_version version'.split():
+		set_default(kw,x,ver)
+	set_default(kw,'lib','')
+	kw['lib']=Utils.to_list(kw['lib'])
+	set_default(kw,'env',self.env)
+	set_default(kw,'libpath',boost_libpath)
+	set_default(kw,'cpppath',boost_cpppath)
+	for x in'tag_threading tag_version tag_toolset'.split():
+		set_default(kw,x,None)
+	set_default(kw,'tag_abi','^[^d]*$')
+	set_default(kw,'python',str(sys.version_info[0])+str(sys.version_info[1]))
+	set_default(kw,'tag_python','^py'+kw['python']+'$')
+	set_default(kw,'score_threading',(10,-10))
+	set_default(kw,'score_abi',(10,-10))
+	set_default(kw,'score_python',(10,-10))
+	set_default(kw,'score_toolset',(1,-1))
+	set_default(kw,'score_version',(100,-100))
+	set_default(kw,'score_min',0)
+	set_default(kw,'static',STATIC_NOSTATIC)
+	set_default(kw,'found_includes',False)
+	set_default(kw,'min_score',0)
+	set_default(kw,'errmsg','not found')
+	set_default(kw,'okmsg','ok')
+def find_boost_includes(self,kw):
+	boostPath=getattr(Options.options,'boostincludes','')
+	if boostPath:
+		boostPath=[os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
+	else:
+		boostPath=Utils.to_list(kw['cpppath'])
+	min_version=string_to_version(kw.get('min_version',''))
+	max_version=string_to_version(kw.get('max_version',''))or(sys.maxint-1)
+	version=0
+	for include_path in boostPath:
+		boost_paths=[p for p in glob.glob(os.path.join(include_path,'boost*'))if os.path.isdir(p)]
+		debug('BOOST Paths: %r'%boost_paths)
+		for path in boost_paths:
+			pathname=os.path.split(path)[-1]
+			ret=-1
+			if pathname=='boost':
+				path=include_path
+				ret=self.get_boost_version_number(path)
+			elif pathname.startswith('boost-'):
+				ret=self.get_boost_version_number(path)
+			ret=int(ret)
+			if ret!=-1 and ret>=min_version and ret<=max_version and ret>version:
+				boost_path=path
+				version=ret
+	if not version:
+		self.fatal('boost headers not found! (required version min: %s max: %s)'%(kw['min_version'],kw['max_version']))
+		return False
+	found_version=version_string(version)
+	versiontag='^'+found_version+'$'
+	if kw['tag_version']is None:
+		kw['tag_version']=versiontag
+	elif kw['tag_version']!=versiontag:
+		warn('boost header version %r and tag_version %r do not match!'%(versiontag,kw['tag_version']))
+	env=self.env
+	env['CPPPATH_BOOST']=boost_path
+	env['BOOST_VERSION']=found_version
+	self.found_includes=1
+	ret='Version %s (%s)'%(found_version,boost_path)
+	return ret
+def find_boost_library(self,lib,kw):
+	def find_library_from_list(lib,files):
+		lib_pattern=re.compile('.*boost_(.*?)\..*')
+		result=(None,None)
+		resultscore=kw['min_score']-1
+		for file in files:
+			m=lib_pattern.search(file,1)
+			if m:
+				libname=m.group(1)
+				libtags=libname.split('-')[1:]
+				currentscore=tags_score(libtags,kw)
+				if currentscore>resultscore:
+					result=(libname,file)
+					resultscore=currentscore
+		return result
+	lib_paths=getattr(Options.options,'boostlibs','')
+	if lib_paths:
+		lib_paths=[os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
+	else:
+		lib_paths=Utils.to_list(kw['libpath'])
+	v=kw.get('env',self.env)
+	(libname,file)=(None,None)
+	if kw['static']in[STATIC_NOSTATIC,STATIC_BOTH]:
+		st_env_prefix='LIB'
+		files=libfiles(lib,v['shlib_PATTERN'],lib_paths)
+		(libname,file)=find_library_from_list(lib,files)
+	if libname is None and kw['static']in[STATIC_ONLYSTATIC,STATIC_BOTH]:
+		st_env_prefix='STATICLIB'
+		staticLibPattern=v['staticlib_PATTERN']
+		if self.env['CC_NAME']=='msvc':
+			staticLibPattern='lib'+staticLibPattern
+		files=libfiles(lib,staticLibPattern,lib_paths)
+		(libname,file)=find_library_from_list(lib,files)
+	if libname is not None:
+		v['LIBPATH_BOOST_'+lib.upper()]=[os.path.split(file)[0]]
+		if self.env['CC_NAME']=='msvc'and os.path.splitext(file)[1]=='.lib':
+			v[st_env_prefix+'_BOOST_'+lib.upper()]=['libboost_'+libname]
+		else:
+			v[st_env_prefix+'_BOOST_'+lib.upper()]=['boost_'+libname]
+		return
+	self.fatal('lib boost_'+lib+' not found!')
+def check_boost(self,*k,**kw):
+	if not self.env['CXX']:
+		self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
+	self.validate_boost(kw)
+	ret=None
+	try:
+		if not kw.get('found_includes',None):
+			self.check_message_1(kw.get('msg_includes','boost headers'))
+			ret=self.find_boost_includes(kw)
+	except Configure.ConfigurationError,e:
+		if'errmsg'in kw:
+			self.check_message_2(kw['errmsg'],'YELLOW')
+		if'mandatory'in kw:
+			if Logs.verbose>1:
+				raise
+			else:
+				self.fatal('the configuration failed (see %r)'%self.log.name)
+	else:
+		if'okmsg'in kw:
+			self.check_message_2(kw.get('okmsg_includes',ret))
+	for lib in kw['lib']:
+		self.check_message_1('library boost_'+lib)
+		try:
+			self.find_boost_library(lib,kw)
+		except Configure.ConfigurationError,e:
+			ret=False
+			if'errmsg'in kw:
+				self.check_message_2(kw['errmsg'],'YELLOW')
+			if'mandatory'in kw:
+				if Logs.verbose>1:
+					raise
+				else:
+					self.fatal('the configuration failed (see %r)'%self.log.name)
+		else:
+			if'okmsg'in kw:
+				self.check_message_2(kw['okmsg'])
+	return ret
+
+conf(get_boost_version_number)
+conf(validate_boost)
+conf(find_boost_includes)
+conf(find_boost_library)
+conf(check_boost)
diff --git a/wafadmin/3rdpartys/fluid.py b/wafadmin/3rdpartys/fluid.py
new file mode 100644
index 0000000..e61f988
--- /dev/null
+++ b/wafadmin/3rdpartys/fluid.py
@@ -0,0 +1,17 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import Task
+from TaskGen import extension
+Task.simple_task_type('fluid','${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}','BLUE',shell=False,ext_out='.cxx')
+def fluid(self,node):
+	cpp=node.change_ext('.cpp')
+	hpp=node.change_ext('.hpp')
+	self.create_task('fluid',node,[cpp,hpp])
+	if'cxx'in self.features:
+		self.allnodes.append(cpp)
+def detect(conf):
+	fluid=conf.find_program('fluid',var='FLUID',mandatory=True)
+	conf.check_cfg(path='fltk-config',package='',args='--cxxflags --ldflags',uselib_store='FLTK',mandatory=True)
+
+extension('.fl')(fluid)
diff --git a/wafadmin/Build.py b/wafadmin/Build.py
new file mode 100644
index 0000000..48d69f7
--- /dev/null
+++ b/wafadmin/Build.py
@@ -0,0 +1,678 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,sys,errno,re,glob,gc,datetime,shutil
+try:import cPickle
+except:import pickle as cPickle
+import Runner,TaskGen,Node,Scripting,Utils,Environment,Task,Logs,Options
+from Logs import debug,error,info
+from Constants import*
+SAVED_ATTRS='root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
+bld=None
+class BuildError(Utils.WafError):
+	def __init__(self,b=None,t=[]):
+		self.bld=b
+		self.tasks=t
+		self.ret=1
+		Utils.WafError.__init__(self,self.format_error())
+	def format_error(self):
+		lst=['Build failed:']
+		for tsk in self.tasks:
+			txt=tsk.format_error()
+			if txt:lst.append(txt)
+		sep=' '
+		if len(lst)>2:
+			sep='\n'
+		return sep.join(lst)
+def group_method(fun):
+	def f(*k,**kw):
+		if not k[0].is_install:
+			return False
+		postpone=True
+		if'postpone'in kw:
+			postpone=kw['postpone']
+			del kw['postpone']
+		if postpone:
+			m=k[0].task_manager
+			if not m.groups:m.add_group()
+			m.groups[m.current_group].post_funs.append((fun,k,kw))
+			if not'cwd'in kw:
+				kw['cwd']=k[0].path
+		else:
+			fun(*k,**kw)
+	return f
+class BuildContext(Utils.Context):
+	def __init__(self):
+		global bld
+		bld=self
+		self.task_manager=Task.TaskManager()
+		self.id_nodes=0
+		self.idx={}
+		self.all_envs={}
+		self.bdir=''
+		self.path=None
+		self.deps_man=Utils.DefaultDict(list)
+		self.cache_node_abspath={}
+		self.cache_scanned_folders={}
+		self.uninstall=[]
+		for v in'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
+			var={}
+			setattr(self,v,var)
+		self.cache_dir_contents={}
+		self.all_task_gen=[]
+		self.task_gen_cache_names={}
+		self.cache_sig_vars={}
+		self.log=None
+		self.root=None
+		self.srcnode=None
+		self.bldnode=None
+		class node_class(Node.Node):
+			pass
+		self.node_class=node_class
+		self.node_class.__module__="Node"
+		self.node_class.__name__="Nodu"
+		self.node_class.bld=self
+		self.is_install=None
+	def __copy__(self):
+		raise Utils.WafError('build contexts are not supposed to be cloned')
+	def load(self):
+		try:
+			env=Environment.Environment(os.path.join(self.cachedir,'build.config.py'))
+		except(IOError,OSError):
+			pass
+		else:
+			if env['version']<HEXVERSION:
+				raise Utils.WafError('Version mismatch! reconfigure the project')
+			for t in env['tools']:
+				self.setup(**t)
+		try:
+			gc.disable()
+			f=data=None
+			Node.Nodu=self.node_class
+			try:
+				f=open(os.path.join(self.bdir,DBFILE),'rb')
+			except(IOError,EOFError):
+				pass
+			try:
+				if f:data=cPickle.load(f)
+			except AttributeError:
+				if Logs.verbose>1:raise
+			if data:
+				for x in SAVED_ATTRS:setattr(self,x,data[x])
+			else:
+				debug('build: Build cache loading failed')
+		finally:
+			if f:f.close()
+			gc.enable()
+	def save(self):
+		gc.disable()
+		self.root.__class__.bld=None
+		Node.Nodu=self.node_class
+		db=os.path.join(self.bdir,DBFILE)
+		file=open(db+'.tmp','wb')
+		data={}
+		for x in SAVED_ATTRS:data[x]=getattr(self,x)
+		cPickle.dump(data,file,-1)
+		file.close()
+		try:os.unlink(db)
+		except OSError:pass
+		os.rename(db+'.tmp',db)
+		self.root.__class__.bld=self
+		gc.enable()
+	def clean(self):
+		debug('build: clean called')
+		precious=set([])
+		for env in self.all_envs.values():
+			for x in env[CFG_FILES]:
+				node=self.srcnode.find_resource(x)
+				if node:
+					precious.add(node.id)
+		def clean_rec(node):
+			for x in list(node.childs.keys()):
+				nd=node.childs[x]
+				tp=nd.id&3
+				if tp==Node.DIR:
+					clean_rec(nd)
+				elif tp==Node.BUILD:
+					if nd.id in precious:continue
+					for env in self.all_envs.values():
+						try:os.remove(nd.abspath(env))
+						except OSError:pass
+					node.childs.__delitem__(x)
+		clean_rec(self.srcnode)
+		for v in'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
+			setattr(self,v,{})
+	def compile(self):
+		debug('build: compile called')
+		self.flush()
+		self.generator=Runner.Parallel(self,Options.options.jobs)
+		def dw(on=True):
+			if Options.options.progress_bar:
+				if on:sys.stderr.write(Logs.colors.cursor_on)
+				else:sys.stderr.write(Logs.colors.cursor_off)
+		debug('build: executor starting')
+		back=os.getcwd()
+		os.chdir(self.bldnode.abspath())
+		try:
+			try:
+				dw(on=False)
+				self.generator.start()
+			except KeyboardInterrupt:
+				dw()
+				if Runner.TaskConsumer.consumers:
+					self.save()
+				raise
+			except Exception:
+				dw()
+				raise
+			else:
+				dw()
+				if Runner.TaskConsumer.consumers:
+					self.save()
+			if self.generator.error:
+				raise BuildError(self,self.task_manager.tasks_done)
+		finally:
+			os.chdir(back)
+	def install(self):
+		debug('build: install called')
+		self.flush()
+		if self.is_install<0:
+			lst=[]
+			for x in self.uninstall:
+				dir=os.path.dirname(x)
+				if not dir in lst:lst.append(dir)
+			lst.sort()
+			lst.reverse()
+			nlst=[]
+			for y in lst:
+				x=y
+				while len(x)>4:
+					if not x in nlst:nlst.append(x)
+					x=os.path.dirname(x)
+			nlst.sort()
+			nlst.reverse()
+			for x in nlst:
+				try:os.rmdir(x)
+				except OSError:pass
+	def new_task_gen(self,*k,**kw):
+		if self.task_gen_cache_names:
+			self.task_gen_cache_names={}
+		kw['bld']=self
+		if len(k)==0:
+			ret=TaskGen.task_gen(*k,**kw)
+		else:
+			cls_name=k[0]
+			try:cls=TaskGen.task_gen.classes[cls_name]
+			except KeyError:raise Utils.WscriptError('%s is not a valid task generator -> %s'%(cls_name,[x for x in TaskGen.task_gen.classes]))
+			ret=cls(*k,**kw)
+		return ret
+	def __call__(self,*k,**kw):
+		if self.task_gen_cache_names:
+			self.task_gen_cache_names={}
+		kw['bld']=self
+		return TaskGen.task_gen(*k,**kw)
+	def load_envs(self):
+		try:
+			lst=Utils.listdir(self.cachedir)
+		except OSError,e:
+			if e.errno==errno.ENOENT:
+				raise Utils.WafError('The project was not configured: run "waf configure" first!')
+			else:
+				raise
+		if not lst:
+			raise Utils.WafError('The cache directory is empty: reconfigure the project')
+		for file in lst:
+			if file.endswith(CACHE_SUFFIX):
+				env=Environment.Environment(os.path.join(self.cachedir,file))
+				name=file[:-len(CACHE_SUFFIX)]
+				self.all_envs[name]=env
+		self.init_variants()
+		for env in self.all_envs.values():
+			for f in env[CFG_FILES]:
+				newnode=self.path.find_or_declare(f)
+				try:
+					hash=Utils.h_file(newnode.abspath(env))
+				except(IOError,AttributeError):
+					error("cannot find "+f)
+					hash=SIG_NIL
+				self.node_sigs[env.variant()][newnode.id]=hash
+		self.bldnode=self.root.find_dir(self.bldnode.abspath())
+		self.path=self.srcnode=self.root.find_dir(self.srcnode.abspath())
+		self.cwd=self.bldnode.abspath()
+	def setup(self,tool,tooldir=None,funs=None):
+		if isinstance(tool,list):
+			for i in tool:self.setup(i,tooldir)
+			return
+		if not tooldir:tooldir=Options.tooldir
+		module=Utils.load_tool(tool,tooldir)
+		if hasattr(module,"setup"):module.setup(self)
+	def init_variants(self):
+		debug('build: init variants')
+		lstvariants=[]
+		for env in self.all_envs.values():
+			if not env.variant()in lstvariants:
+				lstvariants.append(env.variant())
+		self.lst_variants=lstvariants
+		debug('build: list of variants is %r',lstvariants)
+		for name in lstvariants+[0]:
+			for v in'node_sigs cache_node_abspath'.split():
+				var=getattr(self,v)
+				if not name in var:
+					var[name]={}
+	def load_dirs(self,srcdir,blddir,load_cache=1):
+		assert(os.path.isabs(srcdir))
+		assert(os.path.isabs(blddir))
+		self.cachedir=os.path.join(blddir,CACHE_DIR)
+		if srcdir==blddir:
+			raise Utils.WafError("build dir must be different from srcdir: %s <-> %s "%(srcdir,blddir))
+		self.bdir=blddir
+		self.load()
+		if not self.root:
+			Node.Nodu=self.node_class
+			self.root=Node.Nodu('',None,Node.DIR)
+		if not self.srcnode:
+			self.srcnode=self.root.ensure_dir_node_from_path(srcdir)
+		debug('build: srcnode is %s and srcdir %s',self.srcnode.name,srcdir)
+		self.path=self.srcnode
+		try:os.makedirs(blddir)
+		except OSError:pass
+		if not self.bldnode:
+			self.bldnode=self.root.ensure_dir_node_from_path(blddir)
+		self.init_variants()
+	def rescan(self,src_dir_node):
+		if self.cache_scanned_folders.get(src_dir_node.id,None):return
+		self.cache_scanned_folders[src_dir_node.id]=True
+		if hasattr(self,'repository'):self.repository(src_dir_node)
+		if not src_dir_node.name and sys.platform=='win32':
+			return
+		parent_path=src_dir_node.abspath()
+		try:
+			lst=set(Utils.listdir(parent_path))
+		except OSError:
+			lst=set([])
+		self.cache_dir_contents[src_dir_node.id]=lst
+		cache=self.node_sigs[0]
+		for x in src_dir_node.childs.values():
+			if x.id&3!=Node.FILE:continue
+			if x.name in lst:
+				try:
+					cache[x.id]=Utils.h_file(x.abspath())
+				except IOError:
+					raise Utils.WafError('The file %s is not readable or has become a dir'%x.abspath())
+			else:
+				try:del cache[x.id]
+				except KeyError:pass
+				del src_dir_node.childs[x.name]
+		h1=self.srcnode.height()
+		h2=src_dir_node.height()
+		lst=[]
+		child=src_dir_node
+		while h2>h1:
+			lst.append(child.name)
+			child=child.parent
+			h2-=1
+		lst.reverse()
+		try:
+			for variant in self.lst_variants:
+				sub_path=os.path.join(self.bldnode.abspath(),variant,*lst)
+				self.listdir_bld(src_dir_node,sub_path,variant)
+		except OSError:
+			for node in src_dir_node.childs.values():
+				if node.id&3!=Node.BUILD:
+					continue
+				for dct in self.node_sigs.values():
+					if node.id in dct:
+						dct.__delitem__(node.id)
+				src_dir_node.childs.__delitem__(node.name)
+			for variant in self.lst_variants:
+				sub_path=os.path.join(self.bldnode.abspath(),variant,*lst)
+				try:
+					os.makedirs(sub_path)
+				except OSError:
+					pass
+	def listdir_src(self,parent_node):
+		pass
+	def remove_node(self,node):
+		pass
+	def listdir_bld(self,parent_node,path,variant):
+		i_existing_nodes=[x for x in parent_node.childs.values()if x.id&3==Node.BUILD]
+		lst=set(Utils.listdir(path))
+		node_names=set([x.name for x in i_existing_nodes])
+		remove_names=node_names-lst
+		ids_to_remove=[x.id for x in i_existing_nodes if x.name in remove_names]
+		cache=self.node_sigs[variant]
+		for nid in ids_to_remove:
+			if nid in cache:
+				cache.__delitem__(nid)
+	def get_env(self):
+		return self.env_of_name('default')
+	def set_env(self,name,val):
+		self.all_envs[name]=val
+	env=property(get_env,set_env)
+	def add_manual_dependency(self,path,value):
+		if isinstance(path,Node.Node):
+			node=path
+		elif os.path.isabs(path):
+			node=self.root.find_resource(path)
+		else:
+			node=self.path.find_resource(path)
+		self.deps_man[node.id].append(value)
+	def launch_node(self):
+		try:
+			return self.p_ln
+		except AttributeError:
+			self.p_ln=self.root.find_dir(Options.launch_dir)
+			return self.p_ln
+	def glob(self,pattern,relative=True):
+		path=self.path.abspath()
+		files=[self.root.find_resource(x)for x in glob.glob(path+os.sep+pattern)]
+		if relative:
+			files=[x.path_to_parent(self.path)for x in files if x]
+		else:
+			files=[x.abspath()for x in files if x]
+		return files
+	def add_group(self,*k):
+		self.task_manager.add_group(*k)
+	def set_group(self,*k,**kw):
+		self.task_manager.set_group(*k,**kw)
+	def hash_env_vars(self,env,vars_lst):
+		idx=str(id(env))+str(vars_lst)
+		try:return self.cache_sig_vars[idx]
+		except KeyError:pass
+		lst=[str(env[a])for a in vars_lst]
+		ret=Utils.h_list(lst)
+		debug('envhash: %r %r',ret,lst)
+		self.cache_sig_vars[idx]=ret
+		return ret
+	def name_to_obj(self,name,env):
+		cache=self.task_gen_cache_names
+		if not cache:
+			for x in self.all_task_gen:
+				vt=x.env.variant()+'_'
+				if x.name:
+					cache[vt+x.name]=x
+				else:
+					if isinstance(x.target,str):
+						target=x.target
+					else:
+						target=' '.join(x.target)
+					v=vt+target
+					if not cache.get(v,None):
+						cache[v]=x
+		return cache.get(env.variant()+'_'+name,None)
+	def flush(self,all=1):
+		self.ini=datetime.datetime.now()
+		self.task_gen_cache_names={}
+		self.name_to_obj('',self.env)
+		debug('build: delayed operation TaskGen.flush() called')
+		if Options.options.compile_targets:
+			debug('task_gen: posting objects %r listed in compile_targets',Options.options.compile_targets)
+			mana=self.task_manager
+			to_post=[]
+			min_grp=0
+			target_objects=Utils.DefaultDict(list)
+			for target_name in Options.options.compile_targets.split(','):
+				target_name=target_name.strip()
+				for env in self.all_envs.values():
+					tg=self.name_to_obj(target_name,env)
+					if tg:
+						target_objects[target_name].append(tg)
+						m=mana.group_idx(tg)
+						if m>min_grp:
+							min_grp=m
+							to_post=[tg]
+						elif m==min_grp:
+							to_post.append(tg)
+				if not target_name in target_objects and all:
+					raise Utils.WafError("target '%s' does not exist"%target_name)
+			debug('group: Forcing up to group %s for target %s',mana.group_name(min_grp),Options.options.compile_targets)
+			for i in xrange(len(mana.groups)):
+				mana.current_group=i
+				if i==min_grp:
+					break
+				g=mana.groups[i]
+				debug('group: Forcing group %s',mana.group_name(g))
+				for t in g.tasks_gen:
+					debug('group: Posting %s',t.name or t.target)
+					t.post()
+			for t in to_post:
+				t.post()
+		else:
+			debug('task_gen: posting objects (normal)')
+			ln=self.launch_node()
+			if ln.is_child_of(self.bldnode)or not ln.is_child_of(self.srcnode):
+				ln=self.srcnode
+			proj_node=self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
+			if proj_node.id!=self.srcnode.id:
+				ln=self.srcnode
+			for i in xrange(len(self.task_manager.groups)):
+				g=self.task_manager.groups[i]
+				self.task_manager.current_group=i
+				if Logs.verbose:
+					groups=[x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x])==id(g)]
+					name=groups and groups[0]or'unnamed'
+					Logs.debug('group: group',name)
+				for tg in g.tasks_gen:
+					if not tg.path.is_child_of(ln):
+						continue
+					if Logs.verbose:
+						Logs.debug('group: %s'%tg)
+					tg.post()
+	def env_of_name(self,name):
+		try:
+			return self.all_envs[name]
+		except KeyError:
+			error('no such environment: '+name)
+			return None
+	def progress_line(self,state,total,col1,col2):
+		n=len(str(total))
+		Utils.rot_idx+=1
+		ind=Utils.rot_chr[Utils.rot_idx%4]
+		ini=self.ini
+		pc=(100.*state)/total
+		eta=Utils.get_elapsed_time(ini)
+		fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
+		left=fs%(state,total,col1,pc,col2)
+		right='][%s%s%s]'%(col1,eta,col2)
+		cols=Utils.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
+		if cols<7:cols=7
+		ratio=int((cols*state)/total)-1
+		bar=('='*ratio+'>').ljust(cols)
+		msg=Utils.indicator%(left,bar,right)
+		return msg
+	def do_install(self,src,tgt,chmod=O644):
+		if self.is_install>0:
+			if not Options.options.force:
+				try:
+					st1=os.stat(tgt)
+					st2=os.stat(src)
+				except OSError:
+					pass
+				else:
+					if st1.st_mtime>=st2.st_mtime and st1.st_size==st2.st_size:
+						return False
+			srclbl=src.replace(self.srcnode.abspath(None)+os.sep,'')
+			info("* installing %s as %s"%(srclbl,tgt))
+			try:os.remove(tgt)
+			except OSError:pass
+			try:
+				shutil.copy2(src,tgt)
+				os.chmod(tgt,chmod)
+			except IOError:
+				try:
+					os.stat(src)
+				except(OSError,IOError):
+					error('File %r does not exist'%src)
+				raise Utils.WafError('Could not install the file %r'%tgt)
+			return True
+		elif self.is_install<0:
+			info("* uninstalling %s"%tgt)
+			self.uninstall.append(tgt)
+			try:
+				os.remove(tgt)
+			except OSError,e:
+				if e.errno!=errno.ENOENT:
+					if not getattr(self,'uninstall_error',None):
+						self.uninstall_error=True
+						Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+					if Logs.verbose>1:
+						Logs.warn('could not remove %s (error code %r)'%(e.filename,e.errno))
+			return True
+	red=re.compile(r"^([A-Za-z]:)?[/\\\\]*")
+	def get_install_path(self,path,env=None):
+		if not env:env=self.env
+		destdir=env.get_destdir()
+		path=path.replace('/',os.sep)
+		destpath=Utils.subst_vars(path,env)
+		if destdir:
+			destpath=os.path.join(destdir,self.red.sub('',destpath))
+		return destpath
+	def install_dir(self,path,env=None):
+		if env:
+			assert isinstance(env,Environment.Environment),"invalid parameter"
+		else:
+			env=self.env
+		if not path:
+			return[]
+		destpath=self.get_install_path(path,env)
+		if self.is_install>0:
+			info('* creating %s'%destpath)
+			Utils.check_dir(destpath)
+		elif self.is_install<0:
+			info('* removing %s'%destpath)
+			self.uninstall.append(destpath+'/xxx')
+	def install_files(self,path,files,env=None,chmod=O644,relative_trick=False,cwd=None):
+		if env:
+			assert isinstance(env,Environment.Environment),"invalid parameter"
+		else:
+			env=self.env
+		if not path:return[]
+		if not cwd:
+			cwd=self.path
+		if isinstance(files,str)and'*'in files:
+			gl=cwd.abspath()+os.sep+files
+			lst=glob.glob(gl)
+		else:
+			lst=Utils.to_list(files)
+		if not getattr(lst,'__iter__',False):
+			lst=[lst]
+		destpath=self.get_install_path(path,env)
+		Utils.check_dir(destpath)
+		installed_files=[]
+		for filename in lst:
+			if isinstance(filename,str)and os.path.isabs(filename):
+				alst=Utils.split_path(filename)
+				destfile=os.path.join(destpath,alst[-1])
+			else:
+				if isinstance(filename,Node.Node):
+					nd=filename
+				else:
+					nd=cwd.find_resource(filename)
+				if not nd:
+					raise Utils.WafError("Unable to install the file %r (not found in %s)"%(filename,cwd))
+				if relative_trick:
+					destfile=os.path.join(destpath,filename)
+					Utils.check_dir(os.path.dirname(destfile))
+				else:
+					destfile=os.path.join(destpath,nd.name)
+				filename=nd.abspath(env)
+			if self.do_install(filename,destfile,chmod):
+				installed_files.append(destfile)
+		return installed_files
+	def install_as(self,path,srcfile,env=None,chmod=O644,cwd=None):
+		if env:
+			assert isinstance(env,Environment.Environment),"invalid parameter"
+		else:
+			env=self.env
+		if not path:
+			raise Utils.WafError("where do you want to install %r? (%r?)"%(srcfile,path))
+		if not cwd:
+			cwd=self.path
+		destpath=self.get_install_path(path,env)
+		dir,name=os.path.split(destpath)
+		Utils.check_dir(dir)
+		if isinstance(srcfile,Node.Node):
+			src=srcfile.abspath(env)
+		else:
+			src=srcfile
+			if not os.path.isabs(srcfile):
+				node=cwd.find_resource(srcfile)
+				if not node:
+					raise Utils.WafError("Unable to install the file %r (not found in %s)"%(srcfile,cwd))
+				src=node.abspath(env)
+		return self.do_install(src,destpath,chmod)
+	def symlink_as(self,path,src,env=None,cwd=None):
+		if sys.platform=='win32':
+			return
+		if not path:
+			raise Utils.WafError("where do you want to install %r? (%r?)"%(src,path))
+		tgt=self.get_install_path(path,env)
+		dir,name=os.path.split(tgt)
+		Utils.check_dir(dir)
+		if self.is_install>0:
+			link=False
+			if not os.path.islink(tgt):
+				link=True
+			elif os.readlink(tgt)!=src:
+				link=True
+			if link:
+				try:os.remove(tgt)
+				except OSError:pass
+				info('* symlink %s (-> %s)'%(tgt,src))
+				os.symlink(src,tgt)
+			return 0
+		else:
+			try:
+				info('* removing %s'%(tgt))
+				os.remove(tgt)
+				return 0
+			except OSError:
+				return 1
+	def exec_command(self,cmd,**kw):
+		debug('runner: system command -> %s',cmd)
+		if self.log:
+			self.log.write('%s\n'%cmd)
+			kw['log']=self.log
+		try:
+			if not kw.get('cwd',None):
+				kw['cwd']=self.cwd
+		except AttributeError:
+			self.cwd=kw['cwd']=self.bldnode.abspath()
+		return Utils.exec_command(cmd,**kw)
+	def printout(self,s):
+		f=self.log or sys.stderr
+		f.write(s)
+		f.flush()
+	def add_subdirs(self,dirs):
+		self.recurse(dirs,'build')
+	def pre_recurse(self,name_or_mod,path,nexdir):
+		if not hasattr(self,'oldpath'):
+			self.oldpath=[]
+		self.oldpath.append(self.path)
+		self.path=self.root.find_dir(nexdir)
+		return{'bld':self,'ctx':self}
+	def post_recurse(self,name_or_mod,path,nexdir):
+		self.path=self.oldpath.pop()
+	def pre_build(self):
+		if hasattr(self,'pre_funs'):
+			for m in self.pre_funs:
+				m(self)
+	def post_build(self):
+		if hasattr(self,'post_funs'):
+			for m in self.post_funs:
+				m(self)
+	def add_pre_fun(self,meth):
+		try:self.pre_funs.append(meth)
+		except AttributeError:self.pre_funs=[meth]
+	def add_post_fun(self,meth):
+		try:self.post_funs.append(meth)
+		except AttributeError:self.post_funs=[meth]
+	def use_the_magic(self):
+		Task.algotype=Task.MAXPARALLEL
+		Task.file_deps=Task.extract_deps
+		self.magic=True
+	install_as=group_method(install_as)
+	install_files=group_method(install_files)
+	symlink_as=group_method(symlink_as)
+
diff --git a/wafadmin/Configure.py b/wafadmin/Configure.py
new file mode 100644
index 0000000..5a2bbcf
--- /dev/null
+++ b/wafadmin/Configure.py
@@ -0,0 +1,310 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,shlex,sys,time
+try:import cPickle
+except ImportError:import pickle as cPickle
+import Environment,Utils,Options,Logs
+from Logs import warn
+from Constants import*
+try:
+	from urllib import request
+except:
+	from urllib import urlopen
+else:
+	urlopen=request.urlopen
+conf_template='''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#
+'''
+class ConfigurationError(Utils.WscriptError):
+	pass
+autoconfig=False
+def find_file(filename,path_list):
+	for directory in Utils.to_list(path_list):
+		if os.path.exists(os.path.join(directory,filename)):
+			return directory
+	return''
+def find_program_impl(env,filename,path_list=[],var=None,environ=None):
+	if not environ:
+		environ=os.environ
+	try:path_list=path_list.split()
+	except AttributeError:pass
+	if var:
+		if env[var]:return env[var]
+		if var in environ:env[var]=environ[var]
+	if not path_list:path_list=environ.get('PATH','').split(os.pathsep)
+	ext=(Options.platform=='win32')and'.exe,.com,.bat,.cmd'or''
+	for y in[filename+x for x in ext.split(',')]:
+		for directory in path_list:
+			x=os.path.join(directory,y)
+			if os.path.isfile(x):
+				if var:env[var]=x
+				return x
+	return''
+class ConfigurationContext(Utils.Context):
+	tests={}
+	error_handlers=[]
+	def __init__(self,env=None,blddir='',srcdir=''):
+		self.env=None
+		self.envname=''
+		self.environ=dict(os.environ)
+		self.line_just=40
+		self.blddir=blddir
+		self.srcdir=srcdir
+		self.all_envs={}
+		self.cwd=self.curdir=os.getcwd()
+		self.tools=[]
+		self.setenv(DEFAULT)
+		self.lastprog=''
+		self.hash=0
+		self.files=[]
+		self.tool_cache=[]
+		if self.blddir:
+			self.post_init()
+	def post_init(self):
+		self.cachedir=os.path.join(self.blddir,CACHE_DIR)
+		path=os.path.join(self.blddir,WAF_CONFIG_LOG)
+		try:os.unlink(path)
+		except(OSError,IOError):pass
+		try:
+			self.log=open(path,'w')
+		except(OSError,IOError):
+			self.fatal('could not open %r for writing'%path)
+		app=Utils.g_module.APPNAME
+		if app:
+			ver=getattr(Utils.g_module,'VERSION','')
+			if ver:
+				app="%s (%s)"%(app,ver)
+		now=time.ctime()
+		pyver=sys.hexversion
+		systype=sys.platform
+		args=" ".join(sys.argv)
+		wafver=WAFVERSION
+		abi=ABI
+		self.log.write(conf_template%vars())
+	def __del__(self):
+		if hasattr(self,'log')and self.log:
+			self.log.close()
+	def fatal(self,msg):
+		raise ConfigurationError(msg)
+	def check_tool(self,input,tooldir=None,funs=None):
+		tools=Utils.to_list(input)
+		if tooldir:tooldir=Utils.to_list(tooldir)
+		for tool in tools:
+			tool=tool.replace('++','xx')
+			if tool=='java':tool='javaw'
+			if tool.lower()=='unittest':tool='unittestw'
+			mag=(tool,id(self.env),funs)
+			if mag in self.tool_cache:
+				continue
+			self.tool_cache.append(mag)
+			module=None
+			try:
+				module=Utils.load_tool(tool,tooldir)
+			except Exception,e:
+				ex=e
+				if Options.options.download:
+					_3rdparty=os.path.normpath(Options.tooldir[0]+os.sep+'..'+os.sep+'3rdparty')
+					for x in Utils.to_list(Options.remote_repo):
+						for sub in['branches/waf-%s/wafadmin/3rdparty'%WAFVERSION,'trunk/wafadmin/3rdparty']:
+							url='/'.join((x,sub,tool+'.py'))
+							try:
+								web=urlopen(url)
+								if web.getcode()!=200:
+									continue
+							except Exception,e:
+								continue
+							else:
+								loc=None
+								try:
+									loc=open(_3rdparty+os.sep+tool+'.py','wb')
+									loc.write(web.read())
+									web.close()
+								finally:
+									if loc:
+										loc.close()
+								Logs.warn('downloaded %s from %s'%(tool,url))
+								try:
+									module=Utils.load_tool(tool,tooldir)
+								except:
+									Logs.warn('module %s from %s is unusable'%(tool,url))
+									try:
+										os.unlink(_3rdparty+os.sep+tool+'.py')
+									except:
+										pass
+									continue
+						else:
+							break
+					if not module:
+						Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
+						raise ex
+				else:
+					Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s'%(tool,sys.path,e))
+					raise ex
+			if funs is not None:
+				self.eval_rules(funs)
+			else:
+				func=getattr(module,'detect',None)
+				if func:
+					if type(func)is type(find_file):func(self)
+					else:self.eval_rules(func)
+			self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
+	def sub_config(self,k):
+		self.recurse(k,name='configure')
+	def pre_recurse(self,name_or_mod,path,nexdir):
+		return{'conf':self,'ctx':self}
+	def post_recurse(self,name_or_mod,path,nexdir):
+		if not autoconfig:
+			return
+		self.hash=hash((self.hash,getattr(name_or_mod,'waf_hash_val',name_or_mod)))
+		self.files.append(path)
+	def store(self,file=''):
+		if not os.path.isdir(self.cachedir):
+			os.makedirs(self.cachedir)
+		if not file:
+			file=open(os.path.join(self.cachedir,'build.config.py'),'w')
+		file.write('version = 0x%x\n'%HEXVERSION)
+		file.write('tools = %r\n'%self.tools)
+		file.close()
+		if not self.all_envs:
+			self.fatal('nothing to store in the configuration context!')
+		for key in self.all_envs:
+			tmpenv=self.all_envs[key]
+			tmpenv.store(os.path.join(self.cachedir,key+CACHE_SUFFIX))
+	def set_env_name(self,name,env):
+		self.all_envs[name]=env
+		return env
+	def retrieve(self,name,fromenv=None):
+		try:
+			env=self.all_envs[name]
+		except KeyError:
+			env=Environment.Environment()
+			env['PREFIX']=os.path.abspath(os.path.expanduser(Options.options.prefix))
+			self.all_envs[name]=env
+		else:
+			if fromenv:warn("The environment %s may have been configured already"%name)
+		return env
+	def setenv(self,name):
+		self.env=self.retrieve(name)
+		self.envname=name
+	def add_os_flags(self,var,dest=None):
+		try:self.env.append_value(dest or var,Utils.to_list(self.environ[var]))
+		except KeyError:pass
+	def check_message_1(self,sr):
+		self.line_just=max(self.line_just,len(sr))
+		for x in('\n',self.line_just*'-','\n',sr,'\n'):
+			self.log.write(x)
+		Utils.pprint('NORMAL',"%s :"%sr.ljust(self.line_just),sep='')
+	def check_message_2(self,sr,color='GREEN'):
+		self.log.write(sr)
+		self.log.write('\n')
+		Utils.pprint(color,sr)
+	def check_message(self,th,msg,state,option=''):
+		sr='Checking for %s %s'%(th,msg)
+		self.check_message_1(sr)
+		p=self.check_message_2
+		if state:p('ok '+str(option))
+		else:p('not found','YELLOW')
+	def check_message_custom(self,th,msg,custom,option='',color='PINK'):
+		sr='Checking for %s %s'%(th,msg)
+		self.check_message_1(sr)
+		self.check_message_2(custom,color)
+	def start_msg(self,msg):
+		try:
+			if self.in_msg:
+				return
+		except:
+			self.in_msg=0
+		self.in_msg+=1
+		self.line_just=max(self.line_just,len(msg))
+		for x in('\n',self.line_just*'-','\n',msg,'\n'):
+			self.log.write(x)
+		Utils.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
+	def end_msg(self,result):
+		self.in_msg-=1
+		if self.in_msg:
+			return
+		color='GREEN'
+		if result==True:
+			msg='ok'
+		elif result==False:
+			msg='not found'
+			color='YELLOW'
+		else:
+			msg=str(result)
+		self.log.write(msg)
+		self.log.write('\n')
+		Utils.pprint(color,msg)
+	def find_program(self,filename,path_list=[],var=None,mandatory=False):
+		ret=None
+		if var:
+			if self.env[var]:
+				ret=self.env[var]
+			elif var in os.environ:
+				ret=os.environ[var]
+		if not isinstance(filename,list):filename=[filename]
+		if not ret:
+			for x in filename:
+				ret=find_program_impl(self.env,x,path_list,var,environ=self.environ)
+				if ret:break
+		self.check_message_1('Checking for program %s'%' or '.join(filename))
+		self.log.write('  find program=%r paths=%r var=%r\n  -> %r\n'%(filename,path_list,var,ret))
+		if ret:
+			Utils.pprint('GREEN',str(ret))
+		else:
+			Utils.pprint('YELLOW','not found')
+			if mandatory:
+				self.fatal('The program %r is required'%filename)
+		if var:
+			self.env[var]=ret
+		return ret
+	def cmd_to_list(self,cmd):
+		if isinstance(cmd,str)and cmd.find(' '):
+			try:
+				os.stat(cmd)
+			except OSError:
+				return shlex.split(cmd)
+			else:
+				return[cmd]
+		return cmd
+	def __getattr__(self,name):
+		r=self.__class__.__dict__.get(name,None)
+		if r:return r
+		if name and name.startswith('require_'):
+			for k in['check_','find_']:
+				n=name.replace('require_',k)
+				ret=self.__class__.__dict__.get(n,None)
+				if ret:
+					def run(*k,**kw):
+						r=ret(self,*k,**kw)
+						if not r:
+							self.fatal('requirement failure')
+						return r
+					return run
+		self.fatal('No such method %r'%name)
+	def eval_rules(self,rules):
+		self.rules=Utils.to_list(rules)
+		for x in self.rules:
+			f=getattr(self,x)
+			if not f:self.fatal("No such method '%s'."%x)
+			try:
+				f()
+			except Exception,e:
+				ret=self.err_handler(x,e)
+				if ret==BREAK:
+					break
+				elif ret==CONTINUE:
+					continue
+				else:
+					self.fatal(e)
+	def err_handler(self,fun,error):
+		pass
+def conf(f):
+	setattr(ConfigurationContext,f.__name__,f)
+	return f
+def conftest(f):
+	ConfigurationContext.tests[f.__name__]=f
+	return conf(f)
+
diff --git a/wafadmin/Constants.py b/wafadmin/Constants.py
new file mode 100644
index 0000000..c480557
--- /dev/null
+++ b/wafadmin/Constants.py
@@ -0,0 +1,47 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+HEXVERSION=0x10511
+WAFVERSION="1.5.17"
+WAFREVISION="8002"
+ABI=7
+O644=420
+O755=493
+MAXJOBS=99999999
+CACHE_DIR='c4che'
+CACHE_SUFFIX='.cache.py'
+DBFILE='.wafpickle-%d'%ABI
+WSCRIPT_FILE='wscript'
+WSCRIPT_BUILD_FILE='wscript_build'
+WAF_CONFIG_LOG='config.log'
+WAF_CONFIG_H='config.h'
+SIG_NIL='iluvcuteoverload'
+VARIANT='_VARIANT_'
+DEFAULT='default'
+SRCDIR='srcdir'
+BLDDIR='blddir'
+APPNAME='APPNAME'
+VERSION='VERSION'
+DEFINES='defines'
+UNDEFINED=()
+BREAK="break"
+CONTINUE="continue"
+JOBCONTROL="JOBCONTROL"
+MAXPARALLEL="MAXPARALLEL"
+NORMAL="NORMAL"
+NOT_RUN=0
+MISSING=1
+CRASHED=2
+EXCEPTION=3
+SKIPPED=8
+SUCCESS=9
+ASK_LATER=-1
+SKIP_ME=-2
+RUN_ME=-3
+LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
+HOUR_FORMAT="%H:%M:%S"
+TEST_OK=True
+CFG_FILES='cfg_files'
+INSTALL=1337
+UNINSTALL=-1337
+
diff --git a/wafadmin/Environment.py b/wafadmin/Environment.py
new file mode 100644
index 0000000..7068644
--- /dev/null
+++ b/wafadmin/Environment.py
@@ -0,0 +1,158 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,copy,re
+import Logs,Options,Utils
+from Constants import*
+re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
+class Environment(object):
+	__slots__=("table","parent")
+	def __init__(self,filename=None):
+		self.table={}
+		if filename:
+			self.load(filename)
+	def __contains__(self,key):
+		if key in self.table:return True
+		try:return self.parent.__contains__(key)
+		except AttributeError:return False
+	def __str__(self):
+		keys=set()
+		cur=self
+		while cur:
+			keys.update(cur.table.keys())
+			cur=getattr(cur,'parent',None)
+		keys=list(keys)
+		keys.sort()
+		return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in keys])
+	def __getitem__(self,key):
+		try:
+			while 1:
+				x=self.table.get(key,None)
+				if not x is None:
+					return x
+				self=self.parent
+		except AttributeError:
+			return[]
+	def __setitem__(self,key,value):
+		self.table[key]=value
+	def __delitem__(self,key):
+		del self.table[key]
+	def pop(self,key,*args):
+		if len(args):
+			return self.table.pop(key,*args)
+		return self.table.pop(key)
+	def set_variant(self,name):
+		self.table[VARIANT]=name
+	def variant(self):
+		try:
+			while 1:
+				x=self.table.get(VARIANT,None)
+				if not x is None:
+					return x
+				self=self.parent
+		except AttributeError:
+			return DEFAULT
+	def copy(self):
+		newenv=Environment()
+		newenv.parent=self
+		return newenv
+	def detach(self):
+		tbl=self.get_merged_dict()
+		try:
+			delattr(self,'parent')
+		except AttributeError:
+			pass
+		else:
+			keys=tbl.keys()
+			for x in keys:
+				tbl[x]=copy.deepcopy(tbl[x])
+			self.table=tbl
+	def get_flat(self,key):
+		s=self[key]
+		if isinstance(s,str):return s
+		return' '.join(s)
+	def _get_list_value_for_modification(self,key):
+		try:
+			value=self.table[key]
+		except KeyError:
+			try:value=self.parent[key]
+			except AttributeError:value=[]
+			if isinstance(value,list):
+				value=value[:]
+			else:
+				value=[value]
+		else:
+			if not isinstance(value,list):
+				value=[value]
+		self.table[key]=value
+		return value
+	def append_value(self,var,value):
+		current_value=self._get_list_value_for_modification(var)
+		if isinstance(value,list):
+			current_value.extend(value)
+		else:
+			current_value.append(value)
+	def prepend_value(self,var,value):
+		current_value=self._get_list_value_for_modification(var)
+		if isinstance(value,list):
+			current_value=value+current_value
+			self.table[var]=current_value
+		else:
+			current_value.insert(0,value)
+	def append_unique(self,var,value):
+		current_value=self._get_list_value_for_modification(var)
+		if isinstance(value,list):
+			for value_item in value:
+				if value_item not in current_value:
+					current_value.append(value_item)
+		else:
+			if value not in current_value:
+				current_value.append(value)
+	def get_merged_dict(self):
+		table_list=[]
+		env=self
+		while 1:
+			table_list.insert(0,env.table)
+			try:env=env.parent
+			except AttributeError:break
+		merged_table={}
+		for table in table_list:
+			merged_table.update(table)
+		return merged_table
+	def store(self,filename):
+		file=open(filename,'w')
+		merged_table=self.get_merged_dict()
+		keys=list(merged_table.keys())
+		keys.sort()
+		for k in keys:file.write('%s = %r\n'%(k,merged_table[k]))
+		file.close()
+	def load(self,filename):
+		tbl=self.table
+		code=Utils.readf(filename)
+		for m in re_imp.finditer(code):
+			g=m.group
+			tbl[g(2)]=eval(g(3))
+		Logs.debug('env: %s',self.table)
+	def get_destdir(self):
+		if self.__getitem__('NOINSTALL'):return''
+		return Options.options.destdir
+	def update(self,d):
+		for k,v in d.iteritems():
+			self[k]=v
+	def __getattr__(self,name):
+		if name in self.__slots__:
+			return object.__getattr__(self,name)
+		else:
+			return self[name]
+	def __setattr__(self,name,value):
+		if name in self.__slots__:
+			object.__setattr__(self,name,value)
+		else:
+			self[name]=value
+	def __delattr__(self,name):
+		if name in self.__slots__:
+			object.__delattr__(self,name)
+		else:
+			del self[name]
+
diff --git a/wafadmin/Logs.py b/wafadmin/Logs.py
new file mode 100644
index 0000000..666de6d
--- /dev/null
+++ b/wafadmin/Logs.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import ansiterm
+import os,re,logging,traceback,sys
+from Constants import*
+zones=''
+verbose=0
+colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
+got_tty=False
+term=os.environ.get('TERM','dumb')
+if not term in['dumb','emacs']:
+	try:
+		got_tty=sys.stderr.isatty()or(sys.platform=='win32'and term in['xterm','msys'])
+	except AttributeError:
+		pass
+import Utils
+if not got_tty or'NOCOLOR'in os.environ:
+	colors_lst['USE']=False
+def get_color(cl):
+	if not colors_lst['USE']:return''
+	return colors_lst.get(cl,'')
+class foo(object):
+	def __getattr__(self,a):
+		return get_color(a)
+	def __call__(self,a):
+		return get_color(a)
+colors=foo()
+re_log=re.compile(r'(\w+): (.*)',re.M)
+class log_filter(logging.Filter):
+	def __init__(self,name=None):
+		pass
+	def filter(self,rec):
+		rec.c1=colors.PINK
+		rec.c2=colors.NORMAL
+		rec.zone=rec.module
+		if rec.levelno>=logging.INFO:
+			if rec.levelno>=logging.ERROR:
+				rec.c1=colors.RED
+			elif rec.levelno>=logging.WARNING:
+				rec.c1=colors.YELLOW
+			else:
+				rec.c1=colors.GREEN
+			return True
+		zone=''
+		m=re_log.match(rec.msg)
+		if m:
+			zone=rec.zone=m.group(1)
+			rec.msg=m.group(2)
+		if zones:
+			return getattr(rec,'zone','')in zones or'*'in zones
+		elif not verbose>2:
+			return False
+		return True
+class formatter(logging.Formatter):
+	def __init__(self):
+		logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
+	def format(self,rec):
+		if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
+			try:
+				return'%s%s%s'%(rec.c1,rec.msg.decode('utf-8'),rec.c2)
+			except:
+				return rec.c1+rec.msg+rec.c2
+		return logging.Formatter.format(self,rec)
+def debug(*k,**kw):
+	if verbose:
+		k=list(k)
+		k[0]=k[0].replace('\n',' ')
+		logging.debug(*k,**kw)
+def error(*k,**kw):
+	logging.error(*k,**kw)
+	if verbose>1:
+		if isinstance(k[0],Utils.WafError):
+			st=k[0].stack
+		else:
+			st=traceback.extract_stack()
+		if st:
+			st=st[:-1]
+			buf=[]
+			for filename,lineno,name,line in st:
+				buf.append('  File "%s", line %d, in %s'%(filename,lineno,name))
+				if line:
+					buf.append('	%s'%line.strip())
+			if buf:logging.error("\n".join(buf))
+warn=logging.warn
+info=logging.info
+def init_log():
+	log=logging.getLogger()
+	log.handlers=[]
+	log.filters=[]
+	hdlr=logging.StreamHandler()
+	hdlr.setFormatter(formatter())
+	log.addHandler(hdlr)
+	log.addFilter(log_filter())
+	log.setLevel(logging.DEBUG)
+init_log()
+
diff --git a/wafadmin/Node.py b/wafadmin/Node.py
new file mode 100644
index 0000000..dab34e0
--- /dev/null
+++ b/wafadmin/Node.py
@@ -0,0 +1,494 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,sys,fnmatch,re,stat
+import Utils,Constants
+UNDEFINED=0
+DIR=1
+FILE=2
+BUILD=3
+type_to_string={UNDEFINED:"unk",DIR:"dir",FILE:"src",BUILD:"bld"}
+prune_pats='.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
+exclude_pats=prune_pats+'*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
+exclude_regs='''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/_darcs
+**/_darcs/**
+**/.DS_Store'''
+class Node(object):
+	__slots__=("name","parent","id","childs")
+	def __init__(self,name,parent,node_type=UNDEFINED):
+		self.name=name
+		self.parent=parent
+		self.__class__.bld.id_nodes+=4
+		self.id=self.__class__.bld.id_nodes+node_type
+		if node_type==DIR:self.childs={}
+		if parent and name in parent.childs:
+			raise Utils.WafError('node %s exists in the parent files %r already'%(name,parent))
+		if parent:parent.childs[name]=self
+	def __setstate__(self,data):
+		if len(data)==4:
+			(self.parent,self.name,self.id,self.childs)=data
+		else:
+			(self.parent,self.name,self.id)=data
+	def __getstate__(self):
+		if getattr(self,'childs',None)is None:
+			return(self.parent,self.name,self.id)
+		else:
+			return(self.parent,self.name,self.id,self.childs)
+	def __str__(self):
+		if not self.parent:return''
+		return"%s://%s"%(type_to_string[self.id&3],self.abspath())
+	def __repr__(self):
+		return self.__str__()
+	def __hash__(self):
+		raise Utils.WafError('nodes, you are doing it wrong')
+	def __copy__(self):
+		raise Utils.WafError('nodes are not supposed to be cloned')
+	def get_type(self):
+		return self.id&3
+	def set_type(self,t):
+		self.id=self.id+t-self.id&3
+	def dirs(self):
+		return[x for x in self.childs.values()if x.id&3==DIR]
+	def files(self):
+		return[x for x in self.childs.values()if x.id&3==FILE]
+	def get_dir(self,name,default=None):
+		node=self.childs.get(name,None)
+		if not node or node.id&3!=DIR:return default
+		return node
+	def get_file(self,name,default=None):
+		node=self.childs.get(name,None)
+		if not node or node.id&3!=FILE:return default
+		return node
+	def get_build(self,name,default=None):
+		node=self.childs.get(name,None)
+		if not node or node.id&3!=BUILD:return default
+		return node
+	def find_resource(self,lst):
+		if isinstance(lst,str):
+			lst=Utils.split_path(lst)
+		if len(lst)==1:
+			parent=self
+		else:
+			parent=self.find_dir(lst[:-1])
+			if not parent:return None
+		self.__class__.bld.rescan(parent)
+		name=lst[-1]
+		node=parent.childs.get(name,None)
+		if node:
+			tp=node.id&3
+			if tp==FILE or tp==BUILD:
+				return node
+			else:
+				return None
+		tree=self.__class__.bld
+		if not name in tree.cache_dir_contents[parent.id]:
+			return None
+		path=parent.abspath()+os.sep+name
+		try:
+			st=Utils.h_file(path)
+		except IOError:
+			return None
+		child=self.__class__(name,parent,FILE)
+		tree.node_sigs[0][child.id]=st
+		return child
+	def find_or_declare(self,lst):
+		if isinstance(lst,str):
+			lst=Utils.split_path(lst)
+		if len(lst)==1:
+			parent=self
+		else:
+			parent=self.find_dir(lst[:-1])
+			if not parent:return None
+		self.__class__.bld.rescan(parent)
+		name=lst[-1]
+		node=parent.childs.get(name,None)
+		if node:
+			tp=node.id&3
+			if tp!=BUILD:
+				raise Utils.WafError('find_or_declare found a source file where a build file was expected %r'%'/'.join(lst))
+			return node
+		node=self.__class__(name,parent,BUILD)
+		return node
+	def find_dir(self,lst):
+		if isinstance(lst,str):
+			lst=Utils.split_path(lst)
+		current=self
+		for name in lst:
+			self.__class__.bld.rescan(current)
+			prev=current
+			if not current.parent and name==current.name:
+				continue
+			elif not name:
+				continue
+			elif name=='.':
+				continue
+			elif name=='..':
+				current=current.parent or current
+			else:
+				current=prev.childs.get(name,None)
+				if current is None:
+					dir_cont=self.__class__.bld.cache_dir_contents
+					if prev.id in dir_cont and name in dir_cont[prev.id]:
+						if not prev.name:
+							if os.sep=='/':
+								dirname=os.sep+name
+							else:
+								dirname=name
+						else:
+							dirname=prev.abspath()+os.sep+name
+						if not os.path.isdir(dirname):
+							return None
+						current=self.__class__(name,prev,DIR)
+					elif(not prev.name and len(name)==2 and name[1]==':')or name.startswith('\\\\'):
+						current=self.__class__(name,prev,DIR)
+					else:
+						return None
+				else:
+					if current.id&3!=DIR:
+						return None
+		return current
+	def ensure_dir_node_from_path(self,lst):
+		if isinstance(lst,str):
+			lst=Utils.split_path(lst)
+		current=self
+		for name in lst:
+			if not name:
+				continue
+			elif name=='.':
+				continue
+			elif name=='..':
+				current=current.parent or current
+			else:
+				prev=current
+				current=prev.childs.get(name,None)
+				if current is None:
+					current=self.__class__(name,prev,DIR)
+		return current
+	def exclusive_build_node(self,path):
+		lst=Utils.split_path(path)
+		name=lst[-1]
+		if len(lst)>1:
+			parent=None
+			try:
+				parent=self.find_dir(lst[:-1])
+			except OSError:
+				pass
+			if not parent:
+				parent=self.ensure_dir_node_from_path(lst[:-1])
+				self.__class__.bld.rescan(parent)
+			else:
+				try:
+					self.__class__.bld.rescan(parent)
+				except OSError:
+					pass
+		else:
+			parent=self
+		node=parent.childs.get(name,None)
+		if not node:
+			node=self.__class__(name,parent,BUILD)
+		return node
+	def path_to_parent(self,parent):
+		lst=[]
+		p=self
+		h1=parent.height()
+		h2=p.height()
+		while h2>h1:
+			h2-=1
+			lst.append(p.name)
+			p=p.parent
+		if lst:
+			lst.reverse()
+			ret=os.path.join(*lst)
+		else:
+			ret=''
+		return ret
+	def find_ancestor(self,node):
+		dist=self.height()-node.height()
+		if dist<0:return node.find_ancestor(self)
+		cand=self
+		while dist>0:
+			cand=cand.parent
+			dist-=1
+		if cand==node:return cand
+		cursor=node
+		while cand.parent:
+			cand=cand.parent
+			cursor=cursor.parent
+			if cand==cursor:return cand
+	def relpath_gen(self,from_node):
+		if self==from_node:return'.'
+		if from_node.parent==self:return'..'
+		ancestor=self.find_ancestor(from_node)
+		lst=[]
+		cand=self
+		while not cand.id==ancestor.id:
+			lst.append(cand.name)
+			cand=cand.parent
+		cand=from_node
+		while not cand.id==ancestor.id:
+			lst.append('..')
+			cand=cand.parent
+		lst.reverse()
+		return os.sep.join(lst)
+	def nice_path(self,env=None):
+		tree=self.__class__.bld
+		ln=tree.launch_node()
+		if self.id&3==FILE:return self.relpath_gen(ln)
+		else:return os.path.join(tree.bldnode.relpath_gen(ln),env.variant(),self.relpath_gen(tree.srcnode))
+	def is_child_of(self,node):
+		p=self
+		diff=self.height()-node.height()
+		while diff>0:
+			diff-=1
+			p=p.parent
+		return p.id==node.id
+	def variant(self,env):
+		if not env:return 0
+		elif self.id&3==FILE:return 0
+		else:return env.variant()
+	def height(self):
+		d=self
+		val=-1
+		while d:
+			d=d.parent
+			val+=1
+		return val
+	def abspath(self,env=None):
+		variant=(env and(self.id&3!=FILE)and env.variant())or 0
+		ret=self.__class__.bld.cache_node_abspath[variant].get(self.id,None)
+		if ret:return ret
+		if not variant:
+			if not self.parent:
+				val=os.sep=='/'and os.sep or''
+			elif not self.parent.name:
+				val=(os.sep=='/'and os.sep or'')+self.name
+			else:
+				val=self.parent.abspath()+os.sep+self.name
+		else:
+			val=os.sep.join((self.__class__.bld.bldnode.abspath(),variant,self.path_to_parent(self.__class__.bld.srcnode)))
+		self.__class__.bld.cache_node_abspath[variant][self.id]=val
+		return val
+	def change_ext(self,ext):
+		name=self.name
+		k=name.rfind('.')
+		if k>=0:
+			name=name[:k]+ext
+		else:
+			name=name+ext
+		return self.parent.find_or_declare([name])
+	def src_dir(self,env):
+		return self.parent.srcpath(env)
+	def bld_dir(self,env):
+		return self.parent.bldpath(env)
+	def bld_base(self,env):
+		s=os.path.splitext(self.name)[0]
+		return os.path.join(self.bld_dir(env),s)
+	def bldpath(self,env=None):
+		if self.id&3==FILE:
+			return self.relpath_gen(self.__class__.bld.bldnode)
+		p=self.path_to_parent(self.__class__.bld.srcnode)
+		if p is not'':
+			return env.variant()+os.sep+p
+		return env.variant()
+	def srcpath(self,env=None):
+		if self.id&3==BUILD:
+			return self.bldpath(env)
+		return self.relpath_gen(self.__class__.bld.bldnode)
+	def read(self,env):
+		return Utils.readf(self.abspath(env))
+	def dir(self,env):
+		return self.parent.abspath(env)
+	def file(self):
+		return self.name
+	def file_base(self):
+		return os.path.splitext(self.name)[0]
+	def suffix(self):
+		k=max(0,self.name.rfind('.'))
+		return self.name[k:]
+	def find_iter_impl(self,src=True,bld=True,dir=True,accept_name=None,is_prune=None,maxdepth=25):
+		bld_ctx=self.__class__.bld
+		bld_ctx.rescan(self)
+		for name in bld_ctx.cache_dir_contents[self.id]:
+			if accept_name(self,name):
+				node=self.find_resource(name)
+				if node:
+					if src and node.id&3==FILE:
+						yield node
+				else:
+					node=self.find_dir(name)
+					if node and node.id!=bld_ctx.bldnode.id:
+						if dir:
+							yield node
+						if not is_prune(self,name):
+							if maxdepth:
+								for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
+									yield k
+			else:
+				if not is_prune(self,name):
+					node=self.find_resource(name)
+					if not node:
+						node=self.find_dir(name)
+						if node and node.id!=bld_ctx.bldnode.id:
+							if maxdepth:
+								for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
+									yield k
+		if bld:
+			for node in self.childs.values():
+				if node.id==bld_ctx.bldnode.id:
+					continue
+				if node.id&3==BUILD:
+					if accept_name(self,node.name):
+						yield node
+		raise StopIteration
+	def find_iter(self,in_pat=['*'],ex_pat=exclude_pats,prune_pat=prune_pats,src=True,bld=True,dir=False,maxdepth=25,flat=False):
+		if not(src or bld or dir):
+			raise StopIteration
+		if self.id&3!=DIR:
+			raise StopIteration
+		in_pat=Utils.to_list(in_pat)
+		ex_pat=Utils.to_list(ex_pat)
+		prune_pat=Utils.to_list(prune_pat)
+		def accept_name(node,name):
+			for pat in ex_pat:
+				if fnmatch.fnmatchcase(name,pat):
+					return False
+			for pat in in_pat:
+				if fnmatch.fnmatchcase(name,pat):
+					return True
+			return False
+		def is_prune(node,name):
+			for pat in prune_pat:
+				if fnmatch.fnmatchcase(name,pat):
+					return True
+			return False
+		ret=self.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth)
+		if flat:
+			return" ".join([x.relpath_gen(self)for x in ret])
+		return ret
+	def ant_glob(self,*k,**kw):
+		src=kw.get('src',1)
+		bld=kw.get('bld',0)
+		dir=kw.get('dir',0)
+		excl=kw.get('excl',exclude_regs)
+		incl=k and k[0]or kw.get('incl','**')
+		def to_pat(s):
+			lst=Utils.to_list(s)
+			ret=[]
+			for x in lst:
+				x=x.replace('//','/')
+				if x.endswith('/'):
+					x+='**'
+				lst2=x.split('/')
+				accu=[]
+				for k in lst2:
+					if k=='**':
+						accu.append(k)
+					else:
+						k=k.replace('.','[.]').replace('*','.*').replace('?','.')
+						k='^%s$'%k
+						accu.append(re.compile(k))
+				ret.append(accu)
+			return ret
+		def filtre(name,nn):
+			ret=[]
+			for lst in nn:
+				if not lst:
+					pass
+				elif lst[0]=='**':
+					ret.append(lst)
+					if len(lst)>1:
+						if lst[1].match(name):
+							ret.append(lst[2:])
+					else:
+						ret.append([])
+				elif lst[0].match(name):
+					ret.append(lst[1:])
+			return ret
+		def accept(name,pats):
+			nacc=filtre(name,pats[0])
+			nrej=filtre(name,pats[1])
+			if[]in nrej:
+				nacc=[]
+			return[nacc,nrej]
+		def ant_iter(nodi,maxdepth=25,pats=[]):
+			nodi.__class__.bld.rescan(nodi)
+			for name in nodi.__class__.bld.cache_dir_contents[nodi.id]:
+				npats=accept(name,pats)
+				if npats and npats[0]:
+					accepted=[]in npats[0]
+					node=nodi.find_resource(name)
+					if node and accepted:
+						if src and node.id&3==FILE:
+							yield node
+					else:
+						node=nodi.find_dir(name)
+						if node and node.id!=nodi.__class__.bld.bldnode.id:
+							if accepted and dir:
+								yield node
+							if maxdepth:
+								for k in ant_iter(node,maxdepth=maxdepth-1,pats=npats):
+									yield k
+			if bld:
+				for node in nodi.childs.values():
+					if node.id==nodi.__class__.bld.bldnode.id:
+						continue
+					if node.id&3==BUILD:
+						npats=accept(node.name,pats)
+						if npats and npats[0]and[]in npats[0]:
+							yield node
+			raise StopIteration
+		ret=[x for x in ant_iter(self,pats=[to_pat(incl),to_pat(excl)])]
+		if kw.get('flat',True):
+			return" ".join([x.relpath_gen(self)for x in ret])
+		return ret
+	def update_build_dir(self,env=None):
+		if not env:
+			for env in bld.all_envs:
+				self.update_build_dir(env)
+			return
+		path=self.abspath(env)
+		lst=Utils.listdir(path)
+		try:
+			self.__class__.bld.cache_dir_contents[self.id].update(lst)
+		except KeyError:
+			self.__class__.bld.cache_dir_contents[self.id]=set(lst)
+		self.__class__.bld.cache_scanned_folders[self.id]=True
+		for k in lst:
+			npath=path+os.sep+k
+			st=os.stat(npath)
+			if stat.S_ISREG(st[stat.ST_MODE]):
+				ick=self.find_or_declare(k)
+				if not(ick.id in self.__class__.bld.node_sigs[env.variant()]):
+					self.__class__.bld.node_sigs[env.variant()][ick.id]=Constants.SIG_NIL
+			elif stat.S_ISDIR(st[stat.ST_MODE]):
+				child=self.find_dir(k)
+				if not child:
+					child=self.ensure_dir_node_from_path(k)
+				child.update_build_dir(env)
+class Nodu(Node):
+	pass
+
diff --git a/wafadmin/Options.py b/wafadmin/Options.py
new file mode 100644
index 0000000..617ee3a
--- /dev/null
+++ b/wafadmin/Options.py
@@ -0,0 +1,158 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,sys,imp,types,tempfile,optparse
+import Logs,Utils
+from Constants import*
+cmds='distclean configure build install clean uninstall check dist distcheck'.split()
+commands={}
+is_install=False
+options={}
+arg_line=[]
+launch_dir=''
+tooldir=''
+lockfile=os.environ.get('WAFLOCK','.lock-wscript')
+try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
+except KeyError:cache_global=''
+platform=Utils.unversioned_sys_platform()
+conf_file='conf-runs-%s-%d.pickle'%(platform,ABI)
+remote_repo=['http://waf.googlecode.com/svn/']
+default_prefix=os.environ.get('PREFIX')
+if not default_prefix:
+	if platform=='win32':
+		d=tempfile.gettempdir()
+		default_prefix=d[0].upper()+d[1:]
+	else:default_prefix='/usr/local/'
+default_jobs=os.environ.get('JOBS',-1)
+if default_jobs<1:
+	try:
+		if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
+			default_jobs=os.sysconf('SC_NPROCESSORS_ONLN')
+		else:
+			default_jobs=int(Utils.cmd_output(['sysctl','-n','hw.ncpu']))
+	except:
+		if os.name=='java':
+			from java.lang import Runtime
+			default_jobs=Runtime.getRuntime().availableProcessors()
+		else:
+			default_jobs=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
+default_destdir=os.environ.get('DESTDIR','')
+def get_usage(self):
+	cmds_str=[]
+	module=Utils.g_module
+	if module:
+		tbl=module.__dict__
+		keys=list(tbl.keys())
+		keys.sort()
+		if'build'in tbl:
+			if not module.build.__doc__:
+				module.build.__doc__='builds the project'
+		if'configure'in tbl:
+			if not module.configure.__doc__:
+				module.configure.__doc__='configures the project'
+		ban=['set_options','init','shutdown']
+		optlst=[x for x in keys if not x in ban and type(tbl[x])is type(parse_args_impl)and tbl[x].__doc__ and not x.startswith('_')]
+		just=max([len(x)for x in optlst])
+		for x in optlst:
+			cmds_str.append('  %s: %s'%(x.ljust(just),tbl[x].__doc__))
+		ret='\n'.join(cmds_str)
+	else:
+		ret=' '.join(cmds)
+	return'''waf [command] [options]
+
+Main commands (example: ./waf build -j4)
+%s
+'''%ret
+setattr(optparse.OptionParser,'get_usage',get_usage)
+def create_parser(module=None):
+	Logs.debug('options: create_parser is called')
+	parser=optparse.OptionParser(conflict_handler="resolve",version='waf %s (%s)'%(WAFVERSION,WAFREVISION))
+	parser.formatter.width=Utils.get_term_cols()
+	p=parser.add_option
+	p('-j','--jobs',type='int',default=default_jobs,help='amount of parallel jobs (%r)'%default_jobs,dest='jobs')
+	p('-k','--keep',action='store_true',default=False,help='keep running happily on independent task groups',dest='keep')
+	p('-v','--verbose',action='count',default=0,help='verbosity level -v -vv or -vvv [default: 0]',dest='verbose')
+	p('--nocache',action='store_true',default=False,help='ignore the WAFCACHE (if set)',dest='nocache')
+	p('--zones',action='store',default='',help='debugging zones (task_gen, deps, tasks, etc)',dest='zones')
+	p('-p','--progress',action='count',default=0,help='-p: progress bar; -pp: ide output',dest='progress_bar')
+	p('--targets',action='store',default='',help='build given task generators, e.g. "target1,target2"',dest='compile_targets')
+	gr=optparse.OptionGroup(parser,'configuration options')
+	parser.add_option_group(gr)
+	gr.add_option('-b','--blddir',action='store',default='',help='build dir for the project (configuration)',dest='blddir')
+	gr.add_option('-s','--srcdir',action='store',default='',help='src dir for the project (configuration)',dest='srcdir')
+	gr.add_option('--prefix',help='installation prefix (configuration) [default: %r]'%default_prefix,default=default_prefix,dest='prefix')
+	gr.add_option('--download',action='store_true',default=False,help='try to download the tools if missing',dest='download')
+	gr=optparse.OptionGroup(parser,'installation options')
+	parser.add_option_group(gr)
+	gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
+	gr.add_option('-f','--force',action='store_true',default=False,help='force file installation',dest='force')
+	return parser
+def parse_args_impl(parser,_args=None):
+	global options,commands,arg_line
+	(options,args)=parser.parse_args(args=_args)
+	arg_line=args
+	commands={}
+	for var in cmds:commands[var]=0
+	if not args:
+		commands['build']=1
+		args.append('build')
+	for arg in args:
+		commands[arg]=True
+	if'check'in args:
+		idx=args.index('check')
+		try:
+			bidx=args.index('build')
+			if bidx>idx:
+				raise ValueError('build before check')
+		except ValueError,e:
+			args.insert(idx,'build')
+	if args[0]!='init':
+		args.insert(0,'init')
+	if options.keep:options.jobs=1
+	if options.jobs<1:options.jobs=1
+	if'install'in sys.argv or'uninstall'in sys.argv:
+		options.destdir=options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
+	Logs.verbose=options.verbose
+	Logs.init_log()
+	if options.zones:
+		Logs.zones=options.zones.split(',')
+		if not Logs.verbose:Logs.verbose=1
+	elif Logs.verbose>0:
+		Logs.zones=['runner']
+	if Logs.verbose>2:
+		Logs.zones=['*']
+class Handler(Utils.Context):
+	parser=None
+	def __init__(self,module=None):
+		self.parser=create_parser(module)
+		self.cwd=os.getcwd()
+		Handler.parser=self
+	def add_option(self,*k,**kw):
+		self.parser.add_option(*k,**kw)
+	def add_option_group(self,*k,**kw):
+		return self.parser.add_option_group(*k,**kw)
+	def get_option_group(self,opt_str):
+		return self.parser.get_option_group(opt_str)
+	def sub_options(self,*k,**kw):
+		if not k:raise Utils.WscriptError('folder expected')
+		self.recurse(k[0],name='set_options')
+	def tool_options(self,*k,**kw):
+		
+		if not k[0]:
+			raise Utils.WscriptError('invalid tool_options call %r %r'%(k,kw))
+		tools=Utils.to_list(k[0])
+		path=Utils.to_list(kw.get('tdir',kw.get('tooldir',tooldir)))
+		for tool in tools:
+			tool=tool.replace('++','xx')
+			if tool=='java':tool='javaw'
+			if tool.lower()=='unittest':tool='unittestw'
+			module=Utils.load_tool(tool,path)
+			try:
+				fun=module.set_options
+			except AttributeError:
+				pass
+			else:
+				fun(kw.get('option_group',self))
+	def parse_args(self,args=None):
+		parse_args_impl(self.parser,args)
+
diff --git a/wafadmin/Runner.py b/wafadmin/Runner.py
new file mode 100644
index 0000000..2edb648
--- /dev/null
+++ b/wafadmin/Runner.py
@@ -0,0 +1,160 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,sys,random,time,threading,traceback
+try:from Queue import Queue
+except ImportError:from queue import Queue
+import Build,Utils,Logs,Options
+from Logs import debug,error
+from Constants import*
+GAP=15
+run_old=threading.Thread.run
+def run(*args,**kwargs):
+	try:
+		run_old(*args,**kwargs)
+	except(KeyboardInterrupt,SystemExit):
+		raise
+	except:
+		sys.excepthook(*sys.exc_info())
+threading.Thread.run=run
+class TaskConsumer(threading.Thread):
+	ready=Queue(0)
+	consumers=[]
+	def __init__(self):
+		threading.Thread.__init__(self)
+		self.setDaemon(1)
+		self.start()
+	def run(self):
+		try:
+			self.loop()
+		except:
+			pass
+	def loop(self):
+		while 1:
+			tsk=TaskConsumer.ready.get()
+			m=tsk.master
+			if m.stop:
+				m.out.put(tsk)
+				continue
+			try:
+				tsk.generator.bld.printout(tsk.display())
+				if tsk.__class__.stat:ret=tsk.__class__.stat(tsk)
+				else:ret=tsk.call_run()
+			except Exception,e:
+				tsk.err_msg=Utils.ex_stack()
+				tsk.hasrun=EXCEPTION
+				m.error_handler(tsk)
+				m.out.put(tsk)
+				continue
+			if ret:
+				tsk.err_code=ret
+				tsk.hasrun=CRASHED
+			else:
+				try:
+					tsk.post_run()
+				except Utils.WafError:
+					pass
+				except Exception:
+					tsk.err_msg=Utils.ex_stack()
+					tsk.hasrun=EXCEPTION
+				else:
+					tsk.hasrun=SUCCESS
+			if tsk.hasrun!=SUCCESS:
+				m.error_handler(tsk)
+			m.out.put(tsk)
+class Parallel(object):
+	def __init__(self,bld,j=2):
+		self.numjobs=j
+		self.manager=bld.task_manager
+		self.manager.current_group=0
+		self.total=self.manager.total()
+		self.outstanding=[]
+		self.maxjobs=MAXJOBS
+		self.frozen=[]
+		self.out=Queue(0)
+		self.count=0
+		self.processed=1
+		self.stop=False
+		self.error=False
+	def get_next(self):
+		if not self.outstanding:
+			return None
+		return self.outstanding.pop(0)
+	def postpone(self,tsk):
+		if random.randint(0,1):
+			self.frozen.insert(0,tsk)
+		else:
+			self.frozen.append(tsk)
+	def refill_task_list(self):
+		while self.count>self.numjobs+GAP or self.count>=self.maxjobs:
+			self.get_out()
+		while not self.outstanding:
+			if self.count:
+				self.get_out()
+			if self.frozen:
+				self.outstanding+=self.frozen
+				self.frozen=[]
+			elif not self.count:
+				(jobs,tmp)=self.manager.get_next_set()
+				if jobs!=None:self.maxjobs=jobs
+				if tmp:self.outstanding+=tmp
+				break
+	def get_out(self):
+		ret=self.out.get()
+		self.manager.add_finished(ret)
+		if not self.stop and getattr(ret,'more_tasks',None):
+			self.outstanding+=ret.more_tasks
+			self.total+=len(ret.more_tasks)
+		self.count-=1
+	def error_handler(self,tsk):
+		if not Options.options.keep:
+			self.stop=True
+		self.error=True
+	def start(self):
+		if TaskConsumer.consumers:
+			while len(TaskConsumer.consumers)<self.numjobs:
+				TaskConsumer.consumers.append(TaskConsumer())
+		while not self.stop:
+			self.refill_task_list()
+			tsk=self.get_next()
+			if not tsk:
+				if self.count:
+					continue
+				else:
+					break
+			if tsk.hasrun:
+				self.processed+=1
+				self.manager.add_finished(tsk)
+				continue
+			try:
+				st=tsk.runnable_status()
+			except Exception,e:
+				self.processed+=1
+				if self.stop and not Options.options.keep:
+					tsk.hasrun=SKIPPED
+					self.manager.add_finished(tsk)
+					continue
+				self.error_handler(tsk)
+				self.manager.add_finished(tsk)
+				tsk.hasrun=EXCEPTION
+				tsk.err_msg=Utils.ex_stack()
+				continue
+			if st==ASK_LATER:
+				self.postpone(tsk)
+			elif st==SKIP_ME:
+				self.processed+=1
+				tsk.hasrun=SKIPPED
+				self.manager.add_finished(tsk)
+			else:
+				tsk.position=(self.processed,self.total)
+				self.count+=1
+				tsk.master=self
+				TaskConsumer.ready.put(tsk)
+				self.processed+=1
+				if not TaskConsumer.consumers:
+					TaskConsumer.consumers=[TaskConsumer()for i in xrange(self.numjobs)]
+		while self.error and self.count:
+			self.get_out()
+		assert(self.count==0 or self.stop)
+
diff --git a/wafadmin/Scripting.py b/wafadmin/Scripting.py
new file mode 100644
index 0000000..4087ba1
--- /dev/null
+++ b/wafadmin/Scripting.py
@@ -0,0 +1,414 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,sys,shutil,traceback,datetime,inspect,errno
+import Utils,Configure,Build,Logs,Options,Environment,Task
+from Logs import error,warn,info
+from Constants import*
+g_gz='bz2'
+commands=[]
+def prepare_impl(t,cwd,ver,wafdir):
+	Options.tooldir=[t]
+	Options.launch_dir=cwd
+	if'--version'in sys.argv:
+		opt_obj=Options.Handler()
+		opt_obj.curdir=cwd
+		opt_obj.parse_args()
+		sys.exit(0)
+	msg1='Waf: Please run waf from a directory containing a file named "%s" or run distclean'%WSCRIPT_FILE
+	build_dir_override=None
+	candidate=None
+	lst=os.listdir(cwd)
+	search_for_candidate=True
+	if WSCRIPT_FILE in lst:
+		candidate=cwd
+	elif'configure'in sys.argv and not WSCRIPT_BUILD_FILE in lst:
+		calldir=os.path.abspath(os.path.dirname(sys.argv[0]))
+		if WSCRIPT_FILE in os.listdir(calldir):
+			candidate=calldir
+			search_for_candidate=False
+		else:
+			error('arg[0] directory does not contain a wscript file')
+			sys.exit(1)
+		build_dir_override=cwd
+	while search_for_candidate:
+		if len(cwd)<=3:
+			break
+		dirlst=os.listdir(cwd)
+		if WSCRIPT_FILE in dirlst:
+			candidate=cwd
+		if'configure'in sys.argv and candidate:
+			break
+		if Options.lockfile in dirlst:
+			env=Environment.Environment()
+			try:
+				env.load(os.path.join(cwd,Options.lockfile))
+			except:
+				error('could not load %r'%Options.lockfile)
+			try:
+				os.stat(env['cwd'])
+			except:
+				candidate=cwd
+			else:
+				candidate=env['cwd']
+			break
+		cwd=os.path.dirname(cwd)
+	if not candidate:
+		if'-h'in sys.argv or'--help'in sys.argv:
+			warn('No wscript file found: the help message may be incomplete')
+			opt_obj=Options.Handler()
+			opt_obj.curdir=cwd
+			opt_obj.parse_args()
+		else:
+			error(msg1)
+		sys.exit(0)
+	try:
+		os.chdir(candidate)
+	except OSError:
+		raise Utils.WafError("the folder %r is unreadable"%candidate)
+	Utils.set_main_module(os.path.join(candidate,WSCRIPT_FILE))
+	if build_dir_override:
+		d=getattr(Utils.g_module,BLDDIR,None)
+		if d:
+			msg=' Overriding build directory %s with %s'%(d,build_dir_override)
+			warn(msg)
+		Utils.g_module.blddir=build_dir_override
+	def set_def(obj,name=''):
+		n=name or obj.__name__
+		if not n in Utils.g_module.__dict__:
+			setattr(Utils.g_module,n,obj)
+	for k in[dist,distclean,distcheck,clean,install,uninstall]:
+		set_def(k)
+	set_def(Configure.ConfigurationContext,'configure_context')
+	for k in['build','clean','install','uninstall']:
+		set_def(Build.BuildContext,k+'_context')
+	opt_obj=Options.Handler(Utils.g_module)
+	opt_obj.curdir=candidate
+	try:
+		f=Utils.g_module.set_options
+	except AttributeError:
+		pass
+	else:
+		opt_obj.sub_options([''])
+	opt_obj.parse_args()
+	if not'init'in Utils.g_module.__dict__:
+		Utils.g_module.init=Utils.nada
+	if not'shutdown'in Utils.g_module.__dict__:
+		Utils.g_module.shutdown=Utils.nada
+	main()
+def prepare(t,cwd,ver,wafdir):
+	if WAFVERSION!=ver:
+		msg='Version mismatch: waf %s <> wafadmin %s (wafdir %s)'%(ver,WAFVERSION,wafdir)
+		print('\033[91mError: %s\033[0m'%msg)
+		sys.exit(1)
+	try:
+		prepare_impl(t,cwd,ver,wafdir)
+	except Utils.WafError,e:
+		error(str(e))
+		sys.exit(1)
+	except KeyboardInterrupt:
+		Utils.pprint('RED','Interrupted')
+		sys.exit(68)
+def main():
+	global commands
+	commands=Options.arg_line[:]
+	while commands:
+		x=commands.pop(0)
+		ini=datetime.datetime.now()
+		if x=='configure':
+			fun=configure
+		elif x=='build':
+			fun=build
+		else:
+			fun=getattr(Utils.g_module,x,None)
+		if not fun:
+			raise Utils.WscriptError('No such command %r'%x)
+		ctx=getattr(Utils.g_module,x+'_context',Utils.Context)()
+		if x in['init','shutdown','dist','distclean','distcheck']:
+			try:
+				fun(ctx)
+			except TypeError:
+				fun()
+		else:
+			fun(ctx)
+		ela=''
+		if not Options.options.progress_bar:
+			ela=' (%s)'%Utils.get_elapsed_time(ini)
+		if x!='init'and x!='shutdown':
+			info('%r finished successfully%s'%(x,ela))
+		if not commands and x!='shutdown':
+			commands.append('shutdown')
+def configure(conf):
+	src=getattr(Options.options,SRCDIR,None)
+	if not src:src=getattr(Utils.g_module,SRCDIR,None)
+	if not src:src=getattr(Utils.g_module,'top',None)
+	if not src:
+		src='.'
+		incomplete_src=1
+	src=os.path.abspath(src)
+	bld=getattr(Options.options,BLDDIR,None)
+	if not bld:bld=getattr(Utils.g_module,BLDDIR,None)
+	if not bld:bld=getattr(Utils.g_module,'out',None)
+	if not bld:
+		bld='build'
+		incomplete_bld=1
+	if bld=='.':
+		raise Utils.WafError('Setting blddir="." may cause distclean problems')
+	bld=os.path.abspath(bld)
+	try:os.makedirs(bld)
+	except OSError:pass
+	targets=Options.options.compile_targets
+	Options.options.compile_targets=None
+	Options.is_install=False
+	conf.srcdir=src
+	conf.blddir=bld
+	conf.post_init()
+	if'incomplete_src'in vars():
+		conf.check_message_1('Setting srcdir to')
+		conf.check_message_2(src)
+	if'incomplete_bld'in vars():
+		conf.check_message_1('Setting blddir to')
+		conf.check_message_2(bld)
+	conf.sub_config([''])
+	conf.store()
+	env=Environment.Environment()
+	env[BLDDIR]=bld
+	env[SRCDIR]=src
+	env['argv']=sys.argv
+	env['commands']=Options.commands
+	env['options']=Options.options.__dict__
+	env['hash']=conf.hash
+	env['files']=conf.files
+	env['environ']=dict(conf.environ)
+	env['cwd']=os.path.split(Utils.g_module.root_path)[0]
+	if Utils.g_module.root_path!=src:
+		env.store(os.path.join(src,Options.lockfile))
+	env.store(Options.lockfile)
+	Options.options.compile_targets=targets
+def clean(bld):
+	'''removes the build files'''
+	try:
+		proj=Environment.Environment(Options.lockfile)
+	except IOError:
+		raise Utils.WafError('Nothing to clean (project not configured)')
+	bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
+	bld.load_envs()
+	bld.is_install=0
+	bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
+	try:
+		bld.clean()
+	finally:
+		bld.save()
+def check_configured(bld):
+	if not Configure.autoconfig:
+		return bld
+	conf_cls=getattr(Utils.g_module,'configure_context',Utils.Context)
+	bld_cls=getattr(Utils.g_module,'build_context',Utils.Context)
+	def reconf(proj):
+		back=(Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose)
+		Options.commands=proj['commands']
+		Options.options.__dict__=proj['options']
+		conf=conf_cls()
+		conf.environ=proj['environ']
+		configure(conf)
+		(Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose)=back
+	try:
+		proj=Environment.Environment(Options.lockfile)
+	except IOError:
+		conf=conf_cls()
+		configure(conf)
+	else:
+		try:
+			bld=bld_cls()
+			bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
+			bld.load_envs()
+		except Utils.WafError:
+			reconf(proj)
+			return bld_cls()
+	try:
+		proj=Environment.Environment(Options.lockfile)
+	except IOError:
+		raise Utils.WafError('Auto-config: project does not configure (bug)')
+	h=0
+	try:
+		for file in proj['files']:
+			if file.endswith('configure'):
+				h=hash((h,Utils.readf(file)))
+			else:
+				mod=Utils.load_module(file)
+				h=hash((h,mod.waf_hash_val))
+	except(OSError,IOError):
+		warn('Reconfiguring the project: a file is unavailable')
+		reconf(proj)
+	else:
+		if(h!=proj['hash']):
+			warn('Reconfiguring the project: the configuration has changed')
+			reconf(proj)
+	return bld_cls()
+def install(bld):
+	'''installs the build files'''
+	bld=check_configured(bld)
+	Options.commands['install']=True
+	Options.commands['uninstall']=False
+	Options.is_install=True
+	bld.is_install=INSTALL
+	build_impl(bld)
+	bld.install()
+def uninstall(bld):
+	'''removes the installed files'''
+	Options.commands['install']=False
+	Options.commands['uninstall']=True
+	Options.is_install=True
+	bld.is_install=UNINSTALL
+	try:
+		def runnable_status(self):
+			return SKIP_ME
+		setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status)
+		setattr(Task.Task,'runnable_status',runnable_status)
+		build_impl(bld)
+		bld.install()
+	finally:
+		setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back)
+def build(bld):
+	bld=check_configured(bld)
+	Options.commands['install']=False
+	Options.commands['uninstall']=False
+	Options.is_install=False
+	bld.is_install=0
+	return build_impl(bld)
+def build_impl(bld):
+	try:
+		proj=Environment.Environment(Options.lockfile)
+	except IOError:
+		raise Utils.WafError("Project not configured (run 'waf configure' first)")
+	bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
+	bld.load_envs()
+	info("Waf: Entering directory `%s'"%bld.bldnode.abspath())
+	bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
+	bld.pre_build()
+	try:
+		bld.compile()
+	finally:
+		if Options.options.progress_bar:print('')
+		info("Waf: Leaving directory `%s'"%bld.bldnode.abspath())
+	bld.post_build()
+	bld.install()
+excludes='.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
+dist_exts='~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
+def dont_dist(name,src,build_dir):
+	global excludes,dist_exts
+	if(name.startswith(',,')or name.startswith('++')or name.startswith('.waf')or(src=='.'and name==Options.lockfile)or name in excludes or name==build_dir):
+		return True
+	for ext in dist_exts:
+		if name.endswith(ext):
+			return True
+	return False
+def copytree(src,dst,build_dir):
+	names=os.listdir(src)
+	os.makedirs(dst)
+	for name in names:
+		srcname=os.path.join(src,name)
+		dstname=os.path.join(dst,name)
+		if dont_dist(name,src,build_dir):
+			continue
+		if os.path.isdir(srcname):
+			copytree(srcname,dstname,build_dir)
+		else:
+			shutil.copy2(srcname,dstname)
+def distclean(ctx=None):
+	'''removes the build directory'''
+	global commands
+	lst=os.listdir('.')
+	for f in lst:
+		if f==Options.lockfile:
+			try:
+				proj=Environment.Environment(f)
+			except:
+				Logs.warn('could not read %r'%f)
+				continue
+			try:
+				shutil.rmtree(proj[BLDDIR])
+			except IOError:
+				pass
+			except OSError,e:
+				if e.errno!=errno.ENOENT:
+					Logs.warn('project %r cannot be removed'%proj[BLDDIR])
+			try:
+				os.remove(f)
+			except OSError,e:
+				if e.errno!=errno.ENOENT:
+					Logs.warn('file %r cannot be removed'%f)
+		if not commands and f.startswith('.waf'):
+			shutil.rmtree(f,ignore_errors=True)
+def dist(appname='',version=''):
+	'''makes a tarball for redistributing the sources'''
+	import tarfile
+	if not appname:appname=Utils.g_module.APPNAME
+	if not version:version=Utils.g_module.VERSION
+	tmp_folder=appname+'-'+version
+	if g_gz in['gz','bz2']:
+		arch_name=tmp_folder+'.tar.'+g_gz
+	else:
+		arch_name=tmp_folder+'.'+'zip'
+	try:
+		shutil.rmtree(tmp_folder)
+	except(OSError,IOError):
+		pass
+	try:
+		os.remove(arch_name)
+	except(OSError,IOError):
+		pass
+	blddir=getattr(Utils.g_module,BLDDIR,None)
+	if not blddir:
+		blddir=getattr(Utils.g_module,'out',None)
+	copytree('.',tmp_folder,blddir)
+	dist_hook=getattr(Utils.g_module,'dist_hook',None)
+	if dist_hook:
+		back=os.getcwd()
+		os.chdir(tmp_folder)
+		try:
+			dist_hook()
+		finally:
+			os.chdir(back)
+	if g_gz in['gz','bz2']:
+		tar=tarfile.open(arch_name,'w:'+g_gz)
+		tar.add(tmp_folder)
+		tar.close()
+	else:
+		Utils.zip_folder(tmp_folder,arch_name,tmp_folder)
+	try:from hashlib import sha1 as sha
+	except ImportError:from sha import sha
+	try:
+		digest=" (sha=%r)"%sha(Utils.readf(arch_name)).hexdigest()
+	except:
+		digest=''
+	info('New archive created: %s%s'%(arch_name,digest))
+	if os.path.exists(tmp_folder):shutil.rmtree(tmp_folder)
+	return arch_name
+def distcheck(appname='',version='',subdir=''):
+	'''checks if the sources compile (tarball from 'dist')'''
+	import tempfile,tarfile
+	if not appname:appname=Utils.g_module.APPNAME
+	if not version:version=Utils.g_module.VERSION
+	waf=os.path.abspath(sys.argv[0])
+	tarball=dist(appname,version)
+	path=appname+'-'+version
+	if os.path.exists(path):
+		shutil.rmtree(path)
+	t=tarfile.open(tarball)
+	for x in t:t.extract(x)
+	t.close()
+	if subdir:
+		build_path=os.path.join(path,subdir)
+	else:
+		build_path=path
+	instdir=tempfile.mkdtemp('.inst','%s-%s'%(appname,version))
+	ret=Utils.pproc.Popen([waf,'configure','build','install','uninstall','--destdir='+instdir],cwd=build_path).wait()
+	if ret:
+		raise Utils.WafError('distcheck failed with code %i'%ret)
+	if os.path.exists(instdir):
+		raise Utils.WafError('distcheck succeeded, but files were left in %s'%instdir)
+	shutil.rmtree(path)
+def add_subdir(dir,bld):
+	bld.recurse(dir,'build')
+
diff --git a/wafadmin/Task.py b/wafadmin/Task.py
new file mode 100644
index 0000000..900fafc
--- /dev/null
+++ b/wafadmin/Task.py
@@ -0,0 +1,774 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,shutil,sys,re,random,datetime,tempfile,shlex
+from Utils import md5
+import Build,Runner,Utils,Node,Logs,Options
+from Logs import debug,warn,error
+from Constants import*
+algotype=NORMAL
+COMPILE_TEMPLATE_SHELL='''
+def f(task):
+	env = task.env
+	wd = getattr(task, 'cwd', None)
+	p = env.get_flat
+	cmd = \'\'\' %s \'\'\' % s
+	return task.exec_command(cmd, cwd=wd)
+'''
+COMPILE_TEMPLATE_NOSHELL='''
+def f(task):
+	env = task.env
+	wd = getattr(task, 'cwd', None)
+	def to_list(xx):
+		if isinstance(xx, str): return [xx]
+		return xx
+	lst = []
+	%s
+	lst = [x for x in lst if x]
+	return task.exec_command(lst, cwd=wd)
+'''
+file_deps=Utils.nada
+class TaskManager(object):
+	def __init__(self):
+		self.groups=[]
+		self.tasks_done=[]
+		self.current_group=0
+		self.groups_names={}
+	def group_name(self,g):
+		if not isinstance(g,TaskGroup):
+			g=self.groups[g]
+		for x in self.groups_names:
+			if id(self.groups_names[x])==id(g):
+				return x
+		return''
+	def group_idx(self,tg):
+		se=id(tg)
+		for i in range(len(self.groups)):
+			g=self.groups[i]
+			for t in g.tasks_gen:
+				if id(t)==se:
+					return i
+		return None
+	def get_next_set(self):
+		ret=None
+		while not ret and self.current_group<len(self.groups):
+			ret=self.groups[self.current_group].get_next_set()
+			if ret:return ret
+			else:
+				self.groups[self.current_group].process_install()
+				self.current_group+=1
+		return(None,None)
+	def add_group(self,name=None,set=True):
+		g=TaskGroup()
+		if name and name in self.groups_names:
+			error('add_group: name %s already present'%name)
+		self.groups_names[name]=g
+		self.groups.append(g)
+		if set:
+			self.current_group=len(self.groups)-1
+	def set_group(self,idx):
+		if isinstance(idx,str):
+			g=self.groups_names[idx]
+			for x in xrange(len(self.groups)):
+				if id(g)==id(self.groups[x]):
+					self.current_group=x
+		else:
+			self.current_group=idx
+	def add_task_gen(self,tgen):
+		if not self.groups:self.add_group()
+		self.groups[self.current_group].tasks_gen.append(tgen)
+	def add_task(self,task):
+		if not self.groups:self.add_group()
+		self.groups[self.current_group].tasks.append(task)
+	def total(self):
+		total=0
+		if not self.groups:return 0
+		for group in self.groups:
+			total+=len(group.tasks)
+		return total
+	def add_finished(self,tsk):
+		self.tasks_done.append(tsk)
+		bld=tsk.generator.bld
+		if bld.is_install:
+			f=None
+			if'install'in tsk.__dict__:
+				f=tsk.__dict__['install']
+				if f:f(tsk)
+			else:
+				tsk.install()
+class TaskGroup(object):
+	def __init__(self):
+		self.tasks=[]
+		self.tasks_gen=[]
+		self.cstr_groups=Utils.DefaultDict(list)
+		self.cstr_order=Utils.DefaultDict(set)
+		self.temp_tasks=[]
+		self.ready=0
+		self.post_funs=[]
+	def reset(self):
+		for x in self.cstr_groups:
+			self.tasks+=self.cstr_groups[x]
+		self.tasks=self.temp_tasks+self.tasks
+		self.temp_tasks=[]
+		self.cstr_groups=Utils.DefaultDict(list)
+		self.cstr_order=Utils.DefaultDict(set)
+		self.ready=0
+	def process_install(self):
+		for(f,k,kw)in self.post_funs:
+			f(*k,**kw)
+	def prepare(self):
+		self.ready=1
+		file_deps(self.tasks)
+		self.make_cstr_groups()
+		self.extract_constraints()
+	def get_next_set(self):
+		global algotype
+		if algotype==NORMAL:
+			tasks=self.tasks_in_parallel()
+			maxj=MAXJOBS
+		elif algotype==JOBCONTROL:
+			(maxj,tasks)=self.tasks_by_max_jobs()
+		elif algotype==MAXPARALLEL:
+			tasks=self.tasks_with_inner_constraints()
+			maxj=MAXJOBS
+		else:
+			raise Utils.WafError("unknown algorithm type %s"%(algotype))
+		if not tasks:return()
+		return(maxj,tasks)
+	def make_cstr_groups(self):
+		self.cstr_groups=Utils.DefaultDict(list)
+		for x in self.tasks:
+			h=x.hash_constraints()
+			self.cstr_groups[h].append(x)
+	def set_order(self,a,b):
+		self.cstr_order[a].add(b)
+	def compare_exts(self,t1,t2):
+		x="ext_in"
+		y="ext_out"
+		in_=t1.attr(x,())
+		out_=t2.attr(y,())
+		for k in in_:
+			if k in out_:
+				return-1
+		in_=t2.attr(x,())
+		out_=t1.attr(y,())
+		for k in in_:
+			if k in out_:
+				return 1
+		return 0
+	def compare_partial(self,t1,t2):
+		m="after"
+		n="before"
+		name=t2.__class__.__name__
+		if name in Utils.to_list(t1.attr(m,())):return-1
+		elif name in Utils.to_list(t1.attr(n,())):return 1
+		name=t1.__class__.__name__
+		if name in Utils.to_list(t2.attr(m,())):return 1
+		elif name in Utils.to_list(t2.attr(n,())):return-1
+		return 0
+	def extract_constraints(self):
+		keys=self.cstr_groups.keys()
+		max=len(keys)
+		for i in xrange(max):
+			t1=self.cstr_groups[keys[i]][0]
+			for j in xrange(i+1,max):
+				t2=self.cstr_groups[keys[j]][0]
+				val=(self.compare_exts(t1,t2)or self.compare_partial(t1,t2))
+				if val>0:
+					self.set_order(keys[i],keys[j])
+				elif val<0:
+					self.set_order(keys[j],keys[i])
+	def tasks_in_parallel(self):
+		if not self.ready:self.prepare()
+		keys=self.cstr_groups.keys()
+		unconnected=[]
+		remainder=[]
+		for u in keys:
+			for k in self.cstr_order.values():
+				if u in k:
+					remainder.append(u)
+					break
+			else:
+				unconnected.append(u)
+		toreturn=[]
+		for y in unconnected:
+			toreturn.extend(self.cstr_groups[y])
+		for y in unconnected:
+			try:self.cstr_order.__delitem__(y)
+			except KeyError:pass
+			self.cstr_groups.__delitem__(y)
+		if not toreturn and remainder:
+			raise Utils.WafError("circular order constraint detected %r"%remainder)
+		return toreturn
+	def tasks_by_max_jobs(self):
+		if not self.ready:self.prepare()
+		if not self.temp_tasks:self.temp_tasks=self.tasks_in_parallel()
+		if not self.temp_tasks:return(None,None)
+		maxjobs=MAXJOBS
+		ret=[]
+		remaining=[]
+		for t in self.temp_tasks:
+			m=getattr(t,"maxjobs",getattr(self.__class__,"maxjobs",MAXJOBS))
+			if m>maxjobs:
+				remaining.append(t)
+			elif m<maxjobs:
+				remaining+=ret
+				ret=[t]
+				maxjobs=m
+			else:
+				ret.append(t)
+		self.temp_tasks=remaining
+		return(maxjobs,ret)
+	def tasks_with_inner_constraints(self):
+		if not self.ready:self.prepare()
+		if getattr(self,"done",None):return None
+		for p in self.cstr_order:
+			for v in self.cstr_order[p]:
+				for m in self.cstr_groups[p]:
+					for n in self.cstr_groups[v]:
+						n.set_run_after(m)
+		self.cstr_order=Utils.DefaultDict(set)
+		self.cstr_groups=Utils.DefaultDict(list)
+		self.done=1
+		return self.tasks[:]
+class store_task_type(type):
+	def __init__(cls,name,bases,dict):
+		super(store_task_type,cls).__init__(name,bases,dict)
+		name=cls.__name__
+		if name.endswith('_task'):
+			name=name.replace('_task','')
+		if name!='TaskBase':
+			TaskBase.classes[name]=cls
+class TaskBase(object):
+	__metaclass__=store_task_type
+	color="GREEN"
+	maxjobs=MAXJOBS
+	classes={}
+	stat=None
+	def __init__(self,*k,**kw):
+		self.hasrun=NOT_RUN
+		try:
+			self.generator=kw['generator']
+		except KeyError:
+			self.generator=self
+			self.bld=Build.bld
+		if kw.get('normal',1):
+			self.generator.bld.task_manager.add_task(self)
+	def __repr__(self):
+		return'\n\t{task: %s %s}'%(self.__class__.__name__,str(getattr(self,"fun","")))
+	def __str__(self):
+		if hasattr(self,'fun'):
+			return'executing: %s\n'%self.fun.__name__
+		return self.__class__.__name__+'\n'
+	def exec_command(self,*k,**kw):
+		if self.env['env']:
+			kw['env']=self.env['env']
+		return self.generator.bld.exec_command(*k,**kw)
+	def runnable_status(self):
+		return RUN_ME
+	def can_retrieve_cache(self):
+		return False
+	def call_run(self):
+		if self.can_retrieve_cache():
+			return 0
+		return self.run()
+	def run(self):
+		if hasattr(self,'fun'):
+			return self.fun(self)
+		return 0
+	def post_run(self):
+		pass
+	def display(self):
+		col1=Logs.colors(self.color)
+		col2=Logs.colors.NORMAL
+		if Options.options.progress_bar==1:
+			return self.generator.bld.progress_line(self.position[0],self.position[1],col1,col2)
+		if Options.options.progress_bar==2:
+			ela=Utils.get_elapsed_time(self.generator.bld.ini)
+			try:
+				ins=','.join([n.name for n in self.inputs])
+			except AttributeError:
+				ins=''
+			try:
+				outs=','.join([n.name for n in self.outputs])
+			except AttributeError:
+				outs=''
+			return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(self.position[1],self.position[0],ins,outs,ela)
+		total=self.position[1]
+		n=len(str(total))
+		fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
+		return fs%(self.position[0],self.position[1],col1,str(self),col2)
+	def attr(self,att,default=None):
+		ret=getattr(self,att,self)
+		if ret is self:return getattr(self.__class__,att,default)
+		return ret
+	def hash_constraints(self):
+		a=self.attr
+		sum=hash((self.__class__.__name__,str(a('before','')),str(a('after','')),str(a('ext_in','')),str(a('ext_out','')),self.__class__.maxjobs))
+		return sum
+	def format_error(self):
+		if getattr(self,"err_msg",None):
+			return self.err_msg
+		elif self.hasrun==CRASHED:
+			try:
+				return" -> task failed (err #%d): %r"%(self.err_code,self)
+			except AttributeError:
+				return" -> task failed: %r"%self
+		elif self.hasrun==MISSING:
+			return" -> missing files: %r"%self
+		else:
+			return''
+	def install(self):
+		bld=self.generator.bld
+		d=self.attr('install')
+		if self.attr('install_path'):
+			lst=[a.relpath_gen(bld.srcnode)for a in self.outputs]
+			perm=self.attr('chmod',O644)
+			if self.attr('src'):
+				lst+=[a.relpath_gen(bld.srcnode)for a in self.inputs]
+			if self.attr('filename'):
+				dir=self.install_path.rstrip(os.sep)+os.sep+self.attr('filename')
+				bld.install_as(dir,lst[0],self.env,perm)
+			else:
+				bld.install_files(self.install_path,lst,self.env,perm)
+class Task(TaskBase):
+	vars=[]
+	def __init__(self,env,**kw):
+		TaskBase.__init__(self,**kw)
+		self.env=env
+		self.inputs=[]
+		self.outputs=[]
+		self.deps_nodes=[]
+		self.run_after=[]
+	def __str__(self):
+		env=self.env
+		src_str=' '.join([a.nice_path(env)for a in self.inputs])
+		tgt_str=' '.join([a.nice_path(env)for a in self.outputs])
+		if self.outputs:sep=' -> '
+		else:sep=''
+		return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)
+	def __repr__(self):
+		return"".join(['\n\t{task: ',self.__class__.__name__," ",",".join([x.name for x in self.inputs])," -> ",",".join([x.name for x in self.outputs]),'}'])
+	def unique_id(self):
+		try:
+			return self.uid
+		except AttributeError:
+			m=md5()
+			up=m.update
+			up(self.__class__.__name__)
+			up(self.env.variant())
+			p=None
+			for x in self.inputs+self.outputs:
+				if p!=x.parent.id:
+					p=x.parent.id
+					up(x.parent.abspath())
+				up(x.name)
+			self.uid=m.digest()
+			return self.uid
+	def set_inputs(self,inp):
+		if isinstance(inp,list):self.inputs+=inp
+		else:self.inputs.append(inp)
+	def set_outputs(self,out):
+		if isinstance(out,list):self.outputs+=out
+		else:self.outputs.append(out)
+	def set_run_after(self,task):
+		assert isinstance(task,TaskBase)
+		self.run_after.append(task)
+	def add_file_dependency(self,filename):
+		node=self.generator.bld.path.find_resource(filename)
+		self.deps_nodes.append(node)
+	def signature(self):
+		try:return self.cache_sig[0]
+		except AttributeError:pass
+		self.m=md5()
+		exp_sig=self.sig_explicit_deps()
+		var_sig=self.sig_vars()
+		imp_sig=SIG_NIL
+		if self.scan:
+			try:
+				imp_sig=self.sig_implicit_deps()
+			except ValueError:
+				return self.signature()
+		ret=self.m.digest()
+		self.cache_sig=(ret,exp_sig,imp_sig,var_sig)
+		return ret
+	def runnable_status(self):
+		if self.inputs and(not self.outputs):
+			if not getattr(self.__class__,'quiet',None):
+				warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r"%self)
+		for t in self.run_after:
+			if not t.hasrun:
+				return ASK_LATER
+		env=self.env
+		bld=self.generator.bld
+		new_sig=self.signature()
+		key=self.unique_id()
+		try:
+			prev_sig=bld.task_sigs[key][0]
+		except KeyError:
+			debug("task: task %r must run as it was never run before or the task code changed",self)
+			return RUN_ME
+		for node in self.outputs:
+			variant=node.variant(env)
+			try:
+				if bld.node_sigs[variant][node.id]!=new_sig:
+					return RUN_ME
+			except KeyError:
+				debug("task: task %r must run as the output nodes do not exist",self)
+				return RUN_ME
+		if Logs.verbose:self.debug_why(bld.task_sigs[key])
+		if new_sig!=prev_sig:
+			return RUN_ME
+		return SKIP_ME
+	def post_run(self):
+		bld=self.generator.bld
+		env=self.env
+		sig=self.signature()
+		ssig=sig.encode('hex')
+		variant=env.variant()
+		for node in self.outputs:
+			try:
+				os.stat(node.abspath(env))
+			except OSError:
+				self.hasrun=MISSING
+				self.err_msg='-> missing file: %r'%node.abspath(env)
+				raise Utils.WafError
+			bld.node_sigs[variant][node.id]=sig
+		bld.task_sigs[self.unique_id()]=self.cache_sig
+		if not Options.cache_global or Options.options.nocache or not self.outputs:
+			return None
+		if getattr(self,'cached',None):
+			return None
+		dname=os.path.join(Options.cache_global,ssig)
+		tmpdir=tempfile.mkdtemp(prefix=Options.cache_global)
+		try:
+			shutil.rmtree(dname)
+		except:
+			pass
+		try:
+			for node in self.outputs:
+				variant=node.variant(env)
+				dest=os.path.join(tmpdir,node.name)
+				shutil.copy2(node.abspath(env),dest)
+		except(OSError,IOError):
+			try:
+				shutil.rmtree(tmpdir)
+			except:
+				pass
+		else:
+			try:
+				os.rename(tmpdir,dname)
+			except OSError:
+				try:
+					shutil.rmtree(tmpdir)
+				except:
+					pass
+			else:
+				try:
+					os.chmod(dname,O755)
+				except:
+					pass
+	def can_retrieve_cache(self):
+		if not Options.cache_global or Options.options.nocache or not self.outputs:
+			return None
+		env=self.env
+		sig=self.signature()
+		ssig=sig.encode('hex')
+		dname=os.path.join(Options.cache_global,ssig)
+		try:
+			t1=os.stat(dname).st_mtime
+		except OSError:
+			return None
+		for node in self.outputs:
+			variant=node.variant(env)
+			orig=os.path.join(dname,node.name)
+			try:
+				shutil.copy2(orig,node.abspath(env))
+				os.utime(orig,None)
+			except(OSError,IOError):
+				debug('task: failed retrieving file')
+				return None
+		try:
+			t2=os.stat(dname).st_mtime
+		except OSError:
+			return None
+		if t1!=t2:
+			return None
+		for node in self.outputs:
+			self.generator.bld.node_sigs[variant][node.id]=sig
+			if Options.options.progress_bar<1:
+				self.generator.bld.printout('restoring from cache %r\n'%node.bldpath(env))
+		self.cached=True
+		return 1
+	def debug_why(self,old_sigs):
+		new_sigs=self.cache_sig
+		def v(x):
+			return x.encode('hex')
+		debug("Task %r",self)
+		msgs=['Task must run','* Source file or manual dependency','* Implicit dependency','* Environment variable']
+		tmp='task: -> %s: %s %s'
+		for x in xrange(len(msgs)):
+			if(new_sigs[x]!=old_sigs[x]):
+				debug(tmp,msgs[x],v(old_sigs[x]),v(new_sigs[x]))
+	def sig_explicit_deps(self):
+		bld=self.generator.bld
+		up=self.m.update
+		for x in self.inputs+getattr(self,'dep_nodes',[]):
+			if not x.parent.id in bld.cache_scanned_folders:
+				bld.rescan(x.parent)
+			variant=x.variant(self.env)
+			try:
+				up(bld.node_sigs[variant][x.id])
+			except KeyError:
+				raise Utils.WafError('Missing node signature for %r (required by %r)'%(x,self))
+		if bld.deps_man:
+			additional_deps=bld.deps_man
+			for x in self.inputs+self.outputs:
+				try:
+					d=additional_deps[x.id]
+				except KeyError:
+					continue
+				for v in d:
+					if isinstance(v,Node.Node):
+						bld.rescan(v.parent)
+						variant=v.variant(self.env)
+						try:
+							v=bld.node_sigs[variant][v.id]
+						except KeyError:
+							raise Utils.WafError('Missing node signature for %r (required by %r)'%(v,self))
+					elif hasattr(v,'__call__'):
+						v=v()
+					up(v)
+		for x in self.deps_nodes:
+			v=bld.node_sigs[x.variant(self.env)][x.id]
+			up(v)
+		return self.m.digest()
+	def sig_vars(self):
+		bld=self.generator.bld
+		env=self.env
+		act_sig=bld.hash_env_vars(env,self.__class__.vars)
+		self.m.update(act_sig)
+		dep_vars=getattr(self,'dep_vars',None)
+		if dep_vars:
+			self.m.update(bld.hash_env_vars(env,dep_vars))
+		return self.m.digest()
+	scan=None
+	def sig_implicit_deps(self):
+		bld=self.generator.bld
+		key=self.unique_id()
+		prev_sigs=bld.task_sigs.get(key,())
+		if prev_sigs:
+			try:
+				if prev_sigs[2]==self.compute_sig_implicit_deps():
+					return prev_sigs[2]
+			except(KeyError,OSError):
+				pass
+			del bld.task_sigs[key]
+			raise ValueError('rescan')
+		(nodes,names)=self.scan()
+		if Logs.verbose:
+			debug('deps: scanner for %s returned %s %s',str(self),str(nodes),str(names))
+		bld.node_deps[key]=nodes
+		bld.raw_deps[key]=names
+		try:
+			sig=self.compute_sig_implicit_deps()
+		except KeyError:
+			try:
+				nodes=[]
+				for k in bld.node_deps.get(self.unique_id(),[]):
+					if k.id&3==2:
+						if not k.id in bld.node_sigs[0]:
+							nodes.append(k)
+					else:
+						if not k.id in bld.node_sigs[self.env.variant()]:
+							nodes.append(k)
+			except:
+				nodes='?'
+			raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)'%(nodes,self))
+		return sig
+	def compute_sig_implicit_deps(self):
+		upd=self.m.update
+		bld=self.generator.bld
+		tstamp=bld.node_sigs
+		env=self.env
+		for k in bld.node_deps.get(self.unique_id(),[]):
+			if not k.parent.id in bld.cache_scanned_folders:
+				bld.rescan(k.parent)
+			if k.id&3==2:
+				upd(tstamp[0][k.id])
+			else:
+				upd(tstamp[env.variant()][k.id])
+		return self.m.digest()
+def funex(c):
+	dc={}
+	exec(c,dc)
+	return dc['f']
+reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M)
+def compile_fun_shell(name,line):
+	extr=[]
+	def repl(match):
+		g=match.group
+		if g('dollar'):return"$"
+		elif g('backslash'):return'\\\\'
+		elif g('subst'):extr.append((g('var'),g('code')));return"%s"
+		return None
+	line=reg_act.sub(repl,line)
+	parm=[]
+	dvars=[]
+	app=parm.append
+	for(var,meth)in extr:
+		if var=='SRC':
+			if meth:app('task.inputs%s'%meth)
+			else:app('" ".join([a.srcpath(env) for a in task.inputs])')
+		elif var=='TGT':
+			if meth:app('task.outputs%s'%meth)
+			else:app('" ".join([a.bldpath(env) for a in task.outputs])')
+		else:
+			if not var in dvars:dvars.append(var)
+			app("p('%s')"%var)
+	if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
+	else:parm=''
+	c=COMPILE_TEMPLATE_SHELL%(line,parm)
+	debug('action: %s',c)
+	return(funex(c),dvars)
+def compile_fun_noshell(name,line):
+	extr=[]
+	def repl(match):
+		g=match.group
+		if g('dollar'):return"$"
+		elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
+		return None
+	line2=reg_act.sub(repl,line)
+	params=line2.split('<<|@|>>')
+	buf=[]
+	dvars=[]
+	app=buf.append
+	for x in xrange(len(extr)):
+		params[x]=params[x].strip()
+		if params[x]:
+			app("lst.extend(%r)"%params[x].split())
+		(var,meth)=extr[x]
+		if var=='SRC':
+			if meth:app('lst.append(task.inputs%s)'%meth)
+			else:app("lst.extend([a.srcpath(env) for a in task.inputs])")
+		elif var=='TGT':
+			if meth:app('lst.append(task.outputs%s)'%meth)
+			else:app("lst.extend([a.bldpath(env) for a in task.outputs])")
+		else:
+			app('lst.extend(to_list(env[%r]))'%var)
+			if not var in dvars:dvars.append(var)
+	if params[-1]:
+		app("lst.extend(%r)"%shlex.split(params[-1]))
+	fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
+	debug('action: %s',fun)
+	return(funex(fun),dvars)
+def compile_fun(name,line,shell=None):
+	if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
+		shell=True
+	if shell is None:
+		if sys.platform=='win32':
+			shell=False
+		else:
+			shell=True
+	if shell:
+		return compile_fun_shell(name,line)
+	else:
+		return compile_fun_noshell(name,line)
+def simple_task_type(name,line,color='GREEN',vars=[],ext_in=[],ext_out=[],before=[],after=[],shell=None):
+	(fun,dvars)=compile_fun(name,line,shell)
+	fun.code=line
+	return task_type_from_func(name,fun,vars or dvars,color,ext_in,ext_out,before,after)
+def task_type_from_func(name,func,vars=[],color='GREEN',ext_in=[],ext_out=[],before=[],after=[]):
+	params={'run':func,'vars':vars,'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),}
+	cls=type(Task)(name,(Task,),params)
+	TaskBase.classes[name]=cls
+	return cls
+def always_run(cls):
+	old=cls.runnable_status
+	def always(self):
+		old(self)
+		return RUN_ME
+	cls.runnable_status=always
+def update_outputs(cls):
+	old_post_run=cls.post_run
+	def post_run(self):
+		old_post_run(self)
+		bld=self.outputs[0].__class__.bld
+		for output in self.outputs:
+			bld.node_sigs[self.env.variant()][output.id]=Utils.h_file(output.abspath(self.env))
+	cls.post_run=post_run
+	old_runnable_status=cls.runnable_status
+	def runnable_status(self):
+		status=old_runnable_status(self)
+		if status!=RUN_ME:
+			return status
+		try:
+			bld=self.outputs[0].__class__.bld
+			new_sig=self.signature()
+			prev_sig=bld.task_sigs[self.unique_id()][0]
+			if prev_sig==new_sig:
+				for x in self.outputs:
+					if not x.id in bld.node_sigs[self.env.variant()]:
+						return RUN_ME
+				return SKIP_ME
+		except KeyError:
+			pass
+		except IndexError:
+			pass
+		return RUN_ME
+	cls.runnable_status=runnable_status
+def extract_outputs(tasks):
+	v={}
+	for x in tasks:
+		try:
+			(ins,outs)=v[x.env.variant()]
+		except KeyError:
+			ins={}
+			outs={}
+			v[x.env.variant()]=(ins,outs)
+		for a in getattr(x,'inputs',[]):
+			try:ins[a.id].append(x)
+			except KeyError:ins[a.id]=[x]
+		for a in getattr(x,'outputs',[]):
+			try:outs[a.id].append(x)
+			except KeyError:outs[a.id]=[x]
+	for(ins,outs)in v.values():
+		links=set(ins.iterkeys()).intersection(outs.iterkeys())
+		for k in links:
+			for a in ins[k]:
+				for b in outs[k]:
+					a.set_run_after(b)
+def extract_deps(tasks):
+	extract_outputs(tasks)
+	out_to_task={}
+	for x in tasks:
+		v=x.env.variant()
+		try:
+			lst=x.outputs
+		except AttributeError:
+			pass
+		else:
+			for node in lst:
+				out_to_task[(v,node.id)]=x
+	dep_to_task={}
+	for x in tasks:
+		try:
+			x.signature()
+		except:
+			pass
+		v=x.env.variant()
+		key=x.unique_id()
+		for k in x.generator.bld.node_deps.get(x.unique_id(),[]):
+			try:dep_to_task[(v,k.id)].append(x)
+			except KeyError:dep_to_task[(v,k.id)]=[x]
+	deps=set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
+	for idx in deps:
+		for k in dep_to_task[idx]:
+			k.set_run_after(out_to_task[idx])
+	for x in tasks:
+		try:
+			delattr(x,'cache_sig')
+		except AttributeError:
+			pass
+
diff --git a/wafadmin/TaskGen.py b/wafadmin/TaskGen.py
new file mode 100644
index 0000000..303eeb1
--- /dev/null
+++ b/wafadmin/TaskGen.py
@@ -0,0 +1,345 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,traceback,copy
+import Build,Task,Utils,Logs,Options
+from Logs import debug,error,warn
+from Constants import*
+typos={'sources':'source','targets':'target','include':'includes','define':'defines','importpath':'importpaths','install_var':'install_path','install_subdir':'install_path','inst_var':'install_path','inst_dir':'install_path','feature':'features',}
+class register_obj(type):
+	def __init__(cls,name,bases,dict):
+		super(register_obj,cls).__init__(name,bases,dict)
+		name=cls.__name__
+		suffix='_taskgen'
+		if name.endswith(suffix):
+			task_gen.classes[name.replace(suffix,'')]=cls
+class task_gen(object):
+	__metaclass__=register_obj
+	mappings={}
+	mapped={}
+	prec=Utils.DefaultDict(list)
+	traits=Utils.DefaultDict(set)
+	classes={}
+	def __init__(self,*kw,**kwargs):
+		self.prec=Utils.DefaultDict(list)
+		self.source=''
+		self.target=''
+		self.meths=[]
+		self.mappings={}
+		self.features=list(kw)
+		self.tasks=[]
+		self.default_chmod=O644
+		self.default_install_path=None
+		self.allnodes=[]
+		self.bld=kwargs.get('bld',Build.bld)
+		self.env=self.bld.env.copy()
+		self.path=self.bld.path
+		self.name=''
+		self.idx=self.bld.idx[self.path.id]=self.bld.idx.get(self.path.id,0)+1
+		for key,val in kwargs.iteritems():
+			setattr(self,key,val)
+		self.bld.task_manager.add_task_gen(self)
+		self.bld.all_task_gen.append(self)
+	def __str__(self):
+		return("<task_gen '%s' of type %s defined in %s>"%(self.name or self.target,self.__class__.__name__,str(self.path)))
+	def __setattr__(self,name,attr):
+		real=typos.get(name,name)
+		if real!=name:
+			warn('typo %s -> %s'%(name,real))
+			if Logs.verbose>0:
+				traceback.print_stack()
+		object.__setattr__(self,real,attr)
+	def to_list(self,value):
+		if isinstance(value,str):return value.split()
+		else:return value
+	def apply(self):
+		keys=set(self.meths)
+		self.features=Utils.to_list(self.features)
+		for x in self.features+['*']:
+			st=task_gen.traits[x]
+			if not st:
+				warn('feature %r does not exist - bind at least one method to it'%x)
+			keys.update(st)
+		prec={}
+		prec_tbl=self.prec or task_gen.prec
+		for x in prec_tbl:
+			if x in keys:
+				prec[x]=prec_tbl[x]
+		tmp=[]
+		for a in keys:
+			for x in prec.values():
+				if a in x:break
+			else:
+				tmp.append(a)
+		out=[]
+		while tmp:
+			e=tmp.pop()
+			if e in keys:out.append(e)
+			try:
+				nlst=prec[e]
+			except KeyError:
+				pass
+			else:
+				del prec[e]
+				for x in nlst:
+					for y in prec:
+						if x in prec[y]:
+							break
+					else:
+						tmp.append(x)
+		if prec:raise Utils.WafError("graph has a cycle %s"%str(prec))
+		out.reverse()
+		self.meths=out
+		debug('task_gen: posting %s %d',self,id(self))
+		for x in out:
+			try:
+				v=getattr(self,x)
+			except AttributeError:
+				raise Utils.WafError("tried to retrieve %s which is not a valid method"%x)
+			debug('task_gen: -> %s (%d)',x,id(self))
+			v()
+	def post(self):
+		if not self.name:
+			if isinstance(self.target,list):
+				self.name=' '.join(self.target)
+			else:
+				self.name=self.target
+		if getattr(self,'posted',None):
+			return
+		self.apply()
+		self.posted=True
+		debug('task_gen: posted %s',self.name)
+	def get_hook(self,ext):
+		try:return self.mappings[ext]
+		except KeyError:
+			try:return task_gen.mappings[ext]
+			except KeyError:return None
+	def create_task(self,name,src=None,tgt=None,env=None):
+		env=env or self.env
+		task=Task.TaskBase.classes[name](env.copy(),generator=self)
+		if src:
+			task.set_inputs(src)
+		if tgt:
+			task.set_outputs(tgt)
+		self.tasks.append(task)
+		return task
+	def name_to_obj(self,name):
+		return self.bld.name_to_obj(name,self.env)
+	def find_sources_in_dirs(self,dirnames,excludes=[],exts=[]):
+		err_msg="'%s' attribute must be a list"
+		if not isinstance(excludes,list):
+			raise Utils.WscriptError(err_msg%'excludes')
+		if not isinstance(exts,list):
+			raise Utils.WscriptError(err_msg%'exts')
+		lst=[]
+		dirnames=self.to_list(dirnames)
+		ext_lst=exts or list(self.mappings.keys())+list(task_gen.mappings.keys())
+		for name in dirnames:
+			anode=self.path.find_dir(name)
+			if not anode or not anode.is_child_of(self.bld.srcnode):
+				raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path"", or it's not child of '%s'."%(name,self.bld.srcnode))
+			self.bld.rescan(anode)
+			for name in self.bld.cache_dir_contents[anode.id]:
+				if name.startswith('.'):
+					continue
+				(base,ext)=os.path.splitext(name)
+				if ext in ext_lst and not name in lst and not name in excludes:
+					lst.append((anode.relpath_gen(self.path)or'.')+os.path.sep+name)
+		lst.sort()
+		self.source=self.to_list(self.source)
+		if not self.source:self.source=lst
+		else:self.source+=lst
+	def clone(self,env):
+		newobj=task_gen(bld=self.bld)
+		for x in self.__dict__:
+			if x in['env','bld']:
+				continue
+			elif x in["path","features"]:
+				setattr(newobj,x,getattr(self,x))
+			else:
+				setattr(newobj,x,copy.copy(getattr(self,x)))
+		newobj.__class__=self.__class__
+		if isinstance(env,str):
+			newobj.env=self.bld.all_envs[env].copy()
+		else:
+			newobj.env=env.copy()
+		return newobj
+	def get_inst_path(self):
+		return getattr(self,'_install_path',getattr(self,'default_install_path',''))
+	def set_inst_path(self,val):
+		self._install_path=val
+	install_path=property(get_inst_path,set_inst_path)
+	def get_chmod(self):
+		return getattr(self,'_chmod',getattr(self,'default_chmod',O644))
+	def set_chmod(self,val):
+		self._chmod=val
+	chmod=property(get_chmod,set_chmod)
+def declare_extension(var,func):
+	try:
+		for x in Utils.to_list(var):
+			task_gen.mappings[x]=func
+	except:
+		raise Utils.WscriptError('declare_extension takes either a list or a string %r'%var)
+	task_gen.mapped[func.__name__]=func
+def declare_order(*k):
+	assert(len(k)>1)
+	n=len(k)-1
+	for i in xrange(n):
+		f1=k[i]
+		f2=k[i+1]
+		if not f1 in task_gen.prec[f2]:
+			task_gen.prec[f2].append(f1)
+def declare_chain(name='',action='',ext_in='',ext_out='',reentrant=True,color='BLUE',install=0,before=[],after=[],decider=None,rule=None,scan=None):
+	action=action or rule
+	if isinstance(action,str):
+		act=Task.simple_task_type(name,action,color=color)
+	else:
+		act=Task.task_type_from_func(name,action,color=color)
+	act.ext_in=tuple(Utils.to_list(ext_in))
+	act.ext_out=tuple(Utils.to_list(ext_out))
+	act.before=Utils.to_list(before)
+	act.after=Utils.to_list(after)
+	act.scan=scan
+	def x_file(self,node):
+		if decider:
+			ext=decider(self,node)
+		else:
+			ext=ext_out
+		if isinstance(ext,str):
+			out_source=node.change_ext(ext)
+			if reentrant:
+				self.allnodes.append(out_source)
+		elif isinstance(ext,list):
+			out_source=[node.change_ext(x)for x in ext]
+			if reentrant:
+				for i in xrange((reentrant is True)and len(out_source)or reentrant):
+					self.allnodes.append(out_source[i])
+		else:
+			raise Utils.WafError("do not know how to process %s"%str(ext))
+		tsk=self.create_task(name,node,out_source)
+		if node.__class__.bld.is_install:
+			tsk.install=install
+	declare_extension(act.ext_in,x_file)
+	return x_file
+def bind_feature(name,methods):
+	lst=Utils.to_list(methods)
+	task_gen.traits[name].update(lst)
+def taskgen(func):
+	setattr(task_gen,func.__name__,func)
+	return func
+def feature(*k):
+	def deco(func):
+		setattr(task_gen,func.__name__,func)
+		for name in k:
+			task_gen.traits[name].update([func.__name__])
+		return func
+	return deco
+def before(*k):
+	def deco(func):
+		setattr(task_gen,func.__name__,func)
+		for fun_name in k:
+			if not func.__name__ in task_gen.prec[fun_name]:
+				task_gen.prec[fun_name].append(func.__name__)
+		return func
+	return deco
+def after(*k):
+	def deco(func):
+		setattr(task_gen,func.__name__,func)
+		for fun_name in k:
+			if not fun_name in task_gen.prec[func.__name__]:
+				task_gen.prec[func.__name__].append(fun_name)
+		return func
+	return deco
+def extension(var):
+	def deco(func):
+		setattr(task_gen,func.__name__,func)
+		try:
+			for x in Utils.to_list(var):
+				task_gen.mappings[x]=func
+		except:
+			raise Utils.WafError('extension takes either a list or a string %r'%var)
+		task_gen.mapped[func.__name__]=func
+		return func
+	return deco
+def apply_core(self):
+	find_resource=self.path.find_resource
+	for filename in self.to_list(self.source):
+		x=self.get_hook(filename)
+		if x:
+			x(self,filename)
+		else:
+			node=find_resource(filename)
+			if not node:raise Utils.WafError("source not found: '%s' in '%s'"%(filename,str(self.path)))
+			self.allnodes.append(node)
+	for node in self.allnodes:
+		x=self.get_hook(node.suffix())
+		if not x:
+			raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?"%(str(node),self.__class__.mappings.keys(),self.__class__))
+		x(self,node)
+feature('*')(apply_core)
+def exec_rule(self):
+	if not getattr(self,'rule',None):
+		return
+	try:
+		self.meths.remove('apply_core')
+	except ValueError:
+		pass
+	func=self.rule
+	vars2=[]
+	if isinstance(func,str):
+		(func,vars2)=Task.compile_fun('',self.rule,shell=getattr(self,'shell',True))
+		func.code=self.rule
+	name=getattr(self,'name',None)or self.target or self.rule
+	if not isinstance(name,str):
+		name=str(self.idx)
+	cls=Task.task_type_from_func(name,func,getattr(self,'vars',vars2))
+	tsk=self.create_task(name)
+	dep_vars=getattr(self,'dep_vars',['ruledeps'])
+	if dep_vars:
+		tsk.dep_vars=dep_vars
+	if isinstance(self.rule,str):
+		tsk.env.ruledeps=self.rule
+	else:
+		tsk.env.ruledeps=Utils.h_fun(self.rule)
+	if getattr(self,'target',None):
+		cls.quiet=True
+		tsk.outputs=[self.path.find_or_declare(x)for x in self.to_list(self.target)]
+	if getattr(self,'source',None):
+		cls.quiet=True
+		tsk.inputs=[]
+		for x in self.to_list(self.source):
+			y=self.path.find_resource(x)
+			if not y:
+				raise Utils.WafError('input file %r could not be found (%r)'%(x,self.path.abspath()))
+			tsk.inputs.append(y)
+	if self.allnodes:
+		tsk.inputs.extend(self.allnodes)
+	if getattr(self,'scan',None):
+		cls.scan=self.scan
+	if getattr(self,'install_path',None):
+		tsk.install_path=self.install_path
+	if getattr(self,'cwd',None):
+		tsk.cwd=self.cwd
+	if getattr(self,'on_results',None):
+		Task.update_outputs(cls)
+	if getattr(self,'always',None):
+		Task.always_run(cls)
+	for x in['after','before','ext_in','ext_out']:
+		setattr(cls,x,getattr(self,x,[]))
+feature('*')(exec_rule)
+before('apply_core')(exec_rule)
+def sequence_order(self):
+	if self.meths and self.meths[-1]!='sequence_order':
+		self.meths.append('sequence_order')
+		return
+	if getattr(self,'seq_start',None):
+		return
+	if getattr(self.bld,'prev',None):
+		self.bld.prev.post()
+		for x in self.bld.prev.tasks:
+			for y in self.tasks:
+				y.set_run_after(x)
+	self.bld.prev=self
+feature('seq')(sequence_order)
+
diff --git a/wafadmin/Tools/__init__.py b/wafadmin/Tools/__init__.py
new file mode 100644
index 0000000..cbc8406
--- /dev/null
+++ b/wafadmin/Tools/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+
diff --git a/wafadmin/Tools/config_c.py b/wafadmin/Tools/config_c.py
new file mode 100644
index 0000000..7f35318
--- /dev/null
+++ b/wafadmin/Tools/config_c.py
@@ -0,0 +1,531 @@
+#! /usr/bin/env python
+# encoding: utf-8
+import sys
+if sys.hexversion < 0x020400f0: from sets import Set as set
+import os,imp,sys,shlex,shutil
+from Utils import md5
+import Build,Utils,Configure,Task,Options,Logs,TaskGen
+from Constants import*
+from Configure import conf,conftest
+cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',}
+SNIP1='''
+	int main() {
+	void *p;
+	p=(void*)(%s);
+	return 0;
+}
+'''
+SNIP2='''
+int main() {
+	if ((%(type_name)s *) 0) return 0;
+	if (sizeof (%(type_name)s)) return 0;
+}
+'''
+SNIP3='''
+int main() {
+	return 0;
+}
+'''
+def parse_flags(line,uselib,env):
+	lst=shlex.split(line)
+	while lst:
+		x=lst.pop(0)
+		st=x[:2]
+		ot=x[2:]
+		if st=='-I'or st=='/I':
+			if not ot:ot=lst.pop(0)
+			env.append_unique('CPPPATH_'+uselib,ot)
+		elif st=='-D':
+			if not ot:ot=lst.pop(0)
+			env.append_unique('CXXDEFINES_'+uselib,ot)
+			env.append_unique('CCDEFINES_'+uselib,ot)
+		elif st=='-l':
+			if not ot:ot=lst.pop(0)
+			env.append_unique('LIB_'+uselib,ot)
+		elif st=='-L':
+			if not ot:ot=lst.pop(0)
+			env.append_unique('LIBPATH_'+uselib,ot)
+		elif x=='-pthread'or x.startswith('+'):
+			env.append_unique('CCFLAGS_'+uselib,x)
+			env.append_unique('CXXFLAGS_'+uselib,x)
+			env.append_unique('LINKFLAGS_'+uselib,x)
+		elif x=='-framework':
+			env.append_unique('FRAMEWORK_'+uselib,lst.pop(0))
+		elif x.startswith('-F'):
+			env.append_unique('FRAMEWORKPATH_'+uselib,x[2:])
+		elif x.startswith('-std'):
+			env.append_unique('CCFLAGS_'+uselib,x)
+			env.append_unique('LINKFLAGS_'+uselib,x)
+		elif x.startswith('-Wl'):
+			env.append_unique('LINKFLAGS_'+uselib,x)
+		elif x.startswith('-m')or x.startswith('-f'):
+			env.append_unique('CCFLAGS_'+uselib,x)
+			env.append_unique('CXXFLAGS_'+uselib,x)
+def ret_msg(self,f,kw):
+	if isinstance(f,str):
+		return f
+	return f(kw)
+def validate_cfg(self,kw):
+	if not'path'in kw:
+		kw['path']='pkg-config --errors-to-stdout --print-errors'
+	if'atleast_pkgconfig_version'in kw:
+		if not'msg'in kw:
+			kw['msg']='Checking for pkg-config version >= %s'%kw['atleast_pkgconfig_version']
+		return
+	if'modversion'in kw:
+		return
+	if'variables'in kw:
+		if not'msg'in kw:
+			kw['msg']='Checking for %s variables'%kw['package']
+		return
+	for x in cfg_ver.keys():
+		y=x.replace('-','_')
+		if y in kw:
+			if not'package'in kw:
+				raise ValueError('%s requires a package'%x)
+			if not'msg'in kw:
+				kw['msg']='Checking for %s %s %s'%(kw['package'],cfg_ver[x],kw[y])
+			return
+	if not'msg'in kw:
+		kw['msg']='Checking for %s'%(kw['package']or kw['path'])
+	if not'okmsg'in kw:
+		kw['okmsg']='yes'
+	if not'errmsg'in kw:
+		kw['errmsg']='not found'
+def cmd_and_log(self,cmd,kw):
+	Logs.debug('runner: %s\n'%cmd)
+	if self.log:
+		self.log.write('%s\n'%cmd)
+	try:
+		p=Utils.pproc.Popen(cmd,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE,shell=True)
+		(out,err)=p.communicate()
+	except OSError,e:
+		self.log.write('error %r'%e)
+		self.fatal(str(e))
+	out=str(out)
+	err=str(err)
+	if self.log:
+		self.log.write(out)
+		self.log.write(err)
+	if p.returncode:
+		if not kw.get('errmsg',''):
+			if kw.get('mandatory',False):
+				kw['errmsg']=out.strip()
+			else:
+				kw['errmsg']='no'
+		self.fatal('fail')
+	return out
+def exec_cfg(self,kw):
+	if'atleast_pkgconfig_version'in kw:
+		cmd='%s --atleast-pkgconfig-version=%s'%(kw['path'],kw['atleast_pkgconfig_version'])
+		self.cmd_and_log(cmd,kw)
+		if not'okmsg'in kw:
+			kw['okmsg']='yes'
+		return
+	for x in cfg_ver:
+		y=x.replace('-','_')
+		if y in kw:
+			self.cmd_and_log('%s --%s=%s %s'%(kw['path'],x,kw[y],kw['package']),kw)
+			if not'okmsg'in kw:
+				kw['okmsg']='yes'
+			self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
+			break
+	if'modversion'in kw:
+		version=self.cmd_and_log('%s --modversion %s'%(kw['path'],kw['modversion']),kw).strip()
+		self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version)
+		return version
+	if'variables'in kw:
+		env=kw.get('env',self.env)
+		uselib=kw.get('uselib_store',kw['package'].upper())
+		vars=Utils.to_list(kw['variables'])
+		for v in vars:
+			val=self.cmd_and_log('%s --variable=%s %s'%(kw['path'],v,kw['package']),kw).strip()
+			var='%s_%s'%(uselib,v)
+			env[var]=val
+		if not'okmsg'in kw:
+			kw['okmsg']='yes'
+		return
+	lst=[kw['path']]
+	defi=kw.get('define_variable',None)
+	if not defi:
+		defi=self.env.PKG_CONFIG_DEFINES or{}
+	for key,val in defi.iteritems():
+		lst.append('--define-variable=%s=%s'%(key,val))
+	lst.append(kw.get('args',''))
+	lst.append(kw['package'])
+	cmd=' '.join(lst)
+	ret=self.cmd_and_log(cmd,kw)
+	if not'okmsg'in kw:
+		kw['okmsg']='yes'
+	self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
+	parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env))
+	return ret
+def check_cfg(self,*k,**kw):
+	self.validate_cfg(kw)
+	if'msg'in kw:
+		self.check_message_1(kw['msg'])
+	ret=None
+	try:
+		ret=self.exec_cfg(kw)
+	except Configure.ConfigurationError,e:
+		if'errmsg'in kw:
+			self.check_message_2(kw['errmsg'],'YELLOW')
+		if'mandatory'in kw and kw['mandatory']:
+			if Logs.verbose>1:
+				raise
+			else:
+				self.fatal('the configuration failed (see %r)'%self.log.name)
+	else:
+		kw['success']=ret
+		if'okmsg'in kw:
+			self.check_message_2(self.ret_msg(kw['okmsg'],kw))
+	return ret
+def validate_c(self,kw):
+	if not'env'in kw:
+		kw['env']=self.env.copy()
+	env=kw['env']
+	if not'compiler'in kw:
+		kw['compiler']='cc'
+		if env['CXX_NAME']and Task.TaskBase.classes.get('cxx',None):
+			kw['compiler']='cxx'
+			if not self.env['CXX']:
+				self.fatal('a c++ compiler is required')
+		else:
+			if not self.env['CC']:
+				self.fatal('a c compiler is required')
+	if not'type'in kw:
+		kw['type']='cprogram'
+	assert not(kw['type']!='cprogram'and kw.get('execute',0)),'can only execute programs'
+	def to_header(dct):
+		if'header_name'in dct:
+			dct=Utils.to_list(dct['header_name'])
+			return''.join(['#include <%s>\n'%x for x in dct])
+		return''
+	if not'compile_mode'in kw:
+		kw['compile_mode']=(kw['compiler']=='cxx')and'cxx'or'cc'
+	if not'compile_filename'in kw:
+		kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'')
+	if'framework_name'in kw:
+		try:TaskGen.task_gen.create_task_macapp
+		except AttributeError:self.fatal('frameworks require the osx tool')
+		fwkname=kw['framework_name']
+		if not'uselib_store'in kw:
+			kw['uselib_store']=fwkname.upper()
+		if not kw.get('no_header',False):
+			if not'header_name'in kw:
+				kw['header_name']=[]
+			fwk='%s/%s.h'%(fwkname,fwkname)
+			if kw.get('remove_dot_h',None):
+				fwk=fwk[:-2]
+			kw['header_name']=Utils.to_list(kw['header_name'])+[fwk]
+		kw['msg']='Checking for framework %s'%fwkname
+		kw['framework']=fwkname
+	if'function_name'in kw:
+		fu=kw['function_name']
+		if not'msg'in kw:
+			kw['msg']='Checking for function %s'%fu
+		kw['code']=to_header(kw)+SNIP1%fu
+		if not'uselib_store'in kw:
+			kw['uselib_store']=fu.upper()
+		if not'define_name'in kw:
+			kw['define_name']=self.have_define(fu)
+	elif'type_name'in kw:
+		tu=kw['type_name']
+		if not'msg'in kw:
+			kw['msg']='Checking for type %s'%tu
+		if not'header_name'in kw:
+			kw['header_name']='stdint.h'
+		kw['code']=to_header(kw)+SNIP2%{'type_name':tu}
+		if not'define_name'in kw:
+			kw['define_name']=self.have_define(tu.upper())
+	elif'header_name'in kw:
+		if not'msg'in kw:
+			kw['msg']='Checking for header %s'%kw['header_name']
+		l=Utils.to_list(kw['header_name'])
+		assert len(l)>0,'list of headers in header_name is empty'
+		kw['code']=to_header(kw)+SNIP3
+		if not'uselib_store'in kw:
+			kw['uselib_store']=l[0].upper()
+		if not'define_name'in kw:
+			kw['define_name']=self.have_define(l[0])
+	if'lib'in kw:
+		if not'msg'in kw:
+			kw['msg']='Checking for library %s'%kw['lib']
+		if not'uselib_store'in kw:
+			kw['uselib_store']=kw['lib'].upper()
+	if'staticlib'in kw:
+		if not'msg'in kw:
+			kw['msg']='Checking for static library %s'%kw['staticlib']
+		if not'uselib_store'in kw:
+			kw['uselib_store']=kw['staticlib'].upper()
+	if'fragment'in kw:
+		kw['code']=kw['fragment']
+		if not'msg'in kw:
+			kw['msg']='Checking for custom code'
+		if not'errmsg'in kw:
+			kw['errmsg']='no'
+	for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]:
+		if flagsname in kw:
+			if not'msg'in kw:
+				kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
+			if not'errmsg'in kw:
+				kw['errmsg']='no'
+	if not'execute'in kw:
+		kw['execute']=False
+	if not'errmsg'in kw:
+		kw['errmsg']='not found'
+	if not'okmsg'in kw:
+		kw['okmsg']='yes'
+	if not'code'in kw:
+		kw['code']=SNIP3
+	if not kw.get('success'):kw['success']=None
+	assert'msg'in kw,'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
+def post_check(self,*k,**kw):
+	is_success=False
+	if kw['execute']:
+		if kw['success']is not None:
+			is_success=True
+	else:
+		is_success=(kw['success']==0)
+	if'define_name'in kw:
+		if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
+			if kw['execute']:
+				key=kw['success']
+				if isinstance(key,str):
+					if key:
+						self.define(kw['define_name'],key,quote=kw.get('quote',1))
+					else:
+						self.define_cond(kw['define_name'],True)
+				else:
+					self.define_cond(kw['define_name'],False)
+			else:
+				self.define_cond(kw['define_name'],is_success)
+	if is_success and'uselib_store'in kw:
+		import cc,cxx
+		for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
+			lk=k.lower()
+			if k=='CPPPATH':lk='includes'
+			if k=='CXXDEFINES':lk='defines'
+			if k=='CCDEFINES':lk='defines'
+			if lk in kw:
+				val=kw[lk]
+				if isinstance(val,str):
+					val=val.rstrip(os.path.sep)
+				self.env.append_unique(k+'_'+kw['uselib_store'],val)
+def check(self,*k,**kw):
+	self.validate_c(kw)
+	self.check_message_1(kw['msg'])
+	ret=None
+	try:
+		ret=self.run_c_code(*k,**kw)
+	except Configure.ConfigurationError,e:
+		self.check_message_2(kw['errmsg'],'YELLOW')
+		if'mandatory'in kw and kw['mandatory']:
+			if Logs.verbose>1:
+				raise
+			else:
+				self.fatal('the configuration failed (see %r)'%self.log.name)
+	else:
+		kw['success']=ret
+		self.check_message_2(self.ret_msg(kw['okmsg'],kw))
+	self.post_check(*k,**kw)
+	if not kw.get('execute',False):
+		return ret==0
+	return ret
+def run_c_code(self,*k,**kw):
+	test_f_name=kw['compile_filename']
+	k=0
+	while k<10000:
+		dir=os.path.join(self.blddir,'.conf_check_%d'%k)
+		try:
+			shutil.rmtree(dir)
+		except OSError:
+			pass
+		try:
+			os.stat(dir)
+		except OSError:
+			break
+		k+=1
+	try:
+		os.makedirs(dir)
+	except:
+		self.fatal('cannot create a configuration test folder %r'%dir)
+	try:
+		os.stat(dir)
+	except:
+		self.fatal('cannot use the configuration test folder %r'%dir)
+	bdir=os.path.join(dir,'testbuild')
+	if not os.path.exists(bdir):
+		os.makedirs(bdir)
+	env=kw['env']
+	dest=open(os.path.join(dir,test_f_name),'w')
+	dest.write(kw['code'])
+	dest.close()
+	back=os.path.abspath('.')
+	bld=Build.BuildContext()
+	bld.log=self.log
+	bld.all_envs.update(self.all_envs)
+	bld.all_envs['default']=env
+	bld.lst_variants=bld.all_envs.keys()
+	bld.load_dirs(dir,bdir)
+	os.chdir(dir)
+	bld.rescan(bld.srcnode)
+	if not'features'in kw:
+		kw['features']=[kw['compile_mode'],kw['type']]
+	o=bld(features=kw['features'],source=test_f_name,target='testprog')
+	for k,v in kw.iteritems():
+		setattr(o,k,v)
+	self.log.write("==>\n%s\n<==\n"%kw['code'])
+	try:
+		bld.compile()
+	except Utils.WafError:
+		ret=Utils.ex_stack()
+	else:
+		ret=0
+	os.chdir(back)
+	if ret:
+		self.log.write('command returned %r'%ret)
+		self.fatal(str(ret))
+	if kw['execute']:
+		lastprog=o.link_task.outputs[0].abspath(env)
+		args=Utils.to_list(kw.get('exec_args',[]))
+		proc=Utils.pproc.Popen([lastprog]+args,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE)
+		(out,err)=proc.communicate()
+		w=self.log.write
+		w(str(out))
+		w('\n')
+		w(str(err))
+		w('\n')
+		w('returncode %r'%proc.returncode)
+		w('\n')
+		if proc.returncode:
+			self.fatal(Utils.ex_stack())
+		ret=out
+	return ret
+def check_cxx(self,*k,**kw):
+	kw['compiler']='cxx'
+	return self.check(*k,**kw)
+def check_cc(self,*k,**kw):
+	kw['compiler']='cc'
+	return self.check(*k,**kw)
+def define(self,define,value,quote=1):
+	assert define and isinstance(define,str)
+	tbl=self.env[DEFINES]or Utils.ordered_dict()
+	if isinstance(value,str):
+		if quote:
+			tbl[define]='"%s"'%repr('"'+value)[2:-1].replace('"','\\"')
+		else:
+			tbl[define]=value
+	elif isinstance(value,int):
+		tbl[define]=value
+	else:
+		raise TypeError('define %r -> %r must be a string or an int'%(define,value))
+	self.env[DEFINES]=tbl
+	self.env[define]=value
+def undefine(self,define):
+	assert define and isinstance(define,str)
+	tbl=self.env[DEFINES]or Utils.ordered_dict()
+	value=UNDEFINED
+	tbl[define]=value
+	self.env[DEFINES]=tbl
+	self.env[define]=value
+def define_cond(self,name,value):
+	if value:
+		self.define(name,1)
+	else:
+		self.undefine(name)
+def is_defined(self,key):
+	defines=self.env[DEFINES]
+	if not defines:
+		return False
+	try:
+		value=defines[key]
+	except KeyError:
+		return False
+	else:
+		return value!=UNDEFINED
+def get_define(self,define):
+	try:return self.env[DEFINES][define]
+	except KeyError:return None
+def have_define(self,name):
+	return self.__dict__.get('HAVE_PAT','HAVE_%s')%Utils.quote_define_name(name)
+def write_config_header(self,configfile='',env='',guard='',top=False):
+	if not configfile:configfile=WAF_CONFIG_H
+	waf_guard=guard or'_%s_WAF'%Utils.quote_define_name(configfile)
+	if not env:env=self.env
+	if top:
+		diff=''
+	else:
+		diff=Utils.diff_path(self.srcdir,self.curdir)
+	full=os.sep.join([self.blddir,env.variant(),diff,configfile])
+	full=os.path.normpath(full)
+	(dir,base)=os.path.split(full)
+	try:os.makedirs(dir)
+	except:pass
+	dest=open(full,'w')
+	dest.write('/* Configuration header created by Waf - do not edit */\n')
+	dest.write('#ifndef %s\n#define %s\n\n'%(waf_guard,waf_guard))
+	dest.write(self.get_config_header())
+	env.append_unique(CFG_FILES,os.path.join(diff,configfile))
+	dest.write('\n#endif /* %s */\n'%waf_guard)
+	dest.close()
+def get_config_header(self):
+	config_header=[]
+	tbl=self.env[DEFINES]or Utils.ordered_dict()
+	for key in tbl.allkeys:
+		value=tbl[key]
+		if value is None:
+			config_header.append('#define %s'%key)
+		elif value is UNDEFINED:
+			config_header.append('/* #undef %s */'%key)
+		else:
+			config_header.append('#define %s %s'%(key,value))
+	return"\n".join(config_header)
+def find_cpp(conf):
+	v=conf.env
+	cpp=None
+	if v['CPP']:cpp=v['CPP']
+	elif'CPP'in conf.environ:cpp=conf.environ['CPP']
+	if not cpp:cpp=conf.find_program('cpp',var='CPP')
+	if not cpp:cpp=v['CC']
+	if not cpp:cpp=v['CXX']
+	v['CPP']=cpp
+def cc_add_flags(conf):
+	conf.add_os_flags('CFLAGS','CCFLAGS')
+	conf.add_os_flags('CPPFLAGS')
+def cxx_add_flags(conf):
+	conf.add_os_flags('CXXFLAGS')
+	conf.add_os_flags('CPPFLAGS')
+def link_add_flags(conf):
+	conf.add_os_flags('LINKFLAGS')
+	conf.add_os_flags('LDFLAGS','LINKFLAGS')
+def cc_load_tools(conf):
+	conf.check_tool('cc')
+def cxx_load_tools(conf):
+	conf.check_tool('cxx')
+
+conf(ret_msg)
+conf(validate_cfg)
+conf(cmd_and_log)
+conf(exec_cfg)
+conf(check_cfg)
+conf(validate_c)
+conf(post_check)
+conf(check)
+conf(run_c_code)
+conf(check_cxx)
+conf(check_cc)
+conf(define)
+conf(undefine)
+conf(define_cond)
+conf(is_defined)
+conf(get_define)
+conf(have_define)
+conf(write_config_header)
+conf(get_config_header)
+conftest(find_cpp)
+conftest(cc_add_flags)
+conftest(cxx_add_flags)
+conftest(link_add_flags)
+conftest(cc_load_tools)
+conftest(cxx_load_tools)
diff --git a/wafadmin/Tools/dbus.py b/wafadmin/Tools/dbus.py
new file mode 100644
index 0000000..449bdc0
--- /dev/null
+++ b/wafadmin/Tools/dbus.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import Task,Utils
+from TaskGen import taskgen,before,after,feature
+def add_dbus_file(self,filename,prefix,mode):
+	if not hasattr(self,'dbus_lst'):
+		self.dbus_lst=[]
+	self.meths.append('process_dbus')
+	self.dbus_lst.append([filename,prefix,mode])
+def process_dbus(self):
+	for filename,prefix,mode in getattr(self,'dbus_lst',[]):
+		node=self.path.find_resource(filename)
+		if not node:
+			raise Utils.WafError('file not found '+filename)
+		tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h'))
+		tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix
+		tsk.env.DBUS_BINDING_TOOL_MODE=mode
+Task.simple_task_type('dbus_binding_tool','${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',color='BLUE',before='cc')
+def detect(conf):
+	dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL')
+
+taskgen(add_dbus_file)
+before('apply_core')(process_dbus)
diff --git a/wafadmin/Tools/gdc.py b/wafadmin/Tools/gdc.py
new file mode 100644
index 0000000..72ed66c
--- /dev/null
+++ b/wafadmin/Tools/gdc.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import sys
+import Utils,ar
+from Configure import conftest
+def find_gdc(conf):
+	conf.find_program('gdc',var='D_COMPILER',mandatory=True)
+def common_flags_gdc(conf):
+	v=conf.env
+	v['DFLAGS']=[]
+	v['D_SRC_F']=''
+	v['D_TGT_F']=['-c','-o','']
+	v['DPATH_ST']='-I%s'
+	v['D_LINKER']=v['D_COMPILER']
+	v['DLNK_SRC_F']=''
+	v['DLNK_TGT_F']=['-o','']
+	v['DLIB_ST']='-l%s'
+	v['DLIBPATH_ST']='-L%s'
+	v['DLINKFLAGS']=[]
+	v['DFLAGS_OPTIMIZED']=['-O3']
+	v['DFLAGS_DEBUG']=['-O0']
+	v['DFLAGS_ULTRADEBUG']=['-O0']
+	v['D_shlib_DFLAGS']=[]
+	v['D_shlib_LINKFLAGS']=['-shared']
+	v['DHEADER_ext']='.di'
+	v['D_HDR_F']='-fintfc -fintfc-file='
+def detect(conf):
+	conf.find_gdc()
+	conf.check_tool('ar')
+	conf.check_tool('d')
+	conf.common_flags_gdc()
+	conf.d_platform_flags()
+
+conftest(find_gdc)
+conftest(common_flags_gdc)
diff --git a/wafadmin/Tools/glib2.py b/wafadmin/Tools/glib2.py
new file mode 100644
index 0000000..d9574c8
--- /dev/null
+++ b/wafadmin/Tools/glib2.py
@@ -0,0 +1,83 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import Task,Utils
+from TaskGen import taskgen,before,after,feature
+def add_marshal_file(self,filename,prefix):
+	if not hasattr(self,'marshal_list'):
+		self.marshal_list=[]
+	self.meths.append('process_marshal')
+	self.marshal_list.append((filename,prefix))
+def process_marshal(self):
+	for f,prefix in getattr(self,'marshal_list',[]):
+		node=self.path.find_resource(f)
+		if not node:
+			raise Utils.WafError('file not found %r'%f)
+		h_node=node.change_ext('.h')
+		c_node=node.change_ext('.c')
+		task=self.create_task('glib_genmarshal',node,[h_node,c_node])
+		task.env.GLIB_GENMARSHAL_PREFIX=prefix
+	self.allnodes.append(c_node)
+def genmarshal_func(self):
+	bld=self.inputs[0].__class__.bld
+	get=self.env.get_flat
+	cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(self.env),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath(self.env))
+	ret=bld.exec_command(cmd1)
+	if ret:return ret
+	f=open(self.outputs[1].abspath(self.env),'wb')
+	c='''#include "%s"\n'''%self.outputs[0].name
+	f.write(c)
+	f.close()
+	cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(self.env),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath(self.env))
+	ret=Utils.exec_command(cmd2)
+	if ret:return ret
+def add_enums_from_template(self,source='',target='',template='',comments=''):
+	if not hasattr(self,'enums_list'):
+		self.enums_list=[]
+	self.meths.append('process_enums')
+	self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments})
+def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''):
+	if not hasattr(self,'enums_list'):
+		self.enums_list=[]
+	self.meths.append('process_enums')
+	self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments})
+def process_enums(self):
+	for enum in getattr(self,'enums_list',[]):
+		task=self.create_task('glib_mkenums')
+		env=task.env
+		inputs=[]
+		source_list=self.to_list(enum['source'])
+		if not source_list:
+			raise Utils.WafError('missing source '+str(enum))
+		source_list=[self.path.find_resource(k)for k in source_list]
+		inputs+=source_list
+		env['GLIB_MKENUMS_SOURCE']=[k.srcpath(env)for k in source_list]
+		if not enum['target']:
+			raise Utils.WafError('missing target '+str(enum))
+		tgt_node=self.path.find_or_declare(enum['target'])
+		if tgt_node.name.endswith('.c'):
+			self.allnodes.append(tgt_node)
+		env['GLIB_MKENUMS_TARGET']=tgt_node.abspath(env)
+		options=[]
+		if enum['template']:
+			template_node=self.path.find_resource(enum['template'])
+			options.append('--template %s'%(template_node.abspath(env)))
+			inputs.append(template_node)
+		params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'}
+		for param,option in params.iteritems():
+			if enum[param]:
+				options.append('%s %r'%(option,enum[param]))
+		env['GLIB_MKENUMS_OPTIONS']=' '.join(options)
+		task.set_inputs(inputs)
+		task.set_outputs(tgt_node)
+Task.task_type_from_func('glib_genmarshal',func=genmarshal_func,vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'],color='BLUE',before='cc cxx')
+Task.simple_task_type('glib_mkenums','${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',color='PINK',before='cc cxx')
+def detect(conf):
+	glib_genmarshal=conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')
+	mk_enums_tool=conf.find_program('glib-mkenums',var='GLIB_MKENUMS')
+
+taskgen(add_marshal_file)
+before('apply_core')(process_marshal)
+taskgen(add_enums_from_template)
+taskgen(add_enums)
+before('apply_core')(process_enums)
diff --git a/wafadmin/Tools/gnome.py b/wafadmin/Tools/gnome.py
new file mode 100644
index 0000000..07cb9d3
--- /dev/null
+++ b/wafadmin/Tools/gnome.py
@@ -0,0 +1,162 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,re
+import TaskGen,Utils,Runner,Task,Build,Options,Logs
+from Logs import error
+from TaskGen import taskgen,before,after,feature
+n1_regexp=re.compile('<refentrytitle>(.*)</refentrytitle>',re.M)
+n2_regexp=re.compile('<manvolnum>(.*)</manvolnum>',re.M)
+def postinstall_schemas(prog_name):
+	if Build.bld.is_install:
+		dir=Build.bld.get_install_path('${SYSCONFDIR}/gconf/schemas/%s.schemas'%prog_name)
+		if not Options.options.destdir:
+			Utils.pprint('YELLOW','Installing GConf schema')
+			command='gconftool-2 --install-schema-file=%s 1> /dev/null'%dir
+			ret=Utils.exec_command(command)
+		else:
+			Utils.pprint('YELLOW','GConf schema not installed. After install, run this:')
+			Utils.pprint('YELLOW','gconftool-2 --install-schema-file=%s'%dir)
+def postinstall_icons():
+	dir=Build.bld.get_install_path('${DATADIR}/icons/hicolor')
+	if Build.bld.is_install:
+		if not Options.options.destdir:
+			Utils.pprint('YELLOW',"Updating Gtk icon cache.")
+			command='gtk-update-icon-cache -q -f -t %s'%dir
+			ret=Utils.exec_command(command)
+		else:
+			Utils.pprint('YELLOW','Icon cache not updated. After install, run this:')
+			Utils.pprint('YELLOW','gtk-update-icon-cache -q -f -t %s'%dir)
+def postinstall_scrollkeeper(prog_name):
+	if Build.bld.is_install:
+		if os.access('/var/log/scrollkeeper.log',os.W_OK):
+			dir1=Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
+			dir2=Build.bld.get_install_path('${DATADIR}/omf/%s'%prog_name)
+			command='scrollkeeper-update -q -p %s -o %s'%(dir1,dir2)
+			ret=Utils.exec_command(command)
+def postinstall(prog_name='myapp',schemas=1,icons=1,scrollkeeper=1):
+	if schemas:postinstall_schemas(prog_name)
+	if icons:postinstall_icons()
+	if scrollkeeper:postinstall_scrollkeeper(prog_name)
+class gnome_doc_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def init_gnome_doc(self):
+	self.default_install_path='${PREFIX}/share'
+def apply_gnome_doc(self):
+	self.env['APPNAME']=self.doc_module
+	lst=self.to_list(self.doc_linguas)
+	bld=self.bld
+	lst.append('C')
+	for x in lst:
+		if not x=='C':
+			tsk=self.create_task('xml2po')
+			node=self.path.find_resource(x+'/'+x+'.po')
+			src=self.path.find_resource('C/%s.xml'%self.doc_module)
+			out=self.path.find_or_declare('%s/%s.xml'%(x,self.doc_module))
+			tsk.set_inputs([node,src])
+			tsk.set_outputs(out)
+		else:
+			out=self.path.find_resource('%s/%s.xml'%(x,self.doc_module))
+		tsk2=self.create_task('xsltproc2po')
+		out2=self.path.find_or_declare('%s/%s-%s.omf'%(x,self.doc_module,x))
+		tsk2.set_outputs(out2)
+		node=self.path.find_resource(self.doc_module+".omf.in")
+		tsk2.inputs=[node,out]
+		tsk2.run_after.append(tsk)
+		if bld.is_install:
+			path=self.install_path+'/gnome/help/%s/%s'%(self.doc_module,x)
+			bld.install_files(self.install_path+'/omf',out2,env=self.env)
+			for y in self.to_list(self.doc_figures):
+				try:
+					os.stat(self.path.abspath()+'/'+x+'/'+y)
+					bld.install_as(path+'/'+y,self.path.abspath()+'/'+x+'/'+y)
+				except:
+					bld.install_as(path+'/'+y,self.path.abspath()+'/C/'+y)
+			bld.install_as(path+'/%s.xml'%self.doc_module,out.abspath(self.env))
+			if x=='C':
+				xmls=self.to_list(self.doc_includes)
+				xmls.append(self.doc_entities)
+				for z in xmls:
+					out=self.path.find_resource('%s/%s'%(x,z))
+					bld.install_as(path+'/%s'%z,out.abspath(self.env))
+class xml_to_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def init_xml_to(self):
+	Utils.def_attrs(self,source='xmlfile',xslt='xlsltfile',target='hey',default_install_path='${PREFIX}',task_created=None)
+def apply_xml_to(self):
+	xmlfile=self.path.find_resource(self.source)
+	xsltfile=self.path.find_resource(self.xslt)
+	tsk=self.create_task('xmlto',[xmlfile,xsltfile],xmlfile.change_ext('html'))
+	tsk.install_path=self.install_path
+def sgml_scan(self):
+	node=self.inputs[0]
+	env=self.env
+	variant=node.variant(env)
+	fi=open(node.abspath(env),'r')
+	content=fi.read()
+	fi.close()
+	name=n1_regexp.findall(content)[0]
+	num=n2_regexp.findall(content)[0]
+	doc_name=name+'.'+num
+	if not self.outputs:
+		self.outputs=[self.generator.path.find_or_declare(doc_name)]
+	return([],[doc_name])
+class gnome_sgml2man_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def apply_gnome_sgml2man(self):
+	assert(getattr(self,'appname',None))
+	def install_result(task):
+		out=task.outputs[0]
+		name=out.name
+		ext=name[-1]
+		env=task.env
+		self.bld.install_files('${DATADIR}/man/man%s/'%ext,out,env)
+	self.bld.rescan(self.path)
+	for name in self.bld.cache_dir_contents[self.path.id]:
+		base,ext=os.path.splitext(name)
+		if ext!='.sgml':continue
+		task=self.create_task('sgml2man')
+		task.set_inputs(self.path.find_resource(name))
+		task.task_generator=self
+		if self.bld.is_install:task.install=install_result
+		task.scan()
+cls=Task.simple_task_type('sgml2man','${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC}  > /dev/null',color='BLUE')
+cls.scan=sgml_scan
+cls.quiet=1
+Task.simple_task_type('xmlto','${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
+Task.simple_task_type('xml2po','${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}',color='BLUE')
+xslt_magic="""${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
+--stringparam db2omf.basename ${APPNAME} \
+--stringparam db2omf.format docbook \
+--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
+--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
+--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
+--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
+--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
+--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
+${DB2OMF} ${SRC[1].abspath(env)}"""
+Task.simple_task_type('xsltproc2po',xslt_magic,color='BLUE')
+def detect(conf):
+	conf.check_tool('gnu_dirs glib2 dbus')
+	sgml2man=conf.find_program('docbook2man',var='SGML2MAN')
+	def getstr(varname):
+		return getattr(Options.options,varname,'')
+	conf.define('GNOMELOCALEDIR',os.path.join(conf.env['DATADIR'],'locale'))
+	xml2po=conf.find_program('xml2po',var='XML2PO')
+	xsltproc2po=conf.find_program('xsltproc',var='XSLTPROC2PO')
+	conf.env['XML2POFLAGS']='-e -p'
+	conf.env['SCROLLKEEPER_DATADIR']=Utils.cmd_output("scrollkeeper-config --pkgdatadir",silent=1).strip()
+	conf.env['DB2OMF']=Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils",silent=1).strip()
+def set_options(opt):
+	opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]')
+
+feature('gnome_doc')(init_gnome_doc)
+feature('gnome_doc')(apply_gnome_doc)
+after('init_gnome_doc')(apply_gnome_doc)
+feature('xml_to')(init_xml_to)
+feature('xml_to')(apply_xml_to)
+after('init_xml_to')(apply_xml_to)
+feature('gnome_sgml2man')(apply_gnome_sgml2man)
diff --git a/wafadmin/Tools/gnu_dirs.py b/wafadmin/Tools/gnu_dirs.py
new file mode 100644
index 0000000..daa9415
--- /dev/null
+++ b/wafadmin/Tools/gnu_dirs.py
@@ -0,0 +1,63 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import Utils,Options
+_options=[x.split(', ')for x in'''
+bindir, user executables, ${EXEC_PREFIX}/bin
+sbindir, system admin executables, ${EXEC_PREFIX}/sbin
+libexecdir, program executables, ${EXEC_PREFIX}/libexec
+sysconfdir, read-only single-machine data, ${PREFIX}/etc
+sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
+localstatedir, modifiable single-machine data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib
+includedir, C header files, ${PREFIX}/include
+oldincludedir, C header files for non-gcc, /usr/include
+datarootdir, read-only arch.-independent data root, ${PREFIX}/share
+datadir, read-only architecture-independent data, ${DATAROOTDIR}
+infodir, info documentation, ${DATAROOTDIR}/info
+localedir, locale-dependent data, ${DATAROOTDIR}/locale
+mandir, man documentation, ${DATAROOTDIR}/man
+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
+htmldir, html documentation, ${DOCDIR}
+dvidir, dvi documentation, ${DOCDIR}
+pdfdir, pdf documentation, ${DOCDIR}
+psdir, ps documentation, ${DOCDIR}
+'''.split('\n')if x]
+def detect(conf):
+	def get_param(varname,default):
+		return getattr(Options.options,varname,'')or default
+	env=conf.env
+	env['EXEC_PREFIX']=get_param('EXEC_PREFIX',env['PREFIX'])
+	env['PACKAGE']=Utils.g_module.APPNAME
+	complete=False
+	iter=0
+	while not complete and iter<len(_options)+1:
+		iter+=1
+		complete=True
+		for name,help,default in _options:
+			name=name.upper()
+			if not env[name]:
+				try:
+					env[name]=Utils.subst_vars(get_param(name,default),env)
+				except TypeError:
+					complete=False
+	if not complete:
+		lst=[name for name,_,_ in _options if not env[name.upper()]]
+		raise Utils.WafError('Variable substitution failure %r'%lst)
+def set_options(opt):
+	inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\
+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
+	for k in('--prefix','--destdir'):
+		option=opt.parser.get_option(k)
+		if option:
+			opt.parser.remove_option(k)
+			inst_dir.add_option(option)
+	inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX')
+	dirs_options=opt.add_option_group('Pre-defined installation directories','')
+	for name,help,default in _options:
+		option_name='--'+name
+		str_default=default
+		str_help='%s [Default: %s]'%(help,str_default)
+		dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())
+
diff --git a/wafadmin/Tools/intltool.py b/wafadmin/Tools/intltool.py
new file mode 100644
index 0000000..3f374b9
--- /dev/null
+++ b/wafadmin/Tools/intltool.py
@@ -0,0 +1,95 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,re
+import Configure,TaskGen,Task,Utils,Runner,Options,Build,config_c
+from TaskGen import feature,before,taskgen
+from Logs import error
+class intltool_in_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def iapply_intltool_in_f(self):
+	try:self.meths.remove('apply_core')
+	except ValueError:pass
+	for i in self.to_list(self.source):
+		node=self.path.find_resource(i)
+		podir=getattr(self,'podir','po')
+		podirnode=self.path.find_dir(podir)
+		if not podirnode:
+			error("could not find the podir %r"%podir)
+			continue
+		cache=getattr(self,'intlcache','.intlcache')
+		self.env['INTLCACHE']=os.path.join(self.path.bldpath(self.env),podir,cache)
+		self.env['INTLPODIR']=podirnode.srcpath(self.env)
+		self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c'])
+		task=self.create_task('intltool',node,node.change_ext(''))
+		task.install_path=self.install_path
+class intltool_po_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def apply_intltool_po(self):
+	try:self.meths.remove('apply_core')
+	except ValueError:pass
+	self.default_install_path='${LOCALEDIR}'
+	appname=getattr(self,'appname','set_your_app_name')
+	podir=getattr(self,'podir','')
+	def install_translation(task):
+		out=task.outputs[0]
+		filename=out.name
+		(langname,ext)=os.path.splitext(filename)
+		inst_file=langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo'
+		self.bld.install_as(os.path.join(self.install_path,inst_file),out,self.env,self.chmod)
+	linguas=self.path.find_resource(os.path.join(podir,'LINGUAS'))
+	if linguas:
+		file=open(linguas.abspath())
+		langs=[]
+		for line in file.readlines():
+			if not line.startswith('#'):
+				langs+=line.split()
+		file.close()
+		re_linguas=re.compile('[-a-zA-Z_  ]+')
+		for lang in langs:
+			if re_linguas.match(lang):
+				node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po'))
+				task=self.create_task('po')
+				task.set_inputs(node)
+				task.set_outputs(node.change_ext('.mo'))
+				if self.bld.is_install:task.install=install_translation
+	else:
+		Utils.pprint('RED',"Error no LINGUAS file found in po directory")
+Task.simple_task_type('po','${POCOM} -o ${TGT} ${SRC}',color='BLUE',shell=False)
+Task.simple_task_type('intltool','${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',color='BLUE',after="cc_link cxx_link",shell=False)
+def detect(conf):
+	pocom=conf.find_program('msgfmt')
+	if not pocom:
+		conf.fatal('The program msgfmt (gettext) is mandatory!')
+	conf.env['POCOM']=pocom
+	intltool=conf.find_program('intltool-merge',var='INTLTOOL')
+	if not intltool:
+		if Options.platform=='win32':
+			perl=conf.find_program('perl',var='PERL')
+			if not perl:
+				conf.fatal('The program perl (required by intltool) could not be found')
+			intltooldir=Configure.find_file('intltool-merge',os.environ['PATH'].split(os.pathsep))
+			if not intltooldir:
+				conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
+			conf.env['INTLTOOL']=Utils.to_list(conf.env['PERL'])+[intltooldir+os.sep+'intltool-merge']
+			conf.check_message('intltool','',True,' '.join(conf.env['INTLTOOL']))
+		else:
+			conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
+	def getstr(varname):
+		return getattr(Options.options,varname,'')
+	prefix=conf.env['PREFIX']
+	datadir=getstr('datadir')
+	if not datadir:datadir=os.path.join(prefix,'share')
+	conf.define('LOCALEDIR',os.path.join(datadir,'locale'))
+	conf.define('DATADIR',datadir)
+	if conf.env['CC']or conf.env['CXX']:
+		conf.check(header_name='locale.h')
+def set_options(opt):
+	opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]')
+	opt.add_option('--datadir',type='string',default='',dest='datadir',help='read-only application data')
+
+before('apply_core')(iapply_intltool_in_f)
+feature('intltool_in')(iapply_intltool_in_f)
+feature('intltool_po')(apply_intltool_po)
diff --git a/wafadmin/Tools/libtool.py b/wafadmin/Tools/libtool.py
new file mode 100644
index 0000000..5107f3d
--- /dev/null
+++ b/wafadmin/Tools/libtool.py
@@ -0,0 +1,239 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import sys,re,os,optparse
+import TaskGen,Task,Utils,preproc
+from Logs import error,debug,warn
+from TaskGen import taskgen,after,before,feature
+REVISION="0.1.3"
+fakelibtool_vardeps=['CXX','PREFIX']
+def fakelibtool_build(task):
+	env=task.env
+	dest=open(task.outputs[0].abspath(env),'w')
+	sname=task.inputs[0].name
+	fu=dest.write
+	fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
+	if env['vnum']:
+		nums=env['vnum'].split('.')
+		libname=task.inputs[0].name
+		name3=libname+'.'+env['vnum']
+		name2=libname+'.'+nums[0]
+		name1=libname
+		fu("dlname='%s'\n"%name2)
+		strn=" ".join([name3,name2,name1])
+		fu("library_names='%s'\n"%(strn))
+	else:
+		fu("dlname='%s'\n"%sname)
+		fu("library_names='%s %s %s'\n"%(sname,sname,sname))
+	fu("old_library=''\n")
+	vars=' '.join(env['libtoolvars']+env['LINKFLAGS'])
+	fu("dependency_libs='%s'\n"%vars)
+	fu("current=0\n")
+	fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
+	fu("dlopen=''\ndlpreopen=''\n")
+	fu("libdir='%s/lib'\n"%env['PREFIX'])
+	dest.close()
+	return 0
+def read_la_file(path):
+	sp=re.compile(r'^([^=]+)=\'(.*)\'$')
+	dc={}
+	file=open(path,"r")
+	for line in file.readlines():
+		try:
+			_,left,right,_=sp.split(line.strip())
+			dc[left]=right
+		except ValueError:
+			pass
+	file.close()
+	return dc
+def apply_link_libtool(self):
+	if self.type!='program':
+		linktask=self.link_task
+		self.latask=self.create_task('fakelibtool',linktask.outputs,linktask.outputs[0].change_ext('.la'))
+	if self.bld.is_install:
+		self.bld.install_files('${PREFIX}/lib',linktask.outputs[0],self.env)
+def apply_libtool(self):
+	self.env['vnum']=self.vnum
+	paths=[]
+	libs=[]
+	libtool_files=[]
+	libtool_vars=[]
+	for l in self.env['LINKFLAGS']:
+		if l[:2]=='-L':
+			paths.append(l[2:])
+		elif l[:2]=='-l':
+			libs.append(l[2:])
+	for l in libs:
+		for p in paths:
+			dict=read_la_file(p+'/lib'+l+'.la')
+			linkflags2=dict.get('dependency_libs','')
+			for v in linkflags2.split():
+				if v.endswith('.la'):
+					libtool_files.append(v)
+					libtool_vars.append(v)
+					continue
+				self.env.append_unique('LINKFLAGS',v)
+				break
+	self.env['libtoolvars']=libtool_vars
+	while libtool_files:
+		file=libtool_files.pop()
+		dict=read_la_file(file)
+		for v in dict['dependency_libs'].split():
+			if v[-3:]=='.la':
+				libtool_files.append(v)
+				continue
+			self.env.append_unique('LINKFLAGS',v)
+Task.task_type_from_func('fakelibtool',vars=fakelibtool_vardeps,func=fakelibtool_build,color='BLUE',after="cc_link cxx_link static_link")
+class libtool_la_file:
+	def __init__(self,la_filename):
+		self.__la_filename=la_filename
+		self.linkname=str(os.path.split(la_filename)[-1])[:-3]
+		if self.linkname.startswith("lib"):
+			self.linkname=self.linkname[3:]
+		self.dlname=None
+		self.library_names=None
+		self.old_library=None
+		self.dependency_libs=None
+		self.current=None
+		self.age=None
+		self.revision=None
+		self.installed=None
+		self.shouldnotlink=None
+		self.dlopen=None
+		self.dlpreopen=None
+		self.libdir='/usr/lib'
+		if not self.__parse():
+			raise"file %s not found!!"%(la_filename)
+	def __parse(self):
+		if not os.path.isfile(self.__la_filename):return 0
+		la_file=open(self.__la_filename,'r')
+		for line in la_file:
+			ln=line.strip()
+			if not ln:continue
+			if ln[0]=='#':continue
+			(key,value)=str(ln).split('=',1)
+			key=key.strip()
+			value=value.strip()
+			if value=="no":value=False
+			elif value=="yes":value=True
+			else:
+				try:value=int(value)
+				except ValueError:value=value.strip("'")
+			setattr(self,key,value)
+		la_file.close()
+		return 1
+	def get_libs(self):
+		libs=[]
+		if self.dependency_libs:
+			libs=str(self.dependency_libs).strip().split()
+		if libs==None:
+			libs=[]
+		libs.insert(0,"-l%s"%self.linkname.strip())
+		libs.insert(0,"-L%s"%self.libdir.strip())
+		return libs
+	def __str__(self):
+		return'''\
+dlname = "%(dlname)s"
+library_names = "%(library_names)s"
+old_library = "%(old_library)s"
+dependency_libs = "%(dependency_libs)s"
+version = %(current)s.%(age)s.%(revision)s
+installed = "%(installed)s"
+shouldnotlink = "%(shouldnotlink)s"
+dlopen = "%(dlopen)s"
+dlpreopen = "%(dlpreopen)s"
+libdir = "%(libdir)s"'''%self.__dict__
+class libtool_config:
+	def __init__(self,la_filename):
+		self.__libtool_la_file=libtool_la_file(la_filename)
+		tmp=self.__libtool_la_file
+		self.__version=[int(tmp.current),int(tmp.age),int(tmp.revision)]
+		self.__sub_la_files=[]
+		self.__sub_la_files.append(la_filename)
+		self.__libs=None
+	def __cmp__(self,other):
+		if not other:
+			return 1
+		othervers=[int(s)for s in str(other).split(".")]
+		selfvers=self.__version
+		return cmp(selfvers,othervers)
+	def __str__(self):
+		return"\n".join([str(self.__libtool_la_file),' '.join(self.__libtool_la_file.get_libs()),'* New getlibs:',' '.join(self.get_libs())])
+	def __get_la_libs(self,la_filename):
+		return libtool_la_file(la_filename).get_libs()
+	def get_libs(self):
+		libs_list=list(self.__libtool_la_file.get_libs())
+		libs_map={}
+		while len(libs_list)>0:
+			entry=libs_list.pop(0)
+			if entry:
+				if str(entry).endswith(".la"):
+					if entry not in self.__sub_la_files:
+						self.__sub_la_files.append(entry)
+						libs_list.extend(self.__get_la_libs(entry))
+				else:
+					libs_map[entry]=1
+		self.__libs=libs_map.keys()
+		return self.__libs
+	def get_libs_only_L(self):
+		if not self.__libs:self.get_libs()
+		libs=self.__libs
+		libs=[s for s in libs if str(s).startswith('-L')]
+		return libs
+	def get_libs_only_l(self):
+		if not self.__libs:self.get_libs()
+		libs=self.__libs
+		libs=[s for s in libs if str(s).startswith('-l')]
+		return libs
+	def get_libs_only_other(self):
+		if not self.__libs:self.get_libs()
+		libs=self.__libs
+		libs=[s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
+		return libs
+def useCmdLine():
+	usage='''Usage: %prog [options] PathToFile.la
+example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
+nor: %prog --libs /usr/lib/libamarok.la'''
+	parser=optparse.OptionParser(usage)
+	a=parser.add_option
+	a("--version",dest="versionNumber",action="store_true",default=False,help="output version of libtool-config")
+	a("--debug",dest="debug",action="store_true",default=False,help="enable debug")
+	a("--libs",dest="libs",action="store_true",default=False,help="output all linker flags")
+	a("--libs-only-l",dest="libs_only_l",action="store_true",default=False,help="output -l flags")
+	a("--libs-only-L",dest="libs_only_L",action="store_true",default=False,help="output -L flags")
+	a("--libs-only-other",dest="libs_only_other",action="store_true",default=False,help="output other libs (e.g. -pthread)")
+	a("--atleast-version",dest="atleast_version",default=None,help="return 0 if the module is at least version ATLEAST_VERSION")
+	a("--exact-version",dest="exact_version",default=None,help="return 0 if the module is exactly version EXACT_VERSION")
+	a("--max-version",dest="max_version",default=None,help="return 0 if the module is at no newer than version MAX_VERSION")
+	(options,args)=parser.parse_args()
+	if len(args)!=1 and not options.versionNumber:
+		parser.error("incorrect number of arguments")
+	if options.versionNumber:
+		print("libtool-config version %s"%REVISION)
+		return 0
+	ltf=libtool_config(args[0])
+	if options.debug:
+		print(ltf)
+	if options.atleast_version:
+		if ltf>=options.atleast_version:return 0
+		sys.exit(1)
+	if options.exact_version:
+		if ltf==options.exact_version:return 0
+		sys.exit(1)
+	if options.max_version:
+		if ltf<=options.max_version:return 0
+		sys.exit(1)
+	def p(x):
+		print(" ".join(x))
+	if options.libs:p(ltf.get_libs())
+	elif options.libs_only_l:p(ltf.get_libs_only_l())
+	elif options.libs_only_L:p(ltf.get_libs_only_L())
+	elif options.libs_only_other:p(ltf.get_libs_only_other())
+	return 0
+if __name__=='__main__':
+	useCmdLine()
+
+feature("libtool")(apply_link_libtool)
+after('apply_link')(apply_link_libtool)
+feature("libtool")(apply_libtool)
+before('apply_core')(apply_libtool)
diff --git a/wafadmin/Tools/misc.py b/wafadmin/Tools/misc.py
new file mode 100644
index 0000000..8851fb8
--- /dev/null
+++ b/wafadmin/Tools/misc.py
@@ -0,0 +1,302 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import shutil,re,os
+import TaskGen,Node,Task,Utils,Build,Constants
+from TaskGen import feature,taskgen,after,before
+from Logs import debug
+def copy_func(tsk):
+	env=tsk.env
+	infile=tsk.inputs[0].abspath(env)
+	outfile=tsk.outputs[0].abspath(env)
+	try:
+		shutil.copy2(infile,outfile)
+	except(OSError,IOError):
+		return 1
+	else:
+		if tsk.chmod:os.chmod(outfile,tsk.chmod)
+		return 0
+def action_process_file_func(tsk):
+	if not tsk.fun:raise Utils.WafError('task must have a function attached to it for copy_func to work!')
+	return tsk.fun(tsk)
+class cmd_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def apply_cmd(self):
+	if not self.fun:raise Utils.WafError('cmdobj needs a function!')
+	tsk=Task.TaskBase()
+	tsk.fun=self.fun
+	tsk.env=self.env
+	self.tasks.append(tsk)
+	tsk.install_path=self.install_path
+class copy_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def apply_copy(self):
+	Utils.def_attrs(self,fun=copy_func)
+	self.default_install_path=0
+	lst=self.to_list(self.source)
+	self.meths.remove('apply_core')
+	for filename in lst:
+		node=self.path.find_resource(filename)
+		if not node:raise Utils.WafError('cannot find input file %s for processing'%filename)
+		target=self.target
+		if not target or len(lst)>1:target=node.name
+		newnode=self.path.find_or_declare(target)
+		tsk=self.create_task('copy',node,newnode)
+		tsk.fun=self.fun
+		tsk.chmod=self.chmod
+		tsk.install_path=self.install_path
+		if not tsk.env:
+			tsk.debug()
+			raise Utils.WafError('task without an environment')
+def subst_func(tsk):
+	m4_re=re.compile('@(\w+)@',re.M)
+	env=tsk.env
+	infile=tsk.inputs[0].abspath(env)
+	outfile=tsk.outputs[0].abspath(env)
+	code=Utils.readf(infile)
+	code=code.replace('%','%%')
+	s=m4_re.sub(r'%(\1)s',code)
+	di=tsk.dict or{}
+	if not di:
+		names=m4_re.findall(code)
+		for i in names:
+			di[i]=env.get_flat(i)or env.get_flat(i.upper())
+	file=open(outfile,'w')
+	file.write(s%di)
+	file.close()
+	if tsk.chmod:os.chmod(outfile,tsk.chmod)
+class subst_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def apply_subst(self):
+	Utils.def_attrs(self,fun=subst_func)
+	self.default_install_path=0
+	lst=self.to_list(self.source)
+	self.meths.remove('apply_core')
+	self.dict=getattr(self,'dict',{})
+	for filename in lst:
+		node=self.path.find_resource(filename)
+		if not node:raise Utils.WafError('cannot find input file %s for processing'%filename)
+		if self.target:
+			newnode=self.path.find_or_declare(self.target)
+		else:
+			newnode=node.change_ext('')
+		try:
+			self.dict=self.dict.get_merged_dict()
+		except AttributeError:
+			pass
+		if self.dict and not self.env['DICT_HASH']:
+			self.env=self.env.copy()
+			keys=list(self.dict.keys())
+			keys.sort()
+			lst=[self.dict[x]for x in keys]
+			self.env['DICT_HASH']=str(Utils.h_list(lst))
+		tsk=self.create_task('copy',node,newnode)
+		tsk.fun=self.fun
+		tsk.dict=self.dict
+		tsk.dep_vars=['DICT_HASH']
+		tsk.install_path=self.install_path
+		tsk.chmod=self.chmod
+		if not tsk.env:
+			tsk.debug()
+			raise Utils.WafError('task without an environment')
+class cmd_arg(object):
+	def __init__(self,name,template='%s'):
+		self.name=name
+		self.template=template
+		self.node=None
+class input_file(cmd_arg):
+	def find_node(self,base_path):
+		assert isinstance(base_path,Node.Node)
+		self.node=base_path.find_resource(self.name)
+		if self.node is None:
+			raise Utils.WafError("Input file %s not found in "%(self.name,base_path))
+	def get_path(self,env,absolute):
+		if absolute:
+			return self.template%self.node.abspath(env)
+		else:
+			return self.template%self.node.srcpath(env)
+class output_file(cmd_arg):
+	def find_node(self,base_path):
+		assert isinstance(base_path,Node.Node)
+		self.node=base_path.find_or_declare(self.name)
+		if self.node is None:
+			raise Utils.WafError("Output file %s not found in "%(self.name,base_path))
+	def get_path(self,env,absolute):
+		if absolute:
+			return self.template%self.node.abspath(env)
+		else:
+			return self.template%self.node.bldpath(env)
+class cmd_dir_arg(cmd_arg):
+	def find_node(self,base_path):
+		assert isinstance(base_path,Node.Node)
+		self.node=base_path.find_dir(self.name)
+		if self.node is None:
+			raise Utils.WafError("Directory %s not found in "%(self.name,base_path))
+class input_dir(cmd_dir_arg):
+	def get_path(self,dummy_env,dummy_absolute):
+		return self.template%self.node.abspath()
+class output_dir(cmd_dir_arg):
+	def get_path(self,env,dummy_absolute):
+		return self.template%self.node.abspath(env)
+class command_output(Task.Task):
+	color="BLUE"
+	def __init__(self,env,command,command_node,command_args,stdin,stdout,cwd,os_env,stderr):
+		Task.Task.__init__(self,env,normal=1)
+		assert isinstance(command,(str,Node.Node))
+		self.command=command
+		self.command_args=command_args
+		self.stdin=stdin
+		self.stdout=stdout
+		self.cwd=cwd
+		self.os_env=os_env
+		self.stderr=stderr
+		if command_node is not None:self.dep_nodes=[command_node]
+		self.dep_vars=[]
+	def run(self):
+		task=self
+		def input_path(node,template):
+			if task.cwd is None:
+				return template%node.bldpath(task.env)
+			else:
+				return template%node.abspath()
+		def output_path(node,template):
+			fun=node.abspath
+			if task.cwd is None:fun=node.bldpath
+			return template%fun(task.env)
+		if isinstance(task.command,Node.Node):
+			argv=[input_path(task.command,'%s')]
+		else:
+			argv=[task.command]
+		for arg in task.command_args:
+			if isinstance(arg,str):
+				argv.append(arg)
+			else:
+				assert isinstance(arg,cmd_arg)
+				argv.append(arg.get_path(task.env,(task.cwd is not None)))
+		if task.stdin:
+			stdin=open(input_path(task.stdin,'%s'))
+		else:
+			stdin=None
+		if task.stdout:
+			stdout=open(output_path(task.stdout,'%s'),"w")
+		else:
+			stdout=None
+		if task.stderr:
+			stderr=open(output_path(task.stderr,'%s'),"w")
+		else:
+			stderr=None
+		if task.cwd is None:
+			cwd=('None (actually %r)'%os.getcwd())
+		else:
+			cwd=repr(task.cwd)
+		debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r"%(cwd,stdin,stdout,argv))
+		if task.os_env is None:
+			os_env=os.environ
+		else:
+			os_env=task.os_env
+		command=Utils.pproc.Popen(argv,stdin=stdin,stdout=stdout,stderr=stderr,cwd=task.cwd,env=os_env)
+		return command.wait()
+class cmd_output_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def init_cmd_output(self):
+	Utils.def_attrs(self,stdin=None,stdout=None,stderr=None,command=None,command_is_external=False,argv=[],dependencies=[],dep_vars=[],hidden_inputs=[],hidden_outputs=[],cwd=None,os_env=None)
+def apply_cmd_output(self):
+	if self.command is None:
+		raise Utils.WafError("command-output missing command")
+	if self.command_is_external:
+		cmd=self.command
+		cmd_node=None
+	else:
+		cmd_node=self.path.find_resource(self.command)
+		assert cmd_node is not None,('''Could not find command '%s' in source tree.
+Hint: if this is an external command,
+use command_is_external=True''')%(self.command,)
+		cmd=cmd_node
+	if self.cwd is None:
+		cwd=None
+	else:
+		assert isinstance(cwd,CmdDirArg)
+		self.cwd.find_node(self.path)
+	args=[]
+	inputs=[]
+	outputs=[]
+	for arg in self.argv:
+		if isinstance(arg,cmd_arg):
+			arg.find_node(self.path)
+			if isinstance(arg,input_file):
+				inputs.append(arg.node)
+			if isinstance(arg,output_file):
+				outputs.append(arg.node)
+	if self.stdout is None:
+		stdout=None
+	else:
+		assert isinstance(self.stdout,str)
+		stdout=self.path.find_or_declare(self.stdout)
+		if stdout is None:
+			raise Utils.WafError("File %s not found"%(self.stdout,))
+		outputs.append(stdout)
+	if self.stderr is None:
+		stderr=None
+	else:
+		assert isinstance(self.stderr,str)
+		stderr=self.path.find_or_declare(self.stderr)
+		if stderr is None:
+			raise Utils.WafError("File %s not found"%(self.stderr,))
+		outputs.append(stderr)
+	if self.stdin is None:
+		stdin=None
+	else:
+		assert isinstance(self.stdin,str)
+		stdin=self.path.find_resource(self.stdin)
+		if stdin is None:
+			raise Utils.WafError("File %s not found"%(self.stdin,))
+		inputs.append(stdin)
+	for hidden_input in self.to_list(self.hidden_inputs):
+		node=self.path.find_resource(hidden_input)
+		if node is None:
+			raise Utils.WafError("File %s not found in dir %s"%(hidden_input,self.path))
+		inputs.append(node)
+	for hidden_output in self.to_list(self.hidden_outputs):
+		node=self.path.find_or_declare(hidden_output)
+		if node is None:
+			raise Utils.WafError("File %s not found in dir %s"%(hidden_output,self.path))
+		outputs.append(node)
+	if not(inputs or getattr(self,'no_inputs',None)):
+		raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
+	if not(outputs or getattr(self,'no_outputs',None)):
+		raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
+	task=command_output(self.env,cmd,cmd_node,self.argv,stdin,stdout,cwd,self.os_env,stderr)
+	Utils.copy_attrs(self,task,'before after ext_in ext_out',only_if_set=True)
+	self.tasks.append(task)
+	task.inputs=inputs
+	task.outputs=outputs
+	task.dep_vars=self.to_list(self.dep_vars)
+	for dep in self.dependencies:
+		assert dep is not self
+		dep.post()
+		for dep_task in dep.tasks:
+			task.set_run_after(dep_task)
+	if not task.inputs:
+		task.runnable_status=type(Task.TaskBase.run)(runnable_status,task,task.__class__)
+		task.post_run=type(Task.TaskBase.run)(post_run,task,task.__class__)
+def post_run(self):
+	for x in self.outputs:
+		h=Utils.h_file(x.abspath(self.env))
+		self.generator.bld.node_sigs[self.env.variant()][x.id]=h
+def runnable_status(self):
+	return Constants.RUN_ME
+Task.task_type_from_func('copy',vars=[],func=action_process_file_func)
+TaskGen.task_gen.classes['command-output']=cmd_output_taskgen
+
+feature('cmd')(apply_cmd)
+feature('copy')(apply_copy)
+before('apply_core')(apply_copy)
+feature('subst')(apply_subst)
+before('apply_core')(apply_subst)
+feature('command-output')(init_cmd_output)
+feature('command-output')(apply_cmd_output)
+after('init_cmd_output')(apply_cmd_output)
diff --git a/wafadmin/Tools/preproc.py b/wafadmin/Tools/preproc.py
new file mode 100644
index 0000000..d64b55b
--- /dev/null
+++ b/wafadmin/Tools/preproc.py
@@ -0,0 +1,598 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import re,sys,os,string
+import Logs,Build,Utils
+from Logs import debug,error
+import traceback
+class PreprocError(Utils.WafError):
+	pass
+POPFILE='-'
+recursion_limit=100
+go_absolute=0
+standard_includes=['/usr/include']
+if sys.platform=="win32":
+	standard_includes=[]
+use_trigraphs=0
+'apply the trigraph rules first'
+strict_quotes=0
+g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
+re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
+re_mac=re.compile("^[a-zA-Z_]\w*")
+re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
+re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
+re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
+re_cpp=re.compile(r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",re.MULTILINE)
+trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
+chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
+NUM='i'
+OP='O'
+IDENT='T'
+STR='s'
+CHAR='c'
+tok_types=[NUM,STR,IDENT,OP]
+exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
+re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
+accepted='a'
+ignored='i'
+undefined='u'
+skipped='s'
+def repl(m):
+	s=m.group(1)
+	if s is not None:return' '
+	s=m.group(3)
+	if s is None:return''
+	return s
+def filter_comments(filename):
+	code=Utils.readf(filename)
+	if use_trigraphs:
+		for(a,b)in trig_def:code=code.split(a).join(b)
+	code=re_nl.sub('',code)
+	code=re_cpp.sub(repl,code)
+	return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
+prec={}
+ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
+for x in range(len(ops)):
+	syms=ops[x]
+	for u in syms.split():
+		prec[u]=x
+def reduce_nums(val_1,val_2,val_op):
+	try:a=0+val_1
+	except TypeError:a=int(val_1)
+	try:b=0+val_2
+	except TypeError:b=int(val_2)
+	d=val_op
+	if d=='%':c=a%b
+	elif d=='+':c=a+b
+	elif d=='-':c=a-b
+	elif d=='*':c=a*b
+	elif d=='/':c=a/b
+	elif d=='^':c=a^b
+	elif d=='|':c=a|b
+	elif d=='||':c=int(a or b)
+	elif d=='&':c=a&b
+	elif d=='&&':c=int(a and b)
+	elif d=='==':c=int(a==b)
+	elif d=='!=':c=int(a!=b)
+	elif d=='<=':c=int(a<=b)
+	elif d=='<':c=int(a<b)
+	elif d=='>':c=int(a>b)
+	elif d=='>=':c=int(a>=b)
+	elif d=='^':c=int(a^b)
+	elif d=='<<':c=a<<b
+	elif d=='>>':c=a>>b
+	else:c=0
+	return c
+def get_num(lst):
+	if not lst:raise PreprocError("empty list for get_num")
+	(p,v)=lst[0]
+	if p==OP:
+		if v=='(':
+			count_par=1
+			i=1
+			while i<len(lst):
+				(p,v)=lst[i]
+				if p==OP:
+					if v==')':
+						count_par-=1
+						if count_par==0:
+							break
+					elif v=='(':
+						count_par+=1
+				i+=1
+			else:
+				raise PreprocError("rparen expected %r"%lst)
+			(num,_)=get_term(lst[1:i])
+			return(num,lst[i+1:])
+		elif v=='+':
+			return get_num(lst[1:])
+		elif v=='-':
+			num,lst=get_num(lst[1:])
+			return(reduce_nums('-1',num,'*'),lst)
+		elif v=='!':
+			num,lst=get_num(lst[1:])
+			return(int(not int(num)),lst)
+		elif v=='~':
+			return(~int(num),lst)
+		else:
+			raise PreprocError("invalid op token %r for get_num"%lst)
+	elif p==NUM:
+		return v,lst[1:]
+	elif p==IDENT:
+		return 0,lst[1:]
+	else:
+		raise PreprocError("invalid token %r for get_num"%lst)
+def get_term(lst):
+	if not lst:raise PreprocError("empty list for get_term")
+	num,lst=get_num(lst)
+	if not lst:
+		return(num,[])
+	(p,v)=lst[0]
+	if p==OP:
+		if v=='&&'and not num:
+			return(num,[])
+		elif v=='||'and num:
+			return(num,[])
+		elif v==',':
+			return get_term(lst[1:])
+		elif v=='?':
+			count_par=0
+			i=1
+			while i<len(lst):
+				(p,v)=lst[i]
+				if p==OP:
+					if v==')':
+						count_par-=1
+					elif v=='(':
+						count_par+=1
+					elif v==':':
+						if count_par==0:
+							break
+				i+=1
+			else:
+				raise PreprocError("rparen expected %r"%lst)
+			if int(num):
+				return get_term(lst[1:i])
+			else:
+				return get_term(lst[i+1:])
+		else:
+			num2,lst=get_num(lst[1:])
+			if not lst:
+				num2=reduce_nums(num,num2,v)
+				return get_term([(NUM,num2)]+lst)
+			p2,v2=lst[0]
+			if p2!=OP:
+				raise PreprocError("op expected %r"%lst)
+			if prec[v2]>=prec[v]:
+				num2=reduce_nums(num,num2,v)
+				return get_term([(NUM,num2)]+lst)
+			else:
+				num3,lst=get_num(lst[1:])
+				num3=reduce_nums(num2,num3,v2)
+				return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
+	raise PreprocError("cannot reduce %r"%lst)
+def reduce_eval(lst):
+	num,lst=get_term(lst)
+	return(NUM,num)
+def stringize(lst):
+	lst=[str(v2)for(p2,v2)in lst]
+	return"".join(lst)
+def paste_tokens(t1,t2):
+	p1=None
+	if t1[0]==OP and t2[0]==OP:
+		p1=OP
+	elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
+		p1=IDENT
+	elif t1[0]==NUM and t2[0]==NUM:
+		p1=NUM
+	if not p1:
+		raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
+	return(p1,t1[1]+t2[1])
+def reduce_tokens(lst,defs,ban=[]):
+	i=0
+	while i<len(lst):
+		(p,v)=lst[i]
+		if p==IDENT and v=="defined":
+			del lst[i]
+			if i<len(lst):
+				(p2,v2)=lst[i]
+				if p2==IDENT:
+					if v2 in defs:
+						lst[i]=(NUM,1)
+					else:
+						lst[i]=(NUM,0)
+				elif p2==OP and v2=='(':
+					del lst[i]
+					(p2,v2)=lst[i]
+					del lst[i]
+					if v2 in defs:
+						lst[i]=(NUM,1)
+					else:
+						lst[i]=(NUM,0)
+				else:
+					raise PreprocError("invalid define expression %r"%lst)
+		elif p==IDENT and v in defs:
+			if isinstance(defs[v],str):
+				a,b=extract_macro(defs[v])
+				defs[v]=b
+			macro_def=defs[v]
+			to_add=macro_def[1]
+			if isinstance(macro_def[0],list):
+				del lst[i]
+				for x in xrange(len(to_add)):
+					lst.insert(i,to_add[x])
+					i+=1
+			else:
+				args=[]
+				del lst[i]
+				if i>=len(lst):
+					raise PreprocError("expected '(' after %r (got nothing)"%v)
+				(p2,v2)=lst[i]
+				if p2!=OP or v2!='(':
+					raise PreprocError("expected '(' after %r"%v)
+				del lst[i]
+				one_param=[]
+				count_paren=0
+				while i<len(lst):
+					p2,v2=lst[i]
+					del lst[i]
+					if p2==OP and count_paren==0:
+						if v2=='(':
+							one_param.append((p2,v2))
+							count_paren+=1
+						elif v2==')':
+							if one_param:args.append(one_param)
+							break
+						elif v2==',':
+							if not one_param:raise PreprocError("empty param in funcall %s"%p)
+							args.append(one_param)
+							one_param=[]
+						else:
+							one_param.append((p2,v2))
+					else:
+						one_param.append((p2,v2))
+						if v2=='(':count_paren+=1
+						elif v2==')':count_paren-=1
+				else:
+					raise PreprocError('malformed macro')
+				accu=[]
+				arg_table=macro_def[0]
+				j=0
+				while j<len(to_add):
+					(p2,v2)=to_add[j]
+					if p2==OP and v2=='#':
+						if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
+							toks=args[arg_table[to_add[j+1][1]]]
+							accu.append((STR,stringize(toks)))
+							j+=1
+						else:
+							accu.append((p2,v2))
+					elif p2==OP and v2=='##':
+						if accu and j+1<len(to_add):
+							t1=accu[-1]
+							if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
+								toks=args[arg_table[to_add[j+1][1]]]
+								if toks:
+									accu[-1]=paste_tokens(t1,toks[0])
+									accu.extend(toks[1:])
+								else:
+									accu.append((p2,v2))
+									accu.extend(toks)
+							elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
+								va_toks=[]
+								st=len(macro_def[0])
+								pt=len(args)
+								for x in args[pt-st+1:]:
+									va_toks.extend(x)
+									va_toks.append((OP,','))
+								if va_toks:va_toks.pop()
+								if len(accu)>1:
+									(p3,v3)=accu[-1]
+									(p4,v4)=accu[-2]
+									if v3=='##':
+										accu.pop()
+										if v4==','and pt<st:
+											accu.pop()
+								accu+=va_toks
+							else:
+								accu[-1]=paste_tokens(t1,to_add[j+1])
+							j+=1
+						else:
+							accu.append((p2,v2))
+					elif p2==IDENT and v2 in arg_table:
+						toks=args[arg_table[v2]]
+						reduce_tokens(toks,defs,ban+[v])
+						accu.extend(toks)
+					else:
+						accu.append((p2,v2))
+					j+=1
+				reduce_tokens(accu,defs,ban+[v])
+				for x in xrange(len(accu)-1,-1,-1):
+					lst.insert(i,accu[x])
+		i+=1
+def eval_macro(lst,adefs):
+	reduce_tokens(lst,adefs,[])
+	if not lst:raise PreprocError("missing tokens to evaluate")
+	(p,v)=reduce_eval(lst)
+	return int(v)!=0
+def extract_macro(txt):
+	t=tokenize(txt)
+	if re_fun.search(txt):
+		p,name=t[0]
+		p,v=t[1]
+		if p!=OP:raise PreprocError("expected open parenthesis")
+		i=1
+		pindex=0
+		params={}
+		prev='('
+		while 1:
+			i+=1
+			p,v=t[i]
+			if prev=='(':
+				if p==IDENT:
+					params[v]=pindex
+					pindex+=1
+					prev=p
+				elif p==OP and v==')':
+					break
+				else:
+					raise PreprocError("unexpected token (3)")
+			elif prev==IDENT:
+				if p==OP and v==',':
+					prev=v
+				elif p==OP and v==')':
+					break
+				else:
+					raise PreprocError("comma or ... expected")
+			elif prev==',':
+				if p==IDENT:
+					params[v]=pindex
+					pindex+=1
+					prev=p
+				elif p==OP and v=='...':
+					raise PreprocError("not implemented (1)")
+				else:
+					raise PreprocError("comma or ... expected (2)")
+			elif prev=='...':
+				raise PreprocError("not implemented (2)")
+			else:
+				raise PreprocError("unexpected else")
+		return(name,[params,t[i+1:]])
+	else:
+		(p,v)=t[0]
+		return(v,[[],t[1:]])
+re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
+def extract_include(txt,defs):
+	m=re_include.search(txt)
+	if m:
+		if m.group('a'):return'<',m.group('a')
+		if m.group('b'):return'"',m.group('b')
+	toks=tokenize(txt)
+	reduce_tokens(toks,defs,['waf_include'])
+	if not toks:
+		raise PreprocError("could not parse include %s"%txt)
+	if len(toks)==1:
+		if toks[0][0]==STR:
+			return'"',toks[0][1]
+	else:
+		if toks[0][1]=='<'and toks[-1][1]=='>':
+			return stringize(toks).lstrip('<').rstrip('>')
+	raise PreprocError("could not parse include %s."%txt)
+def parse_char(txt):
+	if not txt:raise PreprocError("attempted to parse a null char")
+	if txt[0]!='\\':
+		return ord(txt)
+	c=txt[1]
+	if c=='x':
+		if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
+		return int(txt[2:],16)
+	elif c.isdigit():
+		if c=='0'and len(txt)==2:return 0
+		for i in 3,2,1:
+			if len(txt)>i and txt[1:1+i].isdigit():
+				return(1+i,int(txt[1:1+i],8))
+	else:
+		try:return chr_esc[c]
+		except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
+def tokenize(s):
+	ret=[]
+	for match in re_clexer.finditer(s):
+		m=match.group
+		for name in tok_types:
+			v=m(name)
+			if v:
+				if name==IDENT:
+					try:v=g_optrans[v];name=OP
+					except KeyError:
+						if v.lower()=="true":
+							v=1
+							name=NUM
+						elif v.lower()=="false":
+							v=0
+							name=NUM
+				elif name==NUM:
+					if m('oct'):v=int(v,8)
+					elif m('hex'):v=int(m('hex'),16)
+					elif m('n0'):v=m('n0')
+					else:
+						v=m('char')
+						if v:v=parse_char(v)
+						else:v=m('n2')or m('n4')
+				elif name==OP:
+					if v=='%:':v='#'
+					elif v=='%:%:':v='##'
+				elif name==STR:
+					v=v[1:-1]
+				ret.append((name,v))
+				break
+	return ret
+class c_parser(object):
+	def __init__(self,nodepaths=None,defines=None):
+		self.lines=[]
+		if defines is None:
+			self.defs={}
+		else:
+			self.defs=dict(defines)
+		self.state=[]
+		self.env=None
+		self.count_files=0
+		self.currentnode_stack=[]
+		self.nodepaths=nodepaths or[]
+		self.nodes=[]
+		self.names=[]
+		self.curfile=''
+		self.ban_includes=[]
+	def tryfind(self,filename):
+		self.curfile=filename
+		found=self.currentnode_stack[-1].find_resource(filename)
+		for n in self.nodepaths:
+			if found:
+				break
+			found=n.find_resource(filename)
+		if not found:
+			if not filename in self.names:
+				self.names.append(filename)
+		else:
+			self.nodes.append(found)
+			if filename[-4:]!='.moc':
+				self.addlines(found)
+		return found
+	def addlines(self,node):
+		self.currentnode_stack.append(node.parent)
+		filepath=node.abspath(self.env)
+		self.count_files+=1
+		if self.count_files>recursion_limit:raise PreprocError("recursion limit exceeded")
+		pc=self.parse_cache
+		debug('preproc: reading file %r',filepath)
+		try:
+			lns=pc[filepath]
+		except KeyError:
+			pass
+		else:
+			self.lines=lns+self.lines
+			return
+		try:
+			lines=filter_comments(filepath)
+			lines.append((POPFILE,''))
+			pc[filepath]=lines
+			self.lines=lines+self.lines
+		except IOError:
+			raise PreprocError("could not read the file %s"%filepath)
+		except Exception:
+			if Logs.verbose>0:
+				error("parsing %s failed"%filepath)
+				traceback.print_exc()
+	def start(self,node,env):
+		debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
+		self.env=env
+		variant=node.variant(env)
+		bld=node.__class__.bld
+		try:
+			self.parse_cache=bld.parse_cache
+		except AttributeError:
+			bld.parse_cache={}
+			self.parse_cache=bld.parse_cache
+		self.addlines(node)
+		if env['DEFLINES']:
+			self.lines=[('define',x)for x in env['DEFLINES']]+self.lines
+		while self.lines:
+			(kind,line)=self.lines.pop(0)
+			if kind==POPFILE:
+				self.currentnode_stack.pop()
+				continue
+			try:
+				self.process_line(kind,line)
+			except Exception,e:
+				if Logs.verbose:
+					debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
+	def process_line(self,token,line):
+		ve=Logs.verbose
+		if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
+		state=self.state
+		if token in['ifdef','ifndef','if']:
+			state.append(undefined)
+		elif token=='endif':
+			state.pop()
+		if not token in['else','elif','endif']:
+			if skipped in self.state or ignored in self.state:
+				return
+		if token=='if':
+			ret=eval_macro(tokenize(line),self.defs)
+			if ret:state[-1]=accepted
+			else:state[-1]=ignored
+		elif token=='ifdef':
+			m=re_mac.search(line)
+			if m and m.group(0)in self.defs:state[-1]=accepted
+			else:state[-1]=ignored
+		elif token=='ifndef':
+			m=re_mac.search(line)
+			if m and m.group(0)in self.defs:state[-1]=ignored
+			else:state[-1]=accepted
+		elif token=='include'or token=='import':
+			(kind,inc)=extract_include(line,self.defs)
+			if inc in self.ban_includes:return
+			if token=='import':self.ban_includes.append(inc)
+			if ve:debug('preproc: include found %s    (%s) ',inc,kind)
+			if kind=='"'or not strict_quotes:
+				self.tryfind(inc)
+		elif token=='elif':
+			if state[-1]==accepted:
+				state[-1]=skipped
+			elif state[-1]==ignored:
+				if eval_macro(tokenize(line),self.defs):
+					state[-1]=accepted
+		elif token=='else':
+			if state[-1]==accepted:state[-1]=skipped
+			elif state[-1]==ignored:state[-1]=accepted
+		elif token=='define':
+			m=re_mac.search(line)
+			if m:
+				name=m.group(0)
+				if ve:debug('preproc: define %s   %s',name,line)
+				self.defs[name]=line
+			else:
+				raise PreprocError("invalid define line %s"%line)
+		elif token=='undef':
+			m=re_mac.search(line)
+			if m and m.group(0)in self.defs:
+				self.defs.__delitem__(m.group(0))
+		elif token=='pragma':
+			if re_pragma_once.search(line.lower()):
+				self.ban_includes.append(self.curfile)
+def get_deps(node,env,nodepaths=[]):
+	gruik=c_parser(nodepaths)
+	gruik.start(node,env)
+	return(gruik.nodes,gruik.names)
+re_inc=re.compile('^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
+def lines_includes(filename):
+	code=Utils.readf(filename)
+	if use_trigraphs:
+		for(a,b)in trig_def:code=code.split(a).join(b)
+	code=re_nl.sub('',code)
+	code=re_cpp.sub(repl,code)
+	return[(m.group(2),m.group(3))for m in re.finditer(re_inc,code)]
+def get_deps_simple(node,env,nodepaths=[],defines={}):
+	nodes=[]
+	names=[]
+	def find_deps(node):
+		lst=lines_includes(node.abspath(env))
+		for(_,line)in lst:
+			(t,filename)=extract_include(line,defines)
+			if filename in names:
+				continue
+			if filename.endswith('.moc'):
+				names.append(filename)
+			found=None
+			for n in nodepaths:
+				if found:
+					break
+				found=n.find_resource(filename)
+			if not found:
+				if not filename in names:
+					names.append(filename)
+			elif not found in nodes:
+				nodes.append(found)
+				find_deps(node)
+	find_deps(node)
+	return(nodes,names)
+
diff --git a/wafadmin/Tools/python.py b/wafadmin/Tools/python.py
new file mode 100644
index 0000000..3ec444b
--- /dev/null
+++ b/wafadmin/Tools/python.py
@@ -0,0 +1,278 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,sys
+import TaskGen,Utils,Utils,Runner,Options,Build
+from Logs import debug,warn,info
+from TaskGen import extension,taskgen,before,after,feature
+from Configure import conf
+EXT_PY=['.py']
+FRAG_2='''
+#include "Python.h"
+#ifdef __cplusplus
+extern "C" {
+#endif
+	void Py_Initialize(void);
+	void Py_Finalize(void);
+#ifdef __cplusplus
+}
+#endif
+int main()
+{
+   Py_Initialize();
+   Py_Finalize();
+   return 0;
+}
+'''
+def init_pyext(self):
+	self.default_install_path='${PYTHONDIR}'
+	self.uselib=self.to_list(getattr(self,'uselib',''))
+	if not'PYEXT'in self.uselib:
+		self.uselib.append('PYEXT')
+	self.env['MACBUNDLE']=True
+def pyext_shlib_ext(self):
+	self.env['shlib_PATTERN']=self.env['pyext_PATTERN']
+def init_pyembed(self):
+	self.uselib=self.to_list(getattr(self,'uselib',''))
+	if not'PYEMBED'in self.uselib:
+		self.uselib.append('PYEMBED')
+def process_py(self,node):
+	if not(self.bld.is_install and self.install_path):
+		return
+	def inst_py(ctx):
+		install_pyfile(self,node)
+	self.bld.add_post_fun(inst_py)
+def install_pyfile(self,node):
+	path=self.bld.get_install_path(self.install_path+os.sep+node.name,self.env)
+	self.bld.install_files(self.install_path,[node],self.env,self.chmod,postpone=False)
+	if self.bld.is_install<0:
+		info("* removing byte compiled python files")
+		for x in'co':
+			try:
+				os.remove(path+x)
+			except OSError:
+				pass
+	if self.bld.is_install>0:
+		if self.env['PYC']or self.env['PYO']:
+			info("* byte compiling %r"%path)
+		if self.env['PYC']:
+			program=("""
+import sys, py_compile
+for pyfile in sys.argv[1:]:
+	py_compile.compile(pyfile, pyfile + 'c')
+""")
+			argv=[self.env['PYTHON'],'-c',program,path]
+			ret=Utils.pproc.Popen(argv).wait()
+			if ret:
+				raise Utils.WafError('bytecode compilation failed %r'%path)
+		if self.env['PYO']:
+			program=("""
+import sys, py_compile
+for pyfile in sys.argv[1:]:
+	py_compile.compile(pyfile, pyfile + 'o')
+""")
+			argv=[self.env['PYTHON'],self.env['PYFLAGS_OPT'],'-c',program,path]
+			ret=Utils.pproc.Popen(argv).wait()
+			if ret:
+				raise Utils.WafError('bytecode compilation failed %r'%path)
+class py_taskgen(TaskGen.task_gen):
+	def __init__(self,*k,**kw):
+		TaskGen.task_gen.__init__(self,*k,**kw)
+def init_py(self):
+	self.default_install_path='${PYTHONDIR}'
+def _get_python_variables(python_exe,variables,imports=['import sys']):
+	program=list(imports)
+	program.append('')
+	for v in variables:
+		program.append("print(repr(%s))"%v)
+	os_env=dict(os.environ)
+	try:
+		del os_env['MACOSX_DEPLOYMENT_TARGET']
+	except KeyError:
+		pass
+	proc=Utils.pproc.Popen([python_exe,"-c",'\n'.join(program)],stdout=Utils.pproc.PIPE,env=os_env)
+	output=proc.communicate()[0].split("\n")
+	if proc.returncode:
+		if Options.options.verbose:
+			warn("Python program to extract python configuration variables failed:\n%s"%'\n'.join(["line %03i: %s"%(lineno+1,line)for lineno,line in enumerate(program)]))
+		raise RuntimeError
+	return_values=[]
+	for s in output:
+		s=s.strip()
+		if not s:
+			continue
+		if s=='None':
+			return_values.append(None)
+		elif s[0]=="'"and s[-1]=="'":
+			return_values.append(s[1:-1])
+		elif s[0].isdigit():
+			return_values.append(int(s))
+		else:break
+	return return_values
+def check_python_headers(conf,mandatory=True):
+	if not conf.env['CC_NAME']and not conf.env['CXX_NAME']:
+		conf.fatal('load a compiler first (gcc, g++, ..)')
+	if not conf.env['PYTHON_VERSION']:
+		conf.check_python_version()
+	env=conf.env
+	python=env['PYTHON']
+	if not python:
+		conf.fatal('could not find the python executable')
+	if Options.platform=='darwin':
+		conf.check_tool('osx')
+	try:
+		v='prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
+		(python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET)=_get_python_variables(python,["get_config_var('%s')"%x for x in v],['from distutils.sysconfig import get_config_var'])
+	except RuntimeError:
+		conf.fatal("Python development headers not found (-v for details).")
+	conf.log.write("""Configuration returned from %r:
+python_prefix = %r
+python_SO = %r
+python_SYSLIBS = %r
+python_LDFLAGS = %r
+python_SHLIBS = %r
+python_LIBDIR = %r
+python_LIBPL = %r
+INCLUDEPY = %r
+Py_ENABLE_SHARED = %r
+MACOSX_DEPLOYMENT_TARGET = %r
+"""%(python,python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET))
+	if python_MACOSX_DEPLOYMENT_TARGET:
+		conf.env['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET
+		conf.environ['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET
+	env['pyext_PATTERN']='%s'+python_SO
+	if python_SYSLIBS is not None:
+		for lib in python_SYSLIBS.split():
+			if lib.startswith('-l'):
+				lib=lib[2:]
+			env.append_value('LIB_PYEMBED',lib)
+	if python_SHLIBS is not None:
+		for lib in python_SHLIBS.split():
+			if lib.startswith('-l'):
+				env.append_value('LIB_PYEMBED',lib[2:])
+			else:
+				env.append_value('LINKFLAGS_PYEMBED',lib)
+	if Options.platform!='darwin'and python_LDFLAGS:
+		env.append_value('LINKFLAGS_PYEMBED',python_LDFLAGS.split())
+	result=False
+	name='python'+env['PYTHON_VERSION']
+	if python_LIBDIR is not None:
+		path=[python_LIBDIR]
+		conf.log.write("\n\n# Trying LIBDIR: %r\n"%path)
+		result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
+	if not result and python_LIBPL is not None:
+		conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
+		path=[python_LIBPL]
+		result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
+	if not result:
+		conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
+		path=[os.path.join(python_prefix,"libs")]
+		name='python'+env['PYTHON_VERSION'].replace('.','')
+		result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
+	if result:
+		env['LIBPATH_PYEMBED']=path
+		env.append_value('LIB_PYEMBED',name)
+	else:
+		conf.log.write("\n\n### LIB NOT FOUND\n")
+	if(sys.platform=='win32'or sys.platform.startswith('os2')or sys.platform=='darwin'or Py_ENABLE_SHARED):
+		env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
+		env['LIB_PYEXT']=env['LIB_PYEMBED']
+	python_config=conf.find_program('python%s-config'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG')
+	if not python_config:
+		python_config=conf.find_program('python-config-%s'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG')
+	includes=[]
+	if python_config:
+		for incstr in Utils.cmd_output("%s %s --includes"%(python,python_config)).strip().split():
+			if(incstr.startswith('-I')or incstr.startswith('/I')):
+				incstr=incstr[2:]
+			if incstr not in includes:
+				includes.append(incstr)
+		conf.log.write("Include path for Python extensions ""(found via python-config --includes): %r\n"%(includes,))
+		env['CPPPATH_PYEXT']=includes
+		env['CPPPATH_PYEMBED']=includes
+	else:
+		conf.log.write("Include path for Python extensions ""(found via distutils module): %r\n"%(INCLUDEPY,))
+		env['CPPPATH_PYEXT']=[INCLUDEPY]
+		env['CPPPATH_PYEMBED']=[INCLUDEPY]
+	if env['CC_NAME']=='gcc':
+		env.append_value('CCFLAGS_PYEMBED','-fno-strict-aliasing')
+		env.append_value('CCFLAGS_PYEXT','-fno-strict-aliasing')
+	if env['CXX_NAME']=='gcc':
+		env.append_value('CXXFLAGS_PYEMBED','-fno-strict-aliasing')
+		env.append_value('CXXFLAGS_PYEXT','-fno-strict-aliasing')
+	conf.check(define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG_2,errmsg='Could not find the python development headers',mandatory=mandatory)
+def check_python_version(conf,minver=None):
+	assert minver is None or isinstance(minver,tuple)
+	python=conf.env['PYTHON']
+	if not python:
+		conf.fatal('could not find the python executable')
+	cmd=[python,"-c","import sys\nfor x in sys.version_info: print(str(x))"]
+	debug('python: Running python command %r'%cmd)
+	proc=Utils.pproc.Popen(cmd,stdout=Utils.pproc.PIPE)
+	lines=proc.communicate()[0].split()
+	assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines)
+	pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4]))
+	result=(minver is None)or(pyver_tuple>=minver)
+	if result:
+		pyver='.'.join([str(x)for x in pyver_tuple[:2]])
+		conf.env['PYTHON_VERSION']=pyver
+		if'PYTHONDIR'in conf.environ:
+			pydir=conf.environ['PYTHONDIR']
+		else:
+			if sys.platform=='win32':
+				(python_LIBDEST,pydir)=_get_python_variables(python,["get_config_var('LIBDEST')","get_python_lib(standard_lib=0, prefix=%r)"%conf.env['PREFIX']],['from distutils.sysconfig import get_config_var, get_python_lib'])
+			else:
+				python_LIBDEST=None
+				(pydir,)=_get_python_variables(python,["get_python_lib(standard_lib=0, prefix=%r)"%conf.env['PREFIX']],['from distutils.sysconfig import get_config_var, get_python_lib'])
+			if python_LIBDEST is None:
+				if conf.env['LIBDIR']:
+					python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver)
+				else:
+					python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver)
+		if hasattr(conf,'define'):
+			conf.define('PYTHONDIR',pydir)
+		conf.env['PYTHONDIR']=pydir
+	pyver_full='.'.join(map(str,pyver_tuple[:3]))
+	if minver is None:
+		conf.check_message_custom('Python version','',pyver_full)
+	else:
+		minver_str='.'.join(map(str,minver))
+		conf.check_message('Python version',">= %s"%minver_str,result,option=pyver_full)
+	if not result:
+		conf.fatal('The python version is too old (%r)'%pyver_full)
+def check_python_module(conf,module_name):
+	result=not Utils.pproc.Popen([conf.env['PYTHON'],"-c","import %s"%module_name],stderr=Utils.pproc.PIPE,stdout=Utils.pproc.PIPE).wait()
+	conf.check_message('Python module',module_name,result)
+	if not result:
+		conf.fatal('Could not find the python module %r'%module_name)
+def detect(conf):
+	if not conf.env.PYTHON:
+		conf.env.PYTHON=sys.executable
+	python=conf.find_program('python',var='PYTHON')
+	if not python:
+		conf.fatal('Could not find the path of the python executable')
+	v=conf.env
+	v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
+	v['PYFLAGS']=''
+	v['PYFLAGS_OPT']='-O'
+	v['PYC']=getattr(Options.options,'pyc',1)
+	v['PYO']=getattr(Options.options,'pyo',1)
+def set_options(opt):
+	opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc')
+	opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo')
+
+before('apply_incpaths','apply_lib_vars','apply_type_vars')(init_pyext)
+feature('pyext')(init_pyext)
+before('apply_bundle')(init_pyext)
+before('apply_link','apply_lib_vars','apply_type_vars')(pyext_shlib_ext)
+after('apply_bundle')(pyext_shlib_ext)
+feature('pyext')(pyext_shlib_ext)
+before('apply_incpaths','apply_lib_vars','apply_type_vars')(init_pyembed)
+feature('pyembed')(init_pyembed)
+extension(EXT_PY)(process_py)
+before('apply_core')(init_py)
+after('vars_target_cprogram','vars_target_cshlib')(init_py)
+feature('py')(init_py)
+conf(check_python_headers)
+conf(check_python_version)
+conf(check_python_module)
diff --git a/wafadmin/Utils.py b/wafadmin/Utils.py
new file mode 100644
index 0000000..fd826ad
--- /dev/null
+++ b/wafadmin/Utils.py
@@ -0,0 +1,520 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os,sys,imp,string,errno,traceback,inspect,re,shutil,datetime,gc
+try:from UserDict import UserDict
+except ImportError:from collections import UserDict
+if sys.hexversion>=0x2060000 or os.name=='java':
+	import subprocess as pproc
+else:
+	import pproc
+import Logs
+from Constants import*
+try:
+	from collections import deque
+except ImportError:
+	class deque(list):
+		def popleft(self):
+			return self.pop(0)
+is_win32=sys.platform=='win32'
+try:
+	from collections import defaultdict as DefaultDict
+except ImportError:
+	class DefaultDict(dict):
+		def __init__(self,default_factory):
+			super(DefaultDict,self).__init__()
+			self.default_factory=default_factory
+		def __getitem__(self,key):
+			try:
+				return super(DefaultDict,self).__getitem__(key)
+			except KeyError:
+				value=self.default_factory()
+				self[key]=value
+				return value
+class WafError(Exception):
+	def __init__(self,*args):
+		self.args=args
+		try:
+			self.stack=traceback.extract_stack()
+		except:
+			pass
+		Exception.__init__(self,*args)
+	def __str__(self):
+		return str(len(self.args)==1 and self.args[0]or self.args)
+class WscriptError(WafError):
+	def __init__(self,message,wscript_file=None):
+		if wscript_file:
+			self.wscript_file=wscript_file
+			self.wscript_line=None
+		else:
+			try:
+				(self.wscript_file,self.wscript_line)=self.locate_error()
+			except:
+				(self.wscript_file,self.wscript_line)=(None,None)
+		msg_file_line=''
+		if self.wscript_file:
+			msg_file_line="%s:"%self.wscript_file
+			if self.wscript_line:
+				msg_file_line+="%s:"%self.wscript_line
+		err_message="%s error: %s"%(msg_file_line,message)
+		WafError.__init__(self,err_message)
+	def locate_error(self):
+		stack=traceback.extract_stack()
+		stack.reverse()
+		for frame in stack:
+			file_name=os.path.basename(frame[0])
+			is_wscript=(file_name==WSCRIPT_FILE or file_name==WSCRIPT_BUILD_FILE)
+			if is_wscript:
+				return(frame[0],frame[1])
+		return(None,None)
+indicator=is_win32 and'\x1b[A\x1b[K%s%s%s\r'or'\x1b[K%s%s%s\r'
+try:
+	from fnv import new as md5
+	import Constants
+	Constants.SIG_NIL='signofnv'
+	def h_file(filename):
+		m=md5()
+		try:
+			m.hfile(filename)
+			x=m.digest()
+			if x is None:raise OSError("not a file")
+			return x
+		except SystemError:
+			raise OSError("not a file"+filename)
+except ImportError:
+	try:
+		try:
+			from hashlib import md5
+		except ImportError:
+			from md5 import md5
+		def h_file(filename):
+			f=open(filename,'rb')
+			m=md5()
+			while(filename):
+				filename=f.read(100000)
+				m.update(filename)
+			f.close()
+			return m.digest()
+	except ImportError:
+		md5=None
+class ordered_dict(UserDict):
+	def __init__(self,dict=None):
+		self.allkeys=[]
+		UserDict.__init__(self,dict)
+	def __delitem__(self,key):
+		self.allkeys.remove(key)
+		UserDict.__delitem__(self,key)
+	def __setitem__(self,key,item):
+		if key not in self.allkeys:self.allkeys.append(key)
+		UserDict.__setitem__(self,key,item)
+def exec_command(s,**kw):
+	if'log'in kw:
+		kw['stdout']=kw['stderr']=kw['log']
+		del(kw['log'])
+	kw['shell']=isinstance(s,str)
+	try:
+		proc=pproc.Popen(s,**kw)
+		return proc.wait()
+	except OSError:
+		return-1
+if is_win32:
+	def exec_command(s,**kw):
+		if'log'in kw:
+			kw['stdout']=kw['stderr']=kw['log']
+			del(kw['log'])
+		kw['shell']=isinstance(s,str)
+		if len(s)>2000:
+			startupinfo=pproc.STARTUPINFO()
+			startupinfo.dwFlags|=pproc.STARTF_USESHOWWINDOW
+			kw['startupinfo']=startupinfo
+		try:
+			if'stdout'not in kw:
+				kw['stdout']=pproc.PIPE
+				kw['stderr']=pproc.PIPE
+				proc=pproc.Popen(s,**kw)
+				(stdout,stderr)=proc.communicate()
+				Logs.info(stdout)
+				if stderr:
+					Logs.error(stderr)
+				return proc.returncode
+			else:
+				proc=pproc.Popen(s,**kw)
+				return proc.wait()
+		except OSError:
+			return-1
+listdir=os.listdir
+if is_win32:
+	def listdir_win32(s):
+		if re.match('^[A-Za-z]:$',s):
+			s+=os.sep
+		if not os.path.isdir(s):
+			e=OSError()
+			e.errno=errno.ENOENT
+			raise e
+		return os.listdir(s)
+	listdir=listdir_win32
+def waf_version(mini=0x010000,maxi=0x100000):
+	ver=HEXVERSION
+	try:min_val=mini+0
+	except TypeError:min_val=int(mini.replace('.','0'),16)
+	if min_val>ver:
+		Logs.error("waf version should be at least %s (%s found)"%(mini,ver))
+		sys.exit(1)
+	try:max_val=maxi+0
+	except TypeError:max_val=int(maxi.replace('.','0'),16)
+	if max_val<ver:
+		Logs.error("waf version should be at most %s (%s found)"%(maxi,ver))
+		sys.exit(1)
+def python_24_guard():
+	if sys.hexversion<0x20400f0 or sys.hexversion>=0x3000000:
+		raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
+def ex_stack():
+	exc_type,exc_value,tb=sys.exc_info()
+	if Logs.verbose>1:
+		exc_lines=traceback.format_exception(exc_type,exc_value,tb)
+		return''.join(exc_lines)
+	return str(exc_value)
+def to_list(sth):
+	if isinstance(sth,str):
+		return sth.split()
+	else:
+		return sth
+g_loaded_modules={}
+g_module=None
+def load_module(file_path,name=WSCRIPT_FILE):
+	try:
+		return g_loaded_modules[file_path]
+	except KeyError:
+		pass
+	module=imp.new_module(name)
+	try:
+		code=readf(file_path,m='rU')
+	except(IOError,OSError):
+		raise WscriptError('Could not read the file %r'%file_path)
+	module.waf_hash_val=code
+	dt=os.path.dirname(file_path)
+	sys.path.insert(0,dt)
+	try:
+		exec(compile(code,file_path,'exec'),module.__dict__)
+	except Exception:
+		exc_type,exc_value,tb=sys.exc_info()
+		raise WscriptError("".join(traceback.format_exception(exc_type,exc_value,tb)),file_path)
+	sys.path.remove(dt)
+	g_loaded_modules[file_path]=module
+	return module
+def set_main_module(file_path):
+	global g_module
+	g_module=load_module(file_path,'wscript_main')
+	g_module.root_path=file_path
+	try:
+		g_module.APPNAME
+	except:
+		g_module.APPNAME='noname'
+	try:
+		g_module.VERSION
+	except:
+		g_module.VERSION='1.0'
+def to_hashtable(s):
+	tbl={}
+	lst=s.split('\n')
+	for line in lst:
+		if not line:continue
+		mems=line.split('=')
+		tbl[mems[0]]=mems[1]
+	return tbl
+def get_term_cols():
+	return 80
+try:
+	import struct,fcntl,termios
+except ImportError:
+	pass
+else:
+	if Logs.got_tty:
+		def myfun():
+			dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2]
+			return cols
+		try:
+			myfun()
+		except:
+			pass
+		else:
+			get_term_cols=myfun
+rot_idx=0
+rot_chr=['\\','|','/','-']
+def split_path(path):
+	return path.split('/')
+def split_path_cygwin(path):
+	if path.startswith('//'):
+		ret=path.split('/')[2:]
+		ret[0]='/'+ret[0]
+		return ret
+	return path.split('/')
+re_sp=re.compile('[/\\\\]')
+def split_path_win32(path):
+	if path.startswith('\\\\'):
+		ret=re.split(re_sp,path)[2:]
+		ret[0]='\\'+ret[0]
+		return ret
+	return re.split(re_sp,path)
+if sys.platform=='cygwin':
+	split_path=split_path_cygwin
+elif is_win32:
+	split_path=split_path_win32
+def copy_attrs(orig,dest,names,only_if_set=False):
+	for a in to_list(names):
+		u=getattr(orig,a,())
+		if u or not only_if_set:
+			setattr(dest,a,u)
+def def_attrs(cls,**kw):
+	'''
+	set attributes for class.
+	@param cls [any class]: the class to update the given attributes in.
+	@param kw [dictionary]: dictionary of attributes names and values.
+
+	if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
+	'''
+	for k,v in kw.iteritems():
+		if not hasattr(cls,k):
+			setattr(cls,k,v)
+def quote_define_name(path):
+	fu=re.compile("[^a-zA-Z0-9]").sub("_",path)
+	fu=fu.upper()
+	return fu
+def quote_whitespace(path):
+	return(path.strip().find(' ')>0 and'"%s"'%path or path).replace('""','"')
+def trimquotes(s):
+	if not s:return''
+	s=s.rstrip()
+	if s[0]=="'"and s[-1]=="'":return s[1:-1]
+	return s
+def h_list(lst):
+	m=md5()
+	m.update(str(lst))
+	return m.digest()
+def h_fun(fun):
+	try:
+		return fun.code
+	except AttributeError:
+		try:
+			h=inspect.getsource(fun)
+		except IOError:
+			h="nocode"
+		try:
+			fun.code=h
+		except AttributeError:
+			pass
+		return h
+def pprint(col,str,label='',sep=os.linesep):
+	sys.stderr.write("%s%s%s %s%s"%(Logs.colors(col),str,Logs.colors.NORMAL,label,sep))
+def check_dir(dir):
+	try:
+		os.stat(dir)
+	except OSError:
+		try:
+			os.makedirs(dir)
+		except OSError,e:
+			raise WafError("Cannot create folder '%s' (original error: %s)"%(dir,e))
+def cmd_output(cmd,**kw):
+	silent=False
+	if'silent'in kw:
+		silent=kw['silent']
+		del(kw['silent'])
+	if'e'in kw:
+		tmp=kw['e']
+		del(kw['e'])
+		kw['env']=tmp
+	kw['shell']=isinstance(cmd,str)
+	kw['stdout']=pproc.PIPE
+	if silent:
+		kw['stderr']=pproc.PIPE
+	try:
+		p=pproc.Popen(cmd,**kw)
+		output=p.communicate()[0]
+	except OSError,e:
+		raise ValueError(str(e))
+	if p.returncode:
+		if not silent:
+			msg="command execution failed: %s -> %r"%(cmd,str(output))
+			raise ValueError(msg)
+		output=''
+	return output
+reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
+def subst_vars(expr,params):
+	def repl_var(m):
+		if m.group(1):
+			return'\\'
+		if m.group(2):
+			return'$'
+		try:
+			return params.get_flat(m.group(3))
+		except AttributeError:
+			return params[m.group(3)]
+	return reg_subst.sub(repl_var,expr)
+def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
+	if unversioned_sys_platform in('linux','freebsd','netbsd','openbsd','sunos','gnu'):
+		return'elf'
+	elif unversioned_sys_platform=='darwin':
+		return'mac-o'
+	elif unversioned_sys_platform in('win32','cygwin','uwin','msys'):
+		return'pe'
+	return'elf'
+def unversioned_sys_platform():
+	s=sys.platform
+	if s=='java':
+		from java.lang import System
+		s=System.getProperty('os.name')
+		if s=='Mac OS X':
+			return'darwin'
+		elif s.startswith('Windows '):
+			return'win32'
+		elif s=='OS/2':
+			return'os2'
+		elif s=='HP-UX':
+			return'hpux'
+		elif s in('SunOS','Solaris'):
+			return'sunos'
+		else:s=s.lower()
+	if s=='win32'or s.endswith('os2')and s!='sunos2':return s
+	return re.split('\d+$',s)[0]
+def detect_platform():
+	s=sys.platform
+	for x in'cygwin linux irix sunos hpux aix darwin gnu'.split():
+		if s.find(x)>=0:
+			return x
+	if os.name in'posix java os2'.split():
+		return os.name
+	return s
+def load_tool(tool,tooldir=None):
+	'''
+	load_tool: import a Python module, optionally using several directories.
+	@param tool [string]: name of tool to import.
+	@param tooldir [list]: directories to look for the tool.
+	@return: the loaded module.
+
+	Warning: this function is not thread-safe: plays with sys.path,
+					 so must run in sequence.
+	'''
+	if tooldir:
+		assert isinstance(tooldir,list)
+		sys.path=tooldir+sys.path
+	else:
+		tooldir=[]
+	try:
+		return __import__(tool)
+	finally:
+		for dt in tooldir:
+			sys.path.remove(dt)
+def readf(fname,m='r'):
+	f=open(fname,m)
+	try:
+		txt=f.read()
+	finally:
+		f.close()
+	return txt
+def nada(*k,**kw):
+	pass
+def diff_path(top,subdir):
+	top=os.path.normpath(top).replace('\\','/').split('/')
+	subdir=os.path.normpath(subdir).replace('\\','/').split('/')
+	if len(top)==len(subdir):return''
+	diff=subdir[len(top)-len(subdir):]
+	return os.path.join(*diff)
+class Context(object):
+	def set_curdir(self,dir):
+		self.curdir_=dir
+	def get_curdir(self):
+		try:
+			return self.curdir_
+		except AttributeError:
+			self.curdir_=os.getcwd()
+			return self.get_curdir()
+	curdir=property(get_curdir,set_curdir)
+	def recurse(self,dirs,name=''):
+		if not name:
+			name=inspect.stack()[1][3]
+		if isinstance(dirs,str):
+			dirs=to_list(dirs)
+		for x in dirs:
+			if os.path.isabs(x):
+				nexdir=x
+			else:
+				nexdir=os.path.join(self.curdir,x)
+			base=os.path.join(nexdir,WSCRIPT_FILE)
+			file_path=base+'_'+name
+			try:
+				txt=readf(file_path,m='rU')
+			except(OSError,IOError):
+				try:
+					module=load_module(base)
+				except OSError:
+					raise WscriptError('No such script %s'%base)
+				try:
+					f=module.__dict__[name]
+				except KeyError:
+					raise WscriptError('No function %s defined in %s'%(name,base))
+				if getattr(self.__class__,'pre_recurse',None):
+					self.pre_recurse(f,base,nexdir)
+				old=self.curdir
+				self.curdir=nexdir
+				try:
+					f(self)
+				finally:
+					self.curdir=old
+				if getattr(self.__class__,'post_recurse',None):
+					self.post_recurse(module,base,nexdir)
+			else:
+				dc={'ctx':self}
+				if getattr(self.__class__,'pre_recurse',None):
+					dc=self.pre_recurse(txt,file_path,nexdir)
+				old=self.curdir
+				self.curdir=nexdir
+				try:
+					try:
+						exec(compile(txt,file_path,'exec'),dc)
+					except Exception:
+						exc_type,exc_value,tb=sys.exc_info()
+						raise WscriptError("".join(traceback.format_exception(exc_type,exc_value,tb)),base)
+				finally:
+					self.curdir=old
+				if getattr(self.__class__,'post_recurse',None):
+					self.post_recurse(txt,file_path,nexdir)
+if is_win32:
+	old=shutil.copy2
+	def copy2(src,dst):
+		old(src,dst)
+		shutil.copystat(src,src)
+	setattr(shutil,'copy2',copy2)
+def zip_folder(dir,zip_file_name,prefix):
+	import zipfile
+	zip=zipfile.ZipFile(zip_file_name,'w',compression=zipfile.ZIP_DEFLATED)
+	base=os.path.abspath(dir)
+	if prefix:
+		if prefix[-1]!=os.sep:
+			prefix+=os.sep
+	n=len(base)
+	for root,dirs,files in os.walk(base):
+		for f in files:
+			archive_name=prefix+root[n:]+os.sep+f
+			zip.write(root+os.sep+f,archive_name,zipfile.ZIP_DEFLATED)
+	zip.close()
+def get_elapsed_time(start):
+	delta=datetime.datetime.now()-start
+	days=int(delta.days)
+	hours=int(delta.seconds/3600)
+	minutes=int((delta.seconds-hours*3600)/60)
+	seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000
+	result=''
+	if days:
+		result+='%dd'%days
+	if days or hours:
+		result+='%dh'%hours
+	if days or hours or minutes:
+		result+='%dm'%minutes
+	return'%s%.3fs'%(result,seconds)
+if os.name=='java':
+	try:
+		gc.disable()
+		gc.enable()
+	except NotImplementedError:
+		gc.disable=gc.enable
+
diff --git a/wafadmin/__init__.py b/wafadmin/__init__.py
new file mode 100644
index 0000000..cbc8406
--- /dev/null
+++ b/wafadmin/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+
diff --git a/wafadmin/ansiterm.py b/wafadmin/ansiterm.py
new file mode 100644
index 0000000..f9d13b3
--- /dev/null
+++ b/wafadmin/ansiterm.py
@@ -0,0 +1,144 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import sys,os
+try:
+	if(not sys.stderr.isatty())or(not sys.stdout.isatty()):
+		raise ValueError('not a tty')
+	from ctypes import*
+	class COORD(Structure):
+		_fields_=[("X",c_short),("Y",c_short)]
+	class SMALL_RECT(Structure):
+		_fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)]
+	class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+		_fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]
+	class CONSOLE_CURSOR_INFO(Structure):
+		_fields_=[('dwSize',c_ulong),('bVisible',c_int)]
+	sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+	csinfo=CONSOLE_CURSOR_INFO()
+	hconsole=windll.kernel32.GetStdHandle(-11)
+	windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo))
+	if sbinfo.Size.X<10 or sbinfo.Size.Y<10:raise Exception('small console')
+	windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo))
+except Exception:
+	pass
+else:
+	import re,threading
+	to_int=lambda number,default:number and int(number)or default
+	wlock=threading.Lock()
+	STD_OUTPUT_HANDLE=-11
+	STD_ERROR_HANDLE=-12
+	class AnsiTerm(object):
+		def __init__(self):
+			self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
+			self.cursor_history=[]
+		def screen_buffer_info(self):
+			sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
+			return sbinfo
+		def clear_line(self,param):
+			mode=param and int(param)or 0
+			sbinfo=self.screen_buffer_info()
+			if mode==1:
+				line_start=COORD(0,sbinfo.CursorPosition.Y)
+				line_length=sbinfo.Size.X
+			elif mode==2:
+				line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y)
+				line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
+			else:
+				line_start=sbinfo.CursorPosition
+				line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
+			chars_written=c_int()
+			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_char(' '),line_length,line_start,byref(chars_written))
+			windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written))
+		def clear_screen(self,param):
+			mode=to_int(param,0)
+			sbinfo=self.screen_buffer_info()
+			if mode==1:
+				clear_start=COORD(0,0)
+				clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y
+			elif mode==2:
+				clear_start=COORD(0,0)
+				clear_length=sbinfo.Size.X*sbinfo.Size.Y
+				windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start)
+			else:
+				clear_start=sbinfo.CursorPosition
+				clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y))
+			chars_written=c_int()
+			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_char(' '),clear_length,clear_start,byref(chars_written))
+			windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written))
+		def push_cursor(self,param):
+			sbinfo=self.screen_buffer_info()
+			self.cursor_history.push(sbinfo.CursorPosition)
+		def pop_cursor(self,param):
+			if self.cursor_history:
+				old_pos=self.cursor_history.pop()
+				windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos)
+		def set_cursor(self,param):
+			x,sep,y=param.partition(';')
+			x=to_int(x,1)-1
+			y=to_int(y,1)-1
+			sbinfo=self.screen_buffer_info()
+			new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y))
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+		def set_column(self,param):
+			x=to_int(param,1)-1
+			sbinfo=self.screen_buffer_info()
+			new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y)
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+		def move_cursor(self,x_offset=0,y_offset=0):
+			sbinfo=self.screen_buffer_info()
+			new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y))
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+		def move_up(self,param):
+			self.move_cursor(y_offset=-to_int(param,1))
+		def move_down(self,param):
+			self.move_cursor(y_offset=to_int(param,1))
+		def move_left(self,param):
+			self.move_cursor(x_offset=-to_int(param,1))
+		def move_right(self,param):
+			self.move_cursor(x_offset=to_int(param,1))
+		def next_line(self,param):
+			sbinfo=self.screen_buffer_info()
+			self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1))
+		def prev_line(self,param):
+			sbinfo=self.screen_buffer_info()
+			self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1))
+		escape_to_color={(0,30):0x0,(0,31):0x4,(0,32):0x2,(0,33):0x4+0x2,(0,34):0x1,(0,35):0x1+0x4,(0,36):0x2+0x4,(0,37):0x1+0x2+0x4,(1,30):0x1+0x2+0x4,(1,31):0x4+0x8,(1,32):0x2+0x8,(1,33):0x4+0x2+0x8,(1,34):0x1+0x8,(1,35):0x1+0x4+0x8,(1,36):0x1+0x2+0x8,(1,37):0x1+0x2+0x4+0x8,}
+		def set_color(self,param):
+			intensity,sep,color=param.partition(';')
+			intensity=to_int(intensity,0)
+			color=to_int(color,0)
+			if intensity and not color:
+				color,intensity=intensity,color
+			attrib=self.escape_to_color.get((intensity,color),0x7)
+			windll.kernel32.SetConsoleTextAttribute(self.hconsole,attrib)
+		def show_cursor(self,param):
+			csinfo.bVisible=1
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+		def hide_cursor(self,param):
+			csinfo.bVisible=0
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+		ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,}
+		ansi_tokans=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+		def write(self,text):
+			wlock.acquire()
+			for param,cmd,txt in self.ansi_tokans.findall(text):
+				if cmd:
+					cmd_func=self.ansi_command_table.get(cmd)
+					if cmd_func:
+						cmd_func(self,param)
+				else:
+					chars_written=c_int()
+					if isinstance(txt,unicode):
+						windll.kernel32.WriteConsoleW(self.hconsole,txt,len(txt),byref(chars_written),None)
+					else:
+						windll.kernel32.WriteConsoleA(self.hconsole,txt,len(txt),byref(chars_written),None)
+			wlock.release()
+		def flush(self):
+			pass
+		def isatty(self):
+			return True
+	sys.stderr=sys.stdout=AnsiTerm()
+	os.environ['TERM']='vt100'
+



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]