[kupfer: 1/6] Add waf-light and waflib from waf-1.6.11



commit 684a1e79e28fa3d9e346bdf2c1659e7d2c6e7718
Author: Ulrik Sverdrup <ulrik sverdrup gmail com>
Date:   Sat Feb 25 18:38:05 2012 +0100

    Add waf-light and waflib from waf-1.6.11
    
    http://code.google.com/p/waf/
    Acquired from: http://waf.googlecode.com/files/waf-1.6.11.tar.bz2
    
    """
    Redistribution and use in source and binary forms, with or without
    modification, are permitted provided that the following conditions
    are met:
    
    1. Redistributions of source code must retain the above copyright
       notice, this list of conditions and the following disclaimer.
    
    2. Redistributions in binary form must reproduce the above copyright
       notice, this list of conditions and the following disclaimer in the
       documentation and/or other materials provided with the distribution.
    
    3. The name of the author may not be used to endorse or promote products
       derived from this software without specific prior written permission.
    
    THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
    IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
    WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
    DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
    INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
    (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
    SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
    IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
    POSSIBILITY OF SUCH DAMAGE.
    """

 Waf.ChangeLog                   |  250 ++++++++
 waf                             |  164 +++++
 waflib/Build.py                 | 1293 +++++++++++++++++++++++++++++++++++++++
 waflib/ConfigSet.py             |  337 ++++++++++
 waflib/Configure.py             |  570 +++++++++++++++++
 waflib/Context.py               |  596 ++++++++++++++++++
 waflib/Errors.py                |   70 +++
 waflib/Logs.py                  |  274 +++++++++
 waflib/Node.py                  |  850 +++++++++++++++++++++++++
 waflib/Options.py               |  252 ++++++++
 waflib/Runner.py                |  361 +++++++++++
 waflib/Scripting.py             |  577 +++++++++++++++++
 waflib/Task.py                  | 1238 +++++++++++++++++++++++++++++++++++++
 waflib/TaskGen.py               |  757 +++++++++++++++++++++++
 waflib/Tools/__init__.py        |    3 +
 waflib/Tools/c_aliases.py       |  128 ++++
 waflib/Tools/c_config.py        | 1198 ++++++++++++++++++++++++++++++++++++
 waflib/Tools/c_osx.py           |  188 ++++++
 waflib/Tools/c_preproc.py       | 1030 +++++++++++++++++++++++++++++++
 waflib/Tools/c_tests.py         |  218 +++++++
 waflib/Tools/ccroot.py          |  608 ++++++++++++++++++
 waflib/Tools/gnu_dirs.py        |  128 ++++
 waflib/Tools/intltool.py        |  176 ++++++
 waflib/Tools/python.py          |  524 ++++++++++++++++
 waflib/Utils.py                 |  602 ++++++++++++++++++
 waflib/__init__.py              |    3 +
 waflib/ansiterm.py              |  246 ++++++++
 waflib/extras/__init__.py       |    3 +
 waflib/extras/compat15.py       |  298 +++++++++
 waflib/extras/local_rpath.py    |   19 +
 waflib/extras/lru_cache.py      |   98 +++
 waflib/extras/make.py           |  142 +++++
 waflib/extras/md5_tstamp.py     |   69 +++
 waflib/extras/misc.py           |  416 +++++++++++++
 waflib/extras/objcopy.py        |   54 ++
 waflib/extras/ocaml.py          |  326 ++++++++++
 waflib/extras/package.py        |   76 +++
 waflib/extras/parallel_debug.py |  342 +++++++++++
 waflib/extras/pep8.py           |  106 ++++
 waflib/extras/print_commands.py |   46 ++
 waflib/extras/proc.py           |   56 ++
 waflib/extras/relocation.py     |   85 +++
 waflib/extras/review.py         |  328 ++++++++++
 waflib/extras/smart_continue.py |   81 +++
 waflib/extras/subprocess.py     |  620 +++++++++++++++++++
 waflib/extras/syms.py           |   71 +++
 waflib/fixpy2.py                |   67 ++
 wscript                         |    2 +-
 48 files changed, 15945 insertions(+), 1 deletions(-)
---
diff --git a/Waf.ChangeLog b/Waf.ChangeLog
new file mode 100644
index 0000000..4d84ca4
--- /dev/null
+++ b/Waf.ChangeLog
@@ -0,0 +1,250 @@
+NEW IN WAF 1.6.11
+-----------------
+* Enable custom variables for the boost detection #1089
+* Disable the config test execution when detecting boost #1090
+* Process moc classes in .cpp files by default #1095
+* Apply the chmod attribute to the versioned libraries (vnum) #1097
+* Fixed the python detection on OSX #1098
+* Changed the win32 color settings for Windows 7 #1099
+* Set the default fortran linker for ifort to xiar #1104
+
+NEW IN WAF 1.6.10
+-----------------
+* Fixed the 'remove' attribute propagation in ant_glob #1086
+* Fixed the behavior of recurse(name=xyz) when looking in existing folders
+* Fixed a problem with include paths in the relocation tool #1078
+* Improved the pgicc compiler detection #1080
+* Fixed the behavior of 'waf options' #1076
+* Process larger java projects #1074
+* Remove the ':' from the drives when dealing with foreign files and folders on Win32
+* Let the 'subst' feature process a chmod attribute
+* Added a hook for distutils variable query #1083
+
+NEW IN WAF 1.6.9
+----------------
+* Fixed the duplicate moc file creation in slow_qt4 #1047
+* Fixed the Visual Studio 2008 projects creation #1033
+* Added a workaround to avoid creating include folders not under the build directory #1049
+* Added a default virtual folder structure for out-of-tree build files #1053
+* Added a way to set variants containing /, for example linux/debug
+* Added a more intuitive behaviour for conf.setenv() #1062
+* Fixed the multiple bibliography processing for tex #1040
+* Windows CE detection improvements #1065
+* Fixed the library installation on OSX
+* Fixed the Powerpc/IPhone platform detection
+* Added an Xcode project generator
+
+NEW IN WAF 1.6.8
+----------------
+* Fixed a typo in Utils.py affecting Win32 platforms (copystat) #1029
+* Fixed a minor bug in the Eclipse project generator
+* Fixed a typo that prevented Waf from running on Pypy-trunk
+* Make the xlc/xlc++ compiler detection more accurate by looking at the version number #1022
+* Minor perl, python and ruby tool improvements
+* Better logs for the boost detection #1036
+* Fixed a performance issue in Runner.py #1039
+* Changed the position of the linker flags #1025
+
+NEW IN WAF 1.6.7
+----------------
+
+* Provide more diagnostic for invalid build groups #914
+* Various enhancements to msvs.py
+* Read MSVC_VERSIONS and MSVC_TARGETS from the command-line
+* Minor cross-compiler detection fix on msvc.py
+* Fix the redirections with pipes (waf configure > log)
+* Do not display runnable_status exceptions when running with -k
+* Let -k stop at the first runnable_status error and -kk run even further
+* Merge the add_object extension in the main line (source='file.o')
+* Make update_outputs more robust with changes in the task definition #1017
+* Qt4 detection on Win32
+
+NEW IN WAF 1.6.6
+----------------
+
+* Fix the performance regression related to #974
+
+NEW IN WAF 1.6.5
+----------------
+
+* More documentation
+* Re-enable the colors for msys
+* Add the .ui files for the qt4 translations
+* Fix the conf.check_large_file() test
+* Fix the conf.check_library() in C++ mode #955
+* Improve the latex scanner to avoid depending on generated files #943
+* Remove the @file processing from the winrc tasks
+* Fix the python detection using python-config
+* Add the missing default includes and defines to the moc command
+* Improve support for hierarchical go-lang packages #953
+* Fix the gfortran verbose flag detection on Windows
+* Fix the support of fortran import libraries #950
+* Added a fix for running with Python 2.4 on Windows #949
+* Limited support for IronPython
+* Support for older Visual Studio versions (VC6) #952
+* New waf.bat file #964
+* New method ConfigSet.keys
+* New Visual Studio and Eclipse CTD project generators (waflib/extras)
+* New lru_cache tool for use with WAFCACHE (waflib/extras)
+
+NEW IN WAF 1.6.4
+----------------
+
+* Fix the Python detection on win32 #883
+* Optimize the Python file installation #892
+* Force +x permissions on installed fortran programs #893
+* Qt library detection fixes #895
+* Ensure that unit tests are executed only after the symlinks are created
+* Fix the precedence constraints for classes ending in _task #896
+* Support the install_path attribute with add_pcfile #913
+* Make the goprograms executable when installed #928
+* Allow non-python data files in the waf executable #927
+* Enforce a build order based on the scanner results #777, #922
+* Multiple msvc detection fixes #907 #910 #923 #924 #926
+* Fix the -of flag append with dmd #917
+* Boost detection fixes #920
+* Support newer javac compilers #921
+* Fix the execution on python 2.3 for: "waf update", msvc.py, fc.py
+* Improve the support for mac applications (demos/mac_app)
+* Better default regexps in "waf step"
+* New error check for tasks creating the same nodes or having the same identifiers (waf -v)
+* New variables conf.env.NO_LOCK_IN_TOP/OUT/RUN for special projects (top='..')
+* New example on avoiding rebuilds when moving a project (playground/relocate)
+* Improve go-lang support for cgo-packages (fixes #932)
+* Fix the progress bar on cmd and msys
+
+NEW IN WAF 1.6.3
+----------------
+
+* Fixed the interaction of Fortran configuration tests and WAFCACHE #842
+* Various documentation fixes
+* Set the PYTHONARCHDIR variable for installing python extensions
+* Fixed the Waf file creation with --zip-type=gz (bunzip2 was not replaced by gzip -d)
+* Fixed multiple problems in the call to TaskGen.declare_chain(...) #850
+* Fixed the task attribute 'vars' which might cause unnecessary rebuilds #852
+* Return the value of post_check(...) in conf.check(...) #857
+* Rewrite the boost tool (waflib/extras/boost.py) #814, #454, #424
+* More fortran file extensions: .for, .FOR #867
+* Searching above the root nodes no longer raise exceptions #868
+* Msvc detection fixes for non-utf8 encodings #873
+* Fixed the destdir processing on Windows #874
+* Stop changing the flags on waf -v (make the errors more visible) #875
+* Fixed the resource file compilation on Windows #876
+* Fixed the vala file installation #881
+* New system of plugins for C/C++/Fortran compilers (files named c_* in waflib/extras/)
+* New examples of interaction between Waf and existing makefiles (playground/)
+* New names for @before/@after: @before_method/@after_method
+
+NEW IN WAF 1.6.2
+----------------
+
+* Support for C# debugging files #797
+* Add -relocation-model=pic for shared libraries on ldc
+* Fixed 'waf dist' for tar files on python 3 #799
+* Make the latex scanner recursive #798
+* Enable the packing of non-python files in the waf file #802
+* Improve the feature sniffing for hybrid programs/libraries #800
+* New apidocs + tutorial in Sphinx format
+* Add the CPPFLAGS from os.environ #803
+* Create the java output directory anywhere #806
+* Enable the .luac file installation
+* Process Qt translation files
+* Detect when the folders were copied and prompt for "waf configure"
+* Parse_flags for the *-config outputs on windows (backslashes) #811
+* Fix the doxygen task build order + improve the dependency scanner #821
+* Various msvc fixes #819, #826, #825
+* Ported the batch executor to waf 1.6 (batched_cc)
+* New tools: erlang, scala
+* Moved conf.multicheck(..) from playground to the library
+* New parameter to avoid reading the same scripts: bld.recurse(dir, once=True)
+* Detect invalid method calls in 'waf -v' such as env.append/env.add/env.prepend
+* New manifest option for jar targets #832
+
+NEW IN WAF 1.6.1
+----------------
+
+* Fixed the method check_waf_version  #764
+* Fixed the name in ctx.recurse(name) #769
+* Stop caching the install tasks and tasks that have no outputs #770
+* Fix the log in Context.cmd_and_log() when calling with "quiet" #778
+* c_preproc exception when a file has the same name as a directory #777
+* 'intltool_po' does not install the .mo files #782
+* 'intltool_in' was broken #792
+* Bind stderr and stdout to the exception in Context.cmd_and_log #779
+* Tasks not rebuilding properly when the 'run' method changes #786
+* Print the progress bar information as late as possible #787
+* Fix for the FRAMEWORK value processing
+* Verbose mode should not require the compat15 tools #790
+* Let static libraries use other static libraries as in 1.5 #768
+* Fix for the boost tool #776
+* boost tool update (in playground) #780
+* Updated the java tool and examples
+* New gcj tool in playground
+* Update the c# tool and examples (playground) #796
+* Read external c# libraries #774
+* Xelatex support #785
+* Rebuild fortran files when .mod files change #766
+* docs #781
+* Improve the ant_glob behaviour on ctx.root + absolute paths
+* Fix for glib_mkenums and dbus-binding-tool #795
+* New feature 'subst' (see demos/subst)
+
+NEW IN WAF 1.6.0
+----------------
+
+General:
+* Python 3 syntax by default (runs unmodified for 2.6, 2.7, 3.0 and 3.1)
+* Environment -> ConfigSet
+* only lists are allowed in ConfigSet
+* Better Node apis
+* Utils.load_tool -> Context.load_tool
+* set_options becomes options
+* only the build-related commands require a configured project
+* new variant system + build context commands
+* removed the pseudo glob in installation methods
+* eliminate find_sources_in_dirs
+* node.__class__.bld â node.ctx
+* bld.new_task_gen(...) disappears, use bld(...)
+* network updates for waf tools
+* accept node objects in the source and includes attributes
+* remove task_gen.allnodes: modify self.source directly
+* merge the include system for c, c++, d, gas and nasm
+* allow top == out (no build directory)
+* merge the Tool/wscript system (detect->configure, set_options->options)
+* new command "waf list" to list the x for "waf build --targets=x"
+* rename apply_core -> process_source
+* rename apply_rule -> process_rule
+* rename Task.TaskBase.classes -> Task.classes
+* the modules Utils.py and Logs.py are now independent from the rest of waf (imports)
+* remove Task.TaskManager and Build.BuildContext.all_task_gen to improve the build group handling
+* remove program_USELIB, shlib_USELIB staticlib_USELIB support
+* use tasks for target installation
+* improve the exception handling (WscriptError was removed, use WafError)
+* let the commands access node objects
+* infer the build directory from the lock filename
+* waf step --file=main.c
+* post task generators in a lazy manner
+
+preview 3:
+* remove the /W3 flag from msvc default flags
+* opt.tool_options -> opt.load (now all commands inherit the 'tool' method)
+* conf.check_tool -> conf.load
+* do not copy files when creating tar files in 'waf dist'
+* add zip files in 'waf dist'
+* fix the behaviour of 'waf distcheck'
+* preprocessor optimizations
+* python 2 fixes
+
+release candidate:
+* cache fixes
+* fortran fixes
+* python 2 and 3 fixes
+* docs and docstrings
+* support for custom waf files and preludes
+* fix in waflib.Context for overriding command classes
+* port the doxygen tool
+* Utils.to_hashtable -> Utils.str2dict
+* change the thread pool to enable thread pool sharing
+* fixed a regression on win32 + ansiterm.py + python 3 -> thanks to kesselhaus :-)
+* various msvc fixes (thanks to Nicolas Mercier)
+
diff --git a/waf b/waf
new file mode 100755
index 0000000..90dbd92
--- /dev/null
+++ b/waf
@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+# encoding: ISO8859-1
+# Thomas Nagy, 2005-2011
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+   derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import os, sys
+
+VERSION="1.6.11"
+REVISION="x"
+INSTALL="x"
+C1='x'
+C2='x'
+cwd = os.getcwd()
+join = os.path.join
+
+if sys.hexversion<0x206000f:
+	raise ImportError('Python >= 2.6 is required to create the waf file')
+
+WAF='waf'
+def b(x):
+	return x
+if sys.hexversion>0x300000f:
+	WAF='waf3'
+	def b(x):
+		return x.encode()
+
+def err(m):
+	print(('\033[91mError: %s\033[0m' % m))
+	sys.exit(1)
+
+def unpack_wafdir(dir):
+	f = open(sys.argv[0],'rb')
+	c = 'corrupt archive (%d)'
+	while 1:
+		line = f.readline()
+		if not line: err('run waf-light from a folder containing waflib')
+		if line == b('#==>\n'):
+			txt = f.readline()
+			if not txt: err(c % 1)
+			if f.readline() != b('#<==\n'): err(c % 2)
+			break
+	if not txt: err(c % 3)
+	txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))
+
+	import shutil, tarfile
+	try: shutil.rmtree(dir)
+	except OSError: pass
+	try:
+		for x in ['Tools', 'extras']:
+			os.makedirs(join(dir, 'waflib', x))
+	except OSError:
+		err("Cannot unpack waf lib into %s\nMove waf into a writeable directory" % dir)
+
+	os.chdir(dir)
+	tmp = 't.bz2'
+	t = open(tmp,'wb')
+	t.write(txt)
+	t.close()
+
+	try:
+		t = tarfile.open(tmp)
+	except:
+		try:
+			os.system('bunzip2 t.bz2')
+			t = tarfile.open('t')
+			tmp = 't'
+		except:
+			os.chdir(cwd)
+			try: shutil.rmtree(dir)
+			except OSError: pass
+			err("Waf cannot be unpacked, check that bzip2 support is present")
+
+	for x in t: t.extract(x)
+	t.close()
+
+	for x in ['Tools', 'extras']:
+		os.chmod(join('waflib',x), 493)
+
+	if sys.hexversion<0x300000f:
+		sys.path = [join(dir, 'waflib')] + sys.path
+		import fixpy2
+		fixpy2.fixdir(dir)
+
+	os.unlink(tmp)
+	os.chdir(cwd)
+
+	try: dir = unicode(dir, 'mbcs')
+	except: pass
+	try:
+		from ctypes import windll
+		windll.kernel32.SetFileAttributesW(dir, 2)
+	except:
+		pass
+
+def test(dir):
+	try:
+		os.stat(join(dir, 'waflib'))
+		return os.path.abspath(dir)
+	except OSError:
+		pass
+
+def find_lib():
+	name = sys.argv[0]
+	base = os.path.dirname(os.path.abspath(name))
+
+	#devs use $WAFDIR
+	w=test(os.environ.get('WAFDIR', ''))
+	if w: return w
+
+	#waf-light
+	if name.endswith('waf-light'):
+		w = test(base)
+		if w: return w
+		err('waf-light requires waflib -> export WAFDIR=/folder')
+
+	dirname = '%s-%s-%s' % (WAF, VERSION, REVISION)
+	for i in [INSTALL,'/usr','/usr/local','/opt']:
+		w = test(i + '/lib/' + dirname)
+		if w: return w
+
+	#waf-local
+	dir = join(base, (sys.platform != 'win32' and '.' or '') + dirname)
+	w = test(dir)
+	if w: return w
+
+	#unpack
+	unpack_wafdir(dir)
+	return dir
+
+wafdir = find_lib()
+sys.path.insert(0, wafdir)
+
+if __name__ == '__main__':
+	import waflib.extras.compat15#PRELUDE
+	from waflib import Scripting
+	Scripting.waf_entry_point(cwd, VERSION, wafdir)
+
diff --git a/waflib/Build.py b/waflib/Build.py
new file mode 100644
index 0000000..7c2a6a3
--- /dev/null
+++ b/waflib/Build.py
@@ -0,0 +1,1293 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Classes related to the build phase (build, clean, install, step, etc)
+
+The inheritance tree is the following:
+
+"""
+
+import os, sys, errno, re, shutil
+try: import cPickle
+except: import pickle as cPickle
+from waflib import Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors
+import waflib.Node
+
+CACHE_DIR = 'c4che'
+"""Location of the cache files"""
+
+CACHE_SUFFIX = '_cache.py'
+"""Suffix for the cache files"""
+
+INSTALL = 1337
+"""Positive value '->' install, see :py:attr:`waflib.Build.BuildContext.is_install`"""
+
+UNINSTALL = -1337
+"""Negative value '<-' uninstall, see :py:attr:`waflib.Build.BuildContext.is_install`"""
+
+SAVED_ATTRS = 'root node_deps raw_deps task_sigs'.split()
+"""Build class members to save between the runs (root, node_deps, raw_deps, task_sigs)"""
+
+CFG_FILES = 'cfg_files'
+"""Files from the build directory to hash before starting the build (``config.h`` written during the configuration)"""
+
+POST_AT_ONCE = 0
+"""Post mode: all task generators are posted before the build really starts"""
+
+POST_LAZY = 1
+"""Post mode: post the task generators group after group"""
+
+POST_BOTH = 2
+"""Post mode: post the task generators at once, then re-check them for each group"""
+
+class BuildContext(Context.Context):
+	'''executes the build'''
+
+	cmd = 'build'
+	variant = ''
+
+	def __init__(self, **kw):
+		super(BuildContext, self).__init__(**kw)
+
+		self.is_install = 0
+		"""Non-zero value when installing or uninstalling file"""
+
+		self.top_dir = kw.get('top_dir', Context.top_dir)
+
+		self.run_dir = kw.get('run_dir', Context.run_dir)
+
+		self.post_mode = POST_AT_ONCE
+		"""post the task generators at once, group-by-group, or both"""
+
+		# output directory - may be set until the nodes are considered
+		self.out_dir = kw.get('out_dir', Context.out_dir)
+
+		self.cache_dir = kw.get('cache_dir', None)
+		if not self.cache_dir:
+			self.cache_dir = self.out_dir + os.sep + CACHE_DIR
+
+		# map names to environments, the '' must be defined
+		self.all_envs = {}
+
+		# ======================================= #
+		# cache variables
+
+		self.task_sigs = {}
+		"""Signatures of the tasks (persists between build executions)"""
+
+		self.node_deps = {}
+		"""Dict of node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists between build executions)"""
+
+		self.raw_deps = {}
+		"""Dict of custom data returned by :py:meth:`waflib.Task.Task.scan` (persists between build executions)"""
+
+		# list of folders that are already scanned
+		# so that we do not need to stat them one more time
+		self.cache_dir_contents = {}
+
+		self.task_gen_cache_names = {}
+
+		self.launch_dir = Context.launch_dir
+
+		self.jobs = Options.options.jobs
+		self.targets = Options.options.targets
+		self.keep = Options.options.keep
+		self.cache_global = Options.cache_global
+		self.nocache = Options.options.nocache
+		self.progress_bar = Options.options.progress_bar
+
+		############ stuff below has not been reviewed
+
+		# Manual dependencies.
+		self.deps_man = Utils.defaultdict(list)
+		"""Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`"""
+
+		# just the structure here
+		self.current_group = 0
+		"""
+		Current build group
+		"""
+
+		self.groups = []
+		"""
+		List containing lists of task generators
+		"""
+		self.group_names = {}
+		"""
+		Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group`
+		"""
+
+	def get_variant_dir(self):
+		"""Getter for the variant_dir attribute"""
+		if not self.variant:
+			return self.out_dir
+		return os.path.join(self.out_dir, self.variant)
+	variant_dir = property(get_variant_dir, None)
+
+	def __call__(self, *k, **kw):
+		"""
+		Create a task generator and add it to the current build group. The following forms are equivalent::
+
+			def build(bld):
+				tg = bld(a=1, b=2)
+
+			def build(bld):
+				tg = bld()
+				tg.a = 1
+				tg.b = 2
+
+			def build(bld):
+				tg = TaskGen.task_gen(a=1, b=2)
+				bld.add_to_group(tg, None)
+
+		:param group: group name to add the task generator to
+		:type group: string
+		"""
+		kw['bld'] = self
+		ret = TaskGen.task_gen(*k, **kw)
+		self.task_gen_cache_names = {} # reset the cache, each time
+		self.add_to_group(ret, group=kw.get('group', None))
+		return ret
+
+	def __copy__(self):
+		"""Implemented to prevents copies of build contexts (raises an exception)"""
+		raise Errors.WafError('build contexts are not supposed to be copied')
+
+	def install_files(self, *k, **kw):
+		"""Actual implementation provided by :py:meth:`waflib.Build.InstallContext.install_files`"""
+		pass
+
+	def install_as(self, *k, **kw):
+		"""Actual implementation provided by :py:meth:`waflib.Build.InstallContext.install_as`"""
+		pass
+
+	def symlink_as(self, *k, **kw):
+		"""Actual implementation provided by :py:meth:`waflib.Build.InstallContext.symlink_as`"""
+		pass
+
+	def load_envs(self):
+		"""
+		The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method
+		creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those
+		files. The config sets are then stored in the dict :py:attr:`waflib.Build.BuildContext.allenvs`.
+		"""
+		node = self.root.find_node(self.cache_dir)
+		if not node:
+			raise Errors.WafError('The project was not configured: run "waf configure" first!')
+		lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True)
+
+		if not lst:
+			raise Errors.WafError('The cache directory is empty: reconfigure the project')
+
+		for x in lst:
+			name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/')
+			env = ConfigSet.ConfigSet(x.abspath())
+			self.all_envs[name] = env
+			for f in env[CFG_FILES]:
+				newnode = self.root.find_resource(f)
+				try:
+					h = Utils.h_file(newnode.abspath())
+				except (IOError, AttributeError):
+					Logs.error('cannot find %r' % f)
+					h = Utils.SIG_NIL
+				newnode.sig = h
+
+	def init_dirs(self):
+		"""
+		Initialize the project directory and the build directory by creating the nodes
+		:py:attr:`waflib.Build.BuildContext.srcnode` and :py:attr:`waflib.Build.BuildContext.bldnode`
+		corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory will be
+		created if it does not exist.
+		"""
+
+		if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
+			raise Errors.WafError('The project was not configured: run "waf configure" first!')
+
+		self.path = self.srcnode = self.root.find_dir(self.top_dir)
+		self.bldnode = self.root.make_node(self.variant_dir)
+		self.bldnode.mkdir()
+
+	def execute(self):
+		"""
+		Restore the data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`. Overrides from :py:func:`waflib.Context.Context.execute`
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+
+		self.execute_build()
+
+	def execute_build(self):
+		"""
+		Execute the build by:
+
+		* reading the scripts (see :py:meth:`waflib.Context.Context.recurse`)
+		* calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions
+		* calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks
+		* calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions
+		"""
+
+		Logs.info("Waf: Entering directory `%s'" % self.variant_dir)
+		self.recurse([self.run_dir])
+		self.pre_build()
+
+		# display the time elapsed in the progress bar
+		self.timer = Utils.Timer()
+
+		if self.progress_bar:
+			sys.stderr.write(Logs.colors.cursor_off)
+		try:
+			self.compile()
+		finally:
+			if self.progress_bar == 1:
+				c = len(self.returned_tasks) or 1
+				self.to_log(self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL))
+				print('')
+				sys.stdout.flush()
+				sys.stderr.write(Logs.colors.cursor_on)
+			Logs.info("Waf: Leaving directory `%s'" % self.variant_dir)
+		self.post_build()
+
+	def restore(self):
+		"""
+		Load the data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`
+		"""
+		try:
+			env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py'))
+		except (IOError, OSError):
+			pass
+		else:
+			if env['version'] < Context.HEXVERSION:
+				raise Errors.WafError('Version mismatch! reconfigure the project')
+			for t in env['tools']:
+				self.setup(**t)
+
+		f = None
+		try:
+			dbfn = os.path.join(self.variant_dir, Context.DBFILE)
+			try:
+				f = open(dbfn, 'rb')
+			except (IOError, EOFError):
+				# handle missing file/empty file
+				Logs.debug('build: could not load the build cache %s (missing)' % dbfn)
+			else:
+				try:
+					waflib.Node.pickle_lock.acquire()
+					waflib.Node.Nod3 = self.node_class
+					try:
+						data = cPickle.load(f)
+					except Exception as e:
+						Logs.debug('build: could not pickle the build cache %s: %r' % (dbfn, e))
+					else:
+						for x in SAVED_ATTRS:
+							setattr(self, x, data[x])
+				finally:
+					waflib.Node.pickle_lock.release()
+		finally:
+			if f:
+				f.close()
+
+		self.init_dirs()
+
+	def store(self):
+		"""
+		Store the data for next runs, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary
+		file to avoid problems on ctrl+c.
+		"""
+
+		data = {}
+		for x in SAVED_ATTRS:
+			data[x] = getattr(self, x)
+		db = os.path.join(self.variant_dir, Context.DBFILE)
+
+		try:
+			waflib.Node.pickle_lock.acquire()
+			waflib.Node.Nod3 = self.node_class
+
+			f = None
+			try:
+				f = open(db + '.tmp', 'wb')
+				cPickle.dump(data, f)
+			finally:
+				if f:
+					f.close()
+		finally:
+			waflib.Node.pickle_lock.release()
+
+		try:
+			st = os.stat(db)
+			os.unlink(db)
+			if not Utils.is_win32: # win32 has no chown but we're paranoid
+				os.chown(db + '.tmp', st.st_uid, st.st_gid)
+		except (AttributeError, OSError):
+			pass
+
+		# do not use shutil.move (copy is not thread-safe)
+		os.rename(db + '.tmp', db)
+
+	def compile(self):
+		"""
+		Run the build by creating an instance of :py:class:`waflib.Runner.Parallel`
+		The cache file is not written if the build is up to date (no task executed).
+		"""
+		Logs.debug('build: compile()')
+
+		# use another object to perform the producer-consumer logic (reduce the complexity)
+		self.producer = Runner.Parallel(self, self.jobs)
+		self.producer.biter = self.get_build_iterator()
+		self.returned_tasks = [] # not part of the API yet
+		try:
+			self.producer.start()
+		except KeyboardInterrupt:
+			self.store()
+			raise
+		else:
+			if self.producer.dirty:
+				self.store()
+
+		if self.producer.error:
+			raise Errors.BuildError(self.producer.error)
+
+	def setup(self, tool, tooldir=None, funs=None):
+		"""
+		Import waf tools, used to import those accessed during the configuration::
+
+			def configure(conf):
+				conf.load('glib2')
+
+			def build(bld):
+				pass # glib2 is imported implicitly
+
+		:param tool: tool list
+		:type tool: list
+		:param tooldir: optional tool directory (sys.path)
+		:type tooldir: list of string
+		:param funs: unused variable
+		"""
+		if isinstance(tool, list):
+			for i in tool: self.setup(i, tooldir)
+			return
+
+		module = Context.load_tool(tool, tooldir)
+		if hasattr(module, "setup"): module.setup(self)
+
+	def get_env(self):
+		"""Getter for the env property"""
+		try:
+			return self.all_envs[self.variant]
+		except KeyError:
+			return self.all_envs['']
+	def set_env(self, val):
+		"""Setter for the env property"""
+		self.all_envs[self.variant] = val
+
+	env = property(get_env, set_env)
+
+	def add_manual_dependency(self, path, value):
+		"""
+		Adds a dependency from a node object to a value::
+
+			def build(bld):
+				bld.add_manual_dependency(
+					bld.path.find_resource('wscript'),
+					bld.root.find_resource('/etc/fstab'))
+
+		:param path: file path
+		:type path: string or :py:class:`waflib.Node.Node`
+		:param value: value to depend on
+		:type value: :py:class:`waflib.Node.Node`, string, or function returning a string
+		"""
+		if isinstance(path, waflib.Node.Node):
+			node = path
+		elif os.path.isabs(path):
+			node = self.root.find_resource(path)
+		else:
+			node = self.path.find_resource(path)
+		self.deps_man[id(node)].append(value)
+
+	def launch_node(self):
+		"""Returns the launch directory as a :py:class:`waflib.Node.Node` object"""
+		try:
+			# private cache
+			return self.p_ln
+		except AttributeError:
+			self.p_ln = self.root.find_dir(self.launch_dir)
+			return self.p_ln
+
+	def hash_env_vars(self, env, vars_lst):
+		"""
+		Hash configuration set variables::
+
+			def build(bld):
+				bld.hash_env_vars(bld.env, ['CXX', 'CC'])
+
+		:param env: Configuration Set
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:param vars_lst: list of variables
+		:type vars_list: list of string
+		"""
+
+		if not env.table:
+			env = env.parent
+			if not env:
+				return Utils.SIG_NIL
+
+		idx = str(id(env)) + str(vars_lst)
+		try:
+			cache = self.cache_env
+		except AttributeError:
+			cache = self.cache_env = {}
+		else:
+			try:
+				return self.cache_env[idx]
+			except KeyError:
+				pass
+
+		lst = [env[a] for a in vars_lst]
+		ret = Utils.h_list(lst)
+		Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
+
+		cache[idx] = ret
+
+		return ret
+
+	def get_tgen_by_name(self, name):
+		"""
+		Retrieves a task generator from its name or its target name
+		the name must be unique::
+
+			def build(bld):
+				tg = bld(name='foo')
+				tg == bld.get_tgen_by_name('foo')
+		"""
+		cache = self.task_gen_cache_names
+		if not cache:
+			# create the index lazily
+			for g in self.groups:
+				for tg in g:
+					try:
+						cache[tg.name] = tg
+					except AttributeError:
+						# raised if not a task generator, which should be uncommon
+						pass
+		try:
+			return cache[name]
+		except KeyError:
+			raise Errors.WafError('Could not find a task generator for the name %r' % name)
+
+	def progress_line(self, state, total, col1, col2):
+		"""
+		Compute the progress bar used by ``waf -p``
+		"""
+		n = len(str(total))
+
+		Utils.rot_idx += 1
+		ind = Utils.rot_chr[Utils.rot_idx % 4]
+
+		pc = (100.*state)/total
+		eta = str(self.timer)
+		fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
+		left = fs % (state, total, col1, pc, col2)
+		right = '][%s%s%s]' % (col1, eta, col2)
+
+		cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
+		if cols < 7: cols = 7
+
+		ratio = ((cols*state)//total) - 1
+
+		bar = ('='*ratio+'>').ljust(cols)
+		msg = Utils.indicator % (left, bar, right)
+
+		return msg
+
+	def declare_chain(self, *k, **kw):
+		"""
+		Wrapper for :py:func:`waflib.TaskGen.declare_chain` provided for convenience
+		"""
+		return TaskGen.declare_chain(*k, **kw)
+
+	def pre_build(self):
+		"""Execute user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`"""
+		for m in getattr(self, 'pre_funs', []):
+			m(self)
+
+	def post_build(self):
+		"""Executes the user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`"""
+		for m in getattr(self, 'post_funs', []):
+			m(self)
+
+	def add_pre_fun(self, meth):
+		"""
+		Bind a method to execute after the scripts are read and before the build starts::
+
+			def mycallback(bld):
+				print("Hello, world!")
+
+			def build(bld):
+				bld.add_pre_fun(mycallback)
+		"""
+		try:
+			self.pre_funs.append(meth)
+		except AttributeError:
+			self.pre_funs = [meth]
+
+	def add_post_fun(self, meth):
+		"""
+		Bind a method to execute immediately after the build is successful::
+
+			def call_ldconfig(bld):
+				bld.exec_command('/sbin/ldconfig')
+
+			def build(bld):
+				if bld.cmd == 'install':
+					bld.add_pre_fun(call_ldconfig)
+		"""
+		try:
+			self.post_funs.append(meth)
+		except AttributeError:
+			self.post_funs = [meth]
+
+	def get_group(self, x):
+		"""
+		Get the group x, or return the current group if x is None
+
+		:param x: name or number or None
+		:type x: string, int or None
+		"""
+		if not self.groups:
+			self.add_group()
+		if x is None:
+			return self.groups[self.current_group]
+		if x in self.group_names:
+			return self.group_names[x]
+		return self.groups[x]
+
+	def add_to_group(self, tgen, group=None):
+		"""add a task or a task generator for the build"""
+		# paranoid
+		assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.TaskBase))
+		tgen.bld = self
+		self.get_group(group).append(tgen)
+
+	def get_group_name(self, g):
+		"""name for the group g (utility)"""
+		if not isinstance(g, list):
+			g = self.groups[g]
+		for x in self.group_names:
+			if id(self.group_names[x]) == id(g):
+				return x
+		return ''
+
+	def get_group_idx(self, tg):
+		"""
+		Index of the group containing the task generator given as argument::
+
+			def build(bld):
+				tg = bld(name='nada')
+				0 == bld.get_group_idx(tg)
+
+		:param tg: Task generator object
+		:type tg: :py:class:`waflib.TaskGen.task_gen`
+		"""
+		se = id(tg)
+		for i in range(len(self.groups)):
+			for t in self.groups[i]:
+				if id(t) == se:
+					return i
+		return None
+
+	def add_group(self, name=None, move=True):
+		"""
+		Add a new group of tasks/task generators. By default the new group becomes the default group for new task generators.
+
+		:param name: name for this group
+		:type name: string
+		:param move: set the group created as default group (True by default)
+		:type move: bool
+		"""
+		#if self.groups and not self.groups[0].tasks:
+		#	error('add_group: an empty group is already present')
+		if name and name in self.group_names:
+			Logs.error('add_group: name %s already present' % name)
+		g = []
+		self.group_names[name] = g
+		self.groups.append(g)
+		if move:
+			self.current_group = len(self.groups) - 1
+
+	def set_group(self, idx):
+		"""
+		Set the current group to be idx: now new task generators will be added to this group by default::
+
+			def build(bld):
+				bld(rule='touch ${TGT}', target='foo.txt')
+				bld.add_group() # now the current group is 1
+				bld(rule='touch ${TGT}', target='bar.txt')
+				bld.set_group(0) # now the current group is 0
+				bld(rule='touch ${TGT}', target='truc.txt') # build truc.txt before bar.txt
+
+		:param idx: group name or group index
+		:type idx: string or int
+		"""
+		if isinstance(idx, str):
+			g = self.group_names[idx]
+			for i in range(len(self.groups)):
+				if id(g) == id(self.groups[i]):
+					self.current_group = i
+		else:
+			self.current_group = idx
+
+	def total(self):
+		"""
+		Approximate task count: this value may be inaccurate if task generators are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`).
+		The value :py:attr:`waflib.Runner.Parallel.total` is updated during the task execution.
+		"""
+		total = 0
+		for group in self.groups:
+			for tg in group:
+				try:
+					total += len(tg.tasks)
+				except AttributeError:
+					total += 1
+		return total
+
+	def get_targets(self):
+		"""
+		Return the task generator corresponding to the 'targets' list, used by :py:meth:`waflib.Build.BuildContext.get_build_iterator`::
+
+			$ waf --targets=myprogram,myshlib
+		"""
+		to_post = []
+		min_grp = 0
+		for name in self.targets.split(','):
+			tg = self.get_tgen_by_name(name)
+			if not tg:
+				raise Errors.WafError('target %r does not exist' % name)
+
+			m = self.get_group_idx(tg)
+			if m > min_grp:
+				min_grp = m
+				to_post = [tg]
+			elif m == min_grp:
+				to_post.append(tg)
+		return (min_grp, to_post)
+
+	def post_group(self):
+		"""
+		Post the task generators from the group indexed by self.cur, used by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+		"""
+		if self.targets == '*':
+			for tg in self.groups[self.cur]:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+		elif self.targets:
+			if self.cur < self._min_grp:
+				for tg in self.groups[self.cur]:
+					try:
+						f = tg.post
+					except AttributeError:
+						pass
+					else:
+						f()
+			else:
+				for tg in self._exact_tg:
+					tg.post()
+		else:
+			ln = self.launch_node()
+			for tg in self.groups[self.cur]:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					if tg.path.is_child_of(ln):
+						f()
+
+	def get_tasks_group(self, idx):
+		"""
+		Return all the tasks for the group of num idx, used by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+		"""
+		tasks = []
+		for tg in self.groups[idx]:
+			# TODO a try-except might be more efficient
+			if isinstance(tg, Task.TaskBase):
+				tasks.append(tg)
+			else:
+				tasks.extend(tg.tasks)
+		return tasks
+
+	def get_build_iterator(self):
+		"""
+		Creates a generator object that returns lists of tasks executable in parallel (yield)
+
+		:return: tasks which can be executed immediatly
+		:rtype: list of :py:class:`waflib.Task.TaskBase`
+		"""
+		self.cur = 0
+
+		if self.targets and self.targets != '*':
+			(self._min_grp, self._exact_tg) = self.get_targets()
+
+		global lazy_post
+		if self.post_mode != POST_LAZY:
+			while self.cur < len(self.groups):
+				self.post_group()
+				self.cur += 1
+			self.cur = 0
+
+		while self.cur < len(self.groups):
+			# first post the task generators for the group
+			if self.post_mode != POST_AT_ONCE:
+				self.post_group()
+
+			# then extract the tasks
+			tasks = self.get_tasks_group(self.cur)
+			# if the constraints are set properly (ext_in/ext_out, before/after)
+			# the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
+			# (but leave set_file_constraints for the installation step)
+			#
+			# if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary
+			#
+			Task.set_file_constraints(tasks)
+			Task.set_precedence_constraints(tasks)
+
+			self.cur_tasks = tasks
+			self.cur += 1
+			if not tasks: # return something else the build will stop
+				continue
+			yield tasks
+		while 1:
+			yield []
+
+
+	#def install_dir(self, path, env=None):
+	#	"""
+	#	Create empty folders for the installation (very rarely used) TODO
+	#	"""
+	#	return
+
+class inst(Task.Task):
+	"""
+    Special task used for installing files and symlinks, it behaves both like a task
+	and like a task generator
+	"""
+	color = 'CYAN'
+
+	def post(self):
+		"""
+		Same interface as in :py:meth:`waflib.TaskGen.task_gen.post`
+		"""
+		buf = []
+		for x in self.source:
+			if isinstance(x, waflib.Node.Node):
+				y = x
+			else:
+				y = self.path.find_resource(x)
+				if not y:
+					if Logs.verbose:
+						Logs.warn('Could not find %s immediately (may cause broken builds)' % x)
+					idx = self.generator.bld.get_group_idx(self)
+					for tg in self.generator.bld.groups[idx]:
+						if not isinstance(tg, inst) and id(tg) != id(self):
+							tg.post()
+						y = self.path.find_resource(x)
+						if y:
+							break
+					else:
+						raise Errors.WafError('could not find %r in %r' % (x, self.path))
+			buf.append(y)
+		self.inputs = buf
+
+	def runnable_status(self):
+		"""
+		Installation tasks are always executed, so this method returns either :py:const:`waflib.Task.ASK_LATER` or :py:const:`waflib.Task.RUN_ME`.
+		"""
+		ret = super(inst, self).runnable_status()
+		if ret == Task.SKIP_ME:
+			return Task.RUN_ME
+		return ret
+
+	def __str__(self):
+		"""Return an empty string to disable the display"""
+		return ''
+
+	def run(self):
+		"""The attribute 'exec_task' holds the method to execute"""
+		return self.generator.exec_task()
+
+	def get_install_path(self, destdir=True):
+		"""
+		Installation path obtained from ``self.dest`` and prefixed by the destdir.
+		The variables such as '${PREFIX}/bin' are substituted.
+		"""
+		dest = Utils.subst_vars(self.dest, self.env)
+		dest = dest.replace('/', os.sep)
+		if destdir and Options.options.destdir:
+			dest = os.path.join(Options.options.destdir, os.path.splitdrive(dest)[1].lstrip(os.sep))
+		return dest
+
+	def exec_install_files(self):
+		"""
+		Predefined method for installing files
+		"""
+		destpath = self.get_install_path()
+		if not destpath:
+			raise Errors.WafError('unknown installation path %r' % self.generator)
+		for x, y in zip(self.source, self.inputs):
+			if self.relative_trick:
+				destfile = os.path.join(destpath, y.path_from(self.path))
+				Utils.check_dir(os.path.dirname(destfile))
+			else:
+				destfile = os.path.join(destpath, y.name)
+			self.generator.bld.do_install(y.abspath(), destfile, self.chmod)
+
+	def exec_install_as(self):
+		"""
+		Predefined method for installing one file with a given name
+		"""
+		destfile = self.get_install_path()
+		self.generator.bld.do_install(self.inputs[0].abspath(), destfile, self.chmod)
+
+	def exec_symlink_as(self):
+		"""
+		Predefined method for installing a symlink
+		"""
+		destfile = self.get_install_path()
+		self.generator.bld.do_link(self.link, destfile)
+
+class InstallContext(BuildContext):
+	'''installs the targets on the system'''
+	cmd = 'install'
+
+	def __init__(self, **kw):
+		super(InstallContext, self).__init__(**kw)
+
+		# list of targets to uninstall for removing the empty folders after uninstalling
+		self.uninstall = []
+		self.is_install = INSTALL
+
+	def do_install(self, src, tgt, chmod=Utils.O644):
+		"""
+		Copy a file from src to tgt with given file permissions. The actual copy is not performed
+		if the source and target file have the same size and the same timestamps. When the copy occurs,
+		the file is first removed and then copied (prevent stale inodes).
+
+		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_install` to remove the file.
+
+		:param src: file name as absolute path
+		:type src: string
+		:param tgt: file destination, as absolute path
+		:type tgt: string
+		:param chmod: installation mode
+		:type chmod: int
+		"""
+		d, _ = os.path.split(tgt)
+		if not d:
+			raise Errors.WafError('Invalid installation given %r->%r' % (src, tgt))
+		Utils.check_dir(d)
+
+		srclbl = src.replace(self.srcnode.abspath() + os.sep, '')
+		if not Options.options.force:
+			# check if the file is already there to avoid a copy
+			try:
+				st1 = os.stat(tgt)
+				st2 = os.stat(src)
+			except OSError:
+				pass
+			else:
+				# same size and identical timestamps -> make no copy
+				if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size:
+					if not self.progress_bar:
+						Logs.info('- install %s (from %s)' % (tgt, srclbl))
+					return False
+
+		if not self.progress_bar:
+			Logs.info('+ install %s (from %s)' % (tgt, srclbl))
+
+		# following is for shared libs and stale inodes (-_-)
+		try:
+			os.remove(tgt)
+		except OSError:
+			pass
+
+		try:
+			shutil.copy2(src, tgt)
+			os.chmod(tgt, chmod)
+		except IOError:
+			try:
+				os.stat(src)
+			except (OSError, IOError):
+				Logs.error('File %r does not exist' % src)
+			raise Errors.WafError('Could not install the file %r' % tgt)
+
+	def do_link(self, src, tgt):
+		"""
+		Create a symlink from tgt to src.
+
+		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_link` to remove the symlink.
+
+		:param src: file name as absolute path
+		:type src: string
+		:param tgt: file destination, as absolute path
+		:type tgt: string
+		"""
+		d, _ = os.path.split(tgt)
+		Utils.check_dir(d)
+
+		link = False
+		if not os.path.islink(tgt):
+			link = True
+		elif os.readlink(tgt) != src:
+			link = True
+
+		if link:
+			try: os.remove(tgt)
+			except OSError: pass
+			if not self.progress_bar:
+				Logs.info('+ symlink %s (to %s)' % (tgt, src))
+			os.symlink(src, tgt)
+		else:
+			if not self.progress_bar:
+				Logs.info('- symlink %s (to %s)' % (tgt, src))
+
+	def run_task_now(self, tsk, postpone):
+		"""
+		This method is called by :py:meth:`waflib.Build.InstallContext.install_files`,
+		:py:meth:`waflib.Build.InstallContext.install_as` and :py:meth:`waflib.Build.InstallContext.symlink_as` immediately
+		after the installation task is created. Its role is to force the immediate execution if necessary, that is when
+		``postpone=False`` was given.
+		"""
+		tsk.post()
+		if not postpone:
+			if tsk.runnable_status() == Task.ASK_LATER:
+				raise self.WafError('cannot post the task %r' % tsk)
+			tsk.run()
+
+	def install_files(self, dest, files, env=None, chmod=Utils.O644, relative_trick=False, cwd=None, add=True, postpone=True):
+		"""
+		Create a task to install files on the system::
+
+			def build(bld):
+				bld.install_files('${DATADIR}', self.path.find_resource('wscript'))
+
+		:param dest: absolute path of the destination directory
+		:type dest: string
+		:param files: input files
+		:type files: list of strings or list of nodes
+		:param env: configuration set for performing substitutions in dest
+		:type env: Configuration set
+		:param relative_trick: preserve the folder hierarchy when installing whole folders
+		:type relative_trick: bool
+		:param cwd: parent node for searching srcfile, when srcfile is not a :py:class:`waflib.Node.Node`
+		:type cwd: :py:class:`waflib.Node.Node`
+		:param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started
+		:type add: bool
+		:param postpone: execute the task immediately to perform the installation
+		:type postpone: bool
+		"""
+		tsk = inst(env=env or self.env)
+		tsk.bld = self
+		tsk.path = cwd or self.path
+		tsk.chmod = chmod
+		if isinstance(files, waflib.Node.Node):
+			tsk.source =  [files]
+		else:
+			tsk.source = Utils.to_list(files)
+		tsk.dest = dest
+		tsk.exec_task = tsk.exec_install_files
+		tsk.relative_trick = relative_trick
+		if add: self.add_to_group(tsk)
+		self.run_task_now(tsk, postpone)
+		return tsk
+
+	def install_as(self, dest, srcfile, env=None, chmod=Utils.O644, cwd=None, add=True, postpone=True):
+		"""
+		Create a task to install a file on the system with a different name::
+
+			def build(bld):
+				bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755)
+
+		:param dest: absolute path of the destination file
+		:type dest: string
+		:param srcfile: input file
+		:type srcfile: string or node
+		:param cwd: parent node for searching srcfile, when srcfile is not a :py:class:`waflib.Node.Node`
+		:type cwd: :py:class:`waflib.Node.Node`
+		:param env: configuration set for performing substitutions in dest
+		:type env: Configuration set
+		:param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started
+		:type add: bool
+		:param postpone: execute the task immediately to perform the installation
+		:type postpone: bool
+		"""
+		tsk = inst(env=env or self.env)
+		tsk.bld = self
+		tsk.path = cwd or self.path
+		tsk.chmod = chmod
+		tsk.source = [srcfile]
+		tsk.dest = dest
+		tsk.exec_task = tsk.exec_install_as
+		if add: self.add_to_group(tsk)
+		self.run_task_now(tsk, postpone)
+		return tsk
+
+	def symlink_as(self, dest, src, env=None, cwd=None, add=True, postpone=True):
+		"""
+		Create a task to install a symlink::
+
+			def build(bld):
+				bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3')
+
+		:param dest: absolute path of the symlink
+		:type dest: string
+		:param src: absolute or relative path of the link
+		:type src: string
+		:param env: configuration set for performing substitutions in dest
+		:type env: Configuration set
+		:param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started
+		:type add: bool
+		:param postpone: execute the task immediately to perform the installation
+		:type postpone: bool
+		"""
+
+		if Utils.is_win32:
+			# symlinks *cannot* work on that platform
+			return
+
+		tsk = inst(env=env or self.env)
+		tsk.bld = self
+		tsk.dest = dest
+		tsk.path = cwd or self.path
+		tsk.source = []
+		tsk.link = src
+		tsk.exec_task = tsk.exec_symlink_as
+		if add: self.add_to_group(tsk)
+		self.run_task_now(tsk, postpone)
+		return tsk
+
+class UninstallContext(InstallContext):
+	'''removes the targets installed'''
+	cmd = 'uninstall'
+
+	def __init__(self, **kw):
+		super(UninstallContext, self).__init__(**kw)
+		self.is_install = UNINSTALL
+
+	def do_install(self, src, tgt, chmod=Utils.O644):
+		"""See :py:meth:`waflib.Build.InstallContext.do_install`"""
+		if not self.progress_bar:
+			Logs.info('- remove %s' % tgt)
+
+		self.uninstall.append(tgt)
+		try:
+			os.remove(tgt)
+		except OSError as e:
+			if e.errno != errno.ENOENT:
+				if not getattr(self, 'uninstall_error', None):
+					self.uninstall_error = True
+					Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+				if Logs.verbose > 1:
+					Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
+
+		# TODO ita refactor this into a post build action to uninstall the folders (optimization)
+		while tgt:
+			tgt = os.path.dirname(tgt)
+			try:
+				os.rmdir(tgt)
+			except OSError:
+				break
+
+	def do_link(self, src, tgt):
+		"""See :py:meth:`waflib.Build.InstallContext.do_link`"""
+		try:
+			if not self.progress_bar:
+				Logs.info('- unlink %s' % tgt)
+			os.remove(tgt)
+		except OSError:
+			pass
+
+		# TODO ita refactor this into a post build action to uninstall the folders (optimization)?
+		while tgt:
+			tgt = os.path.dirname(tgt)
+			try:
+				os.rmdir(tgt)
+			except OSError:
+				break
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		try:
+			# do not execute any tasks
+			def runnable_status(self):
+				return Task.SKIP_ME
+			setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
+			setattr(Task.Task, 'runnable_status', runnable_status)
+
+			super(UninstallContext, self).execute()
+		finally:
+			setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
+
+class CleanContext(BuildContext):
+	'''cleans the project'''
+	cmd = 'clean'
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+
+		self.recurse([self.run_dir])
+		try:
+			self.clean()
+		finally:
+			self.store()
+
+	def clean(self):
+		"""clean the data and some files in the build dir .. well, TODO"""
+		Logs.debug('build: clean called')
+
+		if self.bldnode != self.srcnode:
+			# would lead to a disaster if top == out
+			lst = [self.root.find_or_declare(f) for f in self.env[CFG_FILES]]
+			for n in self.bldnode.ant_glob('**/*', excl='lock* *conf_check_*/** config.log c4che/*', quiet=True):
+				if n in lst:
+					continue
+				n.delete()
+		self.root.children = {}
+
+		for v in 'node_deps task_sigs raw_deps'.split():
+			setattr(self, v, {})
+
+class ListContext(BuildContext):
+	'''lists the targets to execute'''
+
+	cmd = 'list'
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`.
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+
+		self.recurse([self.run_dir])
+		self.pre_build()
+
+		# display the time elapsed in the progress bar
+		self.timer = Utils.Timer()
+
+		for g in self.groups:
+			for tg in g:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+		try:
+			# force the cache initialization
+			self.get_tgen_by_name('')
+		except:
+			pass
+		lst = list(self.task_gen_cache_names.keys())
+		lst.sort()
+		for k in lst:
+			Logs.pprint('GREEN', k)
+
+class StepContext(BuildContext):
+	'''executes tasks in a step-by-step fashion, for debugging'''
+	cmd = 'step'
+
+	def __init__(self, **kw):
+		super(StepContext, self).__init__(**kw)
+		self.files = Options.options.files
+
+	def compile(self):
+		"""
+		Compile the tasks matching the input/output files given (regular expression matching). Derived from :py:meth:`waflib.Build.BuildContext.compile`::
+
+			$ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o
+			$ waf step --files=in:foo.cpp.1.o # link task only
+
+		"""
+		if not self.files:
+			Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
+			BuildContext.compile(self)
+			return
+
+		for g in self.groups:
+			for tg in g:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+			for pat in self.files.split(','):
+				matcher = self.get_matcher(pat)
+				for tg in g:
+					if isinstance(tg, Task.TaskBase):
+						lst = [tg]
+					else:
+						lst = tg.tasks
+					for tsk in lst:
+						do_exec = False
+						for node in getattr(tsk, 'inputs', []):
+							if matcher(node, output=False):
+								do_exec = True
+								break
+						for node in getattr(tsk, 'outputs', []):
+							if matcher(node, output=True):
+								do_exec = True
+								break
+						if do_exec:
+							ret = tsk.run()
+							Logs.info('%s -> exit %r' % (str(tsk), ret))
+
+	def get_matcher(self, pat):
+		# this returns a function
+		inn = True
+		out = True
+		if pat.startswith('in:'):
+			out = False
+			pat = pat.replace('in:', '')
+		elif pat.startswith('out:'):
+			inn = False
+			pat = pat.replace('out:', '')
+
+		anode = self.root.find_node(pat)
+		pattern = None
+		if not anode:
+			if not pat.startswith('^'):
+				pat = '^.+?%s' % pat
+			if not pat.endswith('$'):
+				pat = '%s$' % pat
+			pattern = re.compile(pat)
+
+		def match(node, output):
+			if output == True and not out:
+				return False
+			if output == False and not inn:
+				return False
+
+			if anode:
+				return anode == node
+			else:
+				return pattern.match(node.abspath())
+		return match
+
+BuildContext.store = Utils.nogc(BuildContext.store)
+BuildContext.restore = Utils.nogc(BuildContext.restore)
+
diff --git a/waflib/ConfigSet.py b/waflib/ConfigSet.py
new file mode 100644
index 0000000..052c053
--- /dev/null
+++ b/waflib/ConfigSet.py
@@ -0,0 +1,337 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+
+ConfigSet: a special dict
+
+The values put in :py:class:`ConfigSet` must be lists
+"""
+
+import copy, re, os
+from waflib import Logs, Utils
+re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
+
+class ConfigSet(object):
+	"""
+	A dict that honor serialization and parent relationships. The serialization format
+	is human-readable (python-like) and performed by using eval() and repr().
+	For high performance prefer pickle. Do not store functions as they are not serializable.
+
+	The values can be accessed by attributes or by keys::
+
+		from waflib.ConfigSet import ConfigSet
+		env = ConfigSet()
+		env.FOO = 'test'
+		env['FOO'] = 'test'
+	"""
+	__slots__ = ('table', 'parent')
+	def __init__(self, filename=None):
+		self.table = {}
+		"""
+		Internal dict holding the object values
+		"""
+		#self.parent = None
+
+		if filename:
+			self.load(filename)
+
+	def __contains__(self, key):
+		"""
+		Enable the *in* syntax::
+
+			if 'foo' in env:
+				print env['foo']
+		"""
+		if key in self.table: return True
+		try: return self.parent.__contains__(key)
+		except AttributeError: return False # parent may not exist
+
+	def keys(self):
+		"""Dict interface (unknown purpose)"""
+		keys = set()
+		cur = self
+		while cur:
+			keys.update(cur.table.keys())
+			cur = getattr(cur, 'parent', None)
+		keys = list(keys)
+		keys.sort()
+		return keys
+
+	def __str__(self):
+		"""Text representation of the ConfigSet (for debugging purposes)"""
+		return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
+
+	def __getitem__(self, key):
+		"""
+		Dictionary interface: get value from key::
+
+			def configure(conf):
+				conf.env['foo'] = {}
+				print(env['foo'])
+		"""
+		try:
+			while 1:
+				x = self.table.get(key, None)
+				if not x is None:
+					return x
+				self = self.parent
+		except AttributeError:
+			return []
+
+	def __setitem__(self, key, value):
+		"""
+		Dictionary interface: get value from key
+		"""
+		self.table[key] = value
+
+	def __delitem__(self, key):
+		"""
+		Dictionary interface: get value from key
+		"""
+		self[key] = []
+
+	def __getattr__(self, name):
+		"""
+		Attribute access provided for convenience. The following forms are equivalent::
+
+			def configure(conf):
+				conf.env.value
+				conf.env['value']
+		"""
+		if name in self.__slots__:
+			return object.__getattr__(self, name)
+		else:
+			return self[name]
+
+	def __setattr__(self, name, value):
+		"""
+		Attribute access provided for convenience. The following forms are equivalent::
+
+			def configure(conf):
+				conf.env.value = x
+				env['value'] = x
+		"""
+		if name in self.__slots__:
+			object.__setattr__(self, name, value)
+		else:
+			self[name] = value
+
+	def __delattr__(self, name):
+		"""
+		Attribute access provided for convenience. The following forms are equivalent::
+
+			def configure(conf):
+				del env.value
+				del env['value']
+		"""
+		if name in self.__slots__:
+			object.__delattr__(self, name)
+		else:
+			del self[name]
+
+	def derive(self):
+		"""
+		Returns a new ConfigSet deriving from self. The copy returned
+		will be a shallow copy::
+
+			from waflib.ConfigSet import ConfigSet
+			env = ConfigSet()
+			env.append_value('CFLAGS', ['-O2'])
+			child = env.derive()
+			child.CFLAGS.append('test') # warning! this will modify 'env'
+			child.CFLAGS = ['-O3'] # new list, ok
+			child.append_value('CFLAGS', ['-O3']) # ok
+
+		Use :py:func:`ConfigSet.detach` to detach the child from the parent.
+		"""
+		newenv = ConfigSet()
+		newenv.parent = self
+		return newenv
+
+	def detach(self):
+		"""
+		Detach self from its parent (if existing)
+
+		Modifying the parent :py:class:`ConfigSet` will not change the current object
+		Modifying this :py:class:`ConfigSet` will not modify the parent one.
+		"""
+		tbl = self.get_merged_dict()
+		try:
+			delattr(self, 'parent')
+		except AttributeError:
+			pass
+		else:
+			keys = tbl.keys()
+			for x in keys:
+				tbl[x] = copy.deepcopy(tbl[x])
+			self.table = tbl
+
+	def get_flat(self, key):
+		"""
+		Return a value as a string. If the input is a list, the value returned is space-separated.
+
+		:param key: key to use
+		:type key: string
+		"""
+		s = self[key]
+		if isinstance(s, str): return s
+		return ' '.join(s)
+
+	def _get_list_value_for_modification(self, key):
+		"""
+		Return a list value for further modification.
+
+		The list may be modified inplace and there is no need to do this afterwards::
+
+			self.table[var] = value
+		"""
+		try:
+			value = self.table[key]
+		except KeyError:
+			try: value = self.parent[key]
+			except AttributeError: value = []
+			if isinstance(value, list):
+				value = value[:]
+			else:
+				value = [value]
+		else:
+			if not isinstance(value, list):
+				value = [value]
+		self.table[key] = value
+		return value
+
+	def append_value(self, var, val):
+		"""
+		Appends a value to the specified config key::
+
+			def build(bld):
+				bld.env.append_value('CFLAGS', ['-O2'])
+
+		The value must be a list or a tuple
+		"""
+		current_value = self._get_list_value_for_modification(var)
+		if isinstance(val, str): # if there were string everywhere we could optimize this
+			val = [val]
+		current_value.extend(val)
+
+	def prepend_value(self, var, val):
+		"""
+		Prepends a value to the specified item::
+
+			def configure(conf):
+				conf.env.prepend_value('CFLAGS', ['-O2'])
+
+		The value must be a list or a tuple
+		"""
+		if isinstance(val, str):
+			val = [val]
+		self.table[var] =  val + self._get_list_value_for_modification(var)
+
+	def append_unique(self, var, val):
+		"""
+		Append a value to the specified item only if it's not already present::
+
+			def build(bld):
+				bld.env.append_unique('CFLAGS', ['-O2', '-g'])
+
+		The value must be a list or a tuple
+		"""
+		if isinstance(val, str):
+			val = [val]
+		current_value = self._get_list_value_for_modification(var)
+
+		for x in val:
+			if x not in current_value:
+				current_value.append(x)
+
+	def get_merged_dict(self):
+		"""
+		Compute the merged dictionary from the fusion of self and all its parent
+
+		:rtype: a ConfigSet object
+		"""
+		table_list = []
+		env = self
+		while 1:
+			table_list.insert(0, env.table)
+			try: env = env.parent
+			except AttributeError: break
+		merged_table = {}
+		for table in table_list:
+			merged_table.update(table)
+		return merged_table
+
+	def store(self, filename):
+		"""
+		Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
+
+		:param filename: file to use
+		:type filename: string
+		"""
+		try:
+			os.makedirs(os.path.split(filename)[0])
+		except OSError:
+			pass
+
+		f = None
+		try:
+			f = open(filename, 'w')
+			merged_table = self.get_merged_dict()
+			keys = list(merged_table.keys())
+			keys.sort()
+			for k in keys:
+				if k != 'undo_stack':
+					f.write('%s = %r\n' % (k, merged_table[k]))
+		finally:
+			if f:
+				f.close()
+
+	def load(self, filename):
+		"""
+		Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
+
+		:param filename: file to use
+		:type filename: string
+		"""
+		tbl = self.table
+		code = Utils.readf(filename)
+		for m in re_imp.finditer(code):
+			g = m.group
+			tbl[g(2)] = eval(g(3))
+		Logs.debug('env: %s' % str(self.table))
+
+	def update(self, d):
+		"""
+		Dictionary interface: replace values from another dict
+
+		:param d: object to use the value from
+		:type d: dict-like object
+		"""
+		for k, v in d.items():
+			self[k] = v
+
+	def stash(self):
+		"""
+		Store the object state, to provide a kind of transaction support::
+
+			env = ConfigSet()
+			env.stash()
+			try:
+				env.append_value('CFLAGS', '-O3')
+				call_some_method(env)
+			finally:
+				env.revert()
+
+		The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store`
+		"""
+		self.undo_stack = self.undo_stack + [self.table]
+		self.table = self.table.copy()
+
+	def revert(self):
+		"""
+		Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
+		"""
+		self.table = self.undo_stack.pop(-1)
+
diff --git a/waflib/Configure.py b/waflib/Configure.py
new file mode 100644
index 0000000..eb1e4d4
--- /dev/null
+++ b/waflib/Configure.py
@@ -0,0 +1,570 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Configuration system
+
+A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to:
+
+* create data dictionaries (ConfigSet instances)
+* store the list of modules to import
+* hold configuration routines such as ``find_program``, etc
+"""
+
+import os, shlex, sys, time
+from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
+
+try:
+	from urllib import request
+except:
+	from urllib import urlopen
+else:
+	urlopen = request.urlopen
+
+BREAK    = 'break'
+"""In case of a configuration error, break"""
+
+CONTINUE = 'continue'
+"""In case of a configuration error, continue"""
+
+WAF_CONFIG_LOG = 'config.log'
+"""Name of the configuration log file"""
+
+autoconfig = False
+"""Execute the configuration automatically"""
+
+conf_template = '''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#'''
+
+def download_check(node):
+	"""
+	Hook to check for the tools which are downloaded. Replace with your function if necessary.
+	"""
+	pass
+
+def download_tool(tool, force=False, ctx=None):
+	"""
+	Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::
+
+		$ waf configure --download
+	"""
+	for x in Utils.to_list(Context.remote_repo):
+		for sub in Utils.to_list(Context.remote_locs):
+			url = '/'.join((x, sub, tool + '.py'))
+			try:
+				web = urlopen(url)
+				try:
+					if web.getcode() != 200:
+						continue
+				except AttributeError:
+					pass
+			except Exception:
+				# on python3 urlopen throws an exception
+				# python 2.3 does not have getcode and throws an exception to fail
+				continue
+			else:
+				tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
+				tmp.write(web.read())
+				Logs.warn('Downloaded %s from %s' % (tool, url))
+				download_check(tmp)
+				try:
+					module = Context.load_tool(tool)
+				except:
+					Logs.warn('The tool %s from %s is unusable' % (tool, url))
+					try:
+						tmp.delete()
+					except:
+						pass
+					continue
+				return module
+	raise Errors.WafError('Could not load the Waf tool')
+
+class ConfigurationContext(Context.Context):
+	'''configures the project'''
+
+	cmd = 'configure'
+
+	error_handlers = []
+	"""
+	Additional functions to handle configuration errors
+	"""
+
+	def __init__(self, **kw):
+		super(ConfigurationContext, self).__init__(**kw)
+		self.environ = dict(os.environ)
+		self.all_envs = {}
+
+		self.top_dir = None
+		self.out_dir = None
+
+		self.tools = [] # tools loaded in the configuration, and that will be loaded when building
+
+		self.hash = 0
+		self.files = []
+
+		self.tool_cache = []
+
+		self.setenv('')
+
+	def setenv(self, name, env=None):
+		"""
+		Set a new config set for conf.env. If a config set of that name already exists,
+		recall it without modification.
+
+		The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it
+		is also used as *variants* by the build commands.
+		Though related to variants, whatever kind of data may be stored in the config set::
+
+			def configure(cfg):
+				cfg.env.ONE = 1
+				cfg.setenv('foo')
+				cfg.env.ONE = 2
+
+			def build(bld):
+				2 == bld.env_of_name('foo').ONE
+
+		:param name: name of the configuration set
+		:type name: string
+		:param env: ConfigSet to copy, or an empty ConfigSet is created
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		"""
+		if name not in self.all_envs or env:
+			if not env:
+				env = ConfigSet.ConfigSet()
+				self.prepare_env(env)
+			else:
+				env = env.derive()
+			self.all_envs[name] = env
+		self.variant = name
+
+	def get_env(self):
+		"""Getter for the env property"""
+		return self.all_envs[self.variant]
+	def set_env(self, val):
+		"""Setter for the env property"""
+		self.all_envs[self.variant] = val
+
+	env = property(get_env, set_env)
+
+	def init_dirs(self):
+		"""
+		Initialize the project directory and the build directory
+		"""
+
+		top = self.top_dir
+		if not top:
+			top = Options.options.top
+		if not top:
+			top = getattr(Context.g_module, Context.TOP, None)
+		if not top:
+			top = self.path.abspath()
+		top = os.path.abspath(top)
+
+		self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top)
+		assert(self.srcnode)
+
+		out = self.out_dir
+		if not out:
+			out = Options.options.out
+		if not out:
+			out = getattr(Context.g_module, Context.OUT, None)
+		if not out:
+			out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '')
+
+		self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out)
+		self.bldnode.mkdir()
+
+		if not os.path.isdir(self.bldnode.abspath()):
+			conf.fatal('could not create the build directory %s' % self.bldnode.abspath())
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		self.init_dirs()
+
+		self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
+		self.cachedir.mkdir()
+
+		path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
+		self.logger = Logs.make_logger(path, 'cfg')
+
+		app = getattr(Context.g_module, 'APPNAME', '')
+		if app:
+			ver = getattr(Context.g_module, 'VERSION', '')
+			if ver:
+				app = "%s (%s)" % (app, ver)
+
+		now = time.ctime()
+		pyver = sys.hexversion
+		systype = sys.platform
+		args = " ".join(sys.argv)
+		wafver = Context.WAFVERSION
+		abi = Context.ABI
+		self.to_log(conf_template % vars())
+
+		self.msg('Setting top to', self.srcnode.abspath())
+		self.msg('Setting out to', self.bldnode.abspath())
+
+		if id(self.srcnode) == id(self.bldnode):
+			Logs.warn('Setting top == out (remember to use "update_outputs")')
+		elif id(self.path) != id(self.srcnode):
+			if self.srcnode.is_child_of(self.path):
+				Logs.warn('Are you certain that you do not want to set top="." ?')
+
+		super(ConfigurationContext, self).execute()
+
+		self.store()
+
+		Context.top_dir = self.srcnode.abspath()
+		Context.out_dir = self.bldnode.abspath()
+
+		# this will write a configure lock so that subsequent builds will
+		# consider the current path as the root directory (see prepare_impl).
+		# to remove: use 'waf distclean'
+		env = ConfigSet.ConfigSet()
+		env['argv'] = sys.argv
+		env['options'] = Options.options.__dict__
+
+		env.run_dir = Context.run_dir
+		env.top_dir = Context.top_dir
+		env.out_dir = Context.out_dir
+
+		# conf.hash & conf.files hold wscript files paths and hash
+		# (used only by Configure.autoconfig)
+		env['hash'] = self.hash
+		env['files'] = self.files
+		env['environ'] = dict(self.environ)
+
+		if not self.env.NO_LOCK_IN_RUN:
+			env.store(Context.run_dir + os.sep + Options.lockfile)
+		if not self.env.NO_LOCK_IN_TOP:
+			env.store(Context.top_dir + os.sep + Options.lockfile)
+		if not self.env.NO_LOCK_IN_OUT:
+			env.store(Context.out_dir + os.sep + Options.lockfile)
+
+	def prepare_env(self, env):
+		"""
+		Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env``
+
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:param env: a ConfigSet, usually ``conf.env``
+		"""
+		if not env.PREFIX:
+			env.PREFIX = os.path.abspath(os.path.expanduser(Options.options.prefix))
+		if not env.BINDIR:
+			env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
+		if not env.LIBDIR:
+			env.LIBDIR = Utils.subst_vars('${PREFIX}/lib', env)
+
+	def store(self):
+		"""Save the config results into the cache file"""
+		n = self.cachedir.make_node('build.config.py')
+		n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools))
+
+		if not self.all_envs:
+			self.fatal('nothing to store in the configuration context!')
+
+		for key in self.all_envs:
+			tmpenv = self.all_envs[key]
+			tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
+
+	def load(self, input, tooldir=None, funs=None, download=True):
+		"""
+		Load Waf tools, which will be imported whenever a build is started.
+
+		:param input: waf tools to import
+		:type input: list of string
+		:param tooldir: paths for the imports
+		:type tooldir: list of string
+		:param funs: functions to execute from the waf tools
+		:type funs: list of string
+		:param download: whether to download the tool from the waf repository
+		:type download: bool
+		"""
+
+		tools = Utils.to_list(input)
+		if tooldir: tooldir = Utils.to_list(tooldir)
+		for tool in tools:
+			# avoid loading the same tool more than once with the same functions
+			# used by composite projects
+
+			mag = (tool, id(self.env), funs)
+			if mag in self.tool_cache:
+				self.to_log('(tool %s is already loaded, skipping)' % tool)
+				continue
+			self.tool_cache.append(mag)
+
+			module = None
+			try:
+				module = Context.load_tool(tool, tooldir)
+			except ImportError as e:
+				if Options.options.download:
+					module = download_tool(tool, ctx=self)
+					if not module:
+						self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
+				else:
+					self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
+			except Exception as e:
+				self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
+				self.to_log(Utils.ex_stack())
+				raise
+
+			if funs is not None:
+				self.eval_rules(funs)
+			else:
+				func = getattr(module, 'configure', None)
+				if func:
+					if type(func) is type(Utils.readf): func(self)
+					else: self.eval_rules(func)
+
+			self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
+
+	def post_recurse(self, node):
+		"""
+		Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse`
+
+		:param node: script
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		super(ConfigurationContext, self).post_recurse(node)
+		self.hash = hash((self.hash, node.read('rb')))
+		self.files.append(node.abspath())
+
+	def eval_rules(self, rules):
+		"""
+		Execute the configuration tests. The method :py:meth:`waflib.Configure.ConfigurationContext.err_handler`
+		is used to process the eventual exceptions
+
+		:param rules: list of configuration method names
+		:type rules: list of string
+		"""
+		self.rules = Utils.to_list(rules)
+		for x in self.rules:
+			f = getattr(self, x)
+			if not f: self.fatal("No such method '%s'." % x)
+			try:
+				f()
+			except Exception as e:
+				ret = self.err_handler(x, e)
+				if ret == BREAK:
+					break
+				elif ret == CONTINUE:
+					continue
+				else:
+					raise
+
+	def err_handler(self, fun, error):
+		"""
+		Error handler for the configuration tests, the default is to let the exception raise
+
+		:param fun: configuration test
+		:type fun: method
+		:param error: exception
+		:type error: exception
+		"""
+		pass
+
+def conf(f):
+	"""
+	Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and
+	:py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter
+	named 'mandatory' to disable the configuration errors::
+
+		def configure(conf):
+			conf.find_program('abc', mandatory=False)
+
+	:param f: method to bind
+	:type f: function
+	"""
+	def fun(*k, **kw):
+		mandatory = True
+		if 'mandatory' in kw:
+			mandatory = kw['mandatory']
+			del kw['mandatory']
+
+		try:
+			return f(*k, **kw)
+		except Errors.ConfigurationError as e:
+			if mandatory:
+				raise e
+
+	setattr(ConfigurationContext, f.__name__, fun)
+	setattr(Build.BuildContext, f.__name__, fun)
+	return f
+
+ conf
+def add_os_flags(self, var, dest=None):
+	"""
+	Import operating system environment values into ``conf.env`` dict::
+
+		def configure(conf):
+			conf.add_os_flags('CFLAGS')
+
+	:param var: variable to use
+	:type var: string
+	:param dest: destination variable, by default the same as var
+	:type dest: string
+	"""
+	# do not use 'get' to make certain the variable is not defined
+	try: self.env.append_value(dest or var, shlex.split(self.environ[var]))
+	except KeyError: pass
+
+ conf
+def cmd_to_list(self, cmd):
+	"""
+	Detect if a command is written in pseudo shell like ``ccache g++`` and return a list.
+
+	:param cmd: command
+	:type cmd: a string or a list of string
+	"""
+	if isinstance(cmd, str) and cmd.find(' '):
+		try:
+			os.stat(cmd)
+		except OSError:
+			return shlex.split(cmd)
+		else:
+			return [cmd]
+	return cmd
+
+ conf
+def check_waf_version(self, mini='1.6.0', maxi='1.7.0'):
+	"""
+	check for the waf version
+
+	Versions should be supplied as hex. 0x01000000 means 1.0.0,
+	0x010408 means 1.4.8, etc.
+
+	:type  mini: number, tuple or string
+	:param mini: Minimum required version
+	:type  maxi: number, tuple or string
+	:param maxi: Maximum allowed version
+	"""
+	self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)))
+	ver = Context.HEXVERSION
+	if Utils.num2ver(mini) > ver:
+		self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver))
+
+	if Utils.num2ver(maxi) < ver:
+		self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver))
+	self.end_msg('ok')
+
+ conf
+def find_file(self, filename, path_list=[]):
+	"""
+	Find a file in a list of paths
+
+	:param filename: name of the file to search for
+	:param path_list: list of directories to search
+	:return: the first occurrence filename or '' if filename could not be found
+	"""
+	for n in Utils.to_list(filename):
+		for d in Utils.to_list(path_list):
+			p = os.path.join(d, n)
+			if os.path.exists(p):
+				return p
+	self.fatal('Could not find %r' % filename)
+
+ conf
+def find_program(self, filename, **kw):
+	"""
+	Search for a program on the operating system
+
+	When var is used, you may set os.environ[var] to help find a specific program version, for example::
+
+		$ VALAC=/usr/bin/valac_test waf configure
+
+	:param path_list: paths to use for searching
+	:type param_list: list of string
+	:param var: store the result to conf.env[var], by default use filename.upper()
+	:type var: string
+	:param ext: list of extensions for the binary (do not add an extension for portability)
+	:type ext: list of string
+	"""
+
+	exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
+
+	environ = kw.get('environ', os.environ)
+
+	ret = ''
+	filename = Utils.to_list(filename)
+
+	var = kw.get('var', '')
+	if not var:
+		var = filename[0].upper()
+
+	if self.env[var]:
+		ret = self.env[var]
+	elif var in environ:
+		ret = environ[var]
+
+	path_list = kw.get('path_list', '')
+	if not ret:
+		if path_list:
+			path_list = Utils.to_list(path_list)
+		else:
+			path_list = environ.get('PATH', '').split(os.pathsep)
+
+		if not isinstance(filename, list):
+			filename = [filename]
+
+		for a in exts.split(','):
+			if ret:
+				break
+			for b in filename:
+				if ret:
+					break
+				for c in path_list:
+					if ret:
+						break
+					x = os.path.expanduser(os.path.join(c, b + a))
+					if os.path.isfile(x):
+						ret = x
+
+	if not ret and Utils.winreg:
+		ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
+	if not ret and Utils.winreg:
+		ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
+
+	self.msg('Checking for program ' + ','.join(filename), ret or False)
+	self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))
+
+	if not ret:
+		self.fatal(kw.get('errmsg', '') or 'Could not find the program %s' % ','.join(filename))
+
+	if var:
+		self.env[var] = ret
+	return ret
+
+
+ conf
+def find_perl_program(self, filename, path_list=[], var=None, environ=None, exts=''):
+	"""
+	Search for a perl program on the operating system
+
+	:param filename: file to search for
+	:type filename: string
+	:param path_list: list of paths to look into
+	:type path_list: list of string
+	:param var: store the results into *conf.env.var*
+	:type var: string
+	:param environ: operating system environment to pass to :py:func:`waflib.Configure.find_program`
+	:type environ: dict
+	:param exts: extensions given to :py:func:`waflib.Configure.find_program`
+	:type exts: list
+	"""
+
+	try:
+		app = self.find_program(filename, path_list=path_list, var=var, environ=environ, exts=exts)
+	except:
+		self.find_program('perl', var='PERL')
+		app = self.find_file(filename, os.environ['PATH'].split(os.pathsep))
+		if not app:
+			raise
+		if var:
+			self.env[var] = Utils.to_list(self.env['PERL']) + [app]
+	self.msg('Checking for %r' % filename, app)
+
diff --git a/waflib/Context.py b/waflib/Context.py
new file mode 100644
index 0000000..cfaf754
--- /dev/null
+++ b/waflib/Context.py
@@ -0,0 +1,596 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+Classes and functions required for waf commands
+"""
+
+import os, imp, sys
+from waflib import Utils, Errors, Logs
+import waflib.Node
+
+# the following 3 constants are updated on each new release (do not touch)
+HEXVERSION=0x1060b00
+"""Constant updated on new releases"""
+
+WAFVERSION="1.6.11"
+"""Constant updated on new releases"""
+
+WAFREVISION="a7e69d6b81b04729804754c4d5214da063779a65"
+"""Constant updated on new releases"""
+
+ABI = 98
+"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
+
+DBFILE = '.wafpickle-%d' % ABI
+"""Name of the pickle file for storing the build data"""
+
+APPNAME = 'APPNAME'
+"""Default application name (used by ``waf dist``)"""
+
+VERSION = 'VERSION'
+"""Default application version (used by ``waf dist``)"""
+
+TOP  = 'top'
+"""The variable name for the top-level directory in wscript files"""
+
+OUT  = 'out'
+"""The variable name for the output directory in wscript files"""
+
+WSCRIPT_FILE = 'wscript'
+"""Name of the waf script files"""
+
+
+launch_dir = ''
+"""Directory from which waf has been called"""
+run_dir = ''
+"""Location of the wscript file to use as the entry point"""
+top_dir = ''
+"""Location of the project directory (top), if the project was configured"""
+out_dir = ''
+"""Location of the build directory (out), if the project was configured"""
+waf_dir = ''
+"""Directory containing the waf modules"""
+
+local_repo = ''
+"""Local repository containing additional Waf tools (plugins)"""
+remote_repo = 'http://waf.googlecode.com/git/'
+"""
+Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
+
+	$ waf configure --download
+"""
+
+remote_locs = ['waflib/extras', 'waflib/Tools']
+"""
+Remote directories for use with :py:const:`waflib.Context.remote_repo`
+"""
+
+g_module = None
+"""
+Module representing the main wscript file (see :py:const:`waflib.Context.run_dir`)
+"""
+
+STDOUT = 1
+STDERR = -1
+BOTH   = 0
+
+classes = []
+"""
+List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes
+are added automatically by a metaclass.
+"""
+
+
+def create_context(cmd_name, *k, **kw):
+	"""
+	Create a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
+	Used in particular by :py:func:`waflib.Scripting.run_command`
+
+	:param cmd_name: command
+	:type cmd_name: string
+	:param k: arguments to give to the context class initializer
+	:type k: list
+	:param k: keyword arguments to give to the context class initializer
+	:type k: dict
+	"""
+	global classes
+	for x in classes:
+		if x.cmd == cmd_name:
+			return x(*k, **kw)
+	ctx = Context(*k, **kw)
+	ctx.fun = cmd_name
+	return ctx
+
+class store_context(type):
+	"""
+	Metaclass for storing the command classes into the list :py:const:`waflib.Context.classes`
+	Context classes must provide an attribute 'cmd' representing the command to execute
+	"""
+	def __init__(cls, name, bases, dict):
+		super(store_context, cls).__init__(name, bases, dict)
+		name = cls.__name__
+
+		if name == 'ctx' or name == 'Context':
+			return
+
+		try:
+			cls.cmd
+		except AttributeError:
+			raise Errors.WafError('Missing command for the context class %r (cmd)' % name)
+
+		if not getattr(cls, 'fun', None):
+			cls.fun = cls.cmd
+
+		global classes
+		classes.insert(0, cls)
+
+ctx = store_context('ctx', (object,), {})
+"""Base class for the :py:class:`waflib.Context.Context` classes"""
+
+class Context(ctx):
+	"""
+	Default context for waf commands, and base class for new command contexts.
+
+	Context objects are passed to top-level functions::
+
+		def foo(ctx):
+			print(ctx.__class__.__name__) # waflib.Context.Context
+
+	Subclasses must define the attribute 'cmd':
+
+	:param cmd: command to execute as in ``waf cmd``
+	:type cmd: string
+	:param fun: function name to execute when the command is called
+	:type fun: string
+
+	.. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext
+
+	"""
+
+	errors = Errors
+	"""
+	Shortcut to :py:mod:`waflib.Errors` provided for convenience
+	"""
+
+	tools = {}
+	"""
+	A cache for modules (wscript files) read by :py:meth:`Context.Context.load`
+	"""
+
+	def __init__(self, **kw):
+		try:
+			rd = kw['run_dir']
+		except KeyError:
+			global run_dir
+			rd = run_dir
+
+		# binds the context to the nodes in use to avoid a context singleton
+		class node_class(waflib.Node.Node):
+			pass
+		self.node_class = node_class
+		self.node_class.__module__ = "waflib.Node"
+		self.node_class.__name__ = "Nod3"
+		self.node_class.ctx = self
+
+		self.root = self.node_class('', None)
+		self.cur_script = None
+		self.path = self.root.find_dir(rd)
+
+		self.stack_path = []
+		self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
+		self.logger = None
+
+	def __hash__(self):
+		"""
+		Return a hash value for storing context objects in dicts or sets. The value is not persistent.
+
+		:return: hash value
+		:rtype: int
+		"""
+		return id(self)
+
+	def load(self, tool_list, *k, **kw):
+		"""
+		Load a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it.
+		A ``tooldir`` value may be provided as a list of module paths.
+
+		:type tool_list: list of string or space-separated string
+		:param tool_list: list of Waf tools to use
+		"""
+		tools = Utils.to_list(tool_list)
+		path = Utils.to_list(kw.get('tooldir', ''))
+
+		for t in tools:
+			module = load_tool(t, path)
+			fun = getattr(module, kw.get('name', self.fun), None)
+			if fun:
+				fun(self)
+
+	def execute(self):
+		"""
+		Execute the command. Redefine this method in subclasses.
+		"""
+		global g_module
+		self.recurse([os.path.dirname(g_module.root_path)])
+
+	def pre_recurse(self, node):
+		"""
+		Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. The node given is set
+		as an attribute ``self.cur_script``, and as the current path ``self.path``
+
+		:param node: script
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		self.stack_path.append(self.cur_script)
+
+		self.cur_script = node
+		self.path = node.parent
+
+	def post_recurse(self, node):
+		"""
+		Restore ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
+
+		:param node: script
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		self.cur_script = self.stack_path.pop()
+		if self.cur_script:
+			self.path = self.cur_script.parent
+
+	def recurse(self, dirs, name=None, mandatory=True, once=True):
+		"""
+		Run user code from the supplied list of directories.
+		The directories can be either absolute, or relative to the directory
+		of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse`
+		are called immediately before and after a script has been executed.
+
+		:param dirs: List of directories to visit
+		:type dirs: list of string or space-separated string
+		:param name: Name of function to invoke from the wscript
+		:type  name: string
+		:param mandatory: whether sub wscript files are required to exist
+		:type  mandatory: bool
+		:param once: read the script file once for a particular context
+		:type once: bool
+		"""
+		try:
+			cache = self.recurse_cache
+		except:
+			cache = self.recurse_cache = {}
+
+		for d in Utils.to_list(dirs):
+
+			if not os.path.isabs(d):
+				# absolute paths only
+				d = os.path.join(self.path.abspath(), d)
+
+			WSCRIPT     = os.path.join(d, WSCRIPT_FILE)
+			WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)
+
+			node = self.root.find_node(WSCRIPT_FUN)
+			if node and (not once or node not in cache):
+				cache[node] = True
+				self.pre_recurse(node)
+				try:
+					function_code = node.read('rU')
+					exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
+				finally:
+					self.post_recurse(node)
+			elif not node:
+				node = self.root.find_node(WSCRIPT)
+				tup = (node, name or self.fun)
+				if node and (not once or tup not in cache):
+					cache[tup] = True
+					self.pre_recurse(node)
+					try:
+						wscript_module = load_module(node.abspath())
+						user_function = getattr(wscript_module, (name or self.fun), None)
+						if not user_function:
+							if not mandatory:
+								continue
+							raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath()))
+						user_function(self)
+					finally:
+						self.post_recurse(node)
+				elif not node:
+					if not mandatory:
+						continue
+					raise Errors.WafError('No wscript file in directory %s' % d)
+
+	def exec_command(self, cmd, **kw):
+		"""
+		Execute a command and return the exit status. If the context has the attribute 'log',
+		capture and log the process stderr/stdout for logging purposes::
+
+			def run(tsk):
+				ret = tsk.generator.bld.exec_command('touch foo.txt')
+				return ret
+
+		Do not confuse this method with :py:meth:`waflib.Context.Context.cmd_and_log` which is used to
+		return the standard output/error values.
+
+		:param cmd: command argument for subprocess.Popen
+		:param kw: keyword arguments for subprocess.Popen
+		"""
+		subprocess = Utils.subprocess
+		kw['shell'] = isinstance(cmd, str)
+		Logs.debug('runner: %r' % cmd)
+		Logs.debug('runner_env: kw=%s' % kw)
+
+		try:
+			if self.logger:
+				# warning: may deadlock with a lot of output (subprocess limitation)
+
+				self.logger.info(cmd)
+
+				kw['stdout'] = kw['stderr'] = subprocess.PIPE
+				p = subprocess.Popen(cmd, **kw)
+				(out, err) = p.communicate()
+				if out:
+					self.logger.debug('out: %s' % out.decode(sys.stdout.encoding or 'iso8859-1'))
+				if err:
+					self.logger.error('err: %s' % err.decode(sys.stdout.encoding or 'iso8859-1'))
+				return p.returncode
+			else:
+				p = subprocess.Popen(cmd, **kw)
+				return p.wait()
+		except OSError:
+			return -1
+
+	def cmd_and_log(self, cmd, **kw):
+		"""
+		Execute a command and return stdout if the execution is successful.
+		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
+		will be bound to the WafError object::
+
+			def configure(conf):
+				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
+				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
+				try:
+					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
+				except Exception as e:
+					print(e.stdout, e.stderr)
+
+		:param cmd: args for subprocess.Popen
+		:param kw: keyword arguments for subprocess.Popen
+		"""
+		subprocess = Utils.subprocess
+		kw['shell'] = isinstance(cmd, str)
+		Logs.debug('runner: %r' % cmd)
+
+		if 'quiet' in kw:
+			quiet = kw['quiet']
+			del kw['quiet']
+		else:
+			quiet = None
+
+		if 'output' in kw:
+			to_ret = kw['output']
+			del kw['output']
+		else:
+			to_ret = STDOUT
+
+		kw['stdout'] = kw['stderr'] = subprocess.PIPE
+		if quiet is None:
+			self.to_log(cmd)
+		try:
+			p = subprocess.Popen(cmd, **kw)
+			(out, err) = p.communicate()
+		except Exception as e:
+			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
+
+		if not isinstance(out, str):
+			out = out.decode(sys.stdout.encoding or 'iso8859-1')
+		if not isinstance(err, str):
+			err = err.decode(sys.stdout.encoding or 'iso8859-1')
+
+		if out and quiet != STDOUT and quiet != BOTH:
+			self.to_log('out: %s' % out)
+		if err and quiet != STDERR and quiet != BOTH:
+			self.to_log('err: %s' % err)
+
+		if p.returncode:
+			e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode))
+			e.returncode = p.returncode
+			e.stderr = err
+			e.stdout = out
+			raise e
+
+		if to_ret == BOTH:
+			return (out, err)
+		elif to_ret == STDERR:
+			return err
+		return out
+
+	def fatal(self, msg, ex=None):
+		"""
+		Raise a configuration error to interrupt the execution immediately::
+
+			def configure(conf):
+				conf.fatal('a requirement is missing')
+
+		:param msg: message to display
+		:type msg: string
+		:param ex: optional exception object
+		:type ex: exception
+		"""
+		if self.logger:
+			self.logger.info('from %s: %s' % (self.path.abspath(), msg))
+		try:
+			msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
+		except:
+			pass
+		raise self.errors.ConfigurationError(msg, ex=ex)
+
+	def to_log(self, msg):
+		"""
+		Log some information to the logger (if present), or to stderr. If the message is empty,
+		it is not printed::
+
+			def build(bld):
+				bld.to_log('starting the build')
+
+		When in doubt, override this method, or provide a logger on the context class.
+
+		:param msg: message
+		:type msg: string
+		"""
+		if not msg:
+			return
+		if self.logger:
+			self.logger.info(msg)
+		else:
+			sys.stderr.write(str(msg))
+			sys.stderr.flush()
+
+
+	def msg(self, msg, result, color=None):
+		"""
+		Print a configuration message of the form ``msg: result``.
+		The second part of the message will be in colors. The output
+		can be disabled easly by setting ``in_msg`` to a positive value::
+
+			def configure(conf):
+				self.in_msg = 1
+				conf.msg('Checking for library foo', 'ok')
+				# no output
+
+		:param msg: message to display to the user
+		:type msg: string
+		:param result: result to display
+		:type result: string or boolean
+		:param color: color to use, see :py:const:`waflib.Logs.colors_lst`
+		:type color: string
+		"""
+		self.start_msg(msg)
+
+		if not isinstance(color, str):
+			color = result and 'GREEN' or 'YELLOW'
+
+		self.end_msg(result, color)
+
+	def start_msg(self, msg):
+		"""
+		Print the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
+		"""
+		try:
+			if self.in_msg:
+				self.in_msg += 1
+				return
+		except:
+			self.in_msg = 0
+		self.in_msg += 1
+
+		try:
+			self.line_just = max(self.line_just, len(msg))
+		except AttributeError:
+			self.line_just = max(40, len(msg))
+		for x in (self.line_just * '-', msg):
+			self.to_log(x)
+		Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
+
+	def end_msg(self, result, color=None):
+		"""Print the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
+		self.in_msg -= 1
+		if self.in_msg:
+			return
+
+		defcolor = 'GREEN'
+		if result == True:
+			msg = 'ok'
+		elif result == False:
+			msg = 'not found'
+			defcolor = 'YELLOW'
+		else:
+			msg = str(result)
+
+		self.to_log(msg)
+		Logs.pprint(color or defcolor, msg)
+
+
+	def load_special_tools(self, var, ban=[]):
+		global waf_dir
+		lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
+		for x in lst:
+			if not x.name in ban:
+				load_tool(x.name.replace('.py', ''))
+
+cache_modules = {}
+"""
+Dictionary holding already loaded modules, keyed by their absolute path.
+The modules are added automatically by :py:func:`waflib.Context.load_module`
+"""
+
+def load_module(path):
+	"""
+	Load a source file as a python module.
+
+	:param path: file path
+	:type path: string
+	:return: Loaded Python module
+	:rtype: module
+	"""
+	try:
+		return cache_modules[path]
+	except KeyError:
+		pass
+
+	module = imp.new_module(WSCRIPT_FILE)
+	try:
+		code = Utils.readf(path, m='rU')
+	except (IOError, OSError):
+		raise Errors.WafError('Could not read the file %r' % path)
+
+	module_dir = os.path.dirname(path)
+	sys.path.insert(0, module_dir)
+
+	exec(compile(code, path, 'exec'), module.__dict__)
+	sys.path.remove(module_dir)
+
+	cache_modules[path] = module
+
+	return module
+
+def load_tool(tool, tooldir=None):
+	"""
+	Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools`
+
+	:type  tool: string
+	:param tool: Name of the tool
+	:type  tooldir: list
+	:param tooldir: List of directories to search for the tool module
+	"""
+	tool = tool.replace('++', 'xx')
+	tool = tool.replace('java', 'javaw')
+	tool = tool.replace('compiler_cc', 'compiler_c')
+
+	if tooldir:
+		assert isinstance(tooldir, list)
+		sys.path = tooldir + sys.path
+		try:
+			__import__(tool)
+			ret = sys.modules[tool]
+			Context.tools[tool] = ret
+			return ret
+		finally:
+			for d in tooldir:
+				sys.path.remove(d)
+	else:
+		global waf_dir
+		try:
+			os.stat(os.path.join(waf_dir, 'waflib', 'extras', tool + '.py'))
+			d = 'waflib.extras.%s' % tool
+		except:
+			try:
+				os.stat(os.path.join(waf_dir, 'waflib', 'Tools', tool + '.py'))
+				d = 'waflib.Tools.%s' % tool
+			except:
+				d = tool # user has messed with sys.path
+
+		__import__(d)
+		ret = sys.modules[d]
+		Context.tools[tool] = ret
+		return ret
+
diff --git a/waflib/Errors.py b/waflib/Errors.py
new file mode 100644
index 0000000..104f7d8
--- /dev/null
+++ b/waflib/Errors.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+Exceptions used in the Waf code
+"""
+
+import traceback, sys
+
+class WafError(Exception):
+	"""Base class for all Waf errors"""
+	def __init__(self, msg='', ex=None):
+		"""
+		:param msg: error message
+		:type msg: string
+		:param ex: exception causing this error (optional)
+		:type ex: exception
+		"""
+		self.msg = msg
+		assert not isinstance(msg, Exception)
+
+		self.stack = []
+		if ex:
+			if not msg:
+				self.msg = str(ex)
+			if isinstance(ex, WafError):
+				self.stack = ex.stack
+			else:
+				self.stack = traceback.extract_tb(sys.exc_info()[2])
+		self.stack += traceback.extract_stack()[:-1]
+		self.verbose_msg = ''.join(traceback.format_list(self.stack))
+
+	def __str__(self):
+		return str(self.msg)
+
+class BuildError(WafError):
+	"""
+	Errors raised during the build and install phases
+	"""
+	def __init__(self, error_tasks=[]):
+		"""
+		:param error_tasks: tasks that could not complete normally
+		:type error_tasks: list of task objects
+		"""
+		self.tasks = error_tasks
+		WafError.__init__(self, self.format_error())
+
+	def format_error(self):
+		"""format the error messages from the tasks that failed"""
+		lst = ['Build failed']
+		for tsk in self.tasks:
+			txt = tsk.format_error()
+			if txt: lst.append(txt)
+		return '\n'.join(lst)
+
+class ConfigurationError(WafError):
+	"""
+	Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`
+	"""
+	pass
+
+class TaskRescan(WafError):
+	"""task-specific exception type, trigger a signature recomputation"""
+	pass
+
+class TaskNotReady(WafError):
+	"""task-specific exception type, raised when the task signature cannot be computed"""
+	pass
+
diff --git a/waflib/Logs.py b/waflib/Logs.py
new file mode 100644
index 0000000..a88ef0a
--- /dev/null
+++ b/waflib/Logs.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+logging, colors, terminal width and pretty-print
+"""
+
+import os, re, traceback, sys
+
+_nocolor = os.environ.get('NOCOLOR', 'no') not in ('no', '0', 'false')
+try:
+	if not _nocolor:
+		import waflib.ansiterm
+except:
+	# optional module for colors on win32, just ignore if it cannot be imported
+	pass
+
+import logging # do it after
+
+LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
+HOUR_FORMAT = "%H:%M:%S"
+
+zones = ''
+verbose = 0
+
+colors_lst = {
+'USE' : True,
+'BOLD'  :'\x1b[01;1m',
+'RED'   :'\x1b[01;31m',
+'GREEN' :'\x1b[32m',
+'YELLOW':'\x1b[33m',
+'PINK'  :'\x1b[35m',
+'BLUE'  :'\x1b[01;34m',
+'CYAN'  :'\x1b[36m',
+'NORMAL':'\x1b[0m',
+'cursor_on'  :'\x1b[?25h',
+'cursor_off' :'\x1b[?25l',
+}
+
+got_tty = not os.environ.get('TERM', 'dumb') in ['dumb', 'emacs']
+if got_tty:
+	try:
+		got_tty = sys.stderr.isatty()
+	except AttributeError:
+		got_tty = False
+
+if (not got_tty and os.environ.get('TERM', 'dumb') != 'msys') or _nocolor:
+	colors_lst['USE'] = False
+
+def get_term_cols():
+	return 80
+
+# If console packages are available, replace the dummy function with a real
+# implementation
+try:
+	import struct, fcntl, termios
+except ImportError:
+	pass
+else:
+	if got_tty:
+		def get_term_cols_real():
+			"""
+			Private use only.
+			"""
+
+			dummy_lines, cols = struct.unpack("HHHH", \
+			fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
+			struct.pack("HHHH", 0, 0, 0, 0)))[:2]
+			return cols
+		# try the function once to see if it really works
+		try:
+			get_term_cols_real()
+		except:
+			pass
+		else:
+			get_term_cols = get_term_cols_real
+
+get_term_cols.__doc__ = """
+	Get the console width in characters.
+
+	:return: the number of characters per line
+	:rtype: int
+	"""
+
+def get_color(cl):
+	if not colors_lst['USE']: return ''
+	return colors_lst.get(cl, '')
+
+class color_dict(object):
+	"""attribute-based color access, eg: colors.PINK"""
+	def __getattr__(self, a):
+		return get_color(a)
+	def __call__(self, a):
+		return get_color(a)
+
+colors = color_dict()
+
+re_log = re.compile(r'(\w+): (.*)', re.M)
+class log_filter(logging.Filter):
+	"""
+	The waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
+	For example, the following::
+
+		from waflib import Logs
+		Logs.debug('test: here is a message')
+
+	Will be displayed only when executing::
+
+		$ waf --zones=test
+	"""
+	def __init__(self, name=None):
+		pass
+
+	def filter(self, rec):
+		"""
+		filter a record, adding the colors automatically
+
+		* error: red
+		* warning: yellow
+
+		:param rec: message to record
+		"""
+
+		rec.c1 = colors.PINK
+		rec.c2 = colors.NORMAL
+		rec.zone = rec.module
+		if rec.levelno >= logging.INFO:
+			if rec.levelno >= logging.ERROR:
+				rec.c1 = colors.RED
+			elif rec.levelno >= logging.WARNING:
+				rec.c1 = colors.YELLOW
+			else:
+				rec.c1 = colors.GREEN
+			return True
+
+		m = re_log.match(rec.msg)
+		if m:
+			rec.zone = m.group(1)
+			rec.msg = m.group(2)
+
+		if zones:
+			return getattr(rec, 'zone', '') in zones or '*' in zones
+		elif not verbose > 2:
+			return False
+		return True
+
+class formatter(logging.Formatter):
+	"""Simple log formatter which handles colors"""
+	def __init__(self):
+		logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
+
+	def format(self, rec):
+		"""Messages in warning, error or info mode are displayed in color by default"""
+		if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
+			try:
+				msg = rec.msg.decode('utf-8')
+			except:
+				msg = rec.msg
+			return '%s%s%s' % (rec.c1, msg, rec.c2)
+		return logging.Formatter.format(self, rec)
+
+log = None
+"""global logger for Logs.debug, Logs.error, etc"""
+
+def debug(*k, **kw):
+	"""
+	Wrap logging.debug, the output is filtered for performance reasons
+	"""
+	if verbose:
+		k = list(k)
+		k[0] = k[0].replace('\n', ' ')
+		global log
+		log.debug(*k, **kw)
+
+def error(*k, **kw):
+	"""
+	Wrap logging.errors, display the origin of the message when '-vv' is set
+	"""
+	global log
+	log.error(*k, **kw)
+	if verbose > 2:
+		st = traceback.extract_stack()
+		if st:
+			st = st[:-1]
+			buf = []
+			for filename, lineno, name, line in st:
+				buf.append('  File "%s", line %d, in %s' % (filename, lineno, name))
+				if line:
+					buf.append('	%s' % line.strip())
+			if buf: log.error("\n".join(buf))
+
+def warn(*k, **kw):
+	"""
+	Wrap logging.warn
+	"""
+	global log
+	log.warn(*k, **kw)
+
+def info(*k, **kw):
+	"""
+	Wrap logging.info
+	"""
+	global log
+	log.info(*k, **kw)
+
+def init_log():
+	"""
+	Initialize the loggers globally
+	"""
+	global log
+	log = logging.getLogger('waflib')
+	log.handlers = []
+	log.filters = []
+	hdlr = logging.StreamHandler()
+	hdlr.setFormatter(formatter())
+	log.addHandler(hdlr)
+	log.addFilter(log_filter())
+	log.setLevel(logging.DEBUG)
+
+def make_logger(path, name):
+	"""
+	Create a simple logger, which is often used to redirect the context command output::
+
+		from waflib import Logs
+		bld.logger = Logs.make_logger('test.log', 'build')
+		bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False)
+		bld.logger = None
+
+	:param path: file name to write the log output to
+	:type path: string
+	:param name: logger name (loggers are reused)
+	:type name: string
+	"""
+	logger = logging.getLogger(name)
+	hdlr = logging.FileHandler(path, 'w')
+	formatter = logging.Formatter('%(message)s')
+	hdlr.setFormatter(formatter)
+	logger.addHandler(hdlr)
+	logger.setLevel(logging.DEBUG)
+	return logger
+
+def make_mem_logger(name, to_log, size=10000):
+	"""
+	Create a memory logger to avoid writing concurrently to the main logger
+	"""
+	from logging.handlers import MemoryHandler
+	logger = logging.getLogger(name)
+	hdlr = MemoryHandler(size, target=to_log)
+	formatter = logging.Formatter('%(message)s')
+	hdlr.setFormatter(formatter)
+	logger.addHandler(hdlr)
+	logger.memhandler = hdlr
+	logger.setLevel(logging.DEBUG)
+	return logger
+
+def pprint(col, str, label='', sep='\n'):
+	"""
+	Print messages in color immediately on stderr::
+
+		from waflib import Logs
+		Logs.pprint('RED', 'Something bad just happened')
+
+	:param col: color name to use in :py:const:`Logs.colors_lst`
+	:type col: string
+	:param str: message to display
+	:type str: string or a value that can be printed by %s
+	:param label: a message to add after the colored output
+	:type label: string
+	:param sep: a string to append at the end (line separator)
+	:type sep: string
+	"""
+	sys.stderr.write("%s%s%s %s%s" % (colors(col), str, colors.NORMAL, label, sep))
+
diff --git a/waflib/Node.py b/waflib/Node.py
new file mode 100644
index 0000000..c86c4c1
--- /dev/null
+++ b/waflib/Node.py
@@ -0,0 +1,850 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Node: filesystem structure, contains lists of nodes
+
+#. Each file/folder is represented by exactly one node.
+
+#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc.
+   Unused class members can increase the `.wafpickle` file size sensibly.
+
+#. Node objects should never be created directly, use
+   the methods :py:func:`Node.make_node` or :py:func:`Node.find_node`
+
+#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` should be
+   used when a build context is present
+
+#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass.
+   (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context owning a node is held as self.ctx
+"""
+
+import os, re, sys, shutil
+from waflib import Utils, Errors
+
+exclude_regs = '''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/BitKeeper
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzrignore
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/.arch-ids
+**/{arch}
+**/_darcs
+**/_darcs/**
+**/.DS_Store'''
+"""
+Ant patterns for files and folders to exclude while doing the
+recursive traversal in :py:meth:`waflib.Node.Node.ant_glob`
+"""
+
+# TODO optimize split_path by performing a replacement when unpacking?
+
+def split_path(path):
+	"""
+	Split a path by os.sep (This is not os.path.split)
+
+	:param path: path to split
+	:type path: string
+	:rtype: list of string
+	:return: the path, split
+	"""
+	return path.split('/')
+
+def split_path_cygwin(path):
+	if path.startswith('//'):
+		ret = path.split('/')[2:]
+		ret[0] = '/' + ret[0]
+		return ret
+	return path.split('/')
+
+re_sp = re.compile('[/\\\\]')
+def split_path_win32(path):
+	if path.startswith('\\\\'):
+		ret = re.split(re_sp, path)[2:]
+		ret[0] = '\\' + ret[0]
+		return ret
+	return re.split(re_sp, path)
+
+if sys.platform == 'cygwin':
+	split_path = split_path_cygwin
+elif Utils.is_win32:
+	split_path = split_path_win32
+
+class Node(object):
+	"""
+	This class is organized in two parts
+
+	* The basic methods meant for filesystem access (compute paths, create folders, etc)
+	* The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``)
+
+	The Node objects are not thread safe in any way.
+	"""
+
+	__slots__ = ('name', 'sig', 'children', 'parent', 'cache_abspath', 'cache_isdir')
+	def __init__(self, name, parent):
+		self.name = name
+		self.parent = parent
+
+		if parent:
+			if name in parent.children:
+				raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent))
+			parent.children[name] = self
+
+	def __setstate__(self, data):
+		"Deserializes from data"
+		self.name = data[0]
+		self.parent = data[1]
+		if data[2] is not None:
+			self.children = data[2]
+		if data[3] is not None:
+			self.sig = data[3]
+
+	def __getstate__(self):
+		"Serialize the node info"
+		return (self.name, self.parent, getattr(self, 'children', None), getattr(self, 'sig', None))
+
+	def __str__(self):
+		"String representation (name), for debugging purposes"
+		return self.name
+
+	def __repr__(self):
+		"String representation (abspath), for debugging purposes"
+		return self.abspath()
+
+	def __hash__(self):
+		"Node hash, used for storage in dicts. This hash is not persistent."
+		return id(self)
+
+	def __eq__(self, node):
+		"Node comparison, based on the IDs"
+		return id(self) == id(node)
+
+	def __copy__(self):
+		"Implemented to prevent nodes from being copied (raises an exception)"
+		raise Errors.WafError('nodes are not supposed to be copied')
+
+	def read(self, flags='r'):
+		"""
+		Return the contents of the file represented by this node::
+
+			def build(bld):
+				bld.path.find_node('wscript').read()
+
+		:type  fname: string
+		:param fname: Path to file
+		:type  m: string
+		:param m: Open mode
+		:rtype: string
+		:return: File contents
+		"""
+		return Utils.readf(self.abspath(), flags)
+
+	def write(self, data, flags='w'):
+		"""
+		Write some text to the physical file represented by this node::
+
+			def build(bld):
+				bld.path.make_node('foo.txt').write('Hello, world!')
+
+		:type  data: string
+		:param data: data to write
+		:type  flags: string
+		:param flags: Write mode
+		"""
+		f = None
+		try:
+			f = open(self.abspath(), flags)
+			f.write(data)
+		finally:
+			if f:
+				f.close()
+
+	def chmod(self, val):
+		"""
+		Change file/dir permissions::
+
+			def build(bld):
+				bld.path.chmod(493) # 0755
+		"""
+		os.chmod(self.abspath(), val)
+
+	def delete(self):
+		"""Delete the file/folder physically (but not the node)"""
+		try:
+			if getattr(self, 'children', None):
+				shutil.rmtree(self.abspath())
+			else:
+				os.unlink(self.abspath())
+		except:
+			pass
+
+		try:
+			delattr(self, 'children')
+		except:
+			pass
+
+	def suffix(self):
+		"""Return the file extension"""
+		k = max(0, self.name.rfind('.'))
+		return self.name[k:]
+
+	def height(self):
+		"""Depth in the folder hierarchy from the filesystem root or from all the file drives"""
+		d = self
+		val = -1
+		while d:
+			d = d.parent
+			val += 1
+		return val
+
+	def listdir(self):
+		"""List the folder contents"""
+		lst = Utils.listdir(self.abspath())
+		lst.sort()
+		return lst
+
+	def mkdir(self):
+		"""
+		Create a folder represented by this node, creating intermediate nodes as needed
+		An exception will be raised only when the folder cannot possibly exist there
+		"""
+		if getattr(self, 'cache_isdir', None):
+			return
+
+		try:
+			self.parent.mkdir()
+		except:
+			pass
+
+		if self.name:
+			try:
+				os.makedirs(self.abspath())
+			except OSError:
+				pass
+
+			if not os.path.isdir(self.abspath()):
+				raise Errors.WafError('Could not create the directory %s' % self.abspath())
+
+			try:
+				self.children
+			except:
+				self.children = {}
+
+		self.cache_isdir = True
+
+	def find_node(self, lst):
+		"""
+		Find a node on the file system (files or folders), create intermediate nodes as needed
+
+		:param lst: path
+		:type lst: string or list of string
+		"""
+
+		if isinstance(lst, str):
+			lst = [x for x in split_path(lst) if x and x != '.']
+
+		cur = self
+		for x in lst:
+			if x == '..':
+				cur = cur.parent or cur
+				continue
+
+			try:
+				if x in cur.children:
+					cur = cur.children[x]
+					continue
+			except:
+				cur.children = {}
+
+			# optimistic: create the node first then look if it was correct to do so
+			cur = self.__class__(x, cur)
+			try:
+				os.stat(cur.abspath())
+			except:
+				del cur.parent.children[x]
+				return None
+
+		ret = cur
+
+		try:
+			os.stat(ret.abspath())
+		except:
+			del ret.parent.children[ret.name]
+			return None
+
+		try:
+			while not getattr(cur.parent, 'cache_isdir', None):
+				cur = cur.parent
+				cur.cache_isdir = True
+		except AttributeError:
+			pass
+
+		return ret
+
+	def make_node(self, lst):
+		"""
+		Find or create a node without looking on the filesystem
+
+		:param lst: path
+		:type lst: string or list of string
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in split_path(lst) if x and x != '.']
+
+		cur = self
+		for x in lst:
+			if x == '..':
+				cur = cur.parent or cur
+				continue
+
+			if getattr(cur, 'children', {}):
+				if x in cur.children:
+					cur = cur.children[x]
+					continue
+			else:
+				cur.children = {}
+			cur = self.__class__(x, cur)
+		return cur
+
+	def search(self, lst):
+		"""
+		Search for a node without looking on the filesystem
+
+		:param lst: path
+		:type lst: string or list of string
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in split_path(lst) if x and x != '.']
+
+		cur = self
+		try:
+			for x in lst:
+				if x == '..':
+					cur = cur.parent or cur
+				else:
+					cur = cur.children[x]
+			return cur
+		except:
+			pass
+
+	def path_from(self, node):
+		"""
+		Path of this node seen from the other::
+
+			def build(bld):
+				n1 = bld.path.find_node('foo/bar/xyz.txt')
+				n2 = bld.path.find_node('foo/stuff/')
+				n1.path_from(n2) # './bar/xyz.txt'
+
+		:param node: path to use as a reference
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+
+		c1 = self
+		c2 = node
+
+		c1h = c1.height()
+		c2h = c2.height()
+
+		lst = []
+		up = 0
+
+		while c1h > c2h:
+			lst.append(c1.name)
+			c1 = c1.parent
+			c1h -= 1
+
+		while c2h > c1h:
+			up += 1
+			c2 = c2.parent
+			c2h -= 1
+
+		while id(c1) != id(c2):
+			lst.append(c1.name)
+			up += 1
+
+			c1 = c1.parent
+			c2 = c2.parent
+
+		for i in range(up):
+			lst.append('..')
+		lst.reverse()
+		return os.sep.join(lst) or '.'
+
+	def abspath(self):
+		"""
+		Absolute path. A cache is kept in the context as ``cache_node_abspath``
+		"""
+		try:
+			return self.cache_abspath
+		except:
+			pass
+		# think twice before touching this (performance + complexity + correctness)
+
+		if os.sep == '/':
+			if not self.parent:
+				val = os.sep
+			elif not self.parent.name:
+				val = os.sep + self.name
+			else:
+				val = self.parent.abspath() + os.sep + self.name
+		else:
+			if not self.parent:
+				val = ''
+			elif not self.parent.name:
+				val = self.name + os.sep
+			else:
+				val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name
+
+		self.cache_abspath = val
+		return val
+
+	def is_child_of(self, node):
+		"""
+		Does this node belong to the subtree node?::
+
+			def build(bld):
+				node = bld.path.find_node('wscript')
+				node.is_child_of(bld.path) # True
+
+		:param node: path to use as a reference
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		p = self
+		diff = self.height() - node.height()
+		while diff > 0:
+			diff -= 1
+			p = p.parent
+		return id(p) == id(node)
+
+	def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True):
+		"""
+		Semi-private and recursive method used by ant_glob.
+
+		:param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion
+		:type accept: function
+		:param maxdepth: maximum depth in the filesystem (25)
+		:type maxdepth: int
+		:param pats: list of patterns to accept and list of patterns to exclude
+		:type pats: tuple
+		:param dir: return folders too (False by default)
+		:type dir: bool
+		:param src: return files (True by default)
+		:type src: bool
+		:param remove: remove files/folders that do not exist (True by default)
+		:type remove: bool
+		"""
+		dircont = self.listdir()
+		dircont.sort()
+
+		try:
+			lst = set(self.children.keys())
+			if remove:
+				for x in lst - set(dircont):
+					del self.children[x]
+		except:
+			self.children = {}
+
+		for name in dircont:
+			npats = accept(name, pats)
+			if npats and npats[0]:
+				accepted = [] in npats[0]
+
+				node = self.make_node([name])
+
+				isdir = os.path.isdir(node.abspath())
+				if accepted:
+					if isdir:
+						if dir:
+							yield node
+					else:
+						if src:
+							yield node
+
+				if getattr(node, 'cache_isdir', None) or isdir:
+					node.cache_isdir = True
+					if maxdepth:
+						for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove):
+							yield k
+		raise StopIteration
+
+	def ant_glob(self, *k, **kw):
+		"""
+		This method is used for finding files across folders. It behaves like ant patterns:
+
+		* ``**/*`` find all files recursively
+		* ``**/*.class`` find all files ending by .class
+		* ``..`` find files having two dot characters
+
+		For example::
+
+			def configure(cfg):
+				cfg.path.ant_glob('**/*.cpp') # find all .cpp files
+				cfg.root.ant_glob('etc/*.txt') # using the filesystem root can be slow
+				cfg.path.ant_glob('*.cpp', excl=['*.c'], src=True, dir=False)
+
+		For more information see http://ant.apache.org/manual/dirtasks.html
+
+		The nodes that correspond to files and folders that do not exist will be removed. To prevent this
+		behaviour, pass 'remove=False'
+
+		:param incl: ant patterns or list of patterns to include
+		:type incl: string or list of strings
+		:param excl: ant patterns or list of patterns to exclude
+		:type excl: string or list of strings
+		:param dir: return folders too (False by default)
+		:type dir: bool
+		:param src: return files (True by default)
+		:type src: bool
+		:param remove: remove files/folders that do not exist (True by default)
+		:type remove: bool
+		:param maxdepth: maximum depth of recursion
+		:type maxdepth: int
+		"""
+
+		src = kw.get('src', True)
+		dir = kw.get('dir', False)
+
+		excl = kw.get('excl', exclude_regs)
+		incl = k and k[0] or kw.get('incl', '**')
+
+		def to_pat(s):
+			lst = Utils.to_list(s)
+			ret = []
+			for x in lst:
+				x = x.replace('\\', '/').replace('//', '/')
+				if x.endswith('/'):
+					x += '**'
+				lst2 = x.split('/')
+				accu = []
+				for k in lst2:
+					if k == '**':
+						accu.append(k)
+					else:
+						k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
+						k = '^%s$' % k
+						try:
+							#print "pattern", k
+							accu.append(re.compile(k))
+						except Exception as e:
+							raise Errors.WafError("Invalid pattern: %s" % k, e)
+				ret.append(accu)
+			return ret
+
+		def filtre(name, nn):
+			ret = []
+			for lst in nn:
+				if not lst:
+					pass
+				elif lst[0] == '**':
+					ret.append(lst)
+					if len(lst) > 1:
+						if lst[1].match(name):
+							ret.append(lst[2:])
+					else:
+						ret.append([])
+				elif lst[0].match(name):
+					ret.append(lst[1:])
+			return ret
+
+		def accept(name, pats):
+			nacc = filtre(name, pats[0])
+			nrej = filtre(name, pats[1])
+			if [] in nrej:
+				nacc = []
+			return [nacc, nrej]
+
+		ret = [x for x in self.ant_iter(accept=accept, pats=[to_pat(incl), to_pat(excl)], maxdepth=25, dir=dir, src=src, remove=kw.get('remove', True))]
+		if kw.get('flat', False):
+			return ' '.join([x.path_from(self) for x in ret])
+
+		return ret
+
+	def find_nodes(self, find_dirs=True, find_files=True, match_fun=lambda x: True):
+		# FIXME not part of the stable API: find_node vs find_nodes? consistency with argument names on other functions?
+		x = """
+		Recursively finds nodes::
+
+			def configure(cnf):
+				cnf.find_nodes()
+
+		:param find_dirs: whether to return directories
+		:param find_files: whether to return files
+		:param match_fun: matching function, taking a node as parameter
+		:rtype generator
+		:return: a generator that iterates over all the requested files
+		"""
+		files = self.listdir()
+		for f in files:
+			node = self.make_node([f])
+			if os.path.isdir(node.abspath()):
+				if find_dirs and match_fun(node):
+					yield node
+				gen = node.find_nodes(find_dirs, find_files, match_fun)
+				for g in gen:
+					yield g
+			else:
+				if find_files and match_fun(node):
+					yield node
+
+
+	# --------------------------------------------------------------------------------
+	# the following methods require the source/build folders (bld.srcnode/bld.bldnode)
+	# using a subclass is a possibility, but is that really necessary?
+	# --------------------------------------------------------------------------------
+
+	def is_src(self):
+		"""
+		True if the node is below the source directory
+		note: !is_src does not imply is_bld()
+
+		:rtype: bool
+		"""
+		cur = self
+		x = id(self.ctx.srcnode)
+		y = id(self.ctx.bldnode)
+		while cur.parent:
+			if id(cur) == y:
+				return False
+			if id(cur) == x:
+				return True
+			cur = cur.parent
+		return False
+
+	def is_bld(self):
+		"""
+		True if the node is below the build directory
+		note: !is_bld does not imply is_src
+
+		:rtype: bool
+		"""
+		cur = self
+		y = id(self.ctx.bldnode)
+		while cur.parent:
+			if id(cur) == y:
+				return True
+			cur = cur.parent
+		return False
+
+	def get_src(self):
+		"""
+		Return the equivalent src node (or self if not possible)
+
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		cur = self
+		x = id(self.ctx.srcnode)
+		y = id(self.ctx.bldnode)
+		lst = []
+		while cur.parent:
+			if id(cur) == y:
+				lst.reverse()
+				return self.ctx.srcnode.make_node(lst)
+			if id(cur) == x:
+				return self
+			lst.append(cur.name)
+			cur = cur.parent
+		return self
+
+	def get_bld(self):
+		"""
+		Return the equivalent bld node (or self if not possible)
+
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		cur = self
+		x = id(self.ctx.srcnode)
+		y = id(self.ctx.bldnode)
+		lst = []
+		while cur.parent:
+			if id(cur) == y:
+				return self
+			if id(cur) == x:
+				lst.reverse()
+				return self.ctx.bldnode.make_node(lst)
+			lst.append(cur.name)
+			cur = cur.parent
+		# the file is external to the current project, make a fake root in the current build directory
+		lst.reverse()
+		if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'):
+			lst[0] = lst[0][0]
+		return self.ctx.bldnode.make_node(['__root__'] + lst)
+
+	def find_resource(self, lst):
+		"""
+		Try to find a declared build node or a source file
+
+		:param lst: path
+		:type lst: string or list of string
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in split_path(lst) if x and x != '.']
+
+		node = self.get_bld().search(lst)
+		if not node:
+			self = self.get_src()
+			node = self.find_node(lst)
+		try:
+			pat = node.abspath()
+			if os.path.isdir(pat):
+				return None
+		except:
+			pass
+		return node
+
+	def find_or_declare(self, lst):
+		"""
+		if 'self' is in build directory, try to return an existing node
+		if no node is found, go to the source directory
+		try to find an existing node in the source directory
+		if no node is found, create it in the build directory
+
+		:param lst: path
+		:type lst: string or list of string
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in split_path(lst) if x and x != '.']
+
+		node = self.get_bld().search(lst)
+		if node:
+			if not os.path.isfile(node.abspath()):
+				node.sig = None
+				try:
+					node.parent.mkdir()
+				except:
+					pass
+			return node
+		self = self.get_src()
+		node = self.find_node(lst)
+		if node:
+			if not os.path.isfile(node.abspath()):
+				node.sig = None
+				try:
+					node.parent.mkdir()
+				except:
+					pass
+			return node
+		node = self.get_bld().make_node(lst)
+		node.parent.mkdir()
+		return node
+
+	def find_dir(self, lst):
+		"""
+		Search for a folder in the filesystem
+
+		:param lst: path
+		:type lst: string or list of string
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in split_path(lst) if x and x != '.']
+
+		node = self.find_node(lst)
+		try:
+			if not os.path.isdir(node.abspath()):
+				return None
+		except (OSError, AttributeError):
+			# the node might be None, and raise an AttributeError
+			return None
+		return node
+
+	# helpers for building things
+	def change_ext(self, ext, ext_in=None):
+		"""
+		:return: A build node of the same path, but with a different extension
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		name = self.name
+		if ext_in is None:
+			k = name.rfind('.')
+			if k >= 0:
+				name = name[:k] + ext
+			else:
+				name = name + ext
+		else:
+			name = name[:- len(ext_in)] + ext
+
+		return self.parent.find_or_declare([name])
+
+	def nice_path(self, env=None):
+		"""
+		Return the path seen from the launch directory. It is often used for printing nodes in the console to open
+		files easily.
+
+		:param env: unused, left for compatibility with waf 1.5
+		"""
+		return self.path_from(self.ctx.launch_node())
+
+	def bldpath(self):
+		"Path seen from the build directory default/src/foo.cpp"
+		return self.path_from(self.ctx.bldnode)
+
+	def srcpath(self):
+		"Path seen from the source directory ../src/foo.cpp"
+		return self.path_from(self.ctx.srcnode)
+
+	def relpath(self):
+		"If a file in the build directory, bldpath, else srcpath"
+		cur = self
+		x = id(self.ctx.bldnode)
+		while cur.parent:
+			if id(cur) == x:
+				return self.bldpath()
+			cur = cur.parent
+		return self.srcpath()
+
+	def bld_dir(self):
+		"Build path without the file name"
+		return self.parent.bldpath()
+
+	def bld_base(self):
+		"Build path without the extension: src/dir/foo(.cpp)"
+		s = os.path.splitext(self.name)[0]
+		return self.bld_dir() + os.sep + s
+
+	def get_bld_sig(self):
+		"""
+		Node signature, assuming the file is in the build directory
+		"""
+		try:
+			ret = self.ctx.hash_cache[id(self)]
+		except KeyError:
+			pass
+		except AttributeError:
+			self.ctx.hash_cache = {}
+		else:
+			return ret
+
+		if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
+			self.sig = Utils.h_file(self.abspath())
+		self.ctx.hash_cache[id(self)] = ret = self.sig
+		return ret
+
+pickle_lock = Utils.threading.Lock()
+"""Lock mandatory for thread-safe node serialization"""
+
+class Nod3(Node):
+	"""Mandatory subclass for thread-safe node serialization"""
+	pass # do not remove
+
+
diff --git a/waflib/Options.py b/waflib/Options.py
new file mode 100644
index 0000000..19d8360
--- /dev/null
+++ b/waflib/Options.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Scott Newton, 2005 (scottn)
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Support for waf command-line options
+
+Provides default command-line options,
+as well as custom ones, used by the ``options`` wscript function.
+
+"""
+
+import os, tempfile, optparse, sys, re
+from waflib import Logs, Utils, Context
+
+cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
+"""
+Constant representing the default waf commands displayed in::
+
+	$ waf --help
+
+"""
+
+options = {}
+"""
+A dictionary representing the command-line options::
+
+	$ waf --foo=bar
+
+"""
+
+commands = []
+"""
+List of commands to execute extracted from the command-line. This list is consumed during the execution, see :py:func:`waflib.Scripting.run_commands`.
+"""
+
+lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
+try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
+except KeyError: cache_global = ''
+platform = Utils.unversioned_sys_platform()
+
+
+class opt_parser(optparse.OptionParser):
+	"""
+	Command-line options parser.
+	"""
+	def __init__(self, ctx):
+		optparse.OptionParser.__init__(self, conflict_handler="resolve", version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
+
+		self.formatter.width = Logs.get_term_cols()
+		p = self.add_option
+		self.ctx = ctx
+
+		jobs = ctx.jobs()
+		p('-j', '--jobs',     dest='jobs',    default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
+		p('-k', '--keep',     dest='keep',    default=0,     action='count', help='keep running happily even if errors are found')
+		p('-v', '--verbose',  dest='verbose', default=0,     action='count', help='verbosity level -v -vv or -vvv [default: 0]')
+		p('--nocache',        dest='nocache', default=False, action='store_true', help='ignore the WAFCACHE (if set)')
+		p('--zones',          dest='zones',   default='',    action='store', help='debugging zones (task_gen, deps, tasks, etc)')
+
+		gr = optparse.OptionGroup(self, 'configure options')
+		self.add_option_group(gr)
+
+		gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
+		gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
+
+		default_prefix = os.environ.get('PREFIX')
+		if not default_prefix:
+			if platform == 'win32':
+				d = tempfile.gettempdir()
+				default_prefix = d[0].upper() + d[1:]
+				# win32 preserves the case, but gettempdir does not
+			else:
+				default_prefix = '/usr/local/'
+		gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix)
+		gr.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
+
+
+		gr = optparse.OptionGroup(self, 'build and install options')
+		self.add_option_group(gr)
+
+		gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
+		gr.add_option('--targets',        dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
+
+		gr = optparse.OptionGroup(self, 'step options')
+		self.add_option_group(gr)
+		gr.add_option('--files',          dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
+
+		default_destdir = os.environ.get('DESTDIR', '')
+		gr = optparse.OptionGroup(self, 'install/uninstall options')
+		self.add_option_group(gr)
+		gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
+		gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation')
+
+	def get_usage(self):
+		"""
+		Return the message to print on ``waf --help``
+		"""
+		cmds_str = {}
+		for cls in Context.classes:
+			if not cls.cmd or cls.cmd == 'options':
+				continue
+
+			s = cls.__doc__ or ''
+			cmds_str[cls.cmd] = s
+
+		if Context.g_module:
+			for (k, v) in Context.g_module.__dict__.items():
+				if k in ['options', 'init', 'shutdown']:
+					continue
+
+				if type(v) is type(Context.create_context):
+					if v.__doc__ and not k.startswith('_'):
+						cmds_str[k] = v.__doc__
+
+		just = 0
+		for k in cmds_str:
+			just = max(just, len(k))
+
+		lst = ['  %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()]
+		lst.sort()
+		ret = '\n'.join(lst)
+
+		return '''waf [commands] [options]
+
+Main commands (example: ./waf build -j4)
+%s
+''' % ret
+
+
+class OptionsContext(Context.Context):
+	"""
+	Collect custom options from wscript files and parses the command line.
+	Set the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
+	"""
+
+	cmd = 'options'
+	fun = 'options'
+
+	def __init__(self, **kw):
+		super(OptionsContext, self).__init__(**kw)
+
+		self.parser = opt_parser(self)
+		"""Instance of :py:class:`waflib.Options.opt_parser`"""
+
+		self.option_groups = {}
+
+	def jobs(self):
+		"""
+		Find the amount of cpu cores to set the default amount of tasks executed in parallel. At
+		runtime the options can be obtained from :py:const:`waflib.Options.options` ::
+
+			from waflib.Options import options
+			njobs = options.jobs
+
+		:return: the amount of cpu cores
+		:rtype: int
+		"""
+		count = int(os.environ.get('JOBS', 0))
+		if count < 1:
+			if 'NUMBER_OF_PROCESSORS' in os.environ:
+				# on Windows, use the NUMBER_OF_PROCESSORS environment variable
+				count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
+			else:
+				# on everything else, first try the POSIX sysconf values
+				if hasattr(os, 'sysconf_names'):
+					if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
+						count = int(os.sysconf('SC_NPROCESSORS_ONLN'))
+					elif 'SC_NPROCESSORS_CONF' in os.sysconf_names:
+						count = int(os.sysconf('SC_NPROCESSORS_CONF'))
+				if not count and os.name not in ('nt', 'java'):
+					try:
+						tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
+					except Exception:
+						pass
+					else:
+						if re.match('^[0-9]+$', tmp):
+							count = int(tmp)
+		if count < 1:
+			count = 1
+		elif count > 1024:
+			count = 1024
+		return count
+
+	def add_option(self, *k, **kw):
+		"""
+		Wrapper for optparse.add_option::
+
+			def options(ctx):
+				ctx.add_option('-u', '--use', dest='use', default=False, action='store_true',
+					help='a boolean option')
+		"""
+		self.parser.add_option(*k, **kw)
+
+	def add_option_group(self, *k, **kw):
+		"""
+		Wrapper for optparse.add_option_group::
+
+			def options(ctx):
+				ctx.add_option_group('some options')
+				gr.add_option('-u', '--use', dest='use', default=False, action='store_true')
+		"""
+		try:
+			gr = self.option_groups[k[0]]
+		except:
+			gr = self.parser.add_option_group(*k, **kw)
+		self.option_groups[k[0]] = gr
+		return gr
+
+	def get_option_group(self, opt_str):
+		"""
+		Wrapper for optparse.get_option_group::
+
+			def options(ctx):
+				gr = ctx.get_option_group('configure options')
+				gr.add_option('-o', '--out', action='store', default='',
+					help='build dir for the project', dest='out')
+
+		"""
+		try:
+			return self.option_groups[opt_str]
+		except KeyError:
+			for group in self.parser.option_groups:
+				if group.title == opt_str:
+					return group
+			return None
+
+	def parse_args(self, _args=None):
+		"""
+		Parse arguments from a list (not bound to the command-line).
+
+		:param _args: arguments
+		:type _args: list of strings
+		"""
+		global options, commands
+		(options, leftover_args) = self.parser.parse_args(args=_args)
+		commands = leftover_args
+
+		if options.destdir:
+			options.destdir = os.path.abspath(os.path.expanduser(options.destdir))
+
+		if options.verbose >= 1:
+			self.load('errcheck')
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		super(OptionsContext, self).execute()
+		self.parse_args()
+
diff --git a/waflib/Runner.py b/waflib/Runner.py
new file mode 100644
index 0000000..3d37308
--- /dev/null
+++ b/waflib/Runner.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Runner.py: Task scheduling and execution
+
+"""
+
+import random, atexit
+try:
+	from queue import Queue
+except:
+	from Queue import Queue
+from waflib import Utils, Task, Errors, Logs
+
+GAP = 10
+"""
+Wait for free tasks if there are at least ``GAP * njobs`` in queue
+"""
+
+class TaskConsumer(Utils.threading.Thread):
+	"""
+	Task consumers belong to a pool of workers
+
+	They wait for tasks in the queue and then use ``task.process(...)``
+	"""
+	def __init__(self):
+		Utils.threading.Thread.__init__(self)
+		self.ready = Queue()
+		"""
+		Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
+		"""
+		self.setDaemon(1)
+		self.start()
+
+	def run(self):
+		"""
+		Loop over the tasks to execute
+		"""
+		try:
+			self.loop()
+		except:
+			pass
+
+	def loop(self):
+		"""
+		Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
+		:py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
+		"""
+		while 1:
+			tsk = self.ready.get()
+			if not isinstance(tsk, Task.TaskBase):
+				tsk(self)
+			else:
+				tsk.process()
+
+pool = Queue()
+"""
+Pool of task consumer objects
+"""
+
+def get_pool():
+	"""
+	Obtain a task consumer from :py:attr:`waflib.Runner.pool`.
+	Do not forget to put it back by using :py:func:`waflib.Runner.put_pool`
+	and reset properly (original waiting queue).
+
+	:rtype: :py:class:`waflib.Runner.TaskConsumer`
+	"""
+	try:
+		return pool.get(False)
+	except:
+		return TaskConsumer()
+
+def put_pool(x):
+	"""
+	Return a task consumer to the thread pool :py:attr:`waflib.Runner.pool`
+
+	:param x: task consumer object
+	:type x: :py:class:`waflib.Runner.TaskConsumer`
+	"""
+	pool.put(x)
+
+def _free_resources():
+	global pool
+	lst = []
+	while pool.qsize():
+		lst.append(pool.get())
+	for x in lst:
+		x.ready.put(None)
+	for x in lst:
+		x.join()
+	pool = None
+atexit.register(_free_resources)
+
+class Parallel(object):
+	"""
+	Schedule the tasks obtained from the build context for execution.
+	"""
+	def __init__(self, bld, j=2):
+		"""
+		The initialization requires a build context reference
+		for computing the total number of jobs.
+		"""
+
+		self.numjobs = j
+		"""
+		Number of consumers in the pool
+		"""
+
+		self.bld = bld
+		"""
+		Instance of :py:class:`waflib.Build.BuildContext`
+		"""
+
+		self.outstanding = []
+		"""List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
+
+		self.frozen = []
+		"""List of :py:class:`waflib.Task.TaskBase` that cannot be executed immediately"""
+
+		self.out = Queue(0)
+		"""List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
+
+		self.count = 0
+		"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
+
+		self.processed = 1
+		"""Amount of tasks processed"""
+
+		self.stop = False
+		"""Error flag to stop the build"""
+
+		self.error = []
+		"""Tasks that could not be executed"""
+
+		self.biter = None
+		"""Task iterator which must give groups of parallelizable tasks when calling ``next()``"""
+
+		self.dirty = False
+		"""Flag to indicate that tasks have been executed, and that the build cache must be saved (call :py:meth:`waflib.Build.BuildContext.store`)"""
+
+	def get_next_task(self):
+		"""
+		Obtain the next task to execute.
+
+		:rtype: :py:class:`waflib.Task.TaskBase`
+		"""
+		if not self.outstanding:
+			return None
+		return self.outstanding.pop(0)
+
+	def postpone(self, tsk):
+		"""
+		A task cannot be executed at this point, put it in the list :py:attr:`waflib.Runner.Parallel.frozen`.
+
+		:param tsk: task
+		:type tsk: :py:class:`waflib.Task.TaskBase`
+		"""
+		if random.randint(0, 1):
+			self.frozen.insert(0, tsk)
+		else:
+			self.frozen.append(tsk)
+
+	def refill_task_list(self):
+		"""
+		Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
+		"""
+		while self.count > self.numjobs * GAP:
+			self.get_out()
+
+		while not self.outstanding:
+			if self.count:
+				self.get_out()
+			elif self.frozen:
+				try:
+					cond = self.deadlock == self.processed
+				except:
+					pass
+				else:
+					if cond:
+						msg = 'check the build order for the tasks'
+						for tsk in self.frozen:
+							if not tsk.run_after:
+								msg = 'check the methods runnable_status'
+								break
+						lst = []
+						for tsk in self.frozen:
+							lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
+						raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
+				self.deadlock = self.processed
+
+			if self.frozen:
+				self.outstanding += self.frozen
+				self.frozen = []
+			elif not self.count:
+				self.outstanding.extend(next(self.biter))
+				self.total = self.bld.total()
+				break
+
+	def add_more_tasks(self, tsk):
+		"""
+		Tasks may be added dynamically during the build by binding them to the task :py:attr:`waflib.Task.TaskBase.more_tasks`
+
+		:param tsk: task
+		:type tsk: :py:attr:`waflib.Task.TaskBase`
+		"""
+		if getattr(tsk, 'more_tasks', None):
+			self.outstanding += tsk.more_tasks
+			self.total += len(tsk.more_tasks)
+
+	def get_out(self):
+		"""
+		Obtain one task returned from the task consumers, and update the task count. Add more tasks if necessary through
+		:py:attr:`waflib.Runner.Parallel.add_more_tasks`.
+
+		:rtype: :py:attr:`waflib.Task.TaskBase`
+		"""
+		tsk = self.out.get()
+		if not self.stop:
+			self.add_more_tasks(tsk)
+		self.count -= 1
+		self.dirty = True
+		return tsk
+
+	def error_handler(self, tsk):
+		"""
+		Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
+		the build is executed with::
+
+			$ waf build -k
+
+		:param tsk: task
+		:type tsk: :py:attr:`waflib.Task.TaskBase`
+		"""
+		if not self.bld.keep:
+			self.stop = True
+		self.error.append(tsk)
+
+	def add_task(self, tsk):
+		"""
+		Pass a task to a consumer.
+
+		:param tsk: task
+		:type tsk: :py:attr:`waflib.Task.TaskBase`
+		"""
+		try:
+			self.pool
+		except AttributeError:
+			self.init_task_pool()
+		self.ready.put(tsk)
+
+	def init_task_pool(self):
+		# lazy creation, and set a common pool for all task consumers
+		pool = self.pool = [get_pool() for i in range(self.numjobs)]
+		self.ready = Queue(0)
+		def setq(consumer):
+			consumer.ready = self.ready
+		for x in pool:
+			x.ready.put(setq)
+		return pool
+
+	def free_task_pool(self):
+		# return the consumers, setting a different queue for each of them
+		def setq(consumer):
+			consumer.ready = Queue(0)
+			self.out.put(self)
+		try:
+			pool = self.pool
+		except:
+			pass
+		else:
+			for x in pool:
+				self.ready.put(setq)
+			for x in pool:
+				self.get_out()
+			for x in pool:
+				put_pool(x)
+			self.pool = []
+
+	def start(self):
+		"""
+		Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set.
+		If only one job is used, then execute the tasks one by one, without consumers.
+		"""
+
+		self.total = self.bld.total()
+
+		while not self.stop:
+
+			self.refill_task_list()
+
+			# consider the next task
+			tsk = self.get_next_task()
+			if not tsk:
+				if self.count:
+					# tasks may add new ones after they are run
+					continue
+				else:
+					# no tasks to run, no tasks running, time to exit
+					break
+
+			if tsk.hasrun:
+				# if the task is marked as "run", just skip it
+				self.processed += 1
+				continue
+
+			if self.stop: # stop immediately after a failure was detected
+				break
+
+			try:
+				st = tsk.runnable_status()
+			except Exception:
+				self.processed += 1
+				# TODO waf 1.7 this piece of code should go in the error_handler
+				tsk.err_msg = Utils.ex_stack()
+				if not self.stop and self.bld.keep:
+					tsk.hasrun = Task.SKIPPED
+					if self.bld.keep == 1:
+						# if -k stop at the first exception, if -kk try to go as far as possible
+						if Logs.verbose > 1 or not self.error:
+							self.error.append(tsk)
+						self.stop = True
+					else:
+						if Logs.verbose > 1:
+							self.error.append(tsk)
+					continue
+				tsk.hasrun = Task.EXCEPTION
+				self.error_handler(tsk)
+				continue
+
+			if st == Task.ASK_LATER:
+				self.postpone(tsk)
+			elif st == Task.SKIP_ME:
+				self.processed += 1
+				tsk.hasrun = Task.SKIPPED
+				self.add_more_tasks(tsk)
+			else:
+				# run me: put the task in ready queue
+				tsk.position = (self.processed, self.total)
+				self.count += 1
+				tsk.master = self
+				self.processed += 1
+
+				if self.numjobs == 1:
+					tsk.process()
+				else:
+					self.add_task(tsk)
+
+		# self.count represents the tasks that have been made available to the consumer threads
+		# collect all the tasks after an error else the message may be incomplete
+		while self.error and self.count:
+			self.get_out()
+
+		#print loop
+		assert (self.count == 0 or self.stop)
+
+		# free the task pool, if any
+		self.free_task_pool()
+
diff --git a/waflib/Scripting.py b/waflib/Scripting.py
new file mode 100644
index 0000000..3443689
--- /dev/null
+++ b/waflib/Scripting.py
@@ -0,0 +1,577 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"Module called for configuring, compiling and installing targets"
+
+import os, shutil, traceback, errno, sys, stat
+from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
+
+build_dir_override = None
+
+no_climb_commands = ['configure']
+
+default_cmd = "build"
+
+def waf_entry_point(current_directory, version, wafdir):
+	"""
+	This is the main entry point, all Waf execution starts here.
+
+	:param current_directory: absolute path representing the current directory
+	:type current_directory: string
+	:param version: version number
+	:type version: string
+	:param wafdir: absolute path representing the directory of the waf library
+	:type wafdir: string
+	"""
+
+	Logs.init_log()
+
+	if Context.WAFVERSION != version:
+		Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir))
+		sys.exit(1)
+
+	if '--version' in sys.argv:
+		Context.run_dir = current_directory
+		ctx = Context.create_context('options')
+		ctx.curdir = current_directory
+		ctx.parse_args()
+		sys.exit(0)
+
+	Context.waf_dir = wafdir
+	Context.launch_dir = current_directory
+
+	# if 'configure' is in the commands, do not search any further
+	no_climb = os.environ.get('NOCLIMB', None)
+	if not no_climb:
+		for k in no_climb_commands:
+			if k in sys.argv:
+				no_climb = True
+				break
+
+	# try to find a lock file (if the project was configured)
+	# at the same time, store the first wscript file seen
+	cur = current_directory
+	while cur:
+		lst = os.listdir(cur)
+		if Options.lockfile in lst:
+			env = ConfigSet.ConfigSet()
+			try:
+				env.load(os.path.join(cur, Options.lockfile))
+				ino = os.stat(cur)[stat.ST_INO]
+			except Exception:
+				pass
+			else:
+				# check if the folder was not moved
+				for x in [env.run_dir, env.top_dir, env.out_dir]:
+					if Utils.is_win32:
+						if cur == x:
+							load = True
+							break
+					else:
+						# if the filesystem features symlinks, compare the inode numbers
+						try:
+							ino2 = os.stat(x)[stat.ST_INO]
+						except:
+							pass
+						else:
+							if ino == ino2:
+								load = True
+								break
+				else:
+					Logs.warn('invalid lock file in %s' % cur)
+					load = False
+
+				if load:
+					Context.run_dir = env.run_dir
+					Context.top_dir = env.top_dir
+					Context.out_dir = env.out_dir
+					break
+
+		if not Context.run_dir:
+			if Context.WSCRIPT_FILE in lst:
+				Context.run_dir = cur
+
+		next = os.path.dirname(cur)
+		if next == cur:
+			break
+		cur = next
+
+		if no_climb:
+			break
+
+	if not Context.run_dir:
+		if '-h' in sys.argv or '--help' in sys.argv:
+			Logs.warn('No wscript file found: the help message may be incomplete')
+			Context.run_dir = current_directory
+			ctx = Context.create_context('options')
+			ctx.curdir = current_directory
+			ctx.parse_args()
+			sys.exit(0)
+		Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE)
+		sys.exit(1)
+
+	try:
+		os.chdir(Context.run_dir)
+	except OSError:
+		Logs.error('Waf: The folder %r is unreadable' % Context.run_dir)
+		sys.exit(1)
+
+	try:
+		set_main_module(Context.run_dir + os.sep + Context.WSCRIPT_FILE)
+	except Errors.WafError as e:
+		Logs.pprint('RED', e.verbose_msg)
+		Logs.error(str(e))
+		sys.exit(1)
+	except Exception as e:
+		Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e)
+		traceback.print_exc(file=sys.stdout)
+		sys.exit(2)
+
+	"""
+	import cProfile, pstats
+	cProfile.runctx("import Scripting; Scripting.run_commands()", {}, {}, 'profi.txt')
+	p = pstats.Stats('profi.txt')
+	p.sort_stats('time').print_stats(25)
+	"""
+	try:
+		run_commands()
+	except Errors.WafError as e:
+		if Logs.verbose > 1:
+			Logs.pprint('RED', e.verbose_msg)
+		Logs.error(e.msg)
+		sys.exit(1)
+	except Exception as e:
+		traceback.print_exc(file=sys.stdout)
+		sys.exit(2)
+	except KeyboardInterrupt:
+		Logs.pprint('RED', 'Interrupted')
+		sys.exit(68)
+	#"""
+
+def set_main_module(file_path):
+	"""
+	Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and
+	bind default functions such as ``init``, ``dist``, ``distclean`` if not defined.
+	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
+
+	:param file_path: absolute path representing the top-level wscript file
+	:type file_path: string
+	"""
+	Context.g_module = Context.load_module(file_path)
+	Context.g_module.root_path = file_path
+
+	# note: to register the module globally, use the following:
+	# sys.modules['wscript_main'] = g_module
+
+	def set_def(obj):
+		name = obj.__name__
+		if not name in Context.g_module.__dict__:
+			setattr(Context.g_module, name, obj)
+	for k in [update, dist, distclean, distcheck, update]:
+		set_def(k)
+	# add dummy init and shutdown functions if they're not defined
+	if not 'init' in Context.g_module.__dict__:
+		Context.g_module.init = Utils.nada
+	if not 'shutdown' in Context.g_module.__dict__:
+		Context.g_module.shutdown = Utils.nada
+	if not 'options' in Context.g_module.__dict__:
+		Context.g_module.options = Utils.nada
+
+def parse_options():
+	"""
+	Parse the command-line options and initialize the logging system.
+	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
+	"""
+	Context.create_context('options').execute()
+
+	if not Options.commands:
+		Options.commands = [default_cmd]
+	Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076
+
+	# process some internal Waf options
+	Logs.verbose = Options.options.verbose
+	Logs.init_log()
+
+	if Options.options.zones:
+		Logs.zones = Options.options.zones.split(',')
+		if not Logs.verbose:
+			Logs.verbose = 1
+	elif Logs.verbose > 0:
+		Logs.zones = ['runner']
+
+	if Logs.verbose > 2:
+		Logs.zones = ['*']
+
+def run_command(cmd_name):
+	"""
+	Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`.
+
+	:param cmd_name: command to execute, like ``build``
+	:type cmd_name: string
+	"""
+	ctx = Context.create_context(cmd_name)
+	ctx.options = Options.options # provided for convenience
+	ctx.cmd = cmd_name
+	ctx.execute()
+	return ctx
+
+def run_commands():
+	"""
+	Execute the commands that were given on the command-line, and the other options
+	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
+	after :py:func:`waflib.Scripting.parse_options`.
+	"""
+	parse_options()
+	run_command('init')
+	while Options.commands:
+		cmd_name = Options.commands.pop(0)
+
+		timer = Utils.Timer()
+		run_command(cmd_name)
+		if not Options.options.progress_bar:
+			elapsed = ' (%s)' % str(timer)
+			Logs.info('%r finished successfully%s' % (cmd_name, elapsed))
+	run_command('shutdown')
+
+###########################################################################################
+
+def _can_distclean(name):
+	# WARNING: this method may disappear anytime
+	for k in '.o .moc .exe'.split():
+		if name.endswith(k):
+			return True
+	return False
+
+def distclean_dir(dirname):
+	"""
+	Distclean function called in the particular case when::
+
+		top == out
+
+	:param dirname: absolute path of the folder to clean
+	:type dirname: string
+	"""
+	for (root, dirs, files) in os.walk(dirname):
+		for f in files:
+			if _can_distclean(f):
+				fname = root + os.sep + f
+				try:
+					os.unlink(fname)
+				except:
+					Logs.warn('could not remove %r' % fname)
+
+	for x in [Context.DBFILE, 'config.log']:
+		try:
+			os.unlink(x)
+		except:
+			pass
+
+	try:
+		shutil.rmtree('c4che')
+	except:
+		pass
+
+def distclean(ctx):
+	'''removes the build directory'''
+	lst = os.listdir('.')
+	for f in lst:
+		if f == Options.lockfile:
+			try:
+				proj = ConfigSet.ConfigSet(f)
+			except:
+				Logs.warn('could not read %r' % f)
+				continue
+
+			if proj['out_dir'] != proj['top_dir']:
+				try:
+					shutil.rmtree(proj['out_dir'])
+				except IOError:
+					pass
+				except OSError as e:
+					if e.errno != errno.ENOENT:
+						Logs.warn('project %r cannot be removed' % proj[Context.OUT])
+			else:
+				distclean_dir(proj['out_dir'])
+
+			for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
+				try:
+					os.remove(os.path.join(k, Options.lockfile))
+				except OSError as e:
+					if e.errno != errno.ENOENT:
+						Logs.warn('file %r cannot be removed' % f)
+
+		# remove the local waf cache
+		if f.startswith('.waf') and not Options.commands:
+			shutil.rmtree(f, ignore_errors=True)
+
+class Dist(Context.Context):
+	"""
+	Create an archive containing the project source code::
+
+		$ waf dist
+	"""
+	cmd = 'dist'
+	fun = 'dist'
+	algo = 'tar.bz2'
+	ext_algo = {}
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		self.recurse([os.path.dirname(Context.g_module.root_path)])
+		self.archive()
+
+	def archive(self):
+		"""
+		Create the archive.
+		"""
+		import tarfile
+
+		arch_name = self.get_arch_name()
+
+		try:
+			self.base_path
+		except:
+			self.base_path = self.path
+
+		node = self.base_path.make_node(arch_name)
+		try:
+			node.delete()
+		except:
+			pass
+
+		files = self.get_files()
+
+		if self.algo.startswith('tar.'):
+			tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', ''))
+
+			for x in files:
+				self.add_tar_file(x, tar)
+			tar.close()
+		elif self.algo == 'zip':
+			import zipfile
+			zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED)
+
+			for x in files:
+				archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
+				zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED)
+			zip.close()
+		else:
+			self.fatal('Valid algo types are tar.bz2, tar.gz or zip')
+
+		try:
+			from hashlib import sha1 as sha
+		except ImportError:
+			from sha import sha
+		try:
+			digest = " (sha=%r)" % sha(node.read()).hexdigest()
+		except:
+			digest = ''
+
+		Logs.info('New archive created: %s%s' % (self.arch_name, digest))
+
+	def get_tar_path(self, node):
+		"""
+		return the path to use for a node in the tar archive, the purpose of this
+		is to let subclases resolve symbolic links or to change file names
+		"""
+		return node.abspath()
+
+	def add_tar_file(self, x, tar):
+		"""
+		Add a file to the tar archive. Transform symlinks into files if the files lie out of the project tree.
+		"""
+		p = self.get_tar_path(x)
+		tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
+		tinfo.uid   = 0
+		tinfo.gid   = 0
+		tinfo.uname = 'root'
+		tinfo.gname = 'root'
+
+		fu = None
+		try:
+			fu = open(p, 'rb')
+			tar.addfile(tinfo, fileobj=fu)
+		finally:
+			if fu:
+				fu.close()
+
+	def get_tar_prefix(self):
+		try:
+			return self.tar_prefix
+		except:
+			return self.get_base_name()
+
+	def get_arch_name(self):
+		"""
+		Return the name of the archive to create. Change the default value by setting *arch_name*::
+
+			def dist(ctx):
+				ctx.arch_name = 'ctx.tar.bz2'
+
+		:rtype: string
+		"""
+		try:
+			self.arch_name
+		except:
+			self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo)
+		return self.arch_name
+
+	def get_base_name(self):
+		"""
+		Return the default name of the main directory in the archive, which is set to *appname-version*.
+		Set the attribute *base_name* to change the default value::
+
+			def dist(ctx):
+				ctx.base_name = 'files'
+
+		:rtype: string
+		"""
+		try:
+			self.base_name
+		except:
+			appname = getattr(Context.g_module, Context.APPNAME, 'noname')
+			version = getattr(Context.g_module, Context.VERSION, '1.0')
+			self.base_name = appname + '-' + version
+		return self.base_name
+
+	def get_excl(self):
+		"""
+		Return the patterns to exclude for finding the files in the top-level directory. Set the attribute *excl*
+		to change the default value::
+
+			def dist(ctx):
+				ctx.excl = 'build **/*.o **/*.class'
+
+		:rtype: string
+		"""
+		try:
+			return self.excl
+		except:
+			self.excl = Node.exclude_regs + ' **/waf-1.6.* **/.waf-1.6* **/waf3-1.6.* **/.waf3-1.6* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+			nd = self.root.find_node(Context.out_dir)
+			if nd:
+				self.excl += ' ' + nd.path_from(self.base_path)
+			return self.excl
+
+	def get_files(self):
+		"""
+		The files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. Set
+		*files* to prevent this behaviour::
+
+			def dist(ctx):
+				ctx.files = ctx.path.find_node('wscript')
+
+		The files are searched from the directory 'base_path', to change it, set::
+
+			def dist(ctx):
+				ctx.base_path = path
+
+		:rtype: list of :py:class:`waflib.Node.Node`
+		"""
+		try:
+			files = self.files
+		except:
+			files = self.base_path.ant_glob('**/*', excl=self.get_excl())
+		return files
+
+
+def dist(ctx):
+	'''makes a tarball for redistributing the sources'''
+	pass
+
+class DistCheck(Dist):
+	"""
+	Create an archive of the project, and try to build the project in a temporary directory::
+
+		$ waf distcheck
+	"""
+
+	fun = 'distcheck'
+	cmd = 'distcheck'
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		self.recurse([os.path.dirname(Context.g_module.root_path)])
+		self.archive()
+		self.check()
+
+	def check(self):
+		"""
+		Create the archive, uncompress it and try to build the project
+		"""
+		import tempfile, tarfile
+
+		t = None
+		try:
+			t = tarfile.open(self.get_arch_name())
+			for x in t:
+				t.extract(x)
+		finally:
+			if t:
+				t.close()
+
+		instdir = tempfile.mkdtemp('.inst', self.get_base_name())
+		ret = Utils.subprocess.Popen([sys.argv[0], 'configure', 'install', 'uninstall', '--destdir=' + instdir], cwd=self.get_base_name()).wait()
+		if ret:
+			raise Errors.WafError('distcheck failed with code %i' % ret)
+
+		if os.path.exists(instdir):
+			raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
+
+		shutil.rmtree(self.get_base_name())
+
+
+def distcheck(ctx):
+	'''checks if the project compiles (tarball from 'dist')'''
+	pass
+
+def update(ctx):
+	'''updates the plugins from the *waflib/extras* directory'''
+	lst = Options.options.files.split(',')
+	if not lst:
+		lst = [x for x in Utils.listdir(Context.waf_dir + '/waflib/extras') if x.endswith('.py')]
+	for x in lst:
+		tool = x.replace('.py', '')
+		try:
+			Configure.download_tool(tool, force=True, ctx=ctx)
+		except Errors.WafError:
+			Logs.error('Could not find the tool %s in the remote repository' % x)
+
+def autoconfigure(execute_method):
+	"""
+	Decorator used to set the commands that can be configured automatically
+	"""
+	def execute(self):
+		if not Configure.autoconfig:
+			return execute_method(self)
+
+		env = ConfigSet.ConfigSet()
+		do_config = False
+		try:
+			env.load(os.path.join(Context.top_dir, Options.lockfile))
+		except Exception:
+			Logs.warn('Configuring the project')
+			do_config = True
+		else:
+			if env.run_dir != Context.run_dir:
+				do_config = True
+			else:
+				h = 0
+				for f in env['files']:
+					h = hash((h, Utils.readf(f, 'rb')))
+				do_config = h != env.hash
+
+		if do_config:
+			Options.commands.insert(0, self.cmd)
+			Options.commands.insert(0, 'configure')
+			return
+
+		return execute_method(self)
+	return execute
+Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)
+
diff --git a/waflib/Task.py b/waflib/Task.py
new file mode 100644
index 0000000..97c9d2f
--- /dev/null
+++ b/waflib/Task.py
@@ -0,0 +1,1238 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Tasks represent atomic operations such as processes.
+"""
+
+import os, shutil, re, tempfile
+from waflib import Utils, Logs, Errors
+
+# task states
+NOT_RUN = 0
+"""The task was not executed yet"""
+
+MISSING = 1
+"""The task has been executed but the files have not been created"""
+
+CRASHED = 2
+"""The task execution returned a non-zero exit status"""
+
+EXCEPTION = 3
+"""An exception occured in the task execution"""
+
+SKIPPED = 8
+"""The task did not have to be executed"""
+
+SUCCESS = 9
+"""The task was successfully executed"""
+
+ASK_LATER = -1
+"""The task is not ready to be executed"""
+
+SKIP_ME = -2
+"""The task does not need to be executed"""
+
+RUN_ME = -3
+"""The task must be executed"""
+
+COMPILE_TEMPLATE_SHELL = '''
+def f(tsk):
+	env = tsk.env
+	gen = tsk.generator
+	bld = gen.bld
+	wd = getattr(tsk, 'cwd', None)
+	p = env.get_flat
+	tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
+	return tsk.exec_command(cmd, cwd=wd, env=env.env or None)
+'''
+
+COMPILE_TEMPLATE_NOSHELL = '''
+def f(tsk):
+	env = tsk.env
+	gen = tsk.generator
+	bld = gen.bld
+	wd = getattr(tsk, 'cwd', None)
+	def to_list(xx):
+		if isinstance(xx, str): return [xx]
+		return xx
+	tsk.last_cmd = lst = []
+	%s
+	lst = [x for x in lst if x]
+	return tsk.exec_command(lst, cwd=wd, env=env.env or None)
+'''
+
+def cache_outputs(cls):
+	"""
+	Task class decorator applied to all task classes by default unless they define the attribute 'nocache'::
+
+		from waflib import Task
+		class foo(Task.Task):
+			nocache = True
+
+	If bld.cache_global is defined and if the task instances produces output nodes,
+	the files will be copied into a folder in the cache directory
+
+	The files may also be retrieved from that folder, if it exists
+	"""
+	m1 = cls.run
+	def run(self):
+		bld = self.generator.bld
+		if bld.cache_global and not bld.nocache:
+			if self.can_retrieve_cache():
+				return 0
+		return m1(self)
+	cls.run = run
+
+	m2 = cls.post_run
+	def post_run(self):
+		bld = self.generator.bld
+		ret = m2(self)
+		if bld.cache_global and not bld.nocache:
+			self.put_files_cache()
+		return ret
+	cls.post_run = post_run
+
+	return cls
+
+
+classes = {}
+"class tasks created by user scripts or Waf tools are kept in this dict name -> class object"
+
+class store_task_type(type):
+	"""
+	Metaclass: store the task types into :py:const:`waflib.Task.classes`.
+	The attribute 'run_str' will be processed to compute a method 'run' on the task class
+	The decorator :py:func:`waflib.Task.cache_outputs` is also applied to the class
+	"""
+	def __init__(cls, name, bases, dict):
+		super(store_task_type, cls).__init__(name, bases, dict)
+		name = cls.__name__
+
+		if name.endswith('_task'):
+			name = name.replace('_task', '')
+		if name != 'evil' and name != 'TaskBase':
+			global classes
+
+			if getattr(cls, 'run_str', None):
+				# if a string is provided, convert it to a method
+				(f, dvars) = compile_fun(cls.run_str, cls.shell)
+				cls.hcode = cls.run_str
+				cls.run_str = None
+				cls.run = f
+				cls.vars = list(set(cls.vars + dvars))
+				cls.vars.sort()
+			elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__:
+				# getattr(cls, 'hcode') would look in the upper classes
+				cls.hcode = Utils.h_fun(cls.run)
+
+			if not getattr(cls, 'nocache', None):
+				cls = cache_outputs(cls)
+
+			classes[name] = cls
+
+evil = store_task_type('evil', (object,), {})
+"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified"
+
+class TaskBase(evil):
+	"""
+	Base class for all Waf tasks, which should be seen as an interface.
+	For illustration purposes, instances of this class will execute the attribute
+	'fun' in :py:meth:`waflib.Task.TaskBase.run`. When in doubt, create
+	subclasses of :py:class:`waflib.Task.Task` instead.
+
+	Subclasses should override these methods:
+
+	#. __str__: string to display to the user
+	#. runnable_status: ask the task if it should be run, skipped, or if we have to ask later
+	#. run: let threads execute the task
+	#. post_run: let threads update the data regarding the task (cache)
+	"""
+
+	color = 'GREEN'
+	"""Color for the console display, see :py:const:`waflib.Logs.colors_lst`"""
+
+	ext_in = []
+	"""File extensions that objects of this task class might use"""
+
+	ext_out = []
+	"""File extensions that objects of this task class might create"""
+
+	before = []
+	"""List of task class names to execute before instances of this class"""
+
+	after = []
+	"""List of task class names to execute after instances of this class"""
+
+	hcode = ''
+	"""String representing an additional hash for the class representation"""
+
+	def __init__(self, *k, **kw):
+		"""
+		The base task class requires a task generator, which will be itself if missing
+		"""
+		self.hasrun = NOT_RUN
+		try:
+			self.generator = kw['generator']
+		except KeyError:
+			self.generator = self
+
+	def __repr__(self):
+		"for debugging purposes"
+		return '\n\t{task %r: %s %s}' % (self.__class__.__name__, id(self), str(getattr(self, 'fun', '')))
+
+	def __str__(self):
+		"string to display to the user"
+		if hasattr(self, 'fun'):
+			return 'executing: %s\n' % self.fun.__name__
+		return self.__class__.__name__ + '\n'
+
+	def __hash__(self):
+		"Very fast hashing scheme but not persistent (replace/implement in subclasses and see :py:meth:`waflib.Task.Task.uid`)"
+		return id(self)
+
+	def exec_command(self, cmd, **kw):
+		"""
+		Wrapper for :py:meth:`waflib.Context.Context.exec_command` which sets a current working directory to ``build.variant_dir``
+
+		:return: the return code
+		:rtype: int
+		"""
+		bld = self.generator.bld
+		try:
+			if not kw.get('cwd', None):
+				kw['cwd'] = bld.cwd
+		except AttributeError:
+			bld.cwd = kw['cwd'] = bld.variant_dir
+		return bld.exec_command(cmd, **kw)
+
+	def runnable_status(self):
+		"""
+		State of the task
+
+		:return: a task state in :py:const:`waflib.Task.RUN_ME`, :py:const:`waflib.Task.SKIP_ME` or :py:const:`waflib.Task.ASK_LATER`.
+		:rtype: int
+		"""
+		return RUN_ME
+
+	def process(self):
+		"""
+		Assume that the task has had a new attribute ``master`` which is an instance of :py:class:`waflib.Runner.Parallel`.
+		Execute the task and then put it back in the queue :py:attr:`waflib.Runner.Parallel.out` (may be replaced by subclassing).
+		"""
+		m = self.master
+		if m.stop:
+			m.out.put(self)
+			return
+
+		# remove the task signature immediately before it is executed
+		# in case of failure the task will be executed again
+		try:
+			del self.generator.bld.task_sigs[self.uid()]
+		except:
+			pass
+
+		try:
+			self.generator.bld.returned_tasks.append(self)
+			self.log_display(self.generator.bld)
+			ret = self.run()
+		except Exception:
+			self.err_msg = Utils.ex_stack()
+			self.hasrun = EXCEPTION
+
+			# TODO cleanup
+			m.error_handler(self)
+			m.out.put(self)
+			return
+
+		if ret:
+			self.err_code = ret
+			self.hasrun = CRASHED
+		else:
+			try:
+				self.post_run()
+			except Errors.WafError:
+				pass
+			except Exception:
+				self.err_msg = Utils.ex_stack()
+				self.hasrun = EXCEPTION
+			else:
+				self.hasrun = SUCCESS
+		if self.hasrun != SUCCESS:
+			m.error_handler(self)
+
+		m.out.put(self)
+
+	def run(self):
+		"""
+		Called by threads to execute the tasks. The default is empty and meant to be overridden in subclasses.
+		It is a bad idea to create nodes in this method (so, no node.ant_glob)
+
+		:rtype: int
+		"""
+		if hasattr(self, 'fun'):
+			return self.fun(self)
+		return 0
+
+	def post_run(self):
+		"Update the cache files (executed by threads). Override in subclasses."
+		pass
+
+	def log_display(self, bld):
+		"Write the execution status on the context logger"
+		bld.to_log(self.display())
+
+	def display(self):
+		"""
+		Return an execution status for the console, the progress bar, or the IDE output.
+
+		:rtype: string
+		"""
+		col1 = Logs.colors(self.color)
+		col2 = Logs.colors.NORMAL
+		master = self.master
+
+		def cur():
+			# the current task position, computed as late as possible
+			tmp = -1
+			if hasattr(master, 'ready'):
+				tmp -= master.ready.qsize()
+			return master.processed + tmp
+
+		if self.generator.bld.progress_bar == 1:
+			return self.generator.bld.progress_line(cur(), master.total, col1, col2)
+
+		if self.generator.bld.progress_bar == 2:
+			ela = str(self.generator.bld.timer)
+			try:
+				ins  = ','.join([n.name for n in self.inputs])
+			except AttributeError:
+				ins = ''
+			try:
+				outs = ','.join([n.name for n in self.outputs])
+			except AttributeError:
+				outs = ''
+			return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (master.total, cur(), ins, outs, ela)
+
+		s = str(self)
+		if not s:
+			return None
+
+		total = master.total
+		n = len(str(total))
+		fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n)
+		return fs % (cur(), total, col1, s, col2)
+
+	def attr(self, att, default=None):
+		"""
+		Retrieve an attribute from the instance or from the class.
+
+		:param att: variable name
+		:type att: string
+		:param default: default value
+		"""
+		ret = getattr(self, att, self)
+		if ret is self: return getattr(self.__class__, att, default)
+		return ret
+
+	def hash_constraints(self):
+		"""
+		Identify a task type for all the constraints relevant for the scheduler: precedence, file production
+
+		:return: a hash value
+		:rtype: string
+		"""
+		cls = self.__class__
+		tup = (str(cls.before), str(cls.after), str(cls.ext_in), str(cls.ext_out), cls.__name__, cls.hcode)
+		h = hash(tup)
+		return h
+
+	def format_error(self):
+		"""
+		Error message to display to the user when a build fails
+
+		:rtype: string
+		"""
+		msg = getattr(self, 'last_cmd', '')
+		name = getattr(self.generator, 'name', '')
+		if getattr(self, "err_msg", None):
+			return self.err_msg
+		elif not self.hasrun:
+			return 'task in %r was not executed for some reason: %r' % (name, self)
+		elif self.hasrun == CRASHED:
+			try:
+				return ' -> task in %r failed (exit status %r): %r\n%r' % (name, self.err_code, self, msg)
+			except AttributeError:
+				return ' -> task in %r failed: %r\n%r' % (name, self, msg)
+		elif self.hasrun == MISSING:
+			return ' -> missing files in %r: %r\n%r' % (name, self, msg)
+		else:
+			return 'invalid status for task in %r: %r' % (name, self.hasrun)
+
+	def colon(self, var1, var2):
+		"""
+		private function for the moment
+
+		used for scriptlet expressions such as ${FOO_ST:FOO}, for example, if
+		env.FOO_ST = ['-a', '-b']
+		env.FOO    = ['1', '2']
+		then the result will be ['-a', '-b', '1', '-a', '-b', '2']
+		"""
+		tmp = self.env[var1]
+		if isinstance(var2, str):
+			it = self.env[var2]
+		else:
+			it = var2
+		if isinstance(tmp, str):
+			return [tmp % x for x in it]
+		else:
+			if Logs.verbose and not tmp and it:
+				Logs.warn('Missing env variable %r for task %r (generator %r)' % (var1, self, self.generator))
+			lst = []
+			for y in it:
+				lst.extend(tmp)
+				lst.append(y)
+			return lst
+
+class Task(TaskBase):
+	"""
+	This class deals with the filesystem (:py:class:`waflib.Node.Node`). The method :py:class:`waflib.Task.Task.runnable_status`
+	uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes,
+	the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output
+	nodes (if present).
+	"""
+	vars = []
+	"""Variables to depend on (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
+
+	shell = False
+	"""Execute the command with the shell (class attribute)"""
+
+	def __init__(self, *k, **kw):
+		TaskBase.__init__(self, *k, **kw)
+
+		self.env = kw['env']
+		"""ConfigSet object (make sure to provide one)"""
+
+		self.inputs  = []
+		"""List of input nodes, which represent the files used by the task instance"""
+
+		self.outputs = []
+		"""List of output nodes, which represent the files created by the task instance"""
+
+		self.dep_nodes = []
+		"""List of additional nodes to depend on"""
+
+		self.run_after = set([])
+		"""Set of tasks that must be executed before this one"""
+
+		# Additionally, you may define the following
+		#self.dep_vars  = 'PREFIX DATADIR'
+
+	def __str__(self):
+		"string to display to the user"
+		env = self.env
+		src_str = ' '.join([a.nice_path(env) for a in self.inputs])
+		tgt_str = ' '.join([a.nice_path(env) for a in self.outputs])
+		if self.outputs: sep = ' -> '
+		else: sep = ''
+		return '%s: %s%s%s\n' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str)
+
+	def __repr__(self):
+		"for debugging purposes"
+		return "".join(['\n\t{task %r: ' % id(self), self.__class__.__name__, " ", ",".join([x.name for x in self.inputs]), " -> ", ",".join([x.name for x in self.outputs]), '}'])
+
+	def uid(self):
+		"""
+		Return an identifier used to determine if tasks are up-to-date. Since the
+		identifier will be stored between executions, it must be:
+
+			- unique: no two tasks return the same value (for a given build context)
+			- the same for a given task instance
+
+		By default, the node paths, the class name, and the function are used
+		as inputs to compute a hash.
+
+		The pointer to the object (python built-in 'id') will change between build executions,
+		and must be avoided in such hashes.
+
+		:return: hash value
+		:rtype: string
+		"""
+		try:
+			return self.uid_
+		except AttributeError:
+			# this is not a real hot zone, but we want to avoid surprizes here
+			m = Utils.md5()
+			up = m.update
+			up(self.__class__.__name__.encode())
+			for x in self.inputs + self.outputs:
+				up(x.abspath().encode())
+			self.uid_ = m.digest()
+			return self.uid_
+
+	def set_inputs(self, inp):
+		"""
+		Append the nodes to the *inputs*
+
+		:param inp: input nodes
+		:type inp: node or list of nodes
+		"""
+		if isinstance(inp, list): self.inputs += inp
+		else: self.inputs.append(inp)
+
+	def set_outputs(self, out):
+		"""
+		Append the nodes to the *outputs*
+
+		:param out: output nodes
+		:type out: node or list of nodes
+		"""
+		if isinstance(out, list): self.outputs += out
+		else: self.outputs.append(out)
+
+	def set_run_after(self, task):
+		"""
+		Run this task only after *task*. Affect :py:meth:`waflib.Task.runnable_status`
+
+		:param task: task
+		:type task: :py:class:`waflib.Task.Task`
+		"""
+		# TODO: handle lists too?
+		assert isinstance(task, TaskBase)
+		self.run_after.add(task)
+
+	def signature(self):
+		"""
+		Task signatures are stored between build executions, they are use to track the changes
+		made to the input nodes (not to the outputs!). The signature hashes data from various sources:
+
+		* explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps`
+		* implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps`
+		* hashed data: variables/values read from task.__class__.vars/task.env :py:meth:`waflib.Task.Task.sig_vars`
+
+		If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``::
+
+			from waflib import Task
+			class cls(Task.Task):
+				def signature(self):
+					sig = super(Task.Task, self).signature()
+					delattr(self, 'cache_sig')
+					return super(Task.Task, self).signature()
+		"""
+		try: return self.cache_sig
+		except AttributeError: pass
+
+		self.m = Utils.md5()
+		self.m.update(self.hcode.encode())
+
+		# explicit deps
+		self.sig_explicit_deps()
+
+		# env vars
+		self.sig_vars()
+
+		# implicit deps / scanner results
+		if self.scan:
+			try:
+				self.sig_implicit_deps()
+			except Errors.TaskRescan:
+				return self.signature()
+
+		ret = self.cache_sig = self.m.digest()
+		return ret
+
+	def runnable_status(self):
+		"""
+		Override :py:meth:`waflib.Task.TaskBase.runnable_status` to determine if the task is ready
+		to be run (:py:attr:`waflib.Task.Task.run_after`)
+		"""
+		#return 0 # benchmarking
+
+		for t in self.run_after:
+			if not t.hasrun:
+				return ASK_LATER
+
+		bld = self.generator.bld
+
+		# first compute the signature
+		try:
+			new_sig = self.signature()
+		except Errors.TaskNotReady:
+			return ASK_LATER
+
+		# compare the signature to a signature computed previously
+		key = self.uid()
+		try:
+			prev_sig = bld.task_sigs[key]
+		except KeyError:
+			Logs.debug("task: task %r must run as it was never run before or the task code changed" % self)
+			return RUN_ME
+
+		# compare the signatures of the outputs
+		for node in self.outputs:
+			try:
+				if node.sig != new_sig:
+					return RUN_ME
+			except AttributeError:
+				Logs.debug("task: task %r must run as the output nodes do not exist" % self)
+				return RUN_ME
+
+		if new_sig != prev_sig:
+			return RUN_ME
+		return SKIP_ME
+
+	def post_run(self):
+		"""
+		Called after successful execution to update the cache data :py:class:`waflib.Node.Node` sigs
+		and :py:attr:`waflib.Build.BuildContext.task_sigs`.
+
+		The node signature is obtained from the task signature, but the output nodes may also get the signature
+		of their contents. See the class decorator :py:func:`waflib.Task.update_outputs` if you need this behaviour.
+		"""
+		bld = self.generator.bld
+		sig = self.signature()
+
+		for node in self.outputs:
+			# check if the node exists ..
+			try:
+				os.stat(node.abspath())
+			except OSError:
+				self.hasrun = MISSING
+				self.err_msg = '-> missing file: %r' % node.abspath()
+				raise Errors.WafError(self.err_msg)
+
+			# important, store the signature for the next run
+			node.sig = sig
+
+		bld.task_sigs[self.uid()] = self.cache_sig
+
+	def sig_explicit_deps(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.signature`, hash :py:attr:`waflib.Task.Task.inputs`
+		and :py:attr:`waflib.Task.Task.dep_nodes` signatures.
+
+		:rtype: hash value
+		"""
+		bld = self.generator.bld
+		upd = self.m.update
+
+		# the inputs
+		for x in self.inputs + self.dep_nodes:
+			try:
+				upd(x.get_bld_sig())
+			except (AttributeError, TypeError):
+				raise Errors.WafError('Missing node signature for %r (required by %r)' % (x, self))
+
+		# manual dependencies, they can slow down the builds
+		if bld.deps_man:
+			additional_deps = bld.deps_man
+			for x in self.inputs + self.outputs:
+				try:
+					d = additional_deps[id(x)]
+				except KeyError:
+					continue
+
+				for v in d:
+					if isinstance(v, bld.root.__class__):
+						try:
+							v = v.get_bld_sig()
+						except AttributeError:
+							raise Errors.WafError('Missing node signature for %r (required by %r)' % (v, self))
+					elif hasattr(v, '__call__'):
+						v = v() # dependency is a function, call it
+					upd(v)
+
+		return self.m.digest()
+
+	def sig_vars(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.signature`, hash :py:attr:`waflib.Task.Task.env` variables/values
+
+		:rtype: hash value
+		"""
+		bld = self.generator.bld
+		env = self.env
+		upd = self.m.update
+
+		# dependencies on the environment vars
+		act_sig = bld.hash_env_vars(env, self.__class__.vars)
+		upd(act_sig)
+
+		# additional variable dependencies, if provided
+		dep_vars = getattr(self, 'dep_vars', None)
+		if dep_vars:
+			upd(bld.hash_env_vars(env, dep_vars))
+
+		return self.m.digest()
+
+	scan = None
+	"""
+	This method, when provided, returns a tuple containing:
+
+	* a list of nodes corresponding to real files
+	* a list of names for files not found in path_lst
+
+	For example::
+
+		from waflib.Task import Task
+		class mytask(Task):
+			def scan(self, node):
+				return ((), ())
+
+	The first and second lists are stored in :py:attr:`waflib.Build.BuildContext.node_deps` and
+	:py:attr:`waflib.Build.BuildContext.raw_deps` respectively.
+	"""
+
+	def sig_implicit_deps(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.signature` hashes node signatures obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`).
+
+		The exception :py:class:`waflib.Errors.TaskRescan` is thrown
+		when a file has changed. When this occurs, :py:meth:`waflib.Task.Task.signature` is called
+		once again, and this method will be executed once again, this time calling :py:meth:`waflib.Task.Task.scan`
+		for searching the dependencies.
+
+		:rtype: hash value
+		"""
+
+		bld = self.generator.bld
+
+		# get the task signatures from previous runs
+		key = self.uid()
+		prev = bld.task_sigs.get((key, 'imp'), [])
+
+		# for issue #379
+		if prev:
+			try:
+				if prev == self.compute_sig_implicit_deps():
+					return prev
+			except:
+				# when a file was renamed (IOError usually), remove the stale nodes (headers in folders without source files)
+				# this will break the order calculation for headers created during the build in the source directory (should be uncommon)
+				# the behaviour will differ when top != out
+				for x in bld.node_deps.get(self.uid(), []):
+					if x.is_child_of(bld.srcnode):
+						try:
+							os.stat(x.abspath())
+						except:
+							try:
+								del x.parent.children[x.name]
+							except:
+								pass
+			del bld.task_sigs[(key, 'imp')]
+			raise Errors.TaskRescan('rescan')
+
+		# no previous run or the signature of the dependencies has changed, rescan the dependencies
+		(nodes, names) = self.scan()
+		if Logs.verbose:
+			Logs.debug('deps: scanner for %s returned %s %s' % (str(self), str(nodes), str(names)))
+
+		# store the dependencies in the cache
+		bld.node_deps[key] = nodes
+		bld.raw_deps[key] = names
+
+		# might happen
+		self.are_implicit_nodes_ready()
+
+		# recompute the signature and return it
+		try:
+			bld.task_sigs[(key, 'imp')] = sig = self.compute_sig_implicit_deps()
+		except:
+			if Logs.verbose:
+				for k in bld.node_deps.get(self.uid(), []):
+					try:
+						k.get_bld_sig()
+					except:
+						Logs.warn('Missing signature for node %r (may cause rebuilds)' % k)
+		else:
+			return sig
+
+	def compute_sig_implicit_deps(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.sig_implicit_deps` for computing the actual hash of the
+		:py:class:`waflib.Node.Node` returned by the scanner.
+
+		:return: hash value
+		:rtype: string
+		"""
+
+		upd = self.m.update
+
+		bld = self.generator.bld
+
+		self.are_implicit_nodes_ready()
+
+		# scanner returns a node that does not have a signature
+		# just *ignore* the error and let them figure out from the compiler output
+		# waf -k behaviour
+		for k in bld.node_deps.get(self.uid(), []):
+			upd(k.get_bld_sig())
+		return self.m.digest()
+
+	def are_implicit_nodes_ready(self):
+		"""
+		For each node returned by the scanner, see if there is a task behind it, and force the build order
+
+		The performance impact on null builds is nearly invisible (1.66s->1.86s), but this is due to
+		agressive caching (1.86s->28s)
+		"""
+		bld = self.generator.bld
+		try:
+			cache = bld.dct_implicit_nodes
+		except:
+			bld.dct_implicit_nodes = cache = {}
+
+		try:
+			dct = cache[bld.cur]
+		except KeyError:
+			dct = cache[bld.cur] = {}
+			for tsk in bld.cur_tasks:
+				for x in tsk.outputs:
+					dct[x] = tsk
+
+		modified = False
+		for x in bld.node_deps.get(self.uid(), []):
+			if x in dct:
+				self.run_after.add(dct[x])
+				modified = True
+
+		if modified:
+			for tsk in self.run_after:
+				if not tsk.hasrun:
+					#print "task is not ready..."
+					raise Errors.TaskNotReady('not ready')
+
+	def can_retrieve_cache(self):
+		"""
+		Used by :py:meth:`waflib.Task.cache_outputs`
+
+		Retrieve build nodes from the cache
+		update the file timestamps to help cleaning the least used entries from the cache
+		additionally, set an attribute 'cached' to avoid re-creating the same cache files
+
+		Suppose there are files in `cache/dir1/file1` and `cache/dir2/file2`:
+
+		#. read the timestamp of dir1
+		#. try to copy the files
+		#. look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
+		#. should an exception occur, ignore the data
+		"""
+
+		if not getattr(self, 'outputs', None):
+			return None
+
+		sig = self.signature()
+		ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)
+
+		# first try to access the cache folder for the task
+		dname = os.path.join(self.generator.bld.cache_global, ssig)
+		try:
+			t1 = os.stat(dname).st_mtime
+		except OSError:
+			return None
+
+		for node in self.outputs:
+			orig = os.path.join(dname, node.name)
+			try:
+				shutil.copy2(orig, node.abspath())
+				# mark the cache file as used recently (modified)
+				os.utime(orig, None)
+			except (OSError, IOError):
+				Logs.debug('task: failed retrieving file')
+				return None
+
+		# is it the same folder?
+		try:
+			t2 = os.stat(dname).st_mtime
+		except OSError:
+			return None
+
+		if t1 != t2:
+			return None
+
+		for node in self.outputs:
+			node.sig = sig
+			if self.generator.bld.progress_bar < 1:
+				self.generator.bld.to_log('restoring from cache %r\n' % node.abspath())
+
+		self.cached = True
+		return True
+
+	def put_files_cache(self):
+		"""
+		Used by :py:func:`waflib.Task.cache_outputs` to store the build files in the cache
+		"""
+
+		# file caching, if possible
+		# try to avoid data corruption as much as possible
+		if getattr(self, 'cached', None):
+			return None
+		if not getattr(self, 'outputs', None):
+			return None
+
+		sig = self.signature()
+		ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)
+		dname = os.path.join(self.generator.bld.cache_global, ssig)
+		tmpdir = tempfile.mkdtemp(prefix=self.generator.bld.cache_global + os.sep + 'waf')
+
+		try:
+			shutil.rmtree(dname)
+		except:
+			pass
+
+		try:
+			for node in self.outputs:
+				dest = os.path.join(tmpdir, node.name)
+				shutil.copy2(node.abspath(), dest)
+		except (OSError, IOError):
+			try:
+				shutil.rmtree(tmpdir)
+			except:
+				pass
+		else:
+			try:
+				os.rename(tmpdir, dname)
+			except OSError:
+				try:
+					shutil.rmtree(tmpdir)
+				except:
+					pass
+			else:
+				try:
+					os.chmod(dname, Utils.O755)
+				except:
+					pass
+
+def is_before(t1, t2):
+	"""
+	Return a non-zero value if task t1 is to be executed before task t2::
+
+		t1.ext_out = '.h'
+		t2.ext_in = '.h'
+		t2.after = ['t1']
+		t1.before = ['t2']
+		waflib.Task.is_before(t1, t2) # True
+
+	:param t1: task
+	:type t1: :py:class:`waflib.Task.TaskBase`
+	:param t2: task
+	:type t2: :py:class:`waflib.Task.TaskBase`
+	"""
+	to_list = Utils.to_list
+	for k in to_list(t2.ext_in):
+		if k in to_list(t1.ext_out):
+			return 1
+
+	if t1.__class__.__name__ in to_list(t2.after):
+		return 1
+
+	if t2.__class__.__name__ in to_list(t1.before):
+		return 1
+
+	return 0
+
+def set_file_constraints(tasks):
+	"""
+	Adds tasks to the task 'run_after' attribute based on the task inputs and outputs
+
+	:param tasks: tasks
+	:type tasks: list of :py:class:`waflib.Task.TaskBase`
+	"""
+	ins = Utils.defaultdict(set)
+	outs = Utils.defaultdict(set)
+	for x in tasks:
+		for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []):
+			ins[id(a)].add(x)
+		for a in getattr(x, 'outputs', []):
+			outs[id(a)].add(x)
+
+	links = set(ins.keys()).intersection(outs.keys())
+	for k in links:
+		for a in ins[k]:
+			a.run_after.update(outs[k])
+
+def set_precedence_constraints(tasks):
+	"""
+	Add tasks to the task 'run_after' attribute based on the after/before/ext_out/ext_in attributes
+
+	:param tasks: tasks
+	:type tasks: list of :py:class:`waflib.Task.TaskBase`
+	"""
+	cstr_groups = Utils.defaultdict(list)
+	for x in tasks:
+		h = x.hash_constraints()
+		cstr_groups[h].append(x)
+
+	keys = list(cstr_groups.keys())
+	maxi = len(keys)
+
+	# this list should be short
+	for i in range(maxi):
+		t1 = cstr_groups[keys[i]][0]
+		for j in range(i + 1, maxi):
+			t2 = cstr_groups[keys[j]][0]
+
+			# add the constraints based on the comparisons
+			if is_before(t1, t2):
+				a = i
+				b = j
+			elif is_before(t2, t1):
+				a = j
+				b = i
+			else:
+				continue
+			for x in cstr_groups[keys[b]]:
+				x.run_after.update(cstr_groups[keys[a]])
+
+def funex(c):
+	"""
+	Compile a function by 'exec'
+
+	:param c: function to compile
+	:type c: string
+	:return: the function 'f' declared in the input string
+	:rtype: function
+	"""
+	dc = {}
+	exec(c, dc)
+	return dc['f']
+
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})", re.M)
+def compile_fun_shell(line):
+	"""
+	Create a compiled function to execute a process with the shell
+	WARNING: this method may disappear anytime, so use compile_fun instead
+	"""
+
+	extr = []
+	def repl(match):
+		g = match.group
+		if g('dollar'): return "$"
+		elif g('backslash'): return '\\\\'
+		elif g('subst'): extr.append((g('var'), g('code'))); return "%s"
+		return None
+
+	line = reg_act.sub(repl, line) or line
+
+	parm = []
+	dvars = []
+	app = parm.append
+	for (var, meth) in extr:
+		if var == 'SRC':
+			if meth: app('tsk.inputs%s' % meth)
+			else: app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])')
+		elif var == 'TGT':
+			if meth: app('tsk.outputs%s' % meth)
+			else: app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])')
+		elif meth:
+			if meth.startswith(':'):
+				m = meth[1:]
+				if m == 'SRC':
+					m = '[a.path_from(bld.bldnode) for a in tsk.inputs]'
+				elif m == 'TGT':
+					m = '[a.path_from(bld.bldnode) for a in tsk.outputs]'
+				elif m[:3] not in ('tsk', 'gen', 'bld'):
+					dvars.extend([var, meth[1:]])
+					m = '%r' % m
+				app('" ".join(tsk.colon(%r, %s))' % (var, m))
+			else:
+				app('%s%s' % (var, meth))
+		else:
+			if not var in dvars: dvars.append(var)
+			app("p('%s')" % var)
+	if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
+	else: parm = ''
+
+	c = COMPILE_TEMPLATE_SHELL % (line, parm)
+
+	Logs.debug('action: %s' % c)
+	return (funex(c), dvars)
+
+def compile_fun_noshell(line):
+	"""
+	Create a compiled function to execute a process without the shell
+	WARNING: this method may disappear anytime, so use compile_fun instead
+	"""
+	extr = []
+	def repl(match):
+		g = match.group
+		if g('dollar'): return "$"
+		elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>"
+		return None
+
+	line2 = reg_act.sub(repl, line)
+	params = line2.split('<<|@|>>')
+	assert(extr)
+
+	buf = []
+	dvars = []
+	app = buf.append
+	for x in range(len(extr)):
+		params[x] = params[x].strip()
+		if params[x]:
+			app("lst.extend(%r)" % params[x].split())
+		(var, meth) = extr[x]
+		if var == 'SRC':
+			if meth: app('lst.append(tsk.inputs%s)' % meth)
+			else: app("lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs])")
+		elif var == 'TGT':
+			if meth: app('lst.append(tsk.outputs%s)' % meth)
+			else: app("lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs])")
+		elif meth:
+			if meth.startswith(':'):
+				m = meth[1:]
+				if m == 'SRC':
+					m = '[a.path_from(bld.bldnode) for a in tsk.inputs]'
+				elif m == 'TGT':
+					m = '[a.path_from(bld.bldnode) for a in tsk.outputs]'
+				elif m[:3] not in ('tsk', 'gen', 'bld'):
+					dvars.extend([var, m])
+					m = '%r' % m
+				app('lst.extend(tsk.colon(%r, %s))' % (var, m))
+			else:
+				app('lst.extend(gen.to_list(%s%s))' % (var, meth))
+		else:
+			app('lst.extend(to_list(env[%r]))' % var)
+			if not var in dvars: dvars.append(var)
+
+	if extr:
+		if params[-1]:
+			app("lst.extend(%r)" % params[-1].split())
+	fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
+	Logs.debug('action: %s' % fun)
+	return (funex(fun), dvars)
+
+def compile_fun(line, shell=False):
+	"""
+	Parse a string expression such as "${CC} ${SRC} -o ${TGT}" and return a pair containing:
+
+	* the function created (compiled) for use as :py:meth:`waflib.Task.TaskBase.run`
+	* the list of variables that imply a dependency from self.env
+
+	for example::
+
+		from waflib.Task import compile_fun
+		compile_fun('cxx', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
+
+		def build(bld):
+			bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
+
+	The env variables (CXX, ..) on the task must not hold dicts (order)
+	The reserved keywords *TGT* and *SRC* represent the task input and output nodes
+
+	"""
+	if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
+		shell = True
+
+	if shell:
+		return compile_fun_shell(line)
+	else:
+		return compile_fun_noshell(line)
+
+def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None):
+	"""
+	Return a new task subclass with the function ``run`` compiled from the line given.
+	Provided for compatibility with waf 1.4-1.5, when we did not use metaclasses to register new objects.
+
+	:param func: method run
+	:type func: string or function
+	:param vars: list of variables to hash
+	:type vars: list of string
+	:param color: color to use
+	:type color: string
+	:param shell: when *func* is a string, enable/disable the use of the shell
+	:type shell: bool
+	:param scan: method scan
+	:type scan: function
+	:rtype: :py:class:`waflib.Task.Task`
+	"""
+
+	params = {
+		'vars': vars or [], # function arguments are static, and this one may be modified by the class
+		'color': color,
+		'name': name,
+		'ext_in': Utils.to_list(ext_in),
+		'ext_out': Utils.to_list(ext_out),
+		'before': Utils.to_list(before),
+		'after': Utils.to_list(after),
+		'shell': shell,
+		'scan': scan,
+	}
+
+	if isinstance(func, str):
+		params['run_str'] = func
+	else:
+		params['run'] = func
+
+	cls = type(Task)(name, (Task,), params)
+	global classes
+	classes[name] = cls
+	return cls
+
+
+def always_run(cls):
+	"""
+	Task class decorator
+
+	Set all task instances of this class to be executed whenever a build is started
+	The task signature is calculated, but the result of the comparation between
+	task signatures is bypassed
+	"""
+	old = cls.runnable_status
+	def always(self):
+		ret = old(self)
+		if ret == SKIP_ME:
+			ret = RUN_ME
+		return ret
+	cls.runnable_status = always
+	return cls
+
+def update_outputs(cls):
+	"""
+	Task class decorator
+
+	If you want to create files in the source directory. For example, to keep *foo.txt* in the source
+	directory, create it first and declare::
+
+		def build(bld):
+			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='foo.txt', update_outputs=True)
+	"""
+	old_post_run = cls.post_run
+	def post_run(self):
+		old_post_run(self)
+		for node in self.outputs:
+			node.sig = Utils.h_file(node.abspath())
+			self.generator.bld.task_sigs[node.abspath()] = self.uid() # issue #1017
+	cls.post_run = post_run
+
+
+	old_runnable_status = cls.runnable_status
+	def runnable_status(self):
+		status = old_runnable_status(self)
+		if status != RUN_ME:
+			return status
+
+		try:
+			# by default, we check that the output nodes have the signature of the task
+			# perform a second check, returning 'SKIP_ME' as we are expecting that
+			# the signatures do not match
+			bld = self.generator.bld
+			prev_sig = bld.task_sigs[self.uid()]
+			if prev_sig == self.signature():
+				for x in self.outputs:
+					if not x.sig or bld.task_sigs[x.abspath()] != self.uid():
+						return RUN_ME
+				return SKIP_ME
+		except KeyError:
+			pass
+		except IndexError:
+			pass
+		except AttributeError:
+			pass
+		return RUN_ME
+	cls.runnable_status = runnable_status
+
+	return cls
+
+
diff --git a/waflib/TaskGen.py b/waflib/TaskGen.py
new file mode 100644
index 0000000..b5def00
--- /dev/null
+++ b/waflib/TaskGen.py
@@ -0,0 +1,757 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Task generators
+
+The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
+The instances can have various parameters, but the creation of task nodes (Task.py)
+is always postponed. To achieve this, various methods are called from the method "apply"
+
+
+"""
+
+import copy, re, os
+from waflib import Task, Utils, Logs, Errors, ConfigSet
+
+feats = Utils.defaultdict(set)
+"""remember the methods declaring features"""
+
+class task_gen(object):
+	"""
+	Instances of this class create :py:class:`waflib.Task.TaskBase` when
+	calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
+	A few notes:
+
+	* The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..)
+	* The 'features' are used to add methods to self.meths and then execute them
+	* The attribute 'path' is a node representing the location of the task generator
+	* The tasks created are added to the attribute *tasks*
+	* The attribute 'idx' is a counter of task generators in the same path
+	"""
+
+	mappings = {}
+	prec = Utils.defaultdict(list)
+
+	def __init__(self, *k, **kw):
+		"""
+		The task generator objects predefine various attributes (source, target) for possible
+		processing by process_rule (make-like rules) or process_source (extensions, misc methods)
+
+		The tasks are stored on the attribute 'tasks'. They are created by calling methods
+		listed in self.meths *or* referenced in the attribute features
+		A topological sort is performed to ease the method re-use.
+
+		The extra key/value elements passed in kw are set as attributes
+		"""
+
+		# so we will have to play with directed acyclic graphs
+		# detect cycles, etc
+		self.source = ''
+		self.target = ''
+
+		self.meths = []
+		"""
+		List of method names to execute (it is usually a good idea to avoid touching this)
+		"""
+
+		self.prec = Utils.defaultdict(list)
+		"""
+		Precedence table for sorting the methods in self.meths
+		"""
+
+		self.mappings = {}
+		"""
+		List of mappings {extension -> function} for processing files by extension
+		"""
+
+		self.features = []
+		"""
+		List of feature names for bringing new methods in
+		"""
+
+		self.tasks = []
+		"""
+		List of tasks created.
+		"""
+
+		if not 'bld' in kw:
+			# task generators without a build context :-/
+			self.env = ConfigSet.ConfigSet()
+			self.idx = 0
+			self.path = None
+		else:
+			self.bld = kw['bld']
+			self.env = self.bld.env.derive()
+			self.path = self.bld.path # emulate chdir when reading scripts
+
+			# provide a unique id
+			try:
+				self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1
+			except AttributeError:
+				self.bld.idx = {}
+				self.idx = self.bld.idx[id(self.path)] = 1
+
+		for key, val in kw.items():
+			setattr(self, key, val)
+
+	def __str__(self):
+		"""for debugging purposes"""
+		return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())
+
+	def __repr__(self):
+		"""for debugging purposes"""
+		lst = []
+		for x in self.__dict__.keys():
+			if x not in ['env', 'bld', 'compiled_tasks', 'tasks']:
+				lst.append("%s=%s" % (x, repr(getattr(self, x))))
+		return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())
+
+	def get_name(self):
+		"""
+		If not set, the name is computed from the target name::
+
+			def build(bld):
+				x = bld(name='foo')
+				x.get_name() # foo
+				y = bld(target='bar')
+				y.get_name() # bar
+
+		:rtype: string
+		:return: name of this task generator
+		"""
+		try:
+			return self._name
+		except AttributeError:
+			if isinstance(self.target, list):
+				lst = [str(x) for x in self.target]
+				name = self._name = ','.join(lst)
+			else:
+				name = self._name = str(self.target)
+			return name
+	def set_name(self, name):
+		self._name = name
+
+	name = property(get_name, set_name)
+
+	def to_list(self, val):
+		"""
+		Ensure that a parameter is a list
+
+		:type val: string or list of string
+		:param val: input to return as a list
+		:rtype: list
+		"""
+		if isinstance(val, str): return val.split()
+		else: return val
+
+	def post(self):
+		"""
+		Create task objects. The following operations are performed:
+
+		#. The body of this method is called only once and sets the attribute ``posted``
+		#. The attribute ``features`` is used to add more methods in ``self.meths``
+		#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
+		#. The methods are then executed in order
+		#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
+		"""
+
+		# we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
+		if getattr(self, 'posted', None):
+			#error("OBJECT ALREADY POSTED" + str( self))
+			return False
+		self.posted = True
+
+		keys = set(self.meths)
+
+		# add the methods listed in the features
+		self.features = Utils.to_list(self.features)
+		for x in self.features + ['*']:
+			st = feats[x]
+			if not st:
+				if not x in Task.classes:
+					Logs.warn('feature %r does not exist - bind at least one method to it' % x)
+			keys.update(list(st)) # ironpython 2.7 wants the cast to list
+
+		# copy the precedence table
+		prec = {}
+		prec_tbl = self.prec or task_gen.prec
+		for x in prec_tbl:
+			if x in keys:
+				prec[x] = prec_tbl[x]
+
+		# elements disconnected
+		tmp = []
+		for a in keys:
+			for x in prec.values():
+				if a in x: break
+			else:
+				tmp.append(a)
+
+		# TODO waf 1.7
+		#tmp.sort()
+
+		# topological sort
+		out = []
+		while tmp:
+			e = tmp.pop()
+			if e in keys: out.append(e)
+			try:
+				nlst = prec[e]
+			except KeyError:
+				pass
+			else:
+				del prec[e]
+				for x in nlst:
+					for y in prec:
+						if x in prec[y]:
+							break
+					else:
+						tmp.append(x)
+
+		if prec:
+			raise Errors.WafError('Cycle detected in the method execution %r' % prec)
+		out.reverse()
+		self.meths = out
+
+		# then we run the methods in order
+		Logs.debug('task_gen: posting %s %d' % (self, id(self)))
+		for x in out:
+			try:
+				v = getattr(self, x)
+			except AttributeError:
+				raise Errors.WafError('%r is not a valid task generator method' % x)
+			Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
+			v()
+
+		Logs.debug('task_gen: posted %s' % self.name)
+		return True
+
+	def get_hook(self, node):
+		"""
+		:param node: Input file to process
+		:type node: :py:class:`waflib.Tools.Node.Node`
+		:return: A method able to process the input node by looking at the extension
+		:rtype: function
+		"""
+		name = node.name
+		for k in self.mappings:
+			if name.endswith(k):
+				return self.mappings[k]
+		for k in task_gen.mappings:
+			if name.endswith(k):
+				return task_gen.mappings[k]
+		raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)" % (node, task_gen.mappings.keys()))
+
+	def create_task(self, name, src=None, tgt=None):
+		"""
+		Wrapper for creating task objects easily
+
+		:param name: task class name
+		:type name: string
+		:param src: input nodes
+		:type src: list of :py:class:`waflib.Tools.Node.Node`
+		:param tgt: output nodes
+		:type tgt: list of :py:class:`waflib.Tools.Node.Node`
+		:return: A task object
+		:rtype: :py:class:`waflib.Task.TaskBase`
+		"""
+		task = Task.classes[name](env=self.env.derive(), generator=self)
+		if src:
+			task.set_inputs(src)
+		if tgt:
+			task.set_outputs(tgt)
+		self.tasks.append(task)
+		return task
+
+	def clone(self, env):
+		"""
+		Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the
+		task generator does not create the same output files as the original, or the same files may
+		be compiled twice.
+
+		:param env: A configuration set
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:return: A copy
+		:rtype: :py:class:`waflib.TaskGen.task_gen`
+		"""
+		newobj = self.bld()
+		for x in self.__dict__:
+			if x in ['env', 'bld']:
+				continue
+			elif x in ['path', 'features']:
+				setattr(newobj, x, getattr(self, x))
+			else:
+				setattr(newobj, x, copy.copy(getattr(self, x)))
+
+		newobj.posted = False
+		if isinstance(env, str):
+			newobj.env = self.bld.all_envs[env].derive()
+		else:
+			newobj.env = env.derive()
+
+		return newobj
+
+def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
+	ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
+	"""
+	Create a new mapping and a task class for processing files by extension.
+	See Tools/flex.py for an example.
+
+	:param name: name for the task class
+	:type name: string
+	:param rule: function to execute or string to be compiled in a function
+	:type rule: string or function
+	:param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable)
+	:type reentrant: int
+	:param color: color for the task output
+	:type color: string
+	:param ext_in: execute the task only after the files of such extensions are created
+	:type ext_in: list of string
+	:param ext_out: execute the task only before files of such extensions are processed
+	:type ext_out: list of string
+	:param before: execute instances of this task before classes of the given names
+	:type before: list of string
+	:param after: execute instances of this task after classes of the given names
+	:type after: list of string
+	:param decider: if present, use it to create the output nodes for the task
+	:type decider: function
+	:param scan: scanner function for the task
+	:type scan: function
+	:param install_path: installation path for the output nodes
+	:type install_path: string
+	"""
+	ext_in = Utils.to_list(ext_in)
+	ext_out = Utils.to_list(ext_out)
+	if not name:
+		name = rule
+	cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)
+
+	def x_file(self, node):
+		ext = decider and decider(self, node) or cls.ext_out
+		if ext_in:
+			_ext_in = ext_in[0]
+
+		tsk = self.create_task(name, node)
+		cnt = 0
+
+		keys = self.mappings.keys() + self.__class__.mappings.keys()
+		for x in ext:
+			k = node.change_ext(x, ext_in=_ext_in)
+			tsk.outputs.append(k)
+
+			if reentrant != None:
+				if cnt < int(reentrant):
+					self.source.append(k)
+			else:
+				for y in keys: # ~ nfile * nextensions :-/
+					if k.name.endswith(y):
+						self.source.append(k)
+						break
+			cnt += 1
+
+		if install_path:
+			self.bld.install_files(install_path, tsk.outputs)
+		return tsk
+
+	for x in cls.ext_in:
+		task_gen.mappings[x] = x_file
+	return x_file
+
+def taskgen_method(func):
+	"""
+	Decorator: register a method as a task generator method.
+	The function must accept a task generator as first parameter::
+
+		from waflib.TaskGen import taskgen_method
+		@taskgen_method
+		def mymethod(self):
+			pass
+
+	:param func: task generator method to add
+	:type func: function
+	:rtype: function
+	"""
+	setattr(task_gen, func.__name__, func)
+	return func
+
+def feature(*k):
+	"""
+	Decorator: register a task generator method that will be executed when the
+	object attribute 'feature' contains the corresponding key(s)::
+
+		from waflib.Task import feature
+		@feature('myfeature')
+		def myfunction(self):
+			print('that is my feature!')
+		def build(bld):
+			bld(features='myfeature')
+
+	:param k: feature names
+	:type k: list of string
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for name in k:
+			feats[name].update([func.__name__])
+		return func
+	return deco
+
+def before_method(*k):
+	"""
+	Decorator: register a task generator method which will be executed
+	before the functions of given name(s)::
+
+		from waflib.TaskGen import feature, before
+		@feature('myfeature')
+		@before_method('fun2')
+		def fun1(self):
+			print('feature 1!')
+		@feature('myfeature')
+		def fun2(self):
+			print('feature 2!')
+		def build(bld):
+			bld(features='myfeature')
+
+	:param k: method names
+	:type k: list of string
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for fun_name in k:
+			if not func.__name__ in task_gen.prec[fun_name]:
+				task_gen.prec[fun_name].append(func.__name__)
+				#task_gen.prec[fun_name].sort()
+		return func
+	return deco
+before = before_method
+
+def after_method(*k):
+	"""
+	Decorator: register a task generator method which will be executed
+	after the functions of given name(s)::
+
+		from waflib.TaskGen import feature, after
+		@feature('myfeature')
+		@after_method('fun2')
+		def fun1(self):
+			print('feature 1!')
+		@feature('myfeature')
+		def fun2(self):
+			print('feature 2!')
+		def build(bld):
+			bld(features='myfeature')
+
+	:param k: method names
+	:type k: list of string
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for fun_name in k:
+			if not fun_name in task_gen.prec[func.__name__]:
+				task_gen.prec[func.__name__].append(fun_name)
+				#task_gen.prec[func.__name__].sort()
+		return func
+	return deco
+after = after_method
+
+def extension(*k):
+	"""
+	Decorator: register a task generator method which will be invoked during
+	the processing of source files for the extension given::
+
+		from waflib import Task
+		class mytask(Task):
+			run_str = 'cp ${SRC} ${TGT}'
+		@extension('.moo')
+		def create_maa_file(self, node):
+			self.create_task('mytask', node, node.change_ext('.maa'))
+		def build(bld):
+			bld(source='foo.moo')
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for x in k:
+			task_gen.mappings[x] = func
+		return func
+	return deco
+
+# ---------------------------------------------------------------
+# The following methods are task generator methods commonly used
+# they are almost examples, the rest of waf core does not depend on them
+
+ taskgen_method
+def to_nodes(self, lst, path=None):
+	"""
+	Convert the input list into a list of nodes.
+	It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
+	It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
+
+	:param lst: input list
+	:type lst: list of string and nodes
+	:param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`)
+	:type path: :py:class:`waflib.Tools.Node.Node`
+	:rtype: list of :py:class:`waflib.Tools.Node.Node`
+	"""
+	tmp = []
+	path = path or self.path
+	find = path.find_resource
+
+	if isinstance(lst, self.path.__class__):
+		lst = [lst]
+
+	# either a list or a string, convert to a list of nodes
+	for x in Utils.to_list(lst):
+		if isinstance(x, str):
+			node = find(x)
+		else:
+			node = x
+		if not node:
+			raise Errors.WafError("source not found: %r in %r" % (x, self))
+		tmp.append(node)
+	return tmp
+
+ feature('*')
+def process_source(self):
+	"""
+	Process each element in the attribute ``source`` by extension.
+
+	#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
+	#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
+	#. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook`
+	#. When called, the methods may modify self.source to append more source to process
+	#. The mappings can map an extension or a filename (see the code below)
+	"""
+	self.source = self.to_nodes(getattr(self, 'source', []))
+	for node in self.source:
+		self.get_hook(node)(self, node)
+
+ feature('*')
+ before_method('process_source')
+def process_rule(self):
+	"""
+	Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
+
+		def build(bld):
+			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
+	"""
+	if not getattr(self, 'rule', None):
+		return
+
+	# create the task class
+	name = str(getattr(self, 'name', None) or self.target or self.rule)
+	cls = Task.task_factory(name, self.rule,
+		getattr(self, 'vars', []),
+		shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'))
+
+	# now create one instance
+	tsk = self.create_task(name)
+
+	if getattr(self, 'target', None):
+		if isinstance(self.target, str):
+			self.target = self.target.split()
+		if not isinstance(self.target, list):
+			self.target = [self.target]
+		for x in self.target:
+			if isinstance(x, str):
+				tsk.outputs.append(self.path.find_or_declare(x))
+			else:
+				x.parent.mkdir() # if a node was given, create the required folders
+				tsk.outputs.append(x)
+		if getattr(self, 'install_path', None):
+			# from waf 1.5
+			# although convenient, it does not 1. allow to name the target file and 2. symlinks
+			# TODO remove in waf 1.7
+			self.bld.install_files(self.install_path, tsk.outputs)
+
+	if getattr(self, 'source', None):
+		tsk.inputs = self.to_nodes(self.source)
+		# bypass the execution of process_source by setting the source to an empty list
+		self.source = []
+
+	if getattr(self, 'scan', None):
+		cls.scan = self.scan
+	elif getattr(self, 'deps', None):
+		def scan(self):
+			nodes = []
+			for x in self.generator.to_list(self.generator.deps):
+				node = self.generator.path.find_resource(x)
+				if not node:
+					self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
+				nodes.append(node)
+			return [nodes, []]
+		cls.scan = scan
+
+	if getattr(self, 'cwd', None):
+		tsk.cwd = self.cwd
+
+	# TODO remove on_results in waf 1.7
+	if getattr(self, 'update_outputs', None) or getattr(self, 'on_results', None):
+		Task.update_outputs(cls)
+
+	if getattr(self, 'always', None):
+		Task.always_run(cls)
+
+	for x in ['after', 'before', 'ext_in', 'ext_out']:
+		setattr(cls, x, getattr(self, x, []))
+
+ feature('seq')
+def sequence_order(self):
+	"""
+	Add a strict sequential constraint between the tasks generated by task generators.
+	It works because task generators are posted in order.
+	It will not post objects which belong to other folders.
+
+	Example::
+
+		bld(features='javac seq')
+		bld(features='jar seq')
+
+	To start a new sequence, set the attribute seq_start, for example::
+
+		obj = bld(features='seq')
+		obj.seq_start = True
+
+	Note that the method is executed in last position. This is more an
+	example than a widely-used solution.
+	"""
+	if self.meths and self.meths[-1] != 'sequence_order':
+		self.meths.append('sequence_order')
+		return
+
+	if getattr(self, 'seq_start', None):
+		return
+
+	# all the tasks previously declared must be run before these
+	if getattr(self.bld, 'prev', None):
+		self.bld.prev.post()
+		for x in self.bld.prev.tasks:
+			for y in self.tasks:
+				y.set_run_after(x)
+
+	self.bld.prev = self
+
+
+re_m4 = re.compile('@(\w+)@', re.M)
+
+class subst_pc(Task.Task):
+	"""
+	Create *.pc* files from *.pc.in*. The task is executed whenever an input variable used
+	in the substitution changes.
+	"""
+
+	def run(self):
+		"Substitutes variables in a .in file"
+
+		code = self.inputs[0].read()
+
+		# replace all % by %% to prevent errors by % signs
+		code = code.replace('%', '%%')
+
+		# extract the vars foo into lst and replace @foo@ by %(foo)s
+		lst = []
+		def repl(match):
+			g = match.group
+			if g(1):
+				lst.append(g(1))
+				return "%%(%s)s" % g(1)
+			return ''
+		code = re_m4.sub(repl, code)
+
+		try:
+			d = self.generator.dct
+		except AttributeError:
+			d = {}
+			for x in lst:
+				tmp = getattr(self.generator, x, '') or self.env.get_flat(x) or self.env.get_flat(x.upper())
+				d[x] = str(tmp)
+
+		self.outputs[0].write(code % d)
+		self.generator.bld.raw_deps[self.uid()] = self.dep_vars = lst
+
+		# make sure the signature is updated
+		try: delattr(self, 'cache_sig')
+		except AttributeError: pass
+
+		if getattr(self.generator, 'chmod', None):
+			os.chmod(self.outputs[0].abspath(), self.generator.chmod)
+
+	def sig_vars(self):
+		"""
+		Compute a hash (signature) of the variables used in the substitution
+		"""
+		bld = self.generator.bld
+		env = self.env
+		upd = self.m.update
+
+		# raw_deps: persistent custom values returned by the scanner
+		vars = self.generator.bld.raw_deps.get(self.uid(), [])
+
+		# hash both env vars and task generator attributes
+		act_sig = bld.hash_env_vars(env, vars)
+		upd(act_sig)
+
+		lst = [getattr(self.generator, x, '') for x in vars]
+		upd(Utils.h_list(lst))
+
+		return self.m.digest()
+
+ extension('.pc.in')
+def add_pcfile(self, node):
+	"""
+	Process *.pc.in* files to *.pc*. Install the results to ``${PREFIX}/lib/pkgconfig/``
+
+		def build(bld):
+			bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
+	"""
+	tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
+	self.bld.install_files(getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), tsk.outputs)
+
+class subst(subst_pc):
+	pass
+
+ feature('subst')
+ before_method('process_source', 'process_rule')
+def process_subst(self):
+	"""
+	Define a transformation that substitutes the contents of *source* files to *target* files::
+
+		def build(bld):
+			bld(
+				features='subst',
+				source='foo.c.in',
+				target='foo.c',
+				install_path='${LIBDIR}/pkgconfig',
+				VAR = 'val'
+			)
+
+	The input files are supposed to contain macros of the form * VAR@*, where *VAR* is an argument
+	of the task generator object.
+
+	This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`.
+	"""
+	src = self.to_nodes(getattr(self, 'source', []))
+	tgt = getattr(self, 'target', [])
+	if isinstance(tgt, self.path.__class__):
+		tgt = [tgt]
+	tgt = [isinstance(x, self.path.__class__) and x or self.path.find_or_declare(x) for x in Utils.to_list(tgt)]
+
+	if len(src) != len(tgt):
+		raise Errors.WafError('invalid source or target for %r' % self)
+
+	for x, y in zip(src, tgt):
+		if not (x and y):
+			raise Errors.WafError('invalid source or target for %r' % self)
+		tsk = self.create_task('subst', x, y)
+		for a in ('after', 'before', 'ext_in', 'ext_out'):
+			val = getattr(self, a, None)
+			if val:
+				setattr(tsk, a, val)
+
+	inst_to = getattr(self, 'install_path', None)
+	if inst_to:
+		self.bld.install_files(inst_to, tgt, chmod=getattr(self, 'chmod', Utils.O644))
+
+	self.source = []
+
diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py
new file mode 100644
index 0000000..c8a3c34
--- /dev/null
+++ b/waflib/Tools/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py
new file mode 100644
index 0000000..0e4059c
--- /dev/null
+++ b/waflib/Tools/c_aliases.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"base for all c/c++ programs and libraries"
+
+import os, sys, re
+from waflib import Utils, Build
+from waflib.Configure import conf
+
+def get_extensions(lst):
+	"""
+	:param lst: files to process
+	:list lst: list of string or :py:class:`waflib.Node.Node`
+	:return: list of file extensions
+	:rtype: list of string
+	"""
+	ret = []
+	for x in Utils.to_list(lst):
+		try:
+			if not isinstance(x, str):
+				x = x.name
+			ret.append(x[x.rfind('.') + 1:])
+		except:
+			pass
+	return ret
+
+def sniff_features(**kw):
+	"""
+	Look at the source files and return the features for a task generator (mainly cc and cxx)::
+
+		snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
+		# returns  ['cxx', 'c', 'cxxshlib', 'cshlib']
+
+	:param source: source files to process
+	:type source: list of string or :py:class:`waflib.Node.Node`
+	:param type: object type in *program*, *shlib* or *stlib*
+	:type type: string
+	:return: the list of features for a task generator processing the source files
+	:rtype: list of string
+	"""
+	exts = get_extensions(kw['source'])
+	type = kw['_type']
+	feats = []
+
+	# watch the order, cxx will have the precedence
+	if 'cxx' in exts or 'cpp' in exts or 'c++' in exts or 'cc' in exts or 'C' in exts:
+		feats.append('cxx')
+
+	if 'c' in exts or 'vala' in exts:
+		feats.append('c')
+
+	if 'd' in exts:
+		feats.append('d')
+
+	if 'java' in exts:
+		feats.append('java')
+
+	if 'java' in exts:
+		return 'java'
+
+	if type in ['program', 'shlib', 'stlib']:
+		for x in feats:
+			if x in ['cxx', 'd', 'c']:
+				feats.append(x + type)
+
+	return feats
+
+def set_features(kw, _type):
+	kw['_type'] = _type
+	kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
+
+ conf
+def program(bld, *k, **kw):
+	"""
+	Alias for creating programs by looking at the file extensions::
+
+		def build(bld):
+			bld.program(source='foo.c', target='app')
+			# equivalent to:
+			# bld(features='c cprogram', source='foo.c', target='app')
+
+	"""
+	set_features(kw, 'program')
+	return bld(*k, **kw)
+
+ conf
+def shlib(bld, *k, **kw):
+	"""
+	Alias for creating shared libraries by looking at the file extensions::
+
+		def build(bld):
+			bld.shlib(source='foo.c', target='app')
+			# equivalent to:
+			# bld(features='c cshlib', source='foo.c', target='app')
+
+	"""
+	set_features(kw, 'shlib')
+	return bld(*k, **kw)
+
+ conf
+def stlib(bld, *k, **kw):
+	"""
+	Alias for creating static libraries by looking at the file extensions::
+
+		def build(bld):
+			bld.stlib(source='foo.cpp', target='app')
+			# equivalent to:
+			# bld(features='cxx cxxstlib', source='foo.cpp', target='app')
+
+	"""
+	set_features(kw, 'stlib')
+	return bld(*k, **kw)
+
+ conf
+def objects(bld, *k, **kw):
+	"""
+	Alias for creating object files by looking at the file extensions::
+
+		def build(bld):
+			bld.objects(source='foo.c', target='app')
+			# equivalent to:
+			# bld(features='c', source='foo.c', target='app')
+
+	"""
+	set_features(kw, 'objects')
+	return bld(*k, **kw)
+
diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py
new file mode 100644
index 0000000..ee1c5c2
--- /dev/null
+++ b/waflib/Tools/c_config.py
@@ -0,0 +1,1198 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+C/C++/D configuration helpers
+"""
+
+import os, imp, sys, re, shlex, shutil
+from waflib import Build, Utils, Configure, Task, Options, Logs, TaskGen, Errors, ConfigSet, Runner
+from waflib.TaskGen import before_method, after_method, feature
+from waflib.Configure import conf
+
+WAF_CONFIG_H   = 'config.h'
+"""default name for the config.h file"""
+
+DEFKEYS = 'define_key'
+INCKEYS = 'include_key'
+
+cfg_ver = {
+	'atleast-version': '>=',
+	'exact-version': '==',
+	'max-version': '<=',
+}
+
+SNIP_FUNCTION = '''
+	int main() {
+	void *p;
+	p=(void*)(%s);
+	return 0;
+}
+'''
+"""Code template for checking for functions"""
+
+SNIP_TYPE = '''
+int main() {
+	if ((%(type_name)s *) 0) return 0;
+	if (sizeof (%(type_name)s)) return 0;
+}
+'''
+"""Code template for checking for types"""
+
+SNIP_CLASS = '''
+int main() {
+	if (
+}
+'''
+
+SNIP_EMPTY_PROGRAM = '''
+int main() {
+	return 0;
+}
+'''
+
+SNIP_FIELD = '''
+int main() {
+	char *off;
+	off = (char*) &((%(type_name)s*)0)->%(field_name)s;
+	return (size_t) off < sizeof(%(type_name)s);
+}
+'''
+
+MACRO_TO_DESTOS = {
+'__linux__'                                      : 'linux',
+'__GNU__'                                        : 'gnu', # hurd
+'__FreeBSD__'                                    : 'freebsd',
+'__NetBSD__'                                     : 'netbsd',
+'__OpenBSD__'                                    : 'openbsd',
+'__sun'                                          : 'sunos',
+'__hpux'                                         : 'hpux',
+'__sgi'                                          : 'irix',
+'_AIX'                                           : 'aix',
+'__CYGWIN__'                                     : 'cygwin',
+'__MSYS__'                                       : 'msys',
+'_UWIN'                                          : 'uwin',
+'_WIN64'                                         : 'win32',
+'_WIN32'                                         : 'win32',
+# Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file.
+'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__'  : 'darwin', 
+'__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone
+'__QNX__'                                        : 'qnx',
+'__native_client__'                              : 'nacl' # google native client platform
+}
+
+MACRO_TO_DEST_CPU = {
+'__x86_64__'  : 'x86_64',
+'__i386__'    : 'x86',
+'__ia64__'    : 'ia',
+'__mips__'    : 'mips',
+'__sparc__'   : 'sparc',
+'__alpha__'   : 'alpha',
+'__arm__'     : 'arm',
+'__hppa__'    : 'hppa',
+'__powerpc__' : 'powerpc',
+}
+
+ conf
+def parse_flags(self, line, uselib, env=None, force_static=False):
+	"""
+	Parse the flags from the input lines, and add them to the relevant use variables::
+
+		def configure(conf):
+			conf.parse_flags('-O3', uselib_store='FOO')
+			# conf.env.CXXFLAGS_FOO = ['-O3']
+			# conf.env.CFLAGS_FOO = ['-O3']
+
+	:param line: flags
+	:type line: string
+	:param uselib: where to add the flags
+	:type uselib: string
+	:param env: config set or conf.env by default
+	:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+	"""
+
+	assert(isinstance(line, str))
+
+	env = env or self.env
+
+	# append_unique is not always possible
+	# for example, apple flags may require both -arch i386 and -arch ppc
+
+	app = env.append_value
+	appu = env.append_unique
+	#lst = shlex.split(line)
+	# issue #811
+	lex = shlex.shlex(line, posix=False)
+	lex.whitespace_split = True
+	lex.commenters = ''
+	lst = list(lex)
+
+	while lst:
+		x = lst.pop(0)
+		st = x[:2]
+		ot = x[2:]
+
+		if st == '-I' or st == '/I':
+			if not ot: ot = lst.pop(0)
+			appu('INCLUDES_' + uselib, [ot])
+		elif st == '-include':
+			tmp = [x, lst.pop(0)]
+			app('CFLAGS', tmp)
+			app('CXXFLAGS', tmp)
+		elif st == '-D' or (self.env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but..
+			if not ot: ot = lst.pop(0)
+			app('DEFINES_' + uselib, [ot])
+		elif st == '-l':
+			if not ot: ot = lst.pop(0)
+			prefix = force_static and 'STLIB_' or 'LIB_'
+			appu(prefix + uselib, [ot])
+		elif st == '-L':
+			if not ot: ot = lst.pop(0)
+			appu('LIBPATH_' + uselib, [ot])
+		elif x == '-pthread' or x.startswith('+') or x.startswith('-std'):
+			app('CFLAGS_' + uselib, [x])
+			app('CXXFLAGS_' + uselib, [x])
+			app('LINKFLAGS_' + uselib, [x])
+		elif x == '-framework':
+			appu('FRAMEWORK_' + uselib, [lst.pop(0)])
+		elif x.startswith('-F'):
+			appu('FRAMEWORKPATH_' + uselib, [x[2:]])
+		elif x.startswith('-Wl'):
+			app('LINKFLAGS_' + uselib, [x])
+		elif x.startswith('-m') or x.startswith('-f') or x.startswith('-dynamic'):
+			app('CFLAGS_' + uselib, [x])
+			app('CXXFLAGS_' + uselib, [x])
+		elif x.startswith('-bundle'):
+			app('LINKFLAGS_' + uselib, [x])
+		elif x.startswith('-undefined'):
+			arg = lst.pop(0)
+			app('LINKFLAGS_' + uselib, [x, arg])
+		elif x.startswith('-arch') or x.startswith('-isysroot'):
+			tmp = [x, lst.pop(0)]
+			app('CFLAGS_' + uselib, tmp)
+			app('CXXFLAGS_' + uselib, tmp)
+			app('LINKFLAGS_' + uselib, tmp)
+		elif x.endswith('.a') or x.endswith('.so') or x.endswith('.dylib'):
+			appu('LINKFLAGS_' + uselib, [x]) # not cool, #762
+
+ conf
+def ret_msg(self, f, kw):
+	if isinstance(f, str):
+		return f
+	return f(kw)
+
+ conf
+def validate_cfg(self, kw):
+	"""
+	Search for the program *pkg-config* if missing, and validate the parameters to pass to
+	:py:func:`waflib.Tools.c_config.exec_cfg`.
+
+	:param path: the **-config program to use** (default is *pkg-config*)
+	:type path: list of string
+	:param msg: message to display to describe the test executed
+	:type msg: string
+	:param okmsg: message to display when the test is successful
+	:type okmsg: string
+	:param errmsg: message to display in case of error
+	:type errmsg: string
+	"""
+	if not 'path' in kw:
+		if not self.env.PKGCONFIG:
+			self.find_program('pkg-config', var='PKGCONFIG')
+		kw['path'] = self.env.PKGCONFIG
+
+	# pkg-config version
+	if 'atleast_pkgconfig_version' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version']
+		return
+
+	if not 'okmsg' in kw:
+		kw['okmsg'] = 'yes'
+	if not 'errmsg' in kw:
+		kw['errmsg'] = 'not found'
+
+	if 'modversion' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for %r version' % kw['modversion']
+		return
+
+	# checking for the version of a module, for the moment, one thing at a time
+	for x in cfg_ver.keys():
+		y = x.replace('-', '_')
+		if y in kw:
+			if not 'package' in kw:
+				raise ValueError('%s requires a package' % x)
+
+			if not 'msg' in kw:
+				kw['msg'] = 'Checking for %r %s %s' % (kw['package'], cfg_ver[x], kw[y])
+			return
+
+	if not 'msg' in kw:
+		kw['msg'] = 'Checking for %r' % (kw['package'] or kw['path'])
+
+ conf
+def exec_cfg(self, kw):
+	"""
+	Execute the program *pkg-config*:
+
+	* if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
+	* if modversion is given, then return the module version
+	* else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable
+
+	:param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests)
+	:type atleast_pkgconfig_version: string
+	:param package: package name, for example *gtk+-2.0*
+	:type package: string
+	:param uselib_store: if the test is successful, define HAVE\_*name*. It is also used to define *conf.env.FLAGS_name* variables.
+	:type uselib_store: string
+	:param modversion: if provided, return the version of the given module and define *name*\_VERSION
+	:type modversion: string
+	:param args: arguments to give to *package* when retrieving flags
+	:type args: list of string
+	:param variables: return the values of particular variables
+	:type variables: list of string
+	:param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES)
+	:type define_variable: dict(string: string)
+	"""
+
+	# pkg-config version
+	if 'atleast_pkgconfig_version' in kw:
+		cmd = [kw['path'], '--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
+		self.cmd_and_log(cmd)
+		if not 'okmsg' in kw:
+			kw['okmsg'] = 'yes'
+		return
+
+	# checking for the version of a module
+	for x in cfg_ver:
+		y = x.replace('-', '_')
+		if y in kw:
+			self.cmd_and_log([kw['path'], '--%s=%s' % (x, kw[y]), kw['package']])
+			if not 'okmsg' in kw:
+				kw['okmsg'] = 'yes'
+			self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
+			break
+
+	# retrieving the version of a module
+	if 'modversion' in kw:
+		version = self.cmd_and_log([kw['path'], '--modversion', kw['modversion']]).strip()
+		self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
+		return version
+
+	lst = [kw['path']]
+
+	defi = kw.get('define_variable', None)
+	if not defi:
+		defi = self.env.PKG_CONFIG_DEFINES or {}
+	for key, val in defi.items():
+		lst.append('--define-variable=%s=%s' % (key, val))
+
+	if kw['package']:
+		lst.extend(Utils.to_list(kw['package']))
+
+	# retrieving variables of a module
+	if 'variables' in kw:
+		env = kw.get('env', self.env)
+		uselib = kw.get('uselib_store', kw['package'].upper())
+		vars = Utils.to_list(kw['variables'])
+		for v in vars:
+			val = self.cmd_and_log(lst + ['--variable=' + v]).strip()
+			var = '%s_%s' % (uselib, v)
+			env[var] = val
+		if not 'okmsg' in kw:
+			kw['okmsg'] = 'yes'
+		return
+
+	static = False
+	if 'args' in kw:
+		args = Utils.to_list(kw['args'])
+		if '--static' in args or '--static-libs' in args:
+			static = True
+		lst += args
+	# so we assume the command-line will output flags to be parsed afterwards
+	ret = self.cmd_and_log(lst)
+	if not 'okmsg' in kw:
+		kw['okmsg'] = 'yes'
+
+	self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
+	self.parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env), force_static=static)
+	return ret
+
+ conf
+def check_cfg(self, *k, **kw):
+	"""
+	Check for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc).
+	Encapsulate the calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg`
+
+	A few examples::
+
+		def configure(conf):
+			conf.load('compiler_c')
+			conf.check_cfg(package='glib-2.0', args='--libs --cflags')
+			conf.check_cfg(package='glib-2.0', uselib_store='GLIB', atleast_version='2.10.0',
+				args='--cflags --libs')
+			conf.check_cfg(package='pango')
+			conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs'])
+			conf.check_cfg(package='pango',
+				args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'],
+				msg="Checking for 'pango 0.1.0'")
+			conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL')
+			conf.check_cfg(path='mpicc', args='--showme:compile --showme:link',
+				package='', uselib_store='OPEN_MPI', mandatory=False)
+
+	"""
+	if k:
+		lst = k[0].split()
+		kw['package'] = lst[0]
+		kw['args'] = ' '.join(lst[1:])
+
+	self.validate_cfg(kw)
+	if 'msg' in kw:
+		self.start_msg(kw['msg'])
+	ret = None
+	try:
+		ret = self.exec_cfg(kw)
+	except self.errors.WafError as e:
+		if 'errmsg' in kw:
+			self.end_msg(kw['errmsg'], 'YELLOW')
+		if Logs.verbose > 1:
+			raise
+		else:
+			self.fatal('The configuration failed')
+	else:
+		kw['success'] = ret
+		if 'okmsg' in kw:
+			self.end_msg(self.ret_msg(kw['okmsg'], kw))
+
+	return ret
+
+ conf
+def validate_c(self, kw):
+	"""
+	pre-check the parameters that will be given to run_c_code
+
+	:param env: an optional environment (modified -> provide a copy)
+	:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+	:param compiler: c or cxx (tries to guess what is best)
+	:type compiler: string
+	:param type: cprogram, cshlib, cstlib - not required if *features are given directly*
+	:type type: binary to create
+	:param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib``
+	:type feature: list of string
+	:param fragment: provide a piece of code for the test (default is to let the system create one)
+	:type fragment: string
+	:param uselib_store: define variables after the test is executed (IMPORTANT!)
+	:type uselib_store: string
+	:param use: parameters to use for building (just like the normal *use* keyword)
+	:type use: list of string
+	:param define_name: define to set when the check is over
+	:type define_name: string
+	:param execute: execute the resulting binary
+	:type execute: bool
+	:param define_ret: if execute is set to True, use the execution output in both the define and the return value
+	:type define_ret: bool
+	:param header_name: check for a particular header
+	:type header_name: string
+	:param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
+	:type auto_add_header_name: bool
+	"""
+
+	if not 'env' in kw:
+		kw['env'] = self.env.derive()
+	env = kw['env']
+
+	if not 'compiler' in kw and not 'features' in kw:
+		kw['compiler'] = 'c'
+		if env['CXX_NAME'] and Task.classes.get('cxx', None):
+			kw['compiler'] = 'cxx'
+			if not self.env['CXX']:
+				self.fatal('a c++ compiler is required')
+		else:
+			if not self.env['CC']:
+				self.fatal('a c compiler is required')
+
+	if not 'compile_mode' in kw:
+		kw['compile_mode'] = 'c'
+		if 'cxx' in Utils.to_list(kw.get('features',[])) or kw.get('compiler', '') == 'cxx':
+			kw['compile_mode'] = 'cxx'
+
+	if not 'type' in kw:
+		kw['type'] = 'cprogram'
+
+	if not 'features' in kw:
+		kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram c"
+	else:
+		kw['features'] = Utils.to_list(kw['features'])
+
+	if not 'compile_filename' in kw:
+		kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
+
+
+	def to_header(dct):
+		if 'header_name' in dct:
+			dct = Utils.to_list(dct['header_name'])
+			return ''.join(['#include <%s>\n' % x for x in dct])
+		return ''
+
+	#OSX
+	if 'framework_name' in kw:
+		fwkname = kw['framework_name']
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = fwkname.upper()
+
+		if not kw.get('no_header', False):
+			if not 'header_name' in kw:
+				kw['header_name'] = []
+			fwk = '%s/%s.h' % (fwkname, fwkname)
+			if kw.get('remove_dot_h', None):
+				fwk = fwk[:-2]
+			kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
+
+		kw['msg'] = 'Checking for framework %s' % fwkname
+		kw['framework'] = fwkname
+		#kw['frameworkpath'] = set it yourself
+
+	if 'function_name' in kw:
+		fu = kw['function_name']
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for function %s' % fu
+		kw['code'] = to_header(kw) + SNIP_FUNCTION % fu
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = fu.upper()
+		if not 'define_name' in kw:
+			kw['define_name'] = self.have_define(fu)
+
+	elif 'type_name' in kw:
+		tu = kw['type_name']
+		if not 'header_name' in kw:
+			kw['header_name'] = 'stdint.h'
+		if 'field_name' in kw:
+			field = kw['field_name']
+			kw['code'] = to_header(kw) + SNIP_FIELD % {'type_name' : tu, 'field_name' : field}
+			if not 'msg' in kw:
+				kw['msg'] = 'Checking for field %s in %s' % (field, tu)
+			if not 'define_name' in kw:
+				kw['define_name'] = self.have_define((tu + '_' + field).upper())
+		else:
+			kw['code'] = to_header(kw) + SNIP_TYPE % {'type_name' : tu}
+			if not 'msg' in kw:
+				kw['msg'] = 'Checking for type %s' % tu
+			if not 'define_name' in kw:
+				kw['define_name'] = self.have_define(tu.upper())
+
+	elif 'header_name' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for header %s' % kw['header_name']
+
+		l = Utils.to_list(kw['header_name'])
+		assert len(l)>0, 'list of headers in header_name is empty'
+
+		kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM
+
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = l[0].upper()
+
+		if not 'define_name' in kw:
+			kw['define_name'] = self.have_define(l[0])
+
+	if 'lib' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for library %s' % kw['lib']
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = kw['lib'].upper()
+
+	if 'stlib' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for static library %s' % kw['stlib']
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = kw['stlib'].upper()
+
+	if 'fragment' in kw:
+		# an additional code fragment may be provided to replace the predefined code
+		# in custom headers
+		kw['code'] = kw['fragment']
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for code snippet'
+		if not 'errmsg' in kw:
+			kw['errmsg'] = 'no'
+
+	for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
+		if flagsname in kw:
+			if not 'msg' in kw:
+				kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
+			if not 'errmsg' in kw:
+				kw['errmsg'] = 'no'
+
+	if not 'execute' in kw:
+		kw['execute'] = False
+	if kw['execute']:
+		kw['features'].append('test_exec')
+
+	if not 'errmsg' in kw:
+		kw['errmsg'] = 'not found'
+
+	if not 'okmsg' in kw:
+		kw['okmsg'] = 'yes'
+
+	if not 'code' in kw:
+		kw['code'] = SNIP_EMPTY_PROGRAM
+
+	# if there are headers to append automatically to the next tests
+	if self.env[INCKEYS]:
+		kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']
+
+	if not kw.get('success'): kw['success'] = None
+
+	if 'define_name' in kw:
+		self.undefine(kw['define_name'])
+
+	assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
+
+ conf
+def post_check(self, *k, **kw):
+	"Set the variables after a test executed in :py:func:`waflib.Tools.c_config.check` was run successfully"
+
+	is_success = 0
+	if kw['execute']:
+		if kw['success'] is not None:
+			if kw.get('define_ret', False):
+				is_success = kw['success']
+			else:
+				is_success = (kw['success'] == 0)
+	else:
+		is_success = (kw['success'] == 0)
+
+	if 'define_name' in kw:
+		# TODO simplify?
+		if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
+			nm = kw['define_name']
+			if kw['execute'] and kw.get('define_ret', None) and isinstance(is_success, str):
+				self.define(kw['define_name'], is_success, quote=kw.get('quote', 1))
+			else:
+				self.define_cond(kw['define_name'], is_success)
+		else:
+			self.define_cond(kw['define_name'], is_success)
+
+	if 'header_name' in kw:
+		if kw.get('auto_add_header_name', False):
+			self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))
+
+	if is_success and 'uselib_store' in kw:
+		from waflib.Tools import ccroot
+
+		# TODO see get_uselib_vars from ccroot.py
+		_vars = set([])
+		for x in kw['features']:
+			if x in ccroot.USELIB_VARS:
+				_vars |= ccroot.USELIB_VARS[x]
+
+		for k in _vars:
+			lk = k.lower()
+			if k == 'INCLUDES': lk = 'includes'
+			if k == 'DEFINES': lk = 'defines'
+			if lk in kw:
+				val = kw[lk]
+				# remove trailing slash
+				if isinstance(val, str):
+					val = val.rstrip(os.path.sep)
+				self.env.append_unique(k + '_' + kw['uselib_store'], val)
+	return is_success
+
+ conf
+def check(self, *k, **kw):
+	"""
+	Perform a configuration test by calling :py:func:`waflib.Tools.c_config.run_c_code`.
+	For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`.
+	To force a specific compiler, prefer the methods :py:func:`waflib.Tools.c_config.check_cxx` or :py:func:`waflib.Tools.c_config.check_cc`
+	"""
+	self.validate_c(kw)
+	self.start_msg(kw['msg'])
+	ret = None
+	try:
+		ret = self.run_c_code(*k, **kw)
+	except self.errors.ConfigurationError as e:
+		self.end_msg(kw['errmsg'], 'YELLOW')
+		if Logs.verbose > 1:
+			raise
+		else:
+			self.fatal('The configuration failed')
+	else:
+		kw['success'] = ret
+		self.end_msg(self.ret_msg(kw['okmsg'], kw))
+
+	ret = self.post_check(*k, **kw)
+	if not ret:
+		self.fatal('The configuration failed %r' % ret)
+	return ret
+
+class test_exec(Task.Task):
+	"""
+	A task for executing a programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`.
+	"""
+	color = 'PINK'
+	def run(self):
+		if getattr(self.generator, 'rpath', None):
+			if getattr(self.generator, 'define_ret', False):
+				self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
+			else:
+				self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()])
+		else:
+			env = self.env.env or {}
+			env.update(dict(os.environ))
+			for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'):
+				env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '')
+			if getattr(self.generator, 'define_ret', False):
+				self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()], env=env)
+			else:
+				self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()], env=env)
+
+ feature('test_exec')
+ after_method('apply_link')
+def test_exec_fun(self):
+	"""
+	The feature **test_exec** is used to create a task that will to execute the binary
+	created (link task output) during the build. The exit status will be set
+	on the build context, so only one program may have the feature *test_exec*.
+	This is used by configuration tests::
+
+		def configure(conf):
+			conf.check(execute=True)
+	"""
+	self.create_task('test_exec', self.link_task.outputs[0])
+
+CACHE_RESULTS = 1
+COMPILE_ERRORS = 2
+
+ conf
+def run_c_code(self, *k, **kw):
+	"""
+	Create a temporary build context to execute a build. A reference to that build
+	context is kept on self.test_bld for debugging purposes, and you should not rely
+	on it too much (read the note on the cache below).
+	The parameters given in the arguments to this function are passed as arguments for
+	a single task generator created in the build. Only three parameters are obligatory:
+
+	:param features: features to pass to a task generator created in the build
+	:type features: list of string
+	:param compile_filename: file to create for the compilation (default: *test.c*)
+	:type compile_filename: string
+	:param code: code to write in the filename to compile
+	:type code: string
+
+	Though this function returns *0* by default, the build may set an attribute named *retval* on the
+	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
+
+	This function also provides a limited cache. To use it, provide the following option::
+
+		def options(opt):
+			opt.add_option('--confcache', dest='confcache', default=0,
+				action='count', help='Use a configuration cache')
+
+	And execute the configuration with the following command-line::
+
+		$ waf configure --confcache
+
+	"""
+
+	lst = [str(v) for (p, v) in kw.items() if p != 'env']
+	h = Utils.h_list(lst)
+	dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
+
+	try:
+		os.makedirs(dir)
+	except:
+		pass
+
+	try:
+		os.stat(dir)
+	except:
+		self.fatal('cannot use the configuration test folder %r' % dir)
+
+	cachemode = getattr(Options.options, 'confcache', None)
+	if cachemode == CACHE_RESULTS:
+		try:
+			proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_c_code'))
+			ret = proj['cache_run_c_code']
+		except:
+			pass
+		else:
+			if isinstance(ret, str) and ret.startswith('Test does not build'):
+				self.fatal(ret)
+			return ret
+
+	bdir = os.path.join(dir, 'testbuild')
+
+	if not os.path.exists(bdir):
+		os.makedirs(bdir)
+
+	self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
+	bld.init_dirs()
+	bld.progress_bar = 0
+	bld.targets = '*'
+
+	if kw['compile_filename']:
+		node = bld.srcnode.make_node(kw['compile_filename'])
+		node.write(kw['code'])
+
+	bld.logger = self.logger
+	bld.all_envs.update(self.all_envs) # not really necessary
+	bld.env = kw['env']
+
+	o = bld(features=kw['features'], source=kw['compile_filename'], target='testprog')
+
+	for k, v in kw.items():
+		setattr(o, k, v)
+
+	self.to_log("==>\n%s\n<==" % kw['code'])
+
+	# compile the program
+	bld.targets = '*'
+
+	ret = -1
+	try:
+		try:
+			bld.compile()
+		except Errors.WafError:
+			ret = 'Test does not build: %s' % Utils.ex_stack()
+			self.fatal(ret)
+		else:
+			ret = getattr(bld, 'retval', 0)
+	finally:
+		# cache the results each time
+		proj = ConfigSet.ConfigSet()
+		proj['cache_run_c_code'] = ret
+		proj.store(os.path.join(dir, 'cache_run_c_code'))
+
+	return ret
+
+ conf
+def check_cxx(self, *k, **kw):
+	"""
+	Same as :py:func:`waflib.Tools.c_config.check` but default to the *c++* programming language
+	"""
+	kw['compiler'] = 'cxx'
+	return self.check(*k, **kw)
+
+ conf
+def check_cc(self, *k, **kw):
+	"""
+	Same as :py:func:`waflib.Tools.c_config.check` but default to the *c* programming language
+	"""
+	kw['compiler'] = 'c'
+	return self.check(*k, **kw)
+
+ conf
+def define(self, key, val, quote=True):
+	"""
+	Store a single define and its state into conf.env.DEFINES
+
+	:param key: define name
+	:type key: string
+	:param val: value
+	:type val: int or string
+	:param quote: enclose strings in quotes (yes by default)
+	:type quote: bool
+	"""
+	assert key and isinstance(key, str)
+
+	if isinstance(val, int) or isinstance(val, float):
+		s = '%s=%s'
+	else:
+		s = quote and '%s="%s"' or '%s=%s'
+	app = s % (key, str(val))
+
+	ban = key + '='
+	lst = self.env['DEFINES']
+	for x in lst:
+		if x.startswith(ban):
+			lst[lst.index(x)] = app
+			break
+	else:
+		self.env.append_value('DEFINES', app)
+
+	self.env.append_unique(DEFKEYS, key)
+
+ conf
+def undefine(self, key):
+	"""
+	Remove a define from conf.env.DEFINES
+
+	:param key: define name
+	:type key: string
+	"""
+	assert key and isinstance(key, str)
+
+	ban = key + '='
+	lst = [x for x in self.env['DEFINES'] if not x.startswith(ban)]
+	self.env['DEFINES'] = lst
+	self.env.append_unique(DEFKEYS, key)
+
+ conf
+def define_cond(self, key, val):
+	"""
+	Conditionally define a name::
+
+		def configure(conf):
+			conf.define_cond('A', True)
+			# equivalent to:
+			# if val: conf.define('A', 1)
+			# else: conf.undefine('A')
+
+	:param key: define name
+	:type key: string
+	:param val: value
+	:type val: int or string
+	"""
+	assert key and isinstance(key, str)
+
+	if val:
+		self.define(key, 1)
+	else:
+		self.undefine(key)
+
+ conf
+def is_defined(self, key):
+	"""
+	:param key: define name
+	:type key: string
+	:return: True if the define is set
+	:rtype: bool
+	"""
+	assert key and isinstance(key, str)
+
+	ban = key + '='
+	for x in self.env['DEFINES']:
+		if x.startswith(ban):
+			return True
+	return False
+
+ conf
+def get_define(self, key):
+	"""
+	:param key: define name
+	:type key: string
+	:return: the value of a previously stored define or None if it is not set
+	"""
+	assert key and isinstance(key, str)
+
+	ban = key + '='
+	for x in self.env['DEFINES']:
+		if x.startswith(ban):
+			return x[len(ban):]
+	return None
+
+ conf
+def have_define(self, key):
+	"""
+	:param key: define name
+	:type key: string
+	:return: the input key prefixed by *HAVE_* and substitute any invalid characters.
+	:rtype: string
+	"""
+	return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(key)
+
+ conf
+def write_config_header(self, configfile='', guard='', top=False, env=None, defines=True, headers=False, remove=True):
+	"""
+	Write a configuration header containing defines and includes::
+
+		def configure(cnf):
+			cnf.define('A', 1)
+			cnf.write_config_header('config.h')
+
+	:param configfile: relative path to the file to create
+	:type configfile: string
+	:param env: config set to read the definitions from (default is conf.env)
+	:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+	:param top: write the configuration header from the build directory (default is from the current path)
+	:type top: bool
+	:param defines: add the defines (yes by default)
+	:type defines: bool
+	:param headers: add #include in the file
+	:type headers: bool
+	:param remove: remove the defines after they are added (yes by default)
+	:type remove: bool
+	"""
+	if not configfile: configfile = WAF_CONFIG_H
+	waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
+
+	node = top and self.bldnode or self.path.get_bld()
+	node = node.make_node(configfile)
+	node.parent.mkdir()
+
+	lst = ['/* WARNING! All changes made to this file will be lost! */\n']
+	lst.append('#ifndef %s\n#define %s\n' % (waf_guard, waf_guard))
+	lst.append(self.get_config_header(defines, headers))
+	lst.append('\n#endif /* %s */\n' % waf_guard)
+
+	node.write('\n'.join(lst))
+
+	env = env or self.env
+
+	# config files are not removed on "waf clean"
+	env.append_unique(Build.CFG_FILES, [node.abspath()])
+
+	if remove:
+		for key in self.env[DEFKEYS]:
+			self.undefine(key)
+		self.env[DEFKEYS] = []
+
+ conf
+def get_config_header(self, defines=True, headers=False):
+	"""
+	Create the contents of a ``config.h`` file from the defines and includes
+	set in conf.env.define_key / conf.env.include_key. No include guards are added.
+
+	:param defines: write the defines values
+	:type defines: bool
+	:param headers: write the headers
+	:type headers: bool
+	:return: the contents of a ``config.h`` file
+	:rtype: string
+	"""
+	lst = []
+	if headers:
+		for x in self.env[INCKEYS]:
+			lst.append('#include <%s>' % x)
+
+	if defines:
+		for x in self.env[DEFKEYS]:
+			if self.is_defined(x):
+				val = self.get_define(x)
+				lst.append('#define %s %s' % (x, val))
+			else:
+				lst.append('/* #undef %s */' % x)
+	return "\n".join(lst)
+
+ conf
+def cc_add_flags(conf):
+	"""
+	Read the CFLAGS/CPPFLAGS from os.environ and add to conf.env.CFLAGS
+	"""
+	conf.add_os_flags('CPPFLAGS', 'CFLAGS')
+	conf.add_os_flags('CFLAGS')
+
+ conf
+def cxx_add_flags(conf):
+	"""
+	Read the CXXFLAGS/CPPFLAGS and add to conf.env.CXXFLAGS
+	"""
+	conf.add_os_flags('CPPFLAGS', 'CXXFLAGS')
+	conf.add_os_flags('CXXFLAGS')
+
+ conf
+def link_add_flags(conf):
+	"""
+	Read the LINKFLAGS/LDFLAGS and add to conf.env.LDFLAGS
+	"""
+	conf.add_os_flags('LINKFLAGS')
+	conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
+
+ conf
+def cc_load_tools(conf):
+	"""
+	Load the c tool
+	"""
+	if not conf.env.DEST_OS:
+		conf.env.DEST_OS = Utils.unversioned_sys_platform()
+	conf.load('c')
+
+ conf
+def cxx_load_tools(conf):
+	"""
+	Load the cxx tool
+	"""
+	if not conf.env.DEST_OS:
+		conf.env.DEST_OS = Utils.unversioned_sys_platform()
+	conf.load('cxx')
+
+ conf
+def get_cc_version(conf, cc, gcc=False, icc=False):
+	"""
+	Run the preprocessor to determine the compiler version
+
+	The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*
+	"""
+	cmd = cc + ['-dM', '-E', '-']
+	env = conf.env.env or None
+	try:
+		p = Utils.subprocess.Popen(cmd, stdin=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env)
+		p.stdin.write('\n'.encode())
+		out = p.communicate()[0]
+	except:
+		conf.fatal('Could not determine the compiler version %r' % cmd)
+
+	if not isinstance(out, str):
+		out = out.decode(sys.stdout.encoding)
+
+	if gcc:
+		if out.find('__INTEL_COMPILER') >= 0:
+			conf.fatal('The intel compiler pretends to be gcc')
+		if out.find('__GNUC__') < 0:
+			conf.fatal('Could not determine the compiler type')
+
+	if icc and out.find('__INTEL_COMPILER') < 0:
+		conf.fatal('Not icc/icpc')
+
+	k = {}
+	if icc or gcc:
+		out = out.split('\n')
+		for line in out:
+			lst = shlex.split(line)
+			if len(lst)>2:
+				key = lst[1]
+				val = lst[2]
+				k[key] = val
+
+		def isD(var):
+			return var in k
+
+		def isT(var):
+			return var in k and k[var] != '0'
+
+		# Some documentation is available at http://predef.sourceforge.net
+		# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
+		if not conf.env.DEST_OS:
+			conf.env.DEST_OS = ''
+		for i in MACRO_TO_DESTOS:
+			if isD(i):
+				conf.env.DEST_OS = MACRO_TO_DESTOS[i]
+				break
+		else:
+			if isD('__APPLE__') and isD('__MACH__'):
+				conf.env.DEST_OS = 'darwin'
+			elif isD('__unix__'): # unix must be tested last as it's a generic fallback
+				conf.env.DEST_OS = 'generic'
+
+		if isD('__ELF__'):
+			conf.env.DEST_BINFMT = 'elf'
+		elif isD('__WINNT__') or isD('__CYGWIN__'):
+			conf.env.DEST_BINFMT = 'pe'
+			conf.env.LIBDIR = conf.env['PREFIX'] + '/bin'
+		elif isD('__APPLE__'):
+			conf.env.DEST_BINFMT = 'mac-o'
+
+		if not conf.env.DEST_BINFMT:
+			# Infer the binary format from the os name.
+			conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)
+
+		for i in MACRO_TO_DEST_CPU:
+			if isD(i):
+				conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
+				break
+
+		Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
+		if icc:
+			ver = k['__INTEL_COMPILER']
+			conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1])
+		else:
+			conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
+	return k
+
+ conf
+def get_xlc_version(conf, cc):
+	"""Get the compiler version"""
+
+	version_re = re.compile(r"IBM XL C/C\+\+.*, V(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+	cmd = cc + ['-qversion']
+
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0)
+	except Errors.WafError:
+		conf.fatal('Could not find xlc %r' % cmd)
+	if out: match = version_re(out)
+	else: match = version_re(err)
+	if not match:
+		conf.fatal('Could not determine the XLC version.')
+	k = match.groupdict()
+	conf.env['CC_VERSION'] = (k['major'], k['minor'])
+
+# ============ the --as-needed flag should added during the configuration, not at runtime =========
+
+ conf
+def add_as_needed(self):
+	"""
+	Add ``--as-needed`` to the *LINKFLAGS*
+	"""
+	if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME):
+		self.env.append_unique('LINKFLAGS', '--as-needed')
+
+# ============ parallel configuration
+
+class cfgtask(Task.TaskBase):
+	"""
+	A task that executes configuration tests
+	make sure that the checks write to conf.env in a thread-safe manner
+
+	for the moment it only executes conf.check
+	"""
+	def display(self):
+		return ''
+
+	def runnable_status(self):
+		return Task.RUN_ME
+
+	def run(self):
+		conf = self.conf
+		bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath())
+		bld.env = conf.env
+		bld.init_dirs()
+		bld.in_msg = 1 # suppress top-level start_msg
+		bld.logger = self.logger
+		try:
+			bld.check(**self.args)
+		except:
+			return 1
+
+ conf
+def multicheck(self, *k, **kw):
+	"""
+	Use tuples to perform parallel configuration tests
+	"""
+	self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)))
+
+	class par(object):
+		def __init__(self):
+			self.keep = False
+			self.cache_global = Options.cache_global
+			self.nocache = Options.options.nocache
+			self.returned_tasks = []
+		def total(self):
+			return len(tasks)
+		def to_log(self, *k, **kw):
+			return
+
+	bld = par()
+	tasks = []
+	for dct in k:
+		x = cfgtask(bld=bld)
+		tasks.append(x)
+		x.args = dct
+		x.bld = bld
+		x.conf = self
+		x.args = dct
+
+		# bind a logger that will keep the info in memory
+		x.logger = Logs.make_mem_logger(str(id(x)), self.logger)
+
+	def it():
+		yield tasks
+		while 1:
+			yield []
+	p = Runner.Parallel(bld, Options.options.jobs)
+	p.biter = it()
+	p.start()
+
+	# flush the logs in order into the config.log
+	for x in tasks:
+		x.logger.memhandler.flush()
+
+	for x in tasks:
+		if x.hasrun != Task.SUCCESS:
+			self.end_msg(kw.get('errmsg', 'no'), color='YELLOW')
+			self.fatal(kw.get('fatalmsg', None) or 'One of the tests has failed, see the config.log for more information')
+
+	self.end_msg('ok')
+
diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py
new file mode 100644
index 0000000..a2f6241
--- /dev/null
+++ b/waflib/Tools/c_osx.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2008-2010
+
+"""
+MacOSX related tools
+"""
+
+import os, shutil, sys, platform
+from waflib import TaskGen, Task, Build, Options, Utils, Errors
+from waflib.TaskGen import taskgen_method, feature, after_method, before_method
+
+app_info = '''
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
+<plist version="0.9">
+<dict>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleGetInfoString</key>
+	<string>Created by Waf</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>NOTE</key>
+	<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
+	<key>CFBundleExecutable</key>
+	<string>%s</string>
+</dict>
+</plist>
+'''
+"""
+plist template
+"""
+
+ feature('c', 'cxx')
+def set_macosx_deployment_target(self):
+	"""
+	see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
+	"""
+	if self.env['MACOSX_DEPLOYMENT_TARGET']:
+		os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
+	elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
+		if Utils.unversioned_sys_platform() == 'darwin':
+			os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
+
+ taskgen_method
+def create_bundle_dirs(self, name, out):
+	"""
+	Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
+	"""
+	bld = self.bld
+	dir = out.parent.find_or_declare(name)
+	dir.mkdir()
+	macos = dir.find_or_declare(['Contents', 'MacOS'])
+	macos.mkdir()
+	return dir
+
+def bundle_name_for_output(out):
+	name = out.name
+	k = name.rfind('.')
+	if k >= 0:
+		name = name[:k] + '.app'
+	else:
+		name = name + '.app'
+	return name
+
+ feature('cprogram', 'cxxprogram')
+ after_method('apply_link')
+def create_task_macapp(self):
+	"""
+	To compile an executable into a Mac application (a .app), set its *mac_app* attribute::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', mac_app = True)
+
+	To force *all* executables to be transformed into Mac applications::
+
+		def build(bld):
+			bld.env.MACAPP = True
+			bld.shlib(source='a.c', target='foo')
+	"""
+	if self.env['MACAPP'] or getattr(self, 'mac_app', False):
+		out = self.link_task.outputs[0]
+
+		name = bundle_name_for_output(out)
+		dir = self.create_bundle_dirs(name, out)
+
+		n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
+
+		self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
+		inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
+		self.bld.install_files(inst_to, n1, chmod=Utils.O755)
+
+		if getattr(self, 'mac_resources', None):
+			res_dir = n1.parent.parent.make_node('Resources')
+			inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
+			for x in self.to_list(self.mac_resources):
+				node = self.path.find_node(x)
+				if not node:
+					raise Errors.WafError('Missing mac_resource %r in %r' % (x, self))
+
+				parent = node.parent
+				if os.path.isdir(node.abspath()):
+					nodes = node.ant_glob('**')
+				else:
+					nodes = [node]
+				for node in nodes:
+					rel = node.path_from(parent)
+					tsk = self.create_task('macapp', node, res_dir.make_node(rel))
+					self.bld.install_as(inst_to + '/%s' % rel, node)
+
+		if getattr(self.bld, 'is_install', None):
+			# disable the normal binary installation
+			self.install_task.hasrun = Task.SKIP_ME
+
+ feature('cprogram', 'cxxprogram')
+ after_method('apply_link')
+def create_task_macplist(self):
+	"""
+	Create a :py:class:`waflib.Tools.c_osx.macplist` instance.
+	"""
+	if  self.env['MACAPP'] or getattr(self, 'mac_app', False):
+		out = self.link_task.outputs[0]
+
+		name = bundle_name_for_output(out)
+
+		dir = self.create_bundle_dirs(name, out)
+		n1 = dir.find_or_declare(['Contents', 'Info.plist'])
+		self.plisttask = plisttask = self.create_task('macplist', [], n1)
+
+		if getattr(self, 'mac_plist', False):
+			node = self.path.find_resource(self.mac_plist)
+			if node:
+				plisttask.inputs.append(node)
+			else:
+				plisttask.code = self.mac_plist
+		else:
+			plisttask.code = app_info % self.link_task.outputs[0].name
+
+		inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
+		self.bld.install_files(inst_to, n1)
+
+ feature('cshlib', 'cxxshlib')
+ before_method('apply_link', 'propagate_uselib_vars')
+def apply_bundle(self):
+	"""
+	To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', mac_bundle = True)
+
+	To force *all* executables to be transformed into bundles::
+
+		def build(bld):
+			bld.env.MACBUNDLE = True
+			bld.shlib(source='a.c', target='foo')
+	"""
+	if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
+		self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag
+		self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN']
+		use = self.use = self.to_list(getattr(self, 'use', []))
+		if not 'MACBUNDLE' in use:
+			use.append('MACBUNDLE')
+
+app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
+
+class macapp(Task.Task):
+	"""
+	Create mac applications
+	"""
+	color = 'PINK'
+	def run(self):
+		self.outputs[0].parent.mkdir()
+		shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath())
+
+class macplist(Task.Task):
+	"""
+	Create plist files
+	"""
+	color = 'PINK'
+	ext_in = ['.bin']
+	def run(self):
+		if getattr(self, 'code', None):
+			txt = self.code
+		else:
+			txt = self.inputs[0].read()
+		self.outputs[0].write(txt)
+
diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py
new file mode 100644
index 0000000..79eef46
--- /dev/null
+++ b/waflib/Tools/c_preproc.py
@@ -0,0 +1,1030 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+C/C++ preprocessor for finding dependencies
+
+Reasons for using the Waf preprocessor by default
+
+#. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
+#. Not all compilers provide .d files for obtaining the dependencies (portability)
+#. A naive file scanner will not catch the constructs such as "#include foo()"
+#. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
+
+Regarding the speed concerns:
+
+* the preprocessing is performed only when files must be compiled
+* the macros are evaluated only for #if/#elif/#include
+* system headers are not scanned by default
+
+Now if you do not want the Waf preprocessor, the tool +gccdeps* uses the .d files produced
+during the compilation to track the dependencies (useful when used with the boost libraries).
+It only works with gcc >= 4.4 though.
+
+A dumb preprocessor is also available in the tool *c_dumbpreproc*
+"""
+# TODO: more varargs, pragma once
+
+import re, sys, os, string, traceback
+from waflib import Logs, Build, Utils, Errors
+from waflib.Logs import debug, error
+
+class PreprocError(Errors.WafError):
+	pass
+
+POPFILE = '-'
+"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"
+
+recursion_limit = 150
+"Limit on the amount of files to read in the dependency scanner"
+
+go_absolute = False
+"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
+
+standard_includes = ['/usr/include']
+if Utils.is_win32:
+	standard_includes = []
+
+use_trigraphs = 0
+"""Apply trigraph rules (False by default)"""
+
+strict_quotes = 0
+"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""
+
+g_optrans = {
+'not':'!',
+'and':'&&',
+'bitand':'&',
+'and_eq':'&=',
+'or':'||',
+'bitor':'|',
+'or_eq':'|=',
+'xor':'^',
+'xor_eq':'^=',
+'compl':'~',
+}
+"""Operators such as and/or/xor for c++. Set an empty dict to disable."""
+
+# ignore #warning and #error
+re_lines = re.compile(
+	'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
+	re.IGNORECASE | re.MULTILINE)
+"""Match #include lines"""
+
+re_mac = re.compile("^[a-zA-Z_]\w*")
+"""Match macro definitions"""
+
+re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
+"""Match macro functions"""
+
+re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
+"""Match #pragma once statements"""
+
+re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
+"""Match newlines"""
+
+re_cpp = re.compile(
+	r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",
+	re.MULTILINE)
+"""Filter C/C++ comments"""
+
+trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
+"""Trigraph definitions"""
+
+chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
+"""Escape characters"""
+
+NUM   = 'i'
+"""Number token"""
+
+OP    = 'O'
+"""Operator token"""
+
+IDENT = 'T'
+"""Identifier token"""
+
+STR   = 's'
+"""String token"""
+
+CHAR  = 'c'
+"""Character token"""
+
+tok_types = [NUM, STR, IDENT, OP]
+"""Token types"""
+
+exp_types = [
+	r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
+	r'L?"([^"\\]|\\.)*"',
+	r'[a-zA-Z_]\w*',
+	r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
+]
+"""Expression types"""
+
+re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
+"""Match expressions into tokens"""
+
+accepted  = 'a'
+"""Parser state is *accepted*"""
+
+ignored   = 'i'
+"""Parser state is *ignored*, for example preprocessor lines in an #if 0 block"""
+
+undefined = 'u'
+"""Parser state is *undefined* at the moment"""
+
+skipped   = 's'
+"""Parser state is *skipped*, for example preprocessor lines in a #elif 0 block"""
+
+def repl(m):
+	"""Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
+	s = m.group(1)
+	if s:
+		return ' '
+	return m.group(3) or ''
+
+def filter_comments(filename):
+	"""
+	Filter the comments from a c/h file, and return the preprocessor lines.
+	The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
+
+	:return: the preprocessor directives as a list of (keyword, line)
+	:rtype: a list of string pairs
+	"""
+	# return a list of tuples : keyword, line
+	code = Utils.readf(filename)
+	if use_trigraphs:
+		for (a, b) in trig_def: code = code.split(a).join(b)
+	code = re_nl.sub('', code)
+	code = re_cpp.sub(repl, code)
+	return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
+
+prec = {}
+"""
+Operator precendence rules required for parsing expressions of the form::
+
+	#if 1 && 2 != 0
+"""
+ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
+for x in range(len(ops)):
+	syms = ops[x]
+	for u in syms.split():
+		prec[u] = x
+
+def trimquotes(s):
+	"""
+	Remove the single quotes around an expression::
+
+		trimquotes("'test'") == "test"
+
+	:param s: expression to transform
+	:type s: string
+	:rtype: string
+	"""
+	if not s: return ''
+	s = s.rstrip()
+	if s[0] == "'" and s[-1] == "'": return s[1:-1]
+	return s
+
+def reduce_nums(val_1, val_2, val_op):
+	"""
+	Apply arithmetic rules to compute a result
+
+	:param val1: input parameter
+	:type val1: int or string
+	:param val2: input parameter
+	:type val2: int or string
+	:param val_op: C operator in *+*, */*, *-*, etc
+	:type val_op: string
+	:rtype: int
+	"""
+	#print val_1, val_2, val_op
+
+	# now perform the operation, make certain a and b are numeric
+	try:    a = 0 + val_1
+	except TypeError: a = int(val_1)
+	try:    b = 0 + val_2
+	except TypeError: b = int(val_2)
+
+	d = val_op
+	if d == '%':  c = a%b
+	elif d=='+':  c = a+b
+	elif d=='-':  c = a-b
+	elif d=='*':  c = a*b
+	elif d=='/':  c = a/b
+	elif d=='^':  c = a^b
+	elif d=='|':  c = a|b
+	elif d=='||': c = int(a or b)
+	elif d=='&':  c = a&b
+	elif d=='&&': c = int(a and b)
+	elif d=='==': c = int(a == b)
+	elif d=='!=': c = int(a != b)
+	elif d=='<=': c = int(a <= b)
+	elif d=='<':  c = int(a < b)
+	elif d=='>':  c = int(a > b)
+	elif d=='>=': c = int(a >= b)
+	elif d=='^':  c = int(a^b)
+	elif d=='<<': c = a<<b
+	elif d=='>>': c = a>>b
+	else: c = 0
+	return c
+
+def get_num(lst):
+	"""
+	Try to obtain a number from a list of tokens. The token types are defined in :py:attr:`waflib.Tools.ccroot.tok_types`.
+
+	:param lst: list of preprocessor tokens
+	:type lst: list of tuple (tokentype, value)
+	:return: a pair containing the number and the rest of the list
+	:rtype: tuple(value, list)
+	"""
+	if not lst: raise PreprocError("empty list for get_num")
+	(p, v) = lst[0]
+	if p == OP:
+		if v == '(':
+			count_par = 1
+			i = 1
+			while i < len(lst):
+				(p, v) = lst[i]
+
+				if p == OP:
+					if v == ')':
+						count_par -= 1
+						if count_par == 0:
+							break
+					elif v == '(':
+						count_par += 1
+				i += 1
+			else:
+				raise PreprocError("rparen expected %r" % lst)
+
+			(num, _) = get_term(lst[1:i])
+			return (num, lst[i+1:])
+
+		elif v == '+':
+			return get_num(lst[1:])
+		elif v == '-':
+			num, lst = get_num(lst[1:])
+			return (reduce_nums('-1', num, '*'), lst)
+		elif v == '!':
+			num, lst = get_num(lst[1:])
+			return (int(not int(num)), lst)
+		elif v == '~':
+			return (~ int(num), lst)
+		else:
+			raise PreprocError("Invalid op token %r for get_num" % lst)
+	elif p == NUM:
+		return v, lst[1:]
+	elif p == IDENT:
+		# all macros should have been replaced, remaining identifiers eval to 0
+		return 0, lst[1:]
+	else:
+		raise PreprocError("Invalid token %r for get_num" % lst)
+
+def get_term(lst):
+	"""
+	Evaluate an expression recursively, for example::
+
+		1+1+1 -> 2+1 -> 3
+
+	:param lst: list of tokens
+	:type lst: list of tuple(token, value)
+	:return: the value and the remaining tokens
+	:rtype: value, list
+	"""
+
+	if not lst: raise PreprocError("empty list for get_term")
+	num, lst = get_num(lst)
+	if not lst:
+		return (num, [])
+	(p, v) = lst[0]
+	if p == OP:
+		if v == '&&' and not num:
+			return (num, [])
+		elif v == '||' and num:
+			return (num, [])
+		elif v == ',':
+			# skip
+			return get_term(lst[1:])
+		elif v == '?':
+			count_par = 0
+			i = 1
+			while i < len(lst):
+				(p, v) = lst[i]
+
+				if p == OP:
+					if v == ')':
+						count_par -= 1
+					elif v == '(':
+						count_par += 1
+					elif v == ':':
+						if count_par == 0:
+							break
+				i += 1
+			else:
+				raise PreprocError("rparen expected %r" % lst)
+
+			if int(num):
+				return get_term(lst[1:i])
+			else:
+				return get_term(lst[i+1:])
+
+		else:
+			num2, lst = get_num(lst[1:])
+
+			if not lst:
+				# no more tokens to process
+				num2 = reduce_nums(num, num2, v)
+				return get_term([(NUM, num2)] + lst)
+
+			# operator precedence
+			p2, v2 = lst[0]
+			if p2 != OP:
+				raise PreprocError("op expected %r" % lst)
+
+			if prec[v2] >= prec[v]:
+				num2 = reduce_nums(num, num2, v)
+				return get_term([(NUM, num2)] + lst)
+			else:
+				num3, lst = get_num(lst[1:])
+				num3 = reduce_nums(num2, num3, v2)
+				return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
+
+
+	raise PreprocError("cannot reduce %r" % lst)
+
+def reduce_eval(lst):
+	"""
+	Take a list of tokens and output true or false for #if/#elif conditions.
+
+	:param lst: a list of tokens
+	:type lst: list of tuple(token, value)
+	:return: a token
+	:rtype: tuple(NUM, int)
+	"""
+	num, lst = get_term(lst)
+	return (NUM, num)
+
+def stringize(lst):
+	"""
+	Merge a list of tokens into a string
+
+	:param lst: a list of tokens
+	:type lst: list of tuple(token, value)
+	:rtype: string
+	"""
+	lst = [str(v2) for (p2, v2) in lst]
+	return "".join(lst)
+
+def paste_tokens(t1, t2):
+	"""
+	Token pasting works between identifiers, particular operators, and identifiers and numbers::
+
+		a ## b  ->  ab
+		> ## =  ->  >=
+		a ## 2  ->  a2
+
+	:param t1: token
+	:type t1: tuple(type, value)
+	:param t2: token
+	:type t2: tuple(type, value)
+	"""
+	p1 = None
+	if t1[0] == OP and t2[0] == OP:
+		p1 = OP
+	elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
+		p1 = IDENT
+	elif t1[0] == NUM and t2[0] == NUM:
+		p1 = NUM
+	if not p1:
+		raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
+	return (p1, t1[1] + t2[1])
+
+def reduce_tokens(lst, defs, ban=[]):
+	"""
+	Replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied
+
+	:param lst: list of tokens
+	:type lst: list of tuple(token, value)
+	:param defs: macro definitions
+	:type defs: dict
+	:param ban: macros that cannot be substituted (recursion is not allowed)
+	:type ban: list of string
+	:return: the new list of tokens
+	:rtype: value, list
+	"""
+	i = 0
+
+	while i < len(lst):
+		(p, v) = lst[i]
+
+		if p == IDENT and v == "defined":
+			del lst[i]
+			if i < len(lst):
+				(p2, v2) = lst[i]
+				if p2 == IDENT:
+					if v2 in defs:
+						lst[i] = (NUM, 1)
+					else:
+						lst[i] = (NUM, 0)
+				elif p2 == OP and v2 == '(':
+					del lst[i]
+					(p2, v2) = lst[i]
+					del lst[i] # remove the ident, and change the ) for the value
+					if v2 in defs:
+						lst[i] = (NUM, 1)
+					else:
+						lst[i] = (NUM, 0)
+				else:
+					raise PreprocError("Invalid define expression %r" % lst)
+
+		elif p == IDENT and v in defs:
+
+			if isinstance(defs[v], str):
+				a, b = extract_macro(defs[v])
+				defs[v] = b
+			macro_def = defs[v]
+			to_add = macro_def[1]
+
+			if isinstance(macro_def[0], list):
+				# macro without arguments
+				del lst[i]
+				for x in range(len(to_add)):
+					lst.insert(i, to_add[x])
+					i += 1
+			else:
+				# collect the arguments for the funcall
+
+				args = []
+				del lst[i]
+
+				if i >= len(lst):
+					raise PreprocError("expected '(' after %r (got nothing)" % v)
+
+				(p2, v2) = lst[i]
+				if p2 != OP or v2 != '(':
+					raise PreprocError("expected '(' after %r" % v)
+
+				del lst[i]
+
+				one_param = []
+				count_paren = 0
+				while i < len(lst):
+					p2, v2 = lst[i]
+
+					del lst[i]
+					if p2 == OP and count_paren == 0:
+						if v2 == '(':
+							one_param.append((p2, v2))
+							count_paren += 1
+						elif v2 == ')':
+							if one_param: args.append(one_param)
+							break
+						elif v2 == ',':
+							if not one_param: raise PreprocError("empty param in funcall %s" % p)
+							args.append(one_param)
+							one_param = []
+						else:
+							one_param.append((p2, v2))
+					else:
+						one_param.append((p2, v2))
+						if   v2 == '(': count_paren += 1
+						elif v2 == ')': count_paren -= 1
+				else:
+					raise PreprocError('malformed macro')
+
+				# substitute the arguments within the define expression
+				accu = []
+				arg_table = macro_def[0]
+				j = 0
+				while j < len(to_add):
+					(p2, v2) = to_add[j]
+
+					if p2 == OP and v2 == '#':
+						# stringize is for arguments only
+						if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
+							toks = args[arg_table[to_add[j+1][1]]]
+							accu.append((STR, stringize(toks)))
+							j += 1
+						else:
+							accu.append((p2, v2))
+					elif p2 == OP and v2 == '##':
+						# token pasting, how can man invent such a complicated system?
+						if accu and j+1 < len(to_add):
+							# we have at least two tokens
+
+							t1 = accu[-1]
+
+							if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
+								toks = args[arg_table[to_add[j+1][1]]]
+
+								if toks:
+									accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
+									accu.extend(toks[1:])
+								else:
+									# error, case "a##"
+									accu.append((p2, v2))
+									accu.extend(toks)
+							elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
+								# TODO not sure
+								# first collect the tokens
+								va_toks = []
+								st = len(macro_def[0])
+								pt = len(args)
+								for x in args[pt-st+1:]:
+									va_toks.extend(x)
+									va_toks.append((OP, ','))
+								if va_toks: va_toks.pop() # extra comma
+								if len(accu)>1:
+									(p3, v3) = accu[-1]
+									(p4, v4) = accu[-2]
+									if v3 == '##':
+										# remove the token paste
+										accu.pop()
+										if v4 == ',' and pt < st:
+											# remove the comma
+											accu.pop()
+								accu += va_toks
+							else:
+								accu[-1] = paste_tokens(t1, to_add[j+1])
+
+							j += 1
+						else:
+							# Invalid paste, case    "##a" or "b##"
+							accu.append((p2, v2))
+
+					elif p2 == IDENT and v2 in arg_table:
+						toks = args[arg_table[v2]]
+						reduce_tokens(toks, defs, ban+[v])
+						accu.extend(toks)
+					else:
+						accu.append((p2, v2))
+
+					j += 1
+
+
+				reduce_tokens(accu, defs, ban+[v])
+
+				for x in range(len(accu)-1, -1, -1):
+					lst.insert(i, accu[x])
+
+		i += 1
+
+
+def eval_macro(lst, defs):
+	"""
+	Reduce the tokens by :py:func:`waflib.Tools.c_preproc.reduce_tokens` and try to return a 0/1 result by :py:func:`waflib.Tools.c_preproc.reduce_eval`.
+
+	:param lst: list of tokens
+	:type lst: list of tuple(token, value)
+	:param defs: macro definitions
+	:type defs: dict
+	:rtype: int
+	"""
+	reduce_tokens(lst, defs, [])
+	if not lst: raise PreprocError("missing tokens to evaluate")
+	(p, v) = reduce_eval(lst)
+	return int(v) != 0
+
+def extract_macro(txt):
+	"""
+	Process a macro definition of the form::
+		 #define f(x, y) x * y
+
+	into a function or a simple macro without arguments
+
+	:param txt: expression to exact a macro definition from
+	:type txt: string
+	:return: a tuple containing the name, the list of arguments and the replacement
+	:rtype: tuple(string, [list, list])
+	"""
+	t = tokenize(txt)
+	if re_fun.search(txt):
+		p, name = t[0]
+
+		p, v = t[1]
+		if p != OP: raise PreprocError("expected open parenthesis")
+
+		i = 1
+		pindex = 0
+		params = {}
+		prev = '('
+
+		while 1:
+			i += 1
+			p, v = t[i]
+
+			if prev == '(':
+				if p == IDENT:
+					params[v] = pindex
+					pindex += 1
+					prev = p
+				elif p == OP and v == ')':
+					break
+				else:
+					raise PreprocError("unexpected token (3)")
+			elif prev == IDENT:
+				if p == OP and v == ',':
+					prev = v
+				elif p == OP and v == ')':
+					break
+				else:
+					raise PreprocError("comma or ... expected")
+			elif prev == ',':
+				if p == IDENT:
+					params[v] = pindex
+					pindex += 1
+					prev = p
+				elif p == OP and v == '...':
+					raise PreprocError("not implemented (1)")
+				else:
+					raise PreprocError("comma or ... expected (2)")
+			elif prev == '...':
+				raise PreprocError("not implemented (2)")
+			else:
+				raise PreprocError("unexpected else")
+
+		#~ print (name, [params, t[i+1:]])
+		return (name, [params, t[i+1:]])
+	else:
+		(p, v) = t[0]
+		return (v, [[], t[1:]])
+
+re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
+def extract_include(txt, defs):
+	"""
+	Process a line in the form::
+
+		#include foo
+
+	:param txt: include line to process
+	:type txt: string
+	:param defs: macro definitions
+	:type defs: dict
+	:return: the file name
+	:rtype: string
+	"""
+	m = re_include.search(txt)
+	if m:
+		if m.group('a'): return '<', m.group('a')
+		if m.group('b'): return '"', m.group('b')
+
+	# perform preprocessing and look at the result, it must match an include
+	toks = tokenize(txt)
+	reduce_tokens(toks, defs, ['waf_include'])
+
+	if not toks:
+		raise PreprocError("could not parse include %s" % txt)
+
+	if len(toks) == 1:
+		if toks[0][0] == STR:
+			return '"', toks[0][1]
+	else:
+		if toks[0][1] == '<' and toks[-1][1] == '>':
+			return stringize(toks).lstrip('<').rstrip('>')
+
+	raise PreprocError("could not parse include %s." % txt)
+
+def parse_char(txt):
+	"""
+	Parse a c character
+
+	:param txt: character to parse
+	:type txt: string
+	:return: a character literal
+	:rtype: string
+	"""
+
+	if not txt: raise PreprocError("attempted to parse a null char")
+	if txt[0] != '\\':
+		return ord(txt)
+	c = txt[1]
+	if c == 'x':
+		if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
+		return int(txt[2:], 16)
+	elif c.isdigit():
+		if c == '0' and len(txt)==2: return 0
+		for i in 3, 2, 1:
+			if len(txt) > i and txt[1:1+i].isdigit():
+				return (1+i, int(txt[1:1+i], 8))
+	else:
+		try: return chr_esc[c]
+		except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
+
+ Utils run_once
+def tokenize(s):
+	"""
+	Convert a string into a list of tokens (shlex.split does not apply to c/c++/d)
+
+	:param s: input to tokenize
+	:type s: string
+	:return: a list of tokens
+	:rtype: list of tuple(token, value)
+	"""
+	# the same headers are read again and again - 10% improvement on preprocessing the samba headers
+	ret = []
+	for match in re_clexer.finditer(s):
+		m = match.group
+		for name in tok_types:
+			v = m(name)
+			if v:
+				if name == IDENT:
+					try: v = g_optrans[v]; name = OP
+					except KeyError:
+						# c++ specific
+						if v.lower() == "true":
+							v = 1
+							name = NUM
+						elif v.lower() == "false":
+							v = 0
+							name = NUM
+				elif name == NUM:
+					if m('oct'): v = int(v, 8)
+					elif m('hex'): v = int(m('hex'), 16)
+					elif m('n0'): v = m('n0')
+					else:
+						v = m('char')
+						if v: v = parse_char(v)
+						else: v = m('n2') or m('n4')
+				elif name == OP:
+					if v == '%:': v = '#'
+					elif v == '%:%:': v = '##'
+				elif name == STR:
+					# remove the quotes around the string
+					v = v[1:-1]
+				ret.append((name, v))
+				break
+	return ret
+
+ Utils run_once
+def define_name(line):
+	"""
+	:param line: define line
+	:type line: string
+	:rtype: string
+	:return: the define name
+	"""
+	return re_mac.match(line).group(0)
+
+class c_parser(object):
+	"""
+	Used by :py:func:`waflib.Tools.c_preproc.scan` to parse c/h files. Note that by default,
+	only project headers are parsed.
+	"""
+	def __init__(self, nodepaths=None, defines=None):
+		self.lines = []
+		"""list of lines read"""
+
+		if defines is None:
+			self.defs  = {}
+		else:
+			self.defs  = dict(defines) # make a copy
+		self.state = []
+
+		self.count_files = 0
+		self.currentnode_stack = []
+
+		self.nodepaths = nodepaths or []
+		"""Include paths"""
+
+		self.nodes = []
+		"""List of :py:class:`waflib.Node.Node` found so far"""
+
+		self.names = []
+		"""List of file names that could not be matched by any file"""
+
+		self.curfile = ''
+		"""Current file"""
+
+		self.ban_includes = set([])
+		"""Includes that must not be read (#pragma once)"""
+
+	def cached_find_resource(self, node, filename):
+		"""
+		Find a file from the input directory
+
+		:param node: directory
+		:type node: :py:class:`waflib.Node.Node`
+		:param filename: header to find
+		:type filename: string
+		:return: the node if found, or None
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		try:
+			nd = node.ctx.cache_nd
+		except:
+			nd = node.ctx.cache_nd = {}
+
+		tup = (node, filename)
+		try:
+			return nd[tup]
+		except KeyError:
+			ret = node.find_resource(filename)
+			if ret:
+				if getattr(ret, 'children', None):
+					ret = None
+				elif ret.is_child_of(node.ctx.bldnode):
+					tmp = node.ctx.srcnode.search(ret.path_from(node.ctx.bldnode))
+					if tmp and getattr(tmp, 'children', None):
+						ret = None
+			nd[tup] = ret
+			return ret
+
+	def tryfind(self, filename):
+		"""
+		Try to obtain a node from the filename based from the include paths. Will add
+		the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
+		:py:attr:`waflib.Tools.c_preproc.c_parser.names` if no corresponding file is found. Called by
+		:py:attr:`waflib.Tools.c_preproc.c_parser.start`.
+
+		:param filename: header to find
+		:type filename: string
+		:return: the node if found
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		self.curfile = filename
+
+		# for msvc it should be a for loop on the whole stack
+		found = self.cached_find_resource(self.currentnode_stack[-1], filename)
+
+		for n in self.nodepaths:
+			if found:
+				break
+			found = self.cached_find_resource(n, filename)
+
+		if found:
+			# TODO the duplicates do not increase the no-op build times too much, but they may be worth removing
+			self.nodes.append(found)
+			if filename[-4:] != '.moc':
+				self.addlines(found)
+		else:
+			if not filename in self.names:
+				self.names.append(filename)
+		return found
+
+	def addlines(self, node):
+		"""
+		Add the lines from a header in the list of preprocessor lines to parse
+
+		:param node: header
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+
+		self.currentnode_stack.append(node.parent)
+		filepath = node.abspath()
+
+		self.count_files += 1
+		if self.count_files > recursion_limit:
+			# issue #812
+			raise PreprocError("recursion limit exceeded")
+		pc = self.parse_cache
+		debug('preproc: reading file %r', filepath)
+		try:
+			lns = pc[filepath]
+		except KeyError:
+			pass
+		else:
+			self.lines.extend(lns)
+			return
+
+		try:
+			lines = filter_comments(filepath)
+			lines.append((POPFILE, ''))
+			lines.reverse()
+			pc[filepath] = lines # cache the lines filtered
+			self.lines.extend(lines)
+		except IOError:
+			raise PreprocError("could not read the file %s" % filepath)
+		except Exception:
+			if Logs.verbose > 0:
+				error("parsing %s failed" % filepath)
+				traceback.print_exc()
+
+	def start(self, node, env):
+		"""
+		Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
+		and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.
+
+		:param node: source file
+		:type node: :py:class:`waflib.Node.Node`
+		:param env: config set containing additional defines to take into account
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		"""
+
+		debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
+
+		bld = node.ctx
+		try:
+			self.parse_cache = bld.parse_cache
+		except AttributeError:
+			bld.parse_cache = {}
+			self.parse_cache = bld.parse_cache
+
+		self.addlines(node)
+
+		# macros may be defined on the command-line, so they must be parsed as if they were part of the file
+		if env['DEFINES']:
+			try:
+				lst = ['%s %s' % (x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]]
+				lst.reverse()
+				self.lines.extend([('define', x) for x in lst])
+			except AttributeError:
+				# if the defines are invalid the compiler will tell the user
+				pass
+
+		while self.lines:
+			(token, line) = self.lines.pop()
+			if token == POPFILE:
+				self.count_files -= 1
+				self.currentnode_stack.pop()
+				continue
+
+			try:
+				ve = Logs.verbose
+				if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
+				state = self.state
+
+				# make certain we define the state if we are about to enter in an if block
+				if token[:2] == 'if':
+					state.append(undefined)
+				elif token == 'endif':
+					state.pop()
+
+				# skip lines when in a dead 'if' branch, wait for the endif
+				if token[0] != 'e':
+					if skipped in self.state or ignored in self.state:
+						continue
+
+				if token == 'if':
+					ret = eval_macro(tokenize(line), self.defs)
+					if ret: state[-1] = accepted
+					else: state[-1] = ignored
+				elif token == 'ifdef':
+					m = re_mac.match(line)
+					if m and m.group(0) in self.defs: state[-1] = accepted
+					else: state[-1] = ignored
+				elif token == 'ifndef':
+					m = re_mac.match(line)
+					if m and m.group(0) in self.defs: state[-1] = ignored
+					else: state[-1] = accepted
+				elif token == 'include' or token == 'import':
+					(kind, inc) = extract_include(line, self.defs)
+					if inc in self.ban_includes:
+						continue
+					if token == 'import': self.ban_includes.add(inc)
+					if ve: debug('preproc: include found %s    (%s) ', inc, kind)
+					if kind == '"' or not strict_quotes:
+						self.tryfind(inc)
+				elif token == 'elif':
+					if state[-1] == accepted:
+						state[-1] = skipped
+					elif state[-1] == ignored:
+						if eval_macro(tokenize(line), self.defs):
+							state[-1] = accepted
+				elif token == 'else':
+					if state[-1] == accepted: state[-1] = skipped
+					elif state[-1] == ignored: state[-1] = accepted
+				elif token == 'define':
+					try:
+						self.defs[define_name(line)] = line
+					except:
+						raise PreprocError("Invalid define line %s" % line)
+				elif token == 'undef':
+					m = re_mac.match(line)
+					if m and m.group(0) in self.defs:
+						self.defs.__delitem__(m.group(0))
+						#print "undef %s" % name
+				elif token == 'pragma':
+					if re_pragma_once.match(line.lower()):
+						self.ban_includes.add(self.curfile)
+			except Exception as e:
+				if Logs.verbose:
+					debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
+
+def scan(task):
+	"""
+	Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind::
+
+		#include some_macro()
+
+	This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
+	"""
+
+	global go_absolute
+
+	try:
+		incn = task.generator.includes_nodes
+	except AttributeError:
+		raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": ' % task.generator)
+
+	if go_absolute:
+		nodepaths = incn + standard_includes
+	else:
+		nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)]
+
+	tmp = c_parser(nodepaths)
+	tmp.start(task.inputs[0], task.env)
+	if Logs.verbose:
+		debug('deps: deps for %r: %r; unresolved %r' % (task.inputs, tmp.nodes, tmp.names))
+	return (tmp.nodes, tmp.names)
+
diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py
new file mode 100644
index 0000000..a2a1d08
--- /dev/null
+++ b/waflib/Tools/c_tests.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+Various configuration tests.
+"""
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method, after_method
+import sys
+
+LIB_CODE = '''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllexport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void) { return 9; }
+'''
+
+MAIN_CODE = '''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllimport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void);
+int main(void) {return !(lib_func() == 9);}
+'''
+
+ feature('link_lib_test')
+ before_method('process_source')
+def link_lib_test_fun(self):
+	"""
+	The configuration test :py:func:`waflib.Tools.ccroot.run_c_code` declares a unique task generator,
+	so we need to create other task generators from here to check if the linker is able to link libraries.
+	"""
+	def write_test_file(task):
+		task.outputs[0].write(task.generator.code)
+
+	rpath = []
+	if getattr(self, 'add_rpath', False):
+		rpath = [self.bld.path.get_bld().abspath()]
+
+	mode = self.mode
+	m = '%s %s' % (mode, mode)
+	ex = self.test_exec and 'test_exec' or ''
+	bld = self.bld
+	bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE)
+	bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE)
+	bld(features='%sshlib' % m, source='test.' + mode, target='test')
+	bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath)
+
+ conf
+def check_library(self, mode=None, test_exec=True):
+	"""
+	Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
+
+	:param mode: c or cxx or d
+	:type mode: string
+	"""
+	if not mode:
+		mode = 'c'
+		if self.env.CXX:
+			mode = 'cxx'
+	self.check(
+		compile_filename = [],
+		features = 'link_lib_test',
+		msg = 'Checking for libraries',
+		mode = mode,
+		test_exec = test_exec,
+		)
+
+########################################################################################
+
+INLINE_CODE = '''
+typedef int foo_t;
+static %s foo_t static_foo () {return 0; }
+%s foo_t foo () {
+	return 0;
+}
+'''
+INLINE_VALUES = ['inline', '__inline__', '__inline']
+
+ conf
+def check_inline(self, **kw):
+	"""
+	Check for the right value for inline macro.
+	Define INLINE_MACRO to 1 if the define is found.
+	If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
+
+	:param define_name: define INLINE_MACRO by default to 1 if the macro is defined
+	:type define_name: string
+	:param features: by default *c* or *cxx* depending on the compiler present
+	:type features: list of string
+	"""
+
+	self.start_msg('Checking for inline')
+
+	if not 'define_name' in kw:
+		kw['define_name'] = 'INLINE_MACRO'
+	if not 'features' in kw:
+		if self.env.CXX:
+			kw['features'] = ['cxx']
+		else:
+			kw['features'] = ['c']
+
+	for x in INLINE_VALUES:
+		kw['fragment'] = INLINE_CODE % (x, x)
+
+		try:
+			self.check(**kw)
+		except self.errors.ConfigurationError:
+			continue
+		else:
+			self.end_msg(x)
+			if x != 'inline':
+				self.define('inline', x, quote=False)
+			return x
+	self.fatal('could not use inline functions')
+
+########################################################################################
+
+LARGE_FRAGMENT = '#include <unistd.h>\nint main() { return !(sizeof(off_t) >= 8); }\n'
+
+ conf
+def check_large_file(self, **kw):
+	"""
+	Check for large file support and define the macro HAVE_LARGEFILE
+	The test is skipped on win32 systems (DEST_BINFMT == pe).
+
+	:param define_name: define to set, by default *HAVE_LARGEFILE*
+	:type define_name: string
+	:param execute: execute the test (yes by default)
+	:type execute: bool
+	"""
+
+	if not 'define_name' in kw:
+		kw['define_name'] = 'HAVE_LARGEFILE'
+	if not 'execute' in kw:
+		kw['execute'] = True
+
+	if not 'features' in kw:
+		if self.env.CXX:
+			kw['features'] = ['cxx', 'cxxprogram']
+		else:
+			kw['features'] = ['c', 'cprogram']
+
+	kw['fragment'] = LARGE_FRAGMENT
+
+	kw['msg'] = 'Checking for large file support'
+	ret = True
+	try:
+		if self.env.DEST_BINFMT != 'pe':
+			ret = self.check(**kw)
+	except self.errors.ConfigurationError:
+		pass
+	else:
+		if ret:
+			return True
+
+	kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64'
+	kw['defines'] = ['_FILE_OFFSET_BITS=64']
+	try:
+		ret = self.check(**kw)
+	except self.errors.ConfigurationError:
+		pass
+	else:
+		self.define('_FILE_OFFSET_BITS', 64)
+		return ret
+
+	self.fatal('There is no support for large files')
+
+########################################################################################
+
+ENDIAN_FRAGMENT = '''
+short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
+short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
+int use_ascii (int i) {
+	return ascii_mm[i] + ascii_ii[i];
+}
+short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
+short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
+int use_ebcdic (int i) {
+	return ebcdic_mm[i] + ebcdic_ii[i];
+}
+extern int foo;
+'''
+
+class grep_for_endianness(Task.Task):
+	color = 'PINK'
+	def run(self):
+		txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
+		if txt.find('LiTTleEnDian') > -1:
+			self.generator.tmp.append('little')
+		elif txt.find('BIGenDianSyS') > -1:
+			self.generator.tmp.append('big')
+		else:
+			return -1
+
+ feature('grep_for_endianness')
+ after_method('process_source')
+def grep_for_endianness_fun(self):
+	self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
+
+ conf
+def check_endianness(self):
+	"""
+	Execute a configuration test to determine the endianness
+	"""
+	tmp = []
+	def check_msg(self):
+		return tmp[0]
+	self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
+	return tmp[0]
+
diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py
new file mode 100644
index 0000000..32bdc36
--- /dev/null
+++ b/waflib/Tools/ccroot.py
@@ -0,0 +1,608 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Classes and methods shared by tools providing support for C-like language such
+as C/C++/D/Assembly/Go (this support module is almost never used alone).
+"""
+
+import os, sys, re
+from waflib import TaskGen, Task, Utils, Logs, Build, Options, Node, Errors
+from waflib.Logs import error, debug, warn
+from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
+from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
+from waflib.Configure import conf
+
+USELIB_VARS = Utils.defaultdict(set)
+"""
+Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`.
+"""
+
+USELIB_VARS['c']   = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH'])
+USELIB_VARS['cxx'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH'])
+USELIB_VARS['d']   = set(['INCLUDES', 'DFLAGS'])
+
+USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
+USELIB_VARS['cshlib']   = USELIB_VARS['cxxshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
+USELIB_VARS['cstlib']   = USELIB_VARS['cxxstlib']   = set(['ARFLAGS', 'LINKDEPS'])
+
+USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+USELIB_VARS['dshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+USELIB_VARS['dstlib']   = set(['ARFLAGS', 'LINKDEPS'])
+
+USELIB_VARS['go'] = set(['GOCFLAGS'])
+USELIB_VARS['goprogram'] = set(['GOLFLAGS'])
+
+USELIB_VARS['asm'] = set(['ASFLAGS'])
+
+# =================================================================================================
+
+ taskgen_method
+def create_compiled_task(self, name, node):
+	"""
+	Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension).
+	The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link`
+
+	:param name: name of the task class
+	:type name: string
+	:param node: the file to compile
+	:type node: :py:class:`waflib.Node.Node`
+	:return: The task created
+	:rtype: :py:class:`waflib.Task.Task`
+	"""
+	out = '%s.%d.o' % (node.name, self.idx)
+	task = self.create_task(name, node, node.parent.find_or_declare(out))
+	try:
+		self.compiled_tasks.append(task)
+	except AttributeError:
+		self.compiled_tasks = [task]
+	return task
+
+ taskgen_method
+def to_incnodes(self, inlst):
+	"""
+	Task generator method provided to convert a list of string/nodes into a list of includes folders.
+
+	The paths are assumed to be relative to the task generator path, except if they begin by **#**
+	in which case they are searched from the top-level directory (``bld.srcnode``).
+	The folders are simply assumed to be existing.
+
+	The node objects in the list are returned in the output list. The strings are converted
+	into node objects if possible. The node is searched from the source directory, and if a match is found,
+	the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored.
+
+	:param inlst: list of folders
+	:type inlst: space-delimited string or a list of string/nodes
+	:rtype: list of :py:class:`waflib.Node.Node`
+	:return: list of include folders as nodes
+	"""
+	lst = []
+	seen = set([])
+	for x in self.to_list(inlst):
+		if x in seen or not x:
+			continue
+		seen.add(x)
+
+		if isinstance(x, Node.Node):
+			lst.append(x)
+		else:
+			if os.path.isabs(x):
+				lst.append(self.bld.root.make_node(x) or x)
+			else:
+				if x[0] == '#':
+					p = self.bld.bldnode.make_node(x[1:])
+					v = self.bld.srcnode.make_node(x[1:])
+				else:
+					p = self.path.get_bld().make_node(x)
+					v = self.path.make_node(x)
+				if p.is_child_of(self.bld.bldnode):
+					p.mkdir()
+				lst.append(p)
+				lst.append(v)
+	return lst
+
+ feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes')
+ after_method('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+	"""
+	Task generator method that processes the attribute *includes*::
+
+		tg = bld(features='includes', includes='.')
+
+	The folders only need to be relative to the current directory, the equivalent build directory is
+	added automatically (for headers created in the build directory). This enable using a build directory
+	or not (``top == out``).
+
+	This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``,
+	and the list of include paths in ``tg.env.INCLUDES``.
+	"""
+
+	lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+	self.includes_nodes = lst
+	self.env['INCPATHS'] = [x.abspath() for x in lst]
+
+class link_task(Task.Task):
+	"""
+	Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`.
+
+	.. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib  waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib
+	"""
+	color   = 'YELLOW'
+
+	inst_to = None
+	"""Default installation path for the link task outputs, or None to disable"""
+
+	chmod   = Utils.O644
+	"""Default installation mode for the link task outputs"""
+
+	def add_target(self, target):
+		"""
+		Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*.
+		The settings are retrieved from ``env.clsname_PATTERN``
+		"""
+		if isinstance(target, str):
+			pattern = self.env[self.__class__.__name__ + '_PATTERN']
+			if not pattern:
+				pattern = '%s'
+			folder, name = os.path.split(target)
+
+			if self.__class__.__name__.find('shlib') > 0:
+				if self.env.DEST_BINFMT == 'pe' and getattr(self.generator, 'vnum', None):
+					# include the version in the dll file name,
+					# the import lib file name stays unversionned.
+					name = name + '-' + self.generator.vnum.split('.')[0]
+
+			tmp = folder + os.sep + pattern % name
+			target = self.generator.path.find_or_declare(tmp)
+		self.set_outputs(target)
+
+class stlink_task(link_task):
+	"""
+	Base for static link tasks, which use *ar* most of the time.
+	The target is always removed before being written.
+	"""
+	run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+
+def rm_tgt(cls):
+	old = cls.run
+	def wrap(self):
+		try: os.remove(self.outputs[0].abspath())
+		except OSError: pass
+		return old(self)
+	setattr(cls, 'run', wrap)
+rm_tgt(stlink_task)
+
+ feature('c', 'cxx', 'd', 'go', 'fc', 'asm')
+ after_method('process_source')
+def apply_link(self):
+	"""
+	Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and
+	use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task
+	matching a name from the attribute *features*, for example::
+
+			def build(bld):
+				tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app')
+
+	will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram`
+	"""
+
+	for x in self.features:
+		if x == 'cprogram' and 'cxx' in self.features: # limited compat
+			x = 'cxxprogram'
+		elif x == 'cshlib' and 'cxx' in self.features:
+			x = 'cxxshlib'
+
+		if x in Task.classes:
+			if issubclass(Task.classes[x], link_task):
+				link = x
+				break
+	else:
+		return
+
+	objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])]
+	self.link_task = self.create_task(link, objs)
+	self.link_task.add_target(self.target)
+
+	# remember that the install paths are given by the task generators
+	# we need to define install_task even during the build phase because others might need the installation path
+	try:
+		inst_to = self.install_path
+	except AttributeError:
+		inst_to = self.link_task.__class__.inst_to
+	if inst_to:
+		# install a copy of the node list we have at this moment (implib not added)
+		self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod)
+
+ taskgen_method
+def use_rec(self, name, **kw):
+	"""
+	Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use``
+	"""
+
+	if name in self.tmp_use_not or name in self.tmp_use_seen:
+		return
+
+	try:
+		y = self.bld.get_tgen_by_name(name)
+	except Errors.WafError:
+		self.uselib.append(name)
+		self.tmp_use_not.add(name)
+		return
+
+	self.tmp_use_seen.append(name)
+	y.post()
+
+	# bind temporary attributes on the task generator
+	y.tmp_use_objects = objects = kw.get('objects', True)
+	y.tmp_use_stlib   = stlib   = kw.get('stlib', True)
+	try:
+		link_task = y.link_task
+	except AttributeError:
+		y.tmp_use_var = ''
+	else:
+		objects = False
+		if not isinstance(y.link_task, stlink_task):
+			stlib = False
+			y.tmp_use_var = 'LIB'
+		else:
+			y.tmp_use_var = 'STLIB'
+
+	p = self.tmp_use_prec
+	for x in self.to_list(getattr(y, 'use', [])):
+		try:
+			p[x].append(name)
+		except:
+			p[x] = [name]
+		self.use_rec(x, objects=objects, stlib=stlib)
+
+ feature('c', 'cxx', 'd', 'use', 'fc')
+ before_method('apply_incpaths', 'propagate_uselib_vars')
+ after_method('apply_link', 'process_source')
+def process_use(self):
+	"""
+	Process the ``use`` attribute which contains a list of task generator names::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='lib1')
+			bld.program(source='main.c', target='app', use='lib1')
+
+	See :py:func:`waflib.Tools.ccroot.use_rec`.
+	"""
+
+	use_not = self.tmp_use_not = set([])
+	use_seen = self.tmp_use_seen = [] # we would like an ordered set
+	use_prec = self.tmp_use_prec = {}
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	self.includes = self.to_list(getattr(self, 'includes', []))
+	names = self.to_list(getattr(self, 'use', []))
+
+	for x in names:
+		self.use_rec(x)
+
+	for x in use_not:
+		if x in use_prec:
+			del use_prec[x]
+
+	# topological sort
+	out = []
+	tmp = []
+	for x in self.tmp_use_seen:
+		for k in use_prec.values():
+			if x in k:
+				break
+		else:
+			tmp.append(x)
+
+	while tmp:
+		e = tmp.pop()
+		out.append(e)
+		try:
+			nlst = use_prec[e]
+		except KeyError:
+			pass
+		else:
+			del use_prec[e]
+			for x in nlst:
+				for y in use_prec:
+					if x in use_prec[y]:
+						break
+				else:
+					tmp.append(x)
+	if use_prec:
+		raise Errors.WafError('Cycle detected in the use processing %r' % use_prec)
+	out.reverse()
+
+	link_task = getattr(self, 'link_task', None)
+	for x in out:
+		y = self.bld.get_tgen_by_name(x)
+		var = y.tmp_use_var
+		if var and link_task:
+			if var == 'LIB' or y.tmp_use_stlib:
+				self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
+				self.link_task.dep_nodes.extend(y.link_task.outputs)
+				tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
+				self.env.append_value(var + 'PATH', [tmp_path])
+		else:
+			if y.tmp_use_objects:
+				self.add_objects_from_tgen(y)
+
+		if getattr(y, 'export_includes', None):
+			self.includes.extend(y.to_incnodes(y.export_includes))
+
+	# and finally, add the uselib variables (no recursion needed)
+	for x in names:
+		try:
+			y = self.bld.get_tgen_by_name(x)
+		except:
+			if not self.env['STLIB_' + x] and not x in self.uselib:
+				self.uselib.append(x)
+		else:
+			for k in self.to_list(getattr(y, 'uselib', [])):
+				if not self.env['STLIB_' + k] and not k in self.uselib:
+					self.uselib.append(k)
+
+ taskgen_method
+def add_objects_from_tgen(self, tg):
+	# Not public yet, wait for waf 1.6.7 at least - the purpose of this is to add pdb files to the compiled
+	# tasks but not to the link tasks (to avoid errors)
+	try:
+		link_task = self.link_task
+	except AttributeError:
+		pass
+	else:
+		for tsk in getattr(tg, 'compiled_tasks', []):
+			for x in tsk.outputs:
+				if x.name.endswith('.o') or x.name.endswith('.obj'):
+					link_task.inputs.append(x)
+
+ taskgen_method
+def get_uselib_vars(self):
+	"""
+	:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
+	:rtype: list of string
+	"""
+	_vars = set([])
+	for x in self.features:
+		if x in USELIB_VARS:
+			_vars |= USELIB_VARS[x]
+	return _vars
+
+ feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib')
+ after_method('process_use')
+def propagate_uselib_vars(self):
+	"""
+	Process uselib variables for adding flags. For example, the following target::
+
+		def build(bld):
+			bld.env.AFLAGS_aaa = ['bar']
+			from waflib.Tools.ccroot import USELIB_VARS
+			USELIB_VARS['aaa'] = set('AFLAGS')
+
+			tg = bld(features='aaa', aflags='test')
+
+	The *aflags* attribute will be processed and this method will set::
+
+			tg.env.AFLAGS = ['bar', 'test']
+	"""
+	_vars = self.get_uselib_vars()
+	env = self.env
+
+	for x in _vars:
+		y = x.lower()
+		env.append_unique(x, self.to_list(getattr(self, y, [])))
+
+	for x in self.features:
+		for var in _vars:
+			compvar = '%s_%s' % (var, x)
+			env.append_value(var, env[compvar])
+
+	for x in self.to_list(getattr(self, 'uselib', [])):
+		for v in _vars:
+			env.append_value(v, env[v + '_' + x])
+
+# ============ the code above must not know anything about import libs ==========
+
+ feature('cshlib', 'cxxshlib', 'fcshlib')
+ after_method('apply_link')
+def apply_implib(self):
+	"""
+	Handle dlls and their import libs on Windows-like systems.
+
+	A ``.dll.a`` file called *import library* is generated.
+	It must be installed as it is required for linking the library.
+	"""
+	if not self.env.DEST_BINFMT == 'pe':
+		return
+
+	dll = self.link_task.outputs[0]
+	if isinstance(self.target, Node.Node):
+		name = self.target.name
+	else:
+		name = os.path.split(self.target)[1]
+	implib = self.env['implib_PATTERN'] % name
+	implib = dll.parent.find_or_declare(implib)
+	self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath())
+	self.link_task.outputs.append(implib)
+
+	if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
+		node = self.path.find_resource(self.defs)
+		if not node:
+			raise Errors.WafError('invalid def file %r' % self.defs)
+		if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+			self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode))
+			self.link_task.dep_nodes.append(node)
+		else:
+			#gcc for windows takes *.def file a an input without any special flag
+			self.link_task.inputs.append(node)
+
+	try:
+		inst_to = self.install_path
+	except AttributeError:
+		inst_to = self.link_task.__class__.inst_to
+	if not inst_to:
+		return
+
+	self.implib_install_task = self.bld.install_as('${PREFIX}/lib/%s' % implib.name, implib, self.env)
+
+# ============ the code above must not know anything about vnum processing on unix platforms =========
+
+ feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum')
+ after_method('apply_link')
+def apply_vnum(self):
+	"""
+	Enforce version numbering on shared libraries. The valid version numbers must have at most two dots::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', vnum='14.15.16')
+
+	In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created:
+
+	* ``libfoo.so   â libfoo.so.1.2.3``
+	* ``libfoo.so.1 â libfoo.so.1.2.3``
+	"""
+	if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
+		return
+
+	link = self.link_task
+	nums = self.vnum.split('.')
+	node = link.outputs[0]
+
+	libname = node.name
+	if libname.endswith('.dylib'):
+		name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
+		name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
+	else:
+		name3 = libname + '.' + self.vnum
+		name2 = libname + '.' + nums[0]
+
+	# add the so name for the ld linker - to disable, just unset env.SONAME_ST
+	if self.env.SONAME_ST:
+		v = self.env.SONAME_ST % name2
+		self.env.append_value('LINKFLAGS', v.split())
+
+	# the following task is just to enable execution from the build dir :-/
+	tsk = self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
+
+	if getattr(self.bld, 'is_install', None):
+		self.install_task.hasrun = Task.SKIP_ME
+		bld = self.bld
+		path = self.install_task.dest
+		t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod)
+		t2 = bld.symlink_as(path + os.sep + name2, name3)
+		t3 = bld.symlink_as(path + os.sep + libname, name3)
+		self.vnum_install_task = (t1, t2, t3)
+
+	if '-dynamiclib' in self.env['LINKFLAGS'] and getattr(self, 'install_task', None):
+		path = os.path.join(self.install_task.get_install_path(), self.link_task.outputs[0].name)
+		self.env.append_value('LINKFLAGS', ['-install_name', path])
+
+class vnum(Task.Task):
+	"""
+	Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
+	"""
+	color = 'CYAN'
+	quient = True
+	ext_in = ['.bin']
+	def run(self):
+		for x in self.outputs:
+			path = x.abspath()
+			try:
+				os.remove(path)
+			except OSError:
+				pass
+
+			try:
+				os.symlink(self.inputs[0].name, path)
+			except OSError:
+				return 1
+
+class fake_shlib(link_task):
+	"""
+	Task used for reading a system library and adding the dependency on it
+	"""
+	def runnable_status(self):
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		for x in self.outputs:
+			x.sig = Utils.h_file(x.abspath())
+		return Task.SKIP_ME
+
+class fake_stlib(stlink_task):
+	"""
+	Task used for reading a system library and adding the dependency on it
+	"""
+	def runnable_status(self):
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		for x in self.outputs:
+			x.sig = Utils.h_file(x.abspath())
+		return Task.SKIP_ME
+
+ conf
+def read_shlib(self, name, paths=[]):
+	"""
+	Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes::
+
+		def build(bld):
+			bld.read_shlib('m')
+			bld.program(source='main.c', use='m')
+	"""
+	return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib')
+
+ conf
+def read_stlib(self, name, paths=[]):
+	"""
+	Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes.
+	"""
+	return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib')
+
+lib_patterns = {
+	'shlib' : ['lib%s.so', '%s.so', 'lib%s.dll', '%s.dll'],
+	'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'],
+}
+
+ feature('fake_lib')
+def process_lib(self):
+	"""
+	Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`.
+	"""
+	node = None
+
+	names = [x % self.name for x in lib_patterns[self.lib_type]]
+	for x in self.lib_paths + [self.path, '/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']:
+		if not isinstance(x, Node.Node):
+			x = self.bld.root.find_node(x) or self.path.find_node(x)
+			if not x:
+				continue
+
+		for y in names:
+			node = x.find_node(y)
+			if node:
+				node.sig = Utils.h_file(node.abspath())
+				break
+		else:
+			continue
+		break
+	else:
+		raise Errors.WafError('could not find library %r' % self.name)
+	self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node])
+	self.target = self.name
+
+
+class fake_o(Task.Task):
+	def runnable_status(self):
+		return Task.SKIP_ME
+
+ extension('.o', '.obj')
+def add_those_o_files(self, node):
+	tsk = self.create_task('fake_o', [], node)
+	try:
+		self.compiled_tasks.append(tsk)
+	except AttributeError:
+		self.compiled_tasks = [tsk]
+
diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py
new file mode 100644
index 0000000..c9fbe5c
--- /dev/null
+++ b/waflib/Tools/gnu_dirs.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+"""
+Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::
+
+	opt.load('gnu_dirs')
+
+and::
+
+	conf.load('gnu_dirs')
+
+Add options for the standard GNU directories, this tool will add the options
+found in autotools, and will update the environment with the following
+installation variables:
+
+============== ========================================= =======================
+Variable       Description                               Value
+============== ========================================= =======================
+PREFIX         architecture-independent files            /usr/local
+EXEC_PREFIX    architecture-dependent files              PREFIX
+BINDIR         user executables                          EXEC_PREFIX/bin
+SBINDIR        user executables                          EXEC_PREFIX/sbin
+LIBEXECDIR     program executables                       EXEC_PREFIX/libexec
+SYSCONFDIR     read-only single-machine data             PREFIX/etc
+SHAREDSTATEDIR modifiable architecture-independent data  PREFIX/com
+LOCALSTATEDIR  modifiable single-machine data            PREFIX/var
+LIBDIR         object code libraries                     EXEC_PREFIX/lib
+INCLUDEDIR     C header files                            PREFIX/include
+OLDINCLUDEDIR  C header files for non-gcc                /usr/include
+DATAROOTDIR    read-only arch.-independent data root     PREFIX/share
+DATADIR        read-only architecture-independent data   DATAROOTDIR
+INFODIR        info documentation                        DATAROOTDIR/info
+LOCALEDIR      locale-dependent data                     DATAROOTDIR/locale
+MANDIR         man documentation                         DATAROOTDIR/man
+DOCDIR         documentation root                        DATAROOTDIR/doc/APPNAME
+HTMLDIR        html documentation                        DOCDIR
+DVIDIR         dvi documentation                         DOCDIR
+PDFDIR         pdf documentation                         DOCDIR
+PSDIR          ps documentation                          DOCDIR
+============== ========================================= =======================
+"""
+
+import os
+from waflib import Utils, Options, Context
+
+_options = [x.split(', ') for x in '''
+bindir, user executables, ${EXEC_PREFIX}/bin
+sbindir, system admin executables, ${EXEC_PREFIX}/sbin
+libexecdir, program executables, ${EXEC_PREFIX}/libexec
+sysconfdir, read-only single-machine data, ${PREFIX}/etc
+sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
+localstatedir, modifiable single-machine data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib
+includedir, C header files, ${PREFIX}/include
+oldincludedir, C header files for non-gcc, /usr/include
+datarootdir, read-only arch.-independent data root, ${PREFIX}/share
+datadir, read-only architecture-independent data, ${DATAROOTDIR}
+infodir, info documentation, ${DATAROOTDIR}/info
+localedir, locale-dependent data, ${DATAROOTDIR}/locale
+mandir, man documentation, ${DATAROOTDIR}/man
+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
+htmldir, html documentation, ${DOCDIR}
+dvidir, dvi documentation, ${DOCDIR}
+pdfdir, pdf documentation, ${DOCDIR}
+psdir, ps documentation, ${DOCDIR}
+'''.split('\n') if x]
+
+def configure(conf):
+	"""
+	Read the command-line options to set lots of variables in *conf.env*. The variables
+	BINDIR and LIBDIR will be overwritten.
+	"""
+	def get_param(varname, default):
+		return getattr(Options.options, varname, '') or default
+
+	env = conf.env
+	conf.env.LIBDIR = conf.env.BINDIR = []
+	env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
+	env['PACKAGE'] = getattr(Context.g_module, 'APPNAME', None) or env['PACKAGE']
+
+	complete = False
+	iter = 0
+	while not complete and iter < len(_options) + 1:
+		iter += 1
+		complete = True
+		for name, help, default in _options:
+			name = name.upper()
+			if not env[name]:
+				try:
+					env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
+				except TypeError:
+					complete = False
+	if not complete:
+		lst = [name for name, _, _ in _options if not env[name.upper()]]
+		raise conf.errors.WafError('Variable substitution failure %r' % lst)
+
+def options(opt):
+	"""
+	Add lots of command-line options, for example::
+
+		--exec-prefix: EXEC_PREFIX
+	"""
+	inst_dir = opt.add_option_group('Installation directories',
+'By default, "waf install" will put the files in\
+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
+
+	for k in ('--prefix', '--destdir'):
+		option = opt.parser.get_option(k)
+		if option:
+			opt.parser.remove_option(k)
+			inst_dir.add_option(option)
+
+	inst_dir.add_option('--exec-prefix',
+		help = 'installation prefix [Default: ${PREFIX}]',
+		default = '',
+		dest = 'EXEC_PREFIX')
+
+	dirs_options = opt.add_option_group('Pre-defined installation directories', '')
+
+	for name, help, default in _options:
+		option_name = '--' + name
+		str_default = default
+		str_help = '%s [Default: %s]' % (help, str_default)
+		dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
+
diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py
new file mode 100644
index 0000000..207508d
--- /dev/null
+++ b/waflib/Tools/intltool.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Support for translation tools such as msgfmt and intltool
+
+Usage::
+
+	def configure(conf):
+		conf.load('gnu_dirs intltool')
+
+	def build(bld):
+		# process the .po files into .gmo files, and install them in LOCALEDIR
+		bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
+
+		# process an input file, substituting the translations from the po dir
+		bld(
+			features  = "intltool_in",
+			podir     = "../po",
+			flags     = ["-d", "-q", "-u", "-c"],
+			source    = 'kupfer.desktop.in',
+			install_path = "${DATADIR}/applications",
+		)
+
+Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
+"""
+
+import os, re
+from waflib import Configure, TaskGen, Task, Utils, Runner, Options, Build, Logs
+import waflib.Tools.ccroot
+from waflib.TaskGen import feature, before_method
+from waflib.Logs import error
+
+ before_method('process_source')
+ feature('intltool_in')
+def apply_intltool_in_f(self):
+	"""
+	Create tasks to translate files by intltool-merge::
+
+		def build(bld):
+			bld(
+				features  = "intltool_in",
+				podir     = "../po",
+				flags     = ["-d", "-q", "-u", "-c"],
+				source    = 'kupfer.desktop.in',
+				install_path = "${DATADIR}/applications",
+			)
+
+	:param podir: location of the .po files
+	:type podir: string
+	:param source: source files to process
+	:type source: list of string
+	:param flags: compilation flags ("-quc" by default)
+	:type flags: list of string
+	:param install_path: installation path
+	:type install_path: string
+	"""
+	try: self.meths.remove('process_source')
+	except ValueError: pass
+
+	if not self.env.LOCALEDIR:
+		self.env.LOCALEDIR = self.env.PREFIX + '/share/locale'
+
+	for i in self.to_list(self.source):
+		node = self.path.find_resource(i)
+
+		podir = getattr(self, 'podir', 'po')
+		podirnode = self.path.find_dir(podir)
+		if not podirnode:
+			error("could not find the podir %r" % podir)
+			continue
+
+		cache = getattr(self, 'intlcache', '.intlcache')
+		self.env['INTLCACHE'] = os.path.join(self.path.bldpath(), podir, cache)
+		self.env['INTLPODIR'] = podirnode.bldpath()
+		self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
+
+		task = self.create_task('intltool', node, node.change_ext(''))
+		inst = getattr(self, 'install_path', '${LOCALEDIR}')
+		if inst:
+			self.bld.install_files(inst, task.outputs)
+
+ feature('intltool_po')
+def apply_intltool_po(self):
+	"""
+	Create tasks to process po files::
+
+		def build(bld):
+			bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
+
+	The relevant task generator arguments are:
+
+	:param podir: directory of the .po files
+	:type podir: string
+	:param appname: name of the application
+	:type appname: string
+	:param install_path: installation directory
+	:type install_path: string
+
+	The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
+	"""
+	try: self.meths.remove('process_source')
+	except ValueError: pass
+
+	if not self.env.LOCALEDIR:
+		self.env.LOCALEDIR = self.env.PREFIX + '/share/locale'
+
+	appname = getattr(self, 'appname', 'set_your_app_name')
+	podir = getattr(self, 'podir', '')
+	inst = getattr(self, 'install_path', '${LOCALEDIR}')
+
+	linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
+	if linguas:
+		# scan LINGUAS file for locales to process
+		file = open(linguas.abspath())
+		langs = []
+		for line in file.readlines():
+			# ignore lines containing comments
+			if not line.startswith('#'):
+				langs += line.split()
+		file.close()
+		re_linguas = re.compile('[-a-zA-Z_  ]+')
+		for lang in langs:
+			# Make sure that we only process lines which contain locales
+			if re_linguas.match(lang):
+				node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
+				task = self.create_task('po', node, node.change_ext('.mo'))
+
+				if inst:
+					filename = task.outputs[0].name
+					(langname, ext) = os.path.splitext(filename)
+					inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
+					self.bld.install_as(inst_file, task.outputs[0], chmod=getattr(self, 'chmod', Utils.O644), env=task.env)
+
+	else:
+		Logs.pprint('RED', "Error no LINGUAS file found in po directory")
+
+class po(Task.Task):
+	"""
+	Compile .po files into .gmo files
+	"""
+	run_str = '${MSGFMT} -o ${TGT} ${SRC}'
+	color   = 'BLUE'
+
+class intltool(Task.Task):
+	"""
+	Let intltool-merge translate an input file
+	"""
+	run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
+	color   = 'BLUE'
+
+def configure(conf):
+	"""
+	Detect the program *msgfmt* and set *conf.env.MSGFMT*.
+	Detect the program *intltool-merge* and set *conf.env.INTLTOOL*.
+	It is possible to set INTLTOOL in the environment, but it must not have spaces in it::
+
+		$ INTLTOOL="/path/to/the program/intltool" waf configure
+
+	If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
+	"""
+	conf.find_program('msgfmt', var='MSGFMT')
+	conf.find_perl_program('intltool-merge', var='INTLTOOL')
+
+	prefix  = conf.env.PREFIX
+	datadir = conf.env.DATADIR
+	if not datadir:
+		datadir = os.path.join(prefix,'share')
+
+	conf.define('LOCALEDIR', os.path.join(datadir, 'locale').replace('\\', '\\\\'))
+	conf.define('DATADIR', datadir.replace('\\', '\\\\'))
+
+	if conf.env.CC or conf.env.CXX:
+		conf.check(header_name='locale.h')
+
diff --git a/waflib/Tools/python.py b/waflib/Tools/python.py
new file mode 100644
index 0000000..8d7f4d5
--- /dev/null
+++ b/waflib/Tools/python.py
@@ -0,0 +1,524 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2010 (ita)
+# Gustavo Carneiro (gjc), 2007
+
+"""
+Support for Python, detect the headers and libraries and provide
+*use* variables to link C/C++ programs against them::
+
+	def options(opt):
+		opt.load('compiler_c python')
+	def configure(conf):
+		conf.load('compiler_c python')
+		conf.check_python_version((2,4,2))
+		conf.check_python_headers()
+	def build(bld):
+		bld.program(features='pyembed', source='a.c', target='myprog')
+		bld.shlib(features='pyext', source='b.c', target='mylib')
+"""
+
+import os, sys
+from waflib import Utils, Options, Errors
+from waflib.Logs import debug, warn, info, error
+from waflib.TaskGen import extension, before_method, after_method, feature
+from waflib.Configure import conf
+
+FRAG = '''
+#include <Python.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+	void Py_Initialize(void);
+	void Py_Finalize(void);
+#ifdef __cplusplus
+}
+#endif
+int main()
+{
+   Py_Initialize();
+   Py_Finalize();
+   return 0;
+}
+'''
+"""
+Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
+"""
+
+INST = '''
+import sys, py_compile
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
+'''
+"""
+Piece of Python code used in :py:func:`waflib.Tools.python.install_pyfile` for installing python files
+"""
+
+DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
+
+ extension('.py')
+def process_py(self, node):
+	"""
+	Add a callback using :py:func:`waflib.Tools.python.install_pyfile` to install a python file
+	"""
+	try:
+		if not self.bld.is_install:
+			return
+	except:
+		return
+
+	try:
+		if not self.install_path:
+			return
+	except AttributeError:
+		self.install_path = '${PYTHONDIR}'
+
+	# i wonder now why we wanted to do this after the build is over
+	# issue #901: people want to preserve the structure of installed files
+	def inst_py(ctx):
+		install_from = getattr(self, 'install_from', None)
+		if install_from:
+			install_from = self.path.find_dir(install_from)
+		install_pyfile(self, node, install_from)
+	self.bld.add_post_fun(inst_py)
+
+def install_pyfile(self, node, install_from=None):
+	"""
+	Execute the installation of a python file
+
+	:param node: python file
+	:type node: :py:class:`waflib.Node.Node`
+	"""
+
+	from_node = install_from or node.parent
+	tsk = self.bld.install_as(self.install_path + '/' + node.path_from(from_node), node, postpone=False)
+	path = tsk.get_install_path()
+
+	if self.bld.is_install < 0:
+		info("+ removing byte compiled python files")
+		for x in 'co':
+			try:
+				os.remove(path + x)
+			except OSError:
+				pass
+
+	if self.bld.is_install > 0:
+		try:
+			st1 = os.stat(path)
+		except:
+			error('The python file is missing, this should not happen')
+
+		for x in ['c', 'o']:
+			do_inst = self.env['PY' + x.upper()]
+			try:
+				st2 = os.stat(path + x)
+			except OSError:
+				pass
+			else:
+				if st1.st_mtime <= st2.st_mtime:
+					do_inst = False
+
+			if do_inst:
+				lst = (x == 'o') and [self.env['PYFLAGS_OPT']] or []
+				(a, b, c) = (path, path + x, tsk.get_install_path(destdir=False) + x)
+				argv = self.env['PYTHON'] + lst + ['-c', INST, a, b, c]
+				info('+ byte compiling %r' % (path + x))
+				env = self.env.env or None
+				ret = Utils.subprocess.Popen(argv, env=env).wait()
+				if ret:
+					raise Errors.WafError('py%s compilation failed %r' % (x, path))
+
+ feature('py')
+def feature_py(self):
+	"""
+	Dummy feature which does nothing
+	"""
+	pass
+
+ feature('pyext')
+ before_method('propagate_uselib_vars', 'apply_link')
+ after_method('apply_bundle')
+def init_pyext(self):
+	"""
+	Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
+	*lib* prefix from library names.
+	"""
+	try:
+		if not self.install_path:
+			return
+	except AttributeError:
+		self.install_path = '${PYTHONARCHDIR}'
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	if not 'PYEXT' in self.uselib:
+		self.uselib.append('PYEXT')
+	# override shlib_PATTERN set by the osx module
+	self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN'] = self.env['pyext_PATTERN']
+
+ feature('pyext')
+ before_method('apply_link', 'apply_bundle')
+def set_bundle(self):
+	if Utils.unversioned_sys_platform() == 'darwin':
+		self.mac_bundle = True
+
+ before_method('propagate_uselib_vars')
+ feature('pyembed')
+def init_pyembed(self):
+	"""
+	Add the PYEMBED variable.
+	"""
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	if not 'PYEMBED' in self.uselib:
+		self.uselib.append('PYEMBED')
+
+ conf
+def get_python_variables(self, variables, imports=None):
+	"""
+	Spawn a new python process to dump configuration variables
+
+	:param variables: variables to print
+	:type variables: list of string
+	:param imports: one import by element
+	:type imports: list of string
+	:return: the variable values
+	:rtype: list of string
+	"""
+	if not imports:
+		try:
+			imports = self.python_imports
+		except AttributeError:
+			imports = DISTUTILS_IMP
+
+	program = list(imports) # copy
+	program.append('')
+	for v in variables:
+		program.append("print(repr(%s))" % v)
+	os_env = dict(os.environ)
+	try:
+		del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
+	except KeyError:
+		pass
+
+	try:
+		out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
+	except Errors.WafError:
+		self.fatal('The distutils module is unusable: install "python-devel"?')
+	return_values = []
+	for s in out.split('\n'):
+		s = s.strip()
+		if not s:
+			continue
+		if s == 'None':
+			return_values.append(None)
+		elif s[0] == "'" and s[-1] == "'":
+			return_values.append(s[1:-1])
+		elif s[0].isdigit():
+			return_values.append(int(s))
+		else: break
+	return return_values
+
+ conf
+def check_python_headers(conf):
+	"""
+	Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
+	On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
+
+	* PYEXT: for compiling python extensions
+	* PYEMBED: for embedding a python interpreter
+	"""
+
+	# FIXME rewrite
+
+	if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
+		conf.fatal('load a compiler first (gcc, g++, ..)')
+
+	if not conf.env['PYTHON_VERSION']:
+		conf.check_python_version()
+
+	env = conf.env
+	pybin = conf.env.PYTHON
+	if not pybin:
+		conf.fatal('could not find the python executable')
+
+	v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
+	try:
+		lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
+	except RuntimeError:
+		conf.fatal("Python development headers not found (-v for details).")
+
+	vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
+	conf.to_log("Configuration returned from %r:\n%r\n" % (pybin, '\n'.join(vals)))
+
+	dct = dict(zip(v, lst))
+	x = 'MACOSX_DEPLOYMENT_TARGET'
+	if dct[x]:
+		conf.env[x] = conf.environ[x] = dct[x]
+
+	env['pyext_PATTERN'] = '%s' + dct['SO'] # not a mistake
+
+	# Check for python libraries for embedding
+
+	all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
+	conf.parse_flags(all_flags, 'PYEMBED')
+
+	all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
+	conf.parse_flags(all_flags, 'PYEXT')
+
+	result = None
+	#name = 'python' + env['PYTHON_VERSION']
+
+	# TODO simplify this
+	for name in ('python' + env['PYTHON_VERSION'], 'python' + env['PYTHON_VERSION'].replace('.', '')):
+
+		# LIBPATH_PYEMBED is already set; see if it works.
+		if not result and env['LIBPATH_PYEMBED']:
+			path = env['LIBPATH_PYEMBED']
+			conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
+
+		if not result and dct['LIBDIR']:
+			path = [dct['LIBDIR']]
+			conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)
+
+		if not result and dct['LIBPL']:
+			path = [dct['LIBPL']]
+			conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)
+
+		if not result:
+			path = [os.path.join(dct['prefix'], "libs")]
+			conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
+
+		if result:
+			break # do not forget to set LIBPATH_PYEMBED
+
+	if result:
+		env['LIBPATH_PYEMBED'] = path
+		env.append_value('LIB_PYEMBED', [name])
+	else:
+		conf.to_log("\n\n### LIB NOT FOUND\n")
+
+	# under certain conditions, python extensions must link to
+	# python libraries, not just python embedding programs.
+	if (Utils.is_win32 or sys.platform.startswith('os2')
+		or dct['Py_ENABLE_SHARED']):
+		env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
+		env['LIB_PYEXT'] = env['LIB_PYEMBED']
+
+	# We check that pythonX.Y-config exists, and if it exists we
+	# use it to get only the includes, else fall back to distutils.
+	num = '.'.join(env['PYTHON_VERSION'].split('.')[:2])
+	conf.find_program(['python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', mandatory=False)
+
+	includes = []
+	if conf.env.PYTHON_CONFIG:
+		for incstr in conf.cmd_and_log([ conf.env.PYTHON_CONFIG, '--includes']).strip().split():
+			# strip the -I or /I
+			if (incstr.startswith('-I') or incstr.startswith('/I')):
+				incstr = incstr[2:]
+			# append include path, unless already given
+			if incstr not in includes:
+				includes.append(incstr)
+		conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n" % (includes,))
+		env['INCLUDES_PYEXT'] = includes
+		env['INCLUDES_PYEMBED'] = includes
+	else:
+		conf.to_log("Include path for Python extensions "
+			       "(found via distutils module): %r\n" % (dct['INCLUDEPY'],))
+		env['INCLUDES_PYEXT'] = [dct['INCLUDEPY']]
+		env['INCLUDES_PYEMBED'] = [dct['INCLUDEPY']]
+
+	# Code using the Python API needs to be compiled with -fno-strict-aliasing
+	if env['CC_NAME'] == 'gcc':
+		env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+		env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
+	if env['CXX_NAME'] == 'gcc':
+		env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+		env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
+
+	if env.CC_NAME == "msvc":
+		from distutils.msvccompiler import MSVCCompiler
+		dist_compiler = MSVCCompiler()
+		dist_compiler.initialize()
+		env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
+		env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
+		env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
+
+	# See if it compiles
+	try:
+		conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H',
+		   uselib='PYEMBED', fragment=FRAG,
+		   errmsg='Could not find the python development headers')
+	except conf.errors.ConfigurationError:
+		# python3.2, oh yeah
+		conf.check_cfg(path=conf.env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=['--cflags', '--libs'])
+		conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', msg='Getting the python flags from python-config',
+			uselib='PYEMBED', fragment=FRAG, errmsg='Could not find the python development headers elsewhere')
+
+ conf
+def check_python_version(conf, minver=None):
+	"""
+	Check if the python interpreter is found matching a given minimum version.
+	minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
+
+	If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
+	(eg. '2.4') of the actual python version found, and PYTHONDIR is
+	defined, pointing to the site-packages directory appropriate for
+	this python version, where modules/packages/extensions should be
+	installed.
+
+	:param minver: minimum version
+	:type minver: tuple of int
+	"""
+	assert minver is None or isinstance(minver, tuple)
+	pybin = conf.env['PYTHON']
+	if not pybin:
+		conf.fatal('could not find the python executable')
+
+	# Get python version string
+	cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
+	debug('python: Running python command %r' % cmd)
+	lines = conf.cmd_and_log(cmd).split()
+	assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
+	pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
+
+	# compare python version with the minimum required
+	result = (minver is None) or (pyver_tuple >= minver)
+
+	if result:
+		# define useful environment variables
+		pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
+		conf.env['PYTHON_VERSION'] = pyver
+
+		if 'PYTHONDIR' in conf.environ:
+			pydir = conf.environ['PYTHONDIR']
+		else:
+			if Utils.is_win32:
+				(python_LIBDEST, pydir) = conf.get_python_variables(
+					  ["get_config_var('LIBDEST') or ''",
+					   "get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
+			else:
+				python_LIBDEST = None
+				(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
+			if python_LIBDEST is None:
+				if conf.env['LIBDIR']:
+					python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
+				else:
+					python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
+
+
+		if 'PYTHONARCHDIR' in conf.environ:
+			pyarchdir = conf.environ['PYTHONARCHDIR']
+		else:
+			(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
+			if not pyarchdir:
+				pyarchdir = pydir
+
+		if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
+			conf.define('PYTHONDIR', pydir)
+			conf.define('PYTHONARCHDIR', pyarchdir)
+
+		conf.env['PYTHONDIR'] = pydir
+		conf.env['PYTHONARCHDIR'] = pyarchdir
+
+	# Feedback
+	pyver_full = '.'.join(map(str, pyver_tuple[:3]))
+	if minver is None:
+		conf.msg('Checking for python version', pyver_full)
+	else:
+		minver_str = '.'.join(map(str, minver))
+		conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str,) and 'GREEN' or 'YELLOW')
+
+	if not result:
+		conf.fatal('The python version is too old, expecting %r' % (minver,))
+
+PYTHON_MODULE_TEMPLATE = '''
+import %s as current_module
+version = getattr(current_module, '__version__', None)
+if version is not None:
+    print(str(version))
+else:
+    print('unknown version')
+'''
+
+ conf
+def check_python_module(conf, module_name, condition=''):
+	"""
+	Check if the selected python interpreter can import the given python module::
+
+		def configure(conf):
+			conf.check_python_module('pygccxml')
+			conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")
+
+	:param module_name: module
+	:type module_name: string
+	"""
+	msg = 'Python module %s' % module_name
+	if condition:
+		msg = '%s (%s)' % (msg, condition)
+	conf.start_msg(msg)
+	try:
+		ret = conf.cmd_and_log(conf.env['PYTHON'] + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
+	except Exception:
+		conf.end_msg(False)
+		conf.fatal('Could not find the python module %r' % module_name)
+
+	ret = ret.strip()
+	if condition:
+		conf.end_msg(ret)
+		if ret == 'unknown version':
+			conf.fatal('Could not check the %s version' % module_name)
+
+		from distutils.version import LooseVersion
+		def num(*k):
+			if isinstance(k[0], int):
+				return LooseVersion('.'.join([str(x) for x in k]))
+			else:
+				return LooseVersion(k[0])
+		d = {'num': num, 'ver': LooseVersion(ret)}
+		ev = eval(condition, {}, d)
+		if not ev:
+			conf.fatal('The %s version does not satisfy the requirements' % module_name)
+	else:
+		if ret == 'unknown version':
+			conf.end_msg(True)
+		else:
+			conf.end_msg(ret)
+
+def configure(conf):
+	"""
+	Detect the python interpreter
+	"""
+	try:
+		conf.find_program('python', var='PYTHON')
+	except conf.errors.ConfigurationError:
+		warn("could not find a python executable, setting to sys.executable '%s'" % sys.executable)
+		conf.env.PYTHON = sys.executable
+
+	if conf.env.PYTHON != sys.executable:
+		warn("python executable '%s' different from sys.executable '%s'" % (conf.env.PYTHON, sys.executable))
+	conf.env.PYTHON = conf.cmd_to_list(conf.env.PYTHON)
+
+	v = conf.env
+	v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
+	v['PYFLAGS'] = ''
+	v['PYFLAGS_OPT'] = '-O'
+
+	v['PYC'] = getattr(Options.options, 'pyc', 1)
+	v['PYO'] = getattr(Options.options, 'pyo', 1)
+
+def options(opt):
+	"""
+	Add the options ``--nopyc`` and ``--nopyo``
+	"""
+	opt.add_option('--nopyc',
+			action='store_false',
+			default=1,
+			help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
+			dest = 'pyc')
+	opt.add_option('--nopyo',
+			action='store_false',
+			default=1,
+			help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
+			dest='pyo')
+
diff --git a/waflib/Utils.py b/waflib/Utils.py
new file mode 100644
index 0000000..16b9737
--- /dev/null
+++ b/waflib/Utils.py
@@ -0,0 +1,602 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
+
+"""
+Utilities and platform-specific fixes
+
+The portability fixes try to provide a consistent behavior of the Waf API
+through Python versions 2.3 to 3.X and across different platforms (win32, linux, etc)
+"""
+
+import os, sys, errno, traceback, inspect, re, shutil, datetime, gc
+try:
+	import subprocess
+except:
+	try:
+		import waflib.extras.subprocess as subprocess
+	except:
+		print("The subprocess module is missing (python2.3?):\n try calling 'waf update --files=subprocess'\n or add a copy of subprocess.py to the python libraries")
+
+try:
+	from collections import deque
+except ImportError:
+	class deque(list):
+		"""A deque for Python 2.3 which does not have one"""
+		def popleft(self):
+			return self.pop(0)
+try:
+	import _winreg as winreg
+except:
+	try:
+		import winreg
+	except:
+		winreg = None
+
+from waflib import Errors
+
+try:
+	from collections import UserDict
+except:
+	from UserDict import UserDict
+
+try:
+	from hashlib import md5
+except:
+	try:
+		from md5 import md5
+	except:
+		# never fail to enable fixes from another module
+		pass
+
+try:
+	import threading
+except:
+	class threading(object):
+		"""
+			A fake threading class for platforms lacking the threading module.
+			Use ``waf -j1`` on those platforms
+		"""
+		pass
+	class Lock(object):
+		"""Fake Lock class"""
+		def acquire(self):
+			pass
+		def release(self):
+			pass
+	threading.Lock = threading.Thread = Lock
+else:
+	run_old = threading.Thread.run
+	def run(*args, **kwargs):
+		try:
+			run_old(*args, **kwargs)
+		except (KeyboardInterrupt, SystemExit):
+			raise
+		except:
+			sys.excepthook(*sys.exc_info())
+	threading.Thread.run = run
+
+SIG_NIL = 'iluvcuteoverload'.encode()
+"""Arbitrary null value for a md5 hash. This value must be changed when the hash value is replaced (size)"""
+
+O644 = 420
+"""Constant representing the permissions for regular files (0644 raises a syntax error on python 3)"""
+
+O755 = 493
+"""Constant representing the permissions for executable files (0755 raises a syntax error on python 3)"""
+
+rot_chr = ['\\', '|', '/', '-']
+"List of characters to use when displaying the throbber (progress bar)"
+
+rot_idx = 0
+"Index of the current throbber character (progress bar)"
+
+try:
+	from collections import defaultdict
+except ImportError:
+	class defaultdict(dict):
+		"""
+		defaultdict was introduced in python 2.5, so we leave it for python 2.4 and 2.3
+		"""
+		def __init__(self, default_factory):
+			super(defaultdict, self).__init__()
+			self.default_factory = default_factory
+		def __getitem__(self, key):
+			try:
+				return super(defaultdict, self).__getitem__(key)
+			except KeyError:
+				value = self.default_factory()
+				self[key] = value
+				return value
+
+is_win32 = sys.platform in ('win32', 'cli')
+
+# we should have put this in the Logs.py file instead :-/
+indicator = '\x1b[K%s%s%s\r'
+if is_win32 and 'NOCOLOR' in os.environ:
+	indicator = '%s%s%s\r'
+
+def readf(fname, m='r'):
+	"""
+	Read an entire file into a string, in practice the wrapper
+	node.read(..) should be used instead of this method::
+
+		def build(ctx):
+			from waflib import Utils
+			txt = Utils.readf(self.path.find_node('wscript').abspath())
+			txt = ctx.path.find_node('wscript').read()
+
+	:type  fname: string
+	:param fname: Path to file
+	:type  m: string
+	:param m: Open mode
+	:rtype: string
+	:return: Content of the file
+	"""
+	f = open(fname, m)
+	try:
+		txt = f.read()
+	finally:
+		f.close()
+	return txt
+
+def h_file(filename):
+	"""
+	Compute a hash value for a file by using md5. This method may be replaced by
+	a faster version if necessary. The following uses the file size and the timestamp value::
+
+		import stat
+		from waflib import Utils
+		def h_file(filename):
+			st = os.stat(filename)
+			if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
+			m = Utils.md5()
+			m.update(str(st.st_mtime))
+			m.update(str(st.st_size))
+			m.update(filename)
+			return m.digest()
+		Utils.h_file = h_file
+
+	:type filename: string
+	:param filename: path to the file to hash
+	:return: hash of the file contents
+	"""
+	f = open(filename, 'rb')
+	m = md5()
+	try:
+		while filename:
+			filename = f.read(100000)
+			m.update(filename)
+	finally:
+		f.close()
+	return m.digest()
+
+try:
+	x = ''.encode('hex')
+except:
+	import binascii
+	def to_hex(s):
+		ret = binascii.hexlify(s)
+		if not isinstance(ret, str):
+			ret = ret.decode('utf-8')
+		return ret
+else:
+	def to_hex(s):
+		return s.encode('hex')
+
+to_hex.__doc__ = """
+Return the hexadecimal representation of a string
+
+:param s: string to convert
+:type s: string
+"""
+
+listdir = os.listdir
+if is_win32:
+	def listdir_win32(s):
+		"""
+		List the contents of a folder in a portable manner.
+
+		:type s: string
+		:param s: a string, which can be empty on Windows for listing the drive letters
+		"""
+		if not s:
+			try:
+				import ctypes
+			except:
+				# there is nothing much we can do
+				return [x + ':\\' for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]
+			else:
+				dlen = 4 # length of "?:\\x00"
+				maxdrives = 26
+				buf = ctypes.create_string_buffer(maxdrives * dlen)
+				ndrives = ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives, ctypes.byref(buf))
+				return [ buf.raw[4*i:4*i+3].decode('ascii') for i in range(int(ndrives/dlen)) ]
+
+		if len(s) == 2 and s[1] == ":":
+			s += os.sep
+
+		if not os.path.isdir(s):
+			e = OSError()
+			e.errno = errno.ENOENT
+			raise e
+		return os.listdir(s)
+	listdir = listdir_win32
+
+def num2ver(ver):
+	"""
+	Convert a string, tuple or version number into an integer. The number is supposed to have at most 4 digits::
+
+		from waflib.Utils import num2ver
+		num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0))
+
+	:type ver: string or tuple of numbers
+	:param ver: a version number
+	"""
+	if isinstance(ver, str):
+		ver = tuple(ver.split('.'))
+	if isinstance(ver, tuple):
+		ret = 0
+		for i in range(4):
+			if i < len(ver):
+				ret += 256**(3 - i) * int(ver[i])
+		return ret
+	return ver
+
+def ex_stack():
+	"""
+	Extract the stack to display exceptions
+
+	:return: a string represening the last exception
+	"""
+	exc_type, exc_value, tb = sys.exc_info()
+	exc_lines = traceback.format_exception(exc_type, exc_value, tb)
+	return ''.join(exc_lines)
+
+def to_list(sth):
+	"""
+	Convert a string argument to a list by splitting on spaces, and pass
+	through a list argument unchanged::
+
+		from waflib.Utils import to_list
+		lst = to_list("a b c d")
+
+	:param sth: List or a string of items separated by spaces
+	:rtype: list
+	:return: Argument converted to list
+
+	"""
+	if isinstance(sth, str):
+		return sth.split()
+	else:
+		return sth
+
+re_nl = re.compile('\r*\n', re.M)
+def str_to_dict(txt):
+	"""
+	Parse a string with key = value pairs into a dictionary::
+
+		from waflib import Utils
+		x = Utils.str_to_dict('''
+			a = 1
+			b = test
+		''')
+
+	:type  s: string
+	:param s: String to parse
+	:rtype: dict
+	:return: Dictionary containing parsed key-value pairs
+	"""
+	tbl = {}
+
+	lines = re_nl.split(txt)
+	for x in lines:
+		x = x.strip()
+		if not x or x.startswith('#') or x.find('=') < 0:
+			continue
+		tmp = x.split('=')
+		tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+	return tbl
+
+def split_path(path):
+	return path.split('/')
+
+def split_path_cygwin(path):
+	if path.startswith('//'):
+		ret = path.split('/')[2:]
+		ret[0] = '/' + ret[0]
+		return ret
+	return path.split('/')
+
+re_sp = re.compile('[/\\\\]')
+def split_path_win32(path):
+	if path.startswith('\\\\'):
+		ret = re.split(re_sp, path)[2:]
+		ret[0] = '\\' + ret[0]
+		return ret
+	return re.split(re_sp, path)
+
+if sys.platform == 'cygwin':
+	split_path = split_path_cygwin
+elif is_win32:
+	split_path = split_path_win32
+
+split_path.__doc__ = """
+Split a path by / or \\. This function is not like os.path.split
+
+:type  path: string
+:param path: path to split
+:return:     list of strings
+"""
+
+def check_dir(path):
+	"""
+	Ensure that a directory exists (similar to ``mkdir -p``).
+
+	:type  dir: string
+	:param dir: Path to directory
+	"""
+	if not os.path.isdir(path):
+		try:
+			os.makedirs(path)
+		except OSError as e:
+			if not os.path.isdir(path):
+				raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
+
+def def_attrs(cls, **kw):
+	"""
+	Set default attributes on a class instance
+
+	:type cls: class
+	:param cls: the class to update the given attributes in.
+	:type kw: dict
+	:param kw: dictionary of attributes names and values.
+	"""
+	for k, v in kw.items():
+		if not hasattr(cls, k):
+			setattr(cls, k, v)
+
+def quote_define_name(s):
+	"""
+	Convert a string to an identifier suitable for C defines.
+
+	:type  s: string
+	:param s: String to convert
+	:rtype: string
+	:return: Identifier suitable for C defines
+	"""
+	fu = re.compile("[^a-zA-Z0-9]").sub("_", s)
+	fu = fu.upper()
+	return fu
+
+def h_list(lst):
+	"""
+	Hash lists. For tuples, using hash(tup) is much more efficient
+
+	:param lst: list to hash
+	:type lst: list of strings
+	:return: hash of the list
+	"""
+	m = md5()
+	m.update(str(lst).encode())
+	return m.digest()
+
+def h_fun(fun):
+	"""
+	Hash functions
+
+	:param fun: function to hash
+	:type  fun: function
+	:return: hash of the function
+	"""
+	try:
+		return fun.code
+	except AttributeError:
+		try:
+			h = inspect.getsource(fun)
+		except IOError:
+			h = "nocode"
+		try:
+			fun.code = h
+		except AttributeError:
+			pass
+		return h
+
+reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
+def subst_vars(expr, params):
+	"""
+	Replace ${VAR} with the value of VAR taken from a dict or a config set::
+
+		from waflib import Utils
+		s = Utils.subst_vars('${PREFIX}/bin', env)
+
+	:type  expr: string
+	:param expr: String to perform substitution on
+	:param params: Dictionary or config set to look up variable values.
+	"""
+	def repl_var(m):
+		if m.group(1):
+			return '\\'
+		if m.group(2):
+			return '$'
+		try:
+			# ConfigSet instances may contain lists
+			return params.get_flat(m.group(3))
+		except AttributeError:
+			return params[m.group(3)]
+	return reg_subst.sub(repl_var, expr)
+
+def destos_to_binfmt(key):
+	"""
+	Return the binary format based on the unversioned platform name.
+
+	:param key: platform name
+	:type  key: string
+	:return: string representing the binary format
+	"""
+	if key == 'darwin':
+		return 'mac-o'
+	elif key in ('win32', 'cygwin', 'uwin', 'msys'):
+		return 'pe'
+	return 'elf'
+
+def unversioned_sys_platform():
+	"""
+	Return the unversioned platform name.
+	Some Python platform names contain versions, that depend on
+	the build environment, e.g. linux2, freebsd6, etc.
+	This returns the name without the version number. Exceptions are
+	os2 and win32, which are returned verbatim.
+
+	:rtype: string
+	:return: Unversioned platform name
+	"""
+	s = sys.platform
+	if s == 'java':
+		# The real OS is hidden under the JVM.
+		from java.lang import System
+		s = System.getProperty('os.name')
+		# see http://lopica.sourceforge.net/os.html for a list of possible values
+		if s == 'Mac OS X':
+			return 'darwin'
+		elif s.startswith('Windows '):
+			return 'win32'
+		elif s == 'OS/2':
+			return 'os2'
+		elif s == 'HP-UX':
+			return 'hpux'
+		elif s in ('SunOS', 'Solaris'):
+			return 'sunos'
+		else: s = s.lower()
+	
+	# powerpc == darwin for our purposes
+	if s == 'powerpc':
+		return 'darwin'
+	if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
+	return re.split('\d+$', s)[0]
+
+def nada(*k, **kw):
+	"""
+	A function that does nothing
+
+	:return: None
+	"""
+	pass
+
+class Timer(object):
+	"""
+	Simple object for timing the execution of commands.
+	Its string representation is the current time::
+
+		from waflib.Utils import Timer
+		timer = Timer()
+		a_few_operations()
+		s = str(timer)
+	"""
+	def __init__(self):
+		self.start_time = datetime.datetime.utcnow()
+
+	def __str__(self):
+		delta = datetime.datetime.utcnow() - self.start_time
+		days = int(delta.days)
+		hours = delta.seconds // 3600
+		minutes = (delta.seconds - hours * 3600) // 60
+		seconds = delta.seconds - hours * 3600 - minutes * 60 + float(delta.microseconds) / 1000 / 1000
+		result = ''
+		if days:
+			result += '%dd' % days
+		if days or hours:
+			result += '%dh' % hours
+		if days or hours or minutes:
+			result += '%dm' % minutes
+		return '%s%.3fs' % (result, seconds)
+
+if is_win32:
+	old = shutil.copy2
+	def copy2(src, dst):
+		"""
+		shutil.copy2 does not copy the file attributes on windows, so we
+		hack into the shutil module to fix the problem
+		"""
+		old(src, dst)
+		shutil.copystat(src, dst)
+	setattr(shutil, 'copy2', copy2)
+
+if os.name == 'java':
+	# Jython cannot disable the gc but they can enable it ... wtf?
+	try:
+		gc.disable()
+		gc.enable()
+	except NotImplementedError:
+		gc.disable = gc.enable
+
+def read_la_file(path):
+	"""
+	Read property files, used by msvc.py
+
+	:param path: file to read
+	:type path: string
+	"""
+	sp = re.compile(r'^([^=]+)=\'(.*)\'$')
+	dc = {}
+	for line in readf(path).splitlines():
+		try:
+			_, left, right, _ = sp.split(line.strip())
+			dc[left] = right
+		except ValueError:
+			pass
+	return dc
+
+def nogc(fun):
+	"""
+	Decorator: let a function disable the garbage collector during its execution.
+	It is used in the build context when storing/loading the build cache file (pickle)
+
+	:param fun: function to execute
+	:type fun: function
+	:return: the return value of the function executed
+	"""
+	def f(*k, **kw):
+		try:
+			gc.disable()
+			ret = fun(*k, **kw)
+		finally:
+			gc.enable()
+		return ret
+	f.__doc__ = fun.__doc__
+	return f
+
+def run_once(fun):
+	"""
+	Decorator: let a function cache its results, use like this::
+
+		@run_once
+		def foo(k):
+			return 345*2343
+
+	:param fun: function to execute
+	:type fun: function
+	:return: the return value of the function executed
+	"""
+	cache = {}
+	def wrap(k):
+		try:
+			return cache[k]
+		except KeyError:
+			ret = fun(k)
+			cache[k] = ret
+			return ret
+	wrap.__cache__ = cache
+	return wrap
+
+def get_registry_app_path(key, filename):
+	if not winreg:
+		return None
+	try:
+		result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0])
+	except WindowsError:
+		pass
+	else:
+		if os.path.isfile(result):
+			return result
+
diff --git a/waflib/__init__.py b/waflib/__init__.py
new file mode 100644
index 0000000..c8a3c34
--- /dev/null
+++ b/waflib/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
diff --git a/waflib/ansiterm.py b/waflib/ansiterm.py
new file mode 100644
index 0000000..a534e49
--- /dev/null
+++ b/waflib/ansiterm.py
@@ -0,0 +1,246 @@
+import sys, os
+try:
+	if not (sys.stderr.isatty() and sys.stdout.isatty()):
+		raise ValueError('not a tty')
+
+	from ctypes import *
+
+	class COORD(Structure):
+		_fields_ = [("X", c_short), ("Y", c_short)]
+
+	class SMALL_RECT(Structure):
+		_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
+
+	class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+		_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
+
+	class CONSOLE_CURSOR_INFO(Structure):
+		_fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
+
+	sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+	csinfo = CONSOLE_CURSOR_INFO()
+	hconsole = windll.kernel32.GetStdHandle(-11)
+	windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
+	if sbinfo.Size.X < 9 or sbinfo.Size.Y < 9: raise ValueError('small console')
+	windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
+except Exception:
+	pass
+else:
+	import re, threading
+
+	is_vista = getattr(sys, "getwindowsversion", None) and sys.getwindowsversion()[0] >= 6
+
+	try:
+		_type = unicode
+	except:
+		_type = str
+
+	to_int = lambda number, default: number and int(number) or default
+	wlock = threading.Lock()
+
+	STD_OUTPUT_HANDLE = -11
+	STD_ERROR_HANDLE = -12
+
+	class AnsiTerm(object):
+		"""
+		emulate a vt100 terminal in cmd.exe
+		"""
+		def __init__(self):
+			self.encoding = sys.stdout.encoding
+			self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
+			self.cursor_history = []
+			self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+			self.orig_csinfo = CONSOLE_CURSOR_INFO()
+			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
+			windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
+
+		def screen_buffer_info(self):
+			sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
+			return sbinfo
+
+		def clear_line(self, param):
+			mode = param and int(param) or 0
+			sbinfo = self.screen_buffer_info()
+			if mode == 1: # Clear from begining of line to cursor position
+				line_start = COORD(0, sbinfo.CursorPosition.Y)
+				line_length = sbinfo.Size.X
+			elif mode == 2: # Clear entire line
+				line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
+				line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
+			else: # Clear from cursor position to end of line
+				line_start = sbinfo.CursorPosition
+				line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
+			chars_written = c_int()
+			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
+			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
+
+		def clear_screen(self, param):
+			mode = to_int(param, 0)
+			sbinfo = self.screen_buffer_info()
+			if mode == 1: # Clear from begining of screen to cursor position
+				clear_start = COORD(0, 0)
+				clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
+			elif mode == 2: # Clear entire screen and return cursor to home
+				clear_start = COORD(0, 0)
+				clear_length = sbinfo.Size.X * sbinfo.Size.Y
+				windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
+			else: # Clear from cursor position to end of screen
+				clear_start = sbinfo.CursorPosition
+				clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
+			chars_written = c_int()
+			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
+			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
+
+		def push_cursor(self, param):
+			sbinfo = self.screen_buffer_info()
+			self.cursor_history.append(sbinfo.CursorPosition)
+
+		def pop_cursor(self, param):
+			if self.cursor_history:
+				old_pos = self.cursor_history.pop()
+				windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
+
+		def set_cursor(self, param):
+			y, sep, x = param.partition(';')
+			x = to_int(x, 1) - 1
+			y = to_int(y, 1) - 1
+			sbinfo = self.screen_buffer_info()
+			new_pos = COORD(
+				min(max(0, x), sbinfo.Size.X),
+				min(max(0, y), sbinfo.Size.Y)
+			)
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+		def set_column(self, param):
+			x = to_int(param, 1) - 1
+			sbinfo = self.screen_buffer_info()
+			new_pos = COORD(
+				min(max(0, x), sbinfo.Size.X),
+				sbinfo.CursorPosition.Y
+			)
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+		def move_cursor(self, x_offset=0, y_offset=0):
+			sbinfo = self.screen_buffer_info()
+			new_pos = COORD(
+				min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
+				min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
+			)
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+		def move_up(self, param):
+			self.move_cursor(y_offset = -to_int(param, 1))
+
+		def move_down(self, param):
+			self.move_cursor(y_offset = to_int(param, 1))
+
+		def move_left(self, param):
+			self.move_cursor(x_offset = -to_int(param, 1))
+
+		def move_right(self, param):
+			self.move_cursor(x_offset = to_int(param, 1))
+
+		def next_line(self, param):
+			sbinfo = self.screen_buffer_info()
+			self.move_cursor(
+				x_offset = -sbinfo.CursorPosition.X,
+				y_offset = to_int(param, 1)
+			)
+
+		def prev_line(self, param):
+			sbinfo = self.screen_buffer_info()
+			self.move_cursor(
+				x_offset = -sbinfo.CursorPosition.X,
+				y_offset = -to_int(param, 1)
+			)
+
+		def rgb2bgr(self, c):
+			return ((c&1) << 2) | (c&2) | ((c&4)>>2)
+
+		def set_color(self, param):
+			cols = param.split(';')
+			sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
+			attr = sbinfo.Attributes
+			for c in cols:
+				if is_vista:
+					c = int(c)
+				else:
+					c = to_int(c, 0)
+				if c in range(30,38): # fgcolor
+					attr = (attr & 0xfff0) | self.rgb2bgr(c-30)
+				elif c in range(40,48): # bgcolor
+					attr = (attr & 0xff0f) | (self.rgb2bgr(c-40) << 4)
+				elif c == 0: # reset
+					attr = self.orig_sbinfo.Attributes
+				elif c == 1: # strong
+					attr |= 0x08
+				elif c == 4: # blink not available -> bg intensity
+					attr |= 0x80
+				elif c == 7: # negative
+					attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4)
+			windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
+
+		def show_cursor(self,param):
+			csinfo.bVisible = 1
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
+
+		def hide_cursor(self,param):
+			csinfo.bVisible = 0
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
+
+		ansi_command_table = {
+			'A': move_up,
+			'B': move_down,
+			'C': move_right,
+			'D': move_left,
+			'E': next_line,
+			'F': prev_line,
+			'G': set_column,
+			'H': set_cursor,
+			'f': set_cursor,
+			'J': clear_screen,
+			'K': clear_line,
+			'h': show_cursor,
+			'l': hide_cursor,
+			'm': set_color,
+			's': push_cursor,
+			'u': pop_cursor,
+		}
+		# Match either the escape sequence or text not containing escape sequence
+		ansi_tokens = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+		def write(self, text):
+			try:
+				wlock.acquire()
+				for param, cmd, txt in self.ansi_tokens.findall(text):
+					if cmd:
+						cmd_func = self.ansi_command_table.get(cmd)
+						if cmd_func:
+							cmd_func(self, param)
+					else:
+						self.writeconsole(txt)
+			finally:
+				wlock.release()
+
+		def writeconsole(self, txt):
+			chars_written = c_int()
+			writeconsole = windll.kernel32.WriteConsoleA
+			if isinstance(txt, _type):
+				writeconsole = windll.kernel32.WriteConsoleW
+
+			TINY_STEP = 3000
+			for x in range(0, len(txt), TINY_STEP):
+			    # According MSDN, size should NOT exceed 64 kb (issue #746)
+			    tiny = txt[x : x + TINY_STEP]
+			    writeconsole(self.hconsole, tiny, len(tiny), byref(chars_written), None)
+
+		def flush(self):
+			pass
+
+		def isatty(self):
+			return True
+
+	sys.stderr = sys.stdout = AnsiTerm()
+	os.environ['TERM'] = 'vt100'
+
diff --git a/waflib/extras/__init__.py b/waflib/extras/__init__.py
new file mode 100644
index 0000000..c8a3c34
--- /dev/null
+++ b/waflib/extras/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
diff --git a/waflib/extras/compat15.py b/waflib/extras/compat15.py
new file mode 100644
index 0000000..6e3f7fe
--- /dev/null
+++ b/waflib/extras/compat15.py
@@ -0,0 +1,298 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+This file is provided to enable compatibility with waf 1.5, it will be removed in waf 1.7
+"""
+
+import sys
+from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context
+
+# the following is to bring some compatibility with waf 1.5 "import waflib.Configure â import Configure"
+sys.modules['Environment'] = ConfigSet
+ConfigSet.Environment = ConfigSet.ConfigSet
+
+sys.modules['Logs'] = Logs
+sys.modules['Options'] = Options
+sys.modules['Scripting'] = Scripting
+sys.modules['Task'] = Task
+sys.modules['Build'] = Build
+sys.modules['Configure'] = Configure
+sys.modules['Node'] = Node
+sys.modules['Runner'] = Runner
+sys.modules['TaskGen'] = TaskGen
+sys.modules['Utils'] = Utils
+
+from waflib.Tools import c_preproc
+sys.modules['preproc'] = c_preproc
+
+from waflib.Tools import c_config
+sys.modules['config_c'] = c_config
+
+ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
+ConfigSet.ConfigSet.set_variant = Utils.nada
+
+Build.BuildContext.add_subdirs = Build.BuildContext.recurse
+Build.BuildContext.new_task_gen = Build.BuildContext.__call__
+Build.BuildContext.is_install = 0
+Node.Node.relpath_gen = Node.Node.path_from
+
+def name_to_obj(self, s, env=None):
+	Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
+	return self.get_tgen_by_name(s)
+Build.BuildContext.name_to_obj = name_to_obj
+
+def env_of_name(self, name):
+	try:
+		return self.all_envs[name]
+	except KeyError:
+		Logs.error('no such environment: '+name)
+		return None
+Build.BuildContext.env_of_name = env_of_name
+
+
+def set_env_name(self, name, env):
+	self.all_envs[name] = env
+	return env
+Configure.ConfigurationContext.set_env_name = set_env_name
+
+def retrieve(self, name, fromenv=None):
+	try:
+		env = self.all_envs[name]
+	except KeyError:
+		env = ConfigSet.ConfigSet()
+		self.prepare_env(env)
+		self.all_envs[name] = env
+	else:
+		if fromenv: Logs.warn("The environment %s may have been configured already" % name)
+	return env
+Configure.ConfigurationContext.retrieve = retrieve
+
+Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
+Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
+Configure.conftest = Configure.conf
+Configure.ConfigurationError = Errors.ConfigurationError
+
+Options.OptionsContext.sub_options = Options.OptionsContext.recurse
+Options.OptionsContext.tool_options = Context.Context.load
+Options.Handler = Options.OptionsContext
+
+Task.simple_task_type = Task.task_type_from_func = Task.task_factory
+Task.TaskBase.classes = Task.classes
+
+def setitem(self, key, value):
+	if key.startswith('CCFLAGS'):
+		key = key[1:]
+	self.table[key] = value
+ConfigSet.ConfigSet.__setitem__ = setitem
+
+ TaskGen feature('d')
+ TaskGen before('apply_incpaths')
+def old_importpaths(self):
+	if getattr(self, 'importpaths', []):
+		self.includes = self.importpaths
+
+from waflib import Context
+eld = Context.load_tool
+def load_tool(*k, **kw):
+	ret = eld(*k, **kw)
+	if 'set_options' in ret.__dict__:
+		Logs.warn('compat: rename "set_options" to options')
+		ret.options = ret.set_options
+	if 'detect' in ret.__dict__:
+		Logs.warn('compat: rename "detect" to "configure"')
+		ret.configure = ret.detect
+	return ret
+Context.load_tool = load_tool
+
+rev = Context.load_module
+def load_module(path):
+	ret = rev(path)
+	if 'set_options' in ret.__dict__:
+		Logs.warn('compat: rename "set_options" to "options" (%r)' % path)
+		ret.options = ret.set_options
+	if 'srcdir' in ret.__dict__:
+		Logs.warn('compat: rename "srcdir" to "top" (%r)' % path)
+		ret.top = ret.srcdir
+	if 'blddir' in ret.__dict__:
+		Logs.warn('compat: rename "blddir" to "out" (%r)' % path)
+		ret.out = ret.blddir
+	return ret
+Context.load_module = load_module
+
+old_post = TaskGen.task_gen.post
+def post(self):
+	self.features = self.to_list(self.features)
+	if 'cc' in self.features:
+		Logs.warn('compat: the feature cc does not exist anymore (use "c")')
+		self.features.remove('cc')
+		self.features.append('c')
+	if 'cstaticlib' in self.features:
+		Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
+		self.features.remove('cstaticlib')
+		self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
+	if getattr(self, 'ccflags', None):
+		Logs.warn('compat: "ccflags" was renamed to "cflags"')
+		self.cflags = self.ccflags
+	return old_post(self)
+TaskGen.task_gen.post = post
+
+def waf_version(*k, **kw):
+	Logs.warn('wrong version (waf_version was removed in waf 1.6)')
+Utils.waf_version = waf_version
+
+
+import os
+ TaskGen feature('c', 'cxx', 'd')
+ TaskGen before('apply_incpaths', 'propagate_uselib_vars')
+ TaskGen after('apply_link', 'process_source')
+def apply_uselib_local(self):
+	"""
+	process the uselib_local attribute
+	execute after apply_link because of the execution order set on 'link_task'
+	"""
+	env = self.env
+	from waflib.Tools.ccroot import stlink_task
+
+	# 1. the case of the libs defined in the project (visit ancestors first)
+	# the ancestors external libraries (uselib) will be prepended
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	self.includes = self.to_list(getattr(self, 'includes', []))
+	names = self.to_list(getattr(self, 'uselib_local', []))
+	get = self.bld.get_tgen_by_name
+	seen = set([])
+	tmp = Utils.deque(names) # consume a copy of the list of names
+	if tmp:
+		Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+	while tmp:
+		lib_name = tmp.popleft()
+		# visit dependencies only once
+		if lib_name in seen:
+			continue
+
+		y = get(lib_name)
+		y.post()
+		seen.add(lib_name)
+
+		# object has ancestors to process (shared libraries): add them to the end of the list
+		if getattr(y, 'uselib_local', None):
+			for x in self.to_list(getattr(y, 'uselib_local', [])):
+				obj = get(x)
+				obj.post()
+				if getattr(obj, 'link_task', None):
+					if not isinstance(obj.link_task, stlink_task):
+						tmp.append(x)
+
+		# link task and flags
+		if getattr(y, 'link_task', None):
+
+			link_name = y.target[y.target.rfind(os.sep) + 1:]
+			if isinstance(y.link_task, stlink_task):
+				env.append_value('STLIB', [link_name])
+			else:
+				# some linkers can link against programs
+				env.append_value('LIB', [link_name])
+
+			# the order
+			self.link_task.set_run_after(y.link_task)
+
+			# for the recompilation
+			self.link_task.dep_nodes += y.link_task.outputs
+
+			# add the link path too
+			tmp_path = y.link_task.outputs[0].parent.bldpath()
+			if not tmp_path in env['LIBPATH']:
+				env.prepend_value('LIBPATH', [tmp_path])
+
+		# add ancestors uselib too - but only propagate those that have no staticlib defined
+		for v in self.to_list(getattr(y, 'uselib', [])):
+			if not env['STLIB_' + v]:
+				if not v in self.uselib:
+					self.uselib.insert(0, v)
+
+		# if the library task generator provides 'export_includes', add to the include path
+		# the export_includes must be a list of paths relative to the other library
+		if getattr(y, 'export_includes', None):
+			self.includes.extend(y.to_incnodes(y.export_includes))
+
+ TaskGen feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
+ TaskGen after('apply_link')
+def apply_objdeps(self):
+	"add the .o files produced by some other object files in the same manner as uselib_local"
+	names = getattr(self, 'add_objects', [])
+	if not names:
+		return
+	names = self.to_list(names)
+
+	get = self.bld.get_tgen_by_name
+	seen = []
+	while names:
+		x = names[0]
+
+		# visit dependencies only once
+		if x in seen:
+			names = names[1:]
+			continue
+
+		# object does not exist ?
+		y = get(x)
+
+		# object has ancestors to process first ? update the list of names
+		if getattr(y, 'add_objects', None):
+			added = 0
+			lst = y.to_list(y.add_objects)
+			lst.reverse()
+			for u in lst:
+				if u in seen: continue
+				added = 1
+				names = [u]+names
+			if added: continue # list of names modified, loop
+
+		# safe to process the current object
+		y.post()
+		seen.append(x)
+
+		for t in getattr(y, 'compiled_tasks', []):
+			self.link_task.inputs.extend(t.outputs)
+
+ TaskGen after('apply_link')
+def process_obj_files(self):
+	if not hasattr(self, 'obj_files'):
+		return
+	for x in self.obj_files:
+		node = self.path.find_resource(x)
+		self.link_task.inputs.append(node)
+
+ TaskGen taskgen_method
+def add_obj_file(self, file):
+	"""Small example on how to link object files as if they were source
+	obj = bld.create_obj('cc')
+	obj.add_obj_file('foo.o')"""
+	if not hasattr(self, 'obj_files'): self.obj_files = []
+	if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
+	self.obj_files.append(file)
+
+
+old_define = Configure.ConfigurationContext.__dict__['define']
+
+ Configure conf
+def define(self, key, val, quote=True):
+	old_define(self, key, val, quote)
+	if key.startswith('HAVE_'):
+		self.env[key] = 1
+
+old_undefine = Configure.ConfigurationContext.__dict__['undefine']
+
+ Configure conf
+def undefine(self, key):
+	old_undefine(self, key)
+	if key.startswith('HAVE_'):
+		self.env[key] = 0
+
+# some people might want to use export_incdirs, but it was renamed
+def set_incdirs(self, val):
+	Logs.warn('compat: change "export_incdirs" by "export_includes"')
+	self.export_includes = val
+TaskGen.task_gen.export_incdirs = property(None, set_incdirs)
+
diff --git a/waflib/extras/local_rpath.py b/waflib/extras/local_rpath.py
new file mode 100644
index 0000000..b2507e1
--- /dev/null
+++ b/waflib/extras/local_rpath.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+from waflib.TaskGen import after_method, feature
+
+ after_method('propagate_uselib_vars')
+ feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
+def add_rpath_stuff(self):
+	all = self.to_list(getattr(self, 'use', []))
+	while all:
+		name = all.pop()
+		try:
+			tg = self.bld.get_tgen_by_name(name)
+		except:
+			continue
+		self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
+		all.extend(self.to_list(getattr(tg, 'use', [])))
+
diff --git a/waflib/extras/lru_cache.py b/waflib/extras/lru_cache.py
new file mode 100644
index 0000000..2bc9644
--- /dev/null
+++ b/waflib/extras/lru_cache.py
@@ -0,0 +1,98 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2011
+
+import os, shutil, re
+from waflib import Options, Build, Logs
+
+"""
+Apply a least recently used policy to the Waf cache.
+
+For performance reasons, it is called after the build is complete.
+
+We assume that the the folders are written atomically
+
+Do export WAFCACHE=/tmp/foo_xyz where xyz represents the cache size in bytes
+If missing, the default cache size will be set to 10GB
+"""
+
+re_num = re.compile('[a-zA-Z_-]+(\d+)')
+
+CACHESIZE = 10*1024*1024*1024 # in bytes
+CLEANRATIO = 0.8
+DIRSIZE = 4096
+
+def compile(self):
+	if Options.cache_global and not Options.options.nocache:
+		try:
+			os.makedirs(Options.cache_global)
+		except:
+			pass
+
+	try:
+		self.raw_compile()
+	finally:
+		if Options.cache_global and not Options.options.nocache:
+			self.sweep()
+
+def sweep(self):
+	global CACHESIZE
+	CACHEDIR = Options.cache_global
+
+	# get the cache max size from the WAFCACHE filename
+	re_num = re.compile('[a-zA-Z_]+(\d+)')
+	val = re_num.sub('\\1', os.path.basename(Options.cache_global))
+	try:
+		CACHESIZE = int(val)
+	except:
+		pass
+
+	# map folder names to timestamps
+	flist = {}
+	for x in os.listdir(CACHEDIR):
+		j = os.path.join(CACHEDIR, x)
+		if os.path.isdir(j) and len(x) == 64: # dir names are md5 hexdigests
+			flist[x] = [os.stat(j).st_mtime, 0]
+
+	for (x, v) in flist.items():
+		cnt = DIRSIZE # each entry takes 4kB
+		d = os.path.join(CACHEDIR, x)
+		for k in os.listdir(d):
+			cnt += os.stat(os.path.join(d, k)).st_size
+		flist[x][1] = cnt
+
+	total = sum([x[1] for x in flist.values()])
+	Logs.debug('lru: Cache size is %r' % total)
+
+	if total >= CACHESIZE:
+		Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
+
+		# make a list to sort the folders by timestamp
+		lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
+		lst.sort(key=lambda x: x[1]) # sort by timestamp
+		lst.reverse()
+
+		while total >= CACHESIZE * CLEANRATIO:
+			(k, t, s) = lst.pop()
+			p = os.path.join(CACHEDIR, k)
+			v = p + '.del'
+			try:
+				os.rename(p, v)
+			except:
+				# someone already did it
+				pass
+			else:
+				try:
+					shutil.rmtree(v)
+				except:
+					# this should not happen, but who knows?
+					Logs.warn('If you ever see this message, report it (%r)' % v)
+			total -= s
+			del flist[k]
+
+	Logs.debug('lru: Total at the end %r' % total)
+
+Build.BuildContext.raw_compile = Build.BuildContext.compile
+Build.BuildContext.compile = compile
+Build.BuildContext.sweep = sweep
+
diff --git a/waflib/extras/make.py b/waflib/extras/make.py
new file mode 100644
index 0000000..57e841d
--- /dev/null
+++ b/waflib/extras/make.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+A make-like way of executing the build, following the relationships between inputs/outputs
+
+This algorithm will lead to slower builds, will not be as flexible as "waf build", but
+it might be useful for building data files (?)
+
+It is likely to break in the following cases:
+- files are created dynamically (no inputs or outputs)
+- headers
+- building two files from different groups
+"""
+
+import re
+from waflib import Options, Task, Logs
+from waflib.Build import BuildContext
+
+class MakeContext(BuildContext):
+	'''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
+	cmd = 'make'
+	fun = 'build'
+
+	def __init__(self, **kw):
+		super(MakeContext, self).__init__(**kw)
+		self.files = Options.options.files
+
+	def get_build_iterator(self):
+		if not self.files:
+			while 1:
+				yield super(MakeContext, self).get_build_iterator()
+
+		for g in self.groups:
+			for tg in g:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+			provides = {}
+			uses = {}
+			all_tasks = []
+			tasks = []
+			for pat in self.files.split(','):
+				matcher = self.get_matcher(pat)
+				for tg in g:
+					if isinstance(tg, Task.TaskBase):
+						lst = [tg]
+					else:
+						lst = tg.tasks
+					for tsk in lst:
+						all_tasks.append(tsk)
+
+						do_exec = False
+						for node in getattr(tsk, 'inputs', []):
+							try:
+								uses[node].append(tsk)
+							except:
+								uses[node] = [tsk]
+
+							if matcher(node, output=False):
+								do_exec = True
+								break
+
+						for node in getattr(tsk, 'outputs', []):
+							try:
+								provides[node].append(tsk)
+							except:
+								provides[node] = [tsk]
+
+							if matcher(node, output=True):
+								do_exec = True
+								break
+						if do_exec:
+							tasks.append(tsk)
+
+			# so we have the tasks that we need to process, the list of all tasks,
+			# the map of the tasks providing nodes, and the map of tasks using nodes
+
+			if not tasks:
+				# if there are no tasks matching, return everything in the current group
+				result = all_tasks
+			else:
+				# this is like a big filter...
+				result = set([])
+				seen = set([])
+				cur = set(tasks)
+				while cur:
+					result |= cur
+					tosee = set([])
+					for tsk in cur:
+						for node in getattr(tsk, 'inputs', []):
+							if node in seen:
+								continue
+							seen.add(node)
+							tosee |= set(provides.get(node, []))
+					cur = tosee
+				result = list(result)
+
+			Task.set_file_constraints(result)
+			Task.set_precedence_constraints(result)
+			yield result
+
+		while 1:
+			yield []
+
+	def get_matcher(self, pat):
+		# this returns a function
+		inn = True
+		out = True
+		if pat.startswith('in:'):
+			out = False
+			pat = pat.replace('in:', '')
+		elif pat.startswith('out:'):
+			inn = False
+			pat = pat.replace('out:', '')
+
+		anode = self.root.find_node(pat)
+		pattern = None
+		if not anode:
+			if not pat.startswith('^'):
+				pat = '^.+?%s' % pat
+			if not pat.endswith('$'):
+				pat = '%s$' % pat
+			pattern = re.compile(pat)
+
+		def match(node, output):
+			if output == True and not out:
+				return False
+			if output == False and not inn:
+				return False
+
+			if anode:
+				return anode == node
+			else:
+				return pattern.match(node.abspath())
+		return match
+
diff --git a/waflib/extras/md5_tstamp.py b/waflib/extras/md5_tstamp.py
new file mode 100644
index 0000000..8c67bcc
--- /dev/null
+++ b/waflib/extras/md5_tstamp.py
@@ -0,0 +1,69 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Store some values on the buildcontext mapping file paths to
+stat values and md5 values (timestamp + md5)
+this way the md5 hashes are computed only when timestamp change (can be faster)
+There is usually little or no gain from enabling this, but it can be used to enable
+the second level cache with timestamps (WAFCACHE)
+
+You may have to run distclean or to remove the build directory before enabling/disabling
+this hashing scheme
+"""
+
+import os, stat
+try: import cPickle
+except: import pickle as cPickle
+from waflib import Utils, Build, Context
+
+STRONGEST = True
+Context.DBFILE += '_md5tstamp'
+
+Build.hashes_md5_tstamp = {}
+Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+def store(self):
+	# save the hash cache as part of the default pickle file
+	self.hashes_md5_tstamp = Build.hashes_md5_tstamp
+	self.store_real()
+Build.BuildContext.store_real = Build.BuildContext.store
+Build.BuildContext.store      = store
+
+def restore(self):
+	# we need a module variable for h_file below
+	self.restore_real()
+	try:
+		Build.hashes_md5_tstamp = self.hashes_md5_tstamp or {}
+	except Exception as e:
+		Build.hashes_md5_tstamp = {}
+Build.BuildContext.restore_real = Build.BuildContext.restore
+Build.BuildContext.restore      = restore
+
+def h_file(filename):
+	st = os.stat(filename)
+	if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
+
+	if filename in Build.hashes_md5_tstamp:
+		if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime):
+			return Build.hashes_md5_tstamp[filename][1]
+	m = Utils.md5()
+
+	if STRONGEST:
+		f = open(filename, 'rb')
+		read = 1
+		try:
+			while read:
+				read = f.read(100000)
+				m.update(read)
+		finally:
+			f.close()
+	else:
+		m.update(str(st.st_mtime))
+		m.update(str(st.st_size))
+		m.update(filename)
+
+	# ensure that the cache is overwritten
+	Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest())
+	return m.digest()
+Utils.h_file = h_file
+
diff --git a/waflib/extras/misc.py b/waflib/extras/misc.py
new file mode 100644
index 0000000..e8620fb
--- /dev/null
+++ b/waflib/extras/misc.py
@@ -0,0 +1,416 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+This tool is totally deprecated
+
+Try using:
+	.pc.in files for .pc files
+	the feature intltool_in - see demos/intltool
+	make-like rules
+"""
+
+import shutil, re, os
+from waflib import TaskGen, Node, Task, Utils, Build, Errors
+from waflib.TaskGen import feature, after_method, before_method
+from waflib.Logs import debug
+
+def copy_attrs(orig, dest, names, only_if_set=False):
+	"""
+	copy class attributes from an object to another
+	"""
+	for a in Utils.to_list(names):
+		u = getattr(orig, a, ())
+		if u or not only_if_set:
+			setattr(dest, a, u)
+
+def copy_func(tsk):
+	"Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
+	env = tsk.env
+	infile = tsk.inputs[0].abspath()
+	outfile = tsk.outputs[0].abspath()
+	try:
+		shutil.copy2(infile, outfile)
+	except (OSError, IOError):
+		return 1
+	else:
+		if tsk.chmod: os.chmod(outfile, tsk.chmod)
+		return 0
+
+def action_process_file_func(tsk):
+	"Ask the function attached to the task to process it"
+	if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!')
+	return tsk.fun(tsk)
+
+ feature('cmd')
+def apply_cmd(self):
+	"call a command everytime"
+	if not self.fun: raise Errors.WafError('cmdobj needs a function!')
+	tsk = Task.TaskBase()
+	tsk.fun = self.fun
+	tsk.env = self.env
+	self.tasks.append(tsk)
+	tsk.install_path = self.install_path
+
+ feature('copy')
+ before_method('process_source')
+def apply_copy(self):
+	Utils.def_attrs(self, fun=copy_func)
+	self.default_install_path = 0
+
+	lst = self.to_list(self.source)
+	self.meths.remove('process_source')
+
+	for filename in lst:
+		node = self.path.find_resource(filename)
+		if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
+
+		target = self.target
+		if not target or len(lst)>1: target = node.name
+
+		# TODO the file path may be incorrect
+		newnode = self.path.find_or_declare(target)
+
+		tsk = self.create_task('copy', node, newnode)
+		tsk.fun = self.fun
+		tsk.chmod = getattr(self, 'chmod', Utils.O644)
+
+		if not tsk.env:
+			tsk.debug()
+			raise Errors.WafError('task without an environment')
+
+def subst_func(tsk):
+	"Substitutes variables in a .in file"
+
+	m4_re = re.compile('@(\w+)@', re.M)
+
+	code = tsk.inputs[0].read() #Utils.readf(infile)
+
+	# replace all % by %% to prevent errors by % signs in the input file while string formatting
+	code = code.replace('%', '%%')
+
+	s = m4_re.sub(r'%(\1)s', code)
+
+	env = tsk.env
+	di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {})
+	if not di:
+		names = m4_re.findall(code)
+		for i in names:
+			di[i] = env.get_flat(i) or env.get_flat(i.upper())
+
+	tsk.outputs[0].write(s % di)
+
+ feature('subst')
+ before_method('process_source')
+def apply_subst(self):
+	Utils.def_attrs(self, fun=subst_func)
+	lst = self.to_list(self.source)
+	self.meths.remove('process_source')
+
+	self.dict = getattr(self, 'dict', {})
+
+	for filename in lst:
+		node = self.path.find_resource(filename)
+		if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
+
+		if self.target:
+			newnode = self.path.find_or_declare(self.target)
+		else:
+			newnode = node.change_ext('')
+
+		try:
+			self.dict = self.dict.get_merged_dict()
+		except AttributeError:
+			pass
+
+		if self.dict and not self.env['DICT_HASH']:
+			self.env = self.env.derive()
+			keys = list(self.dict.keys())
+			keys.sort()
+			lst = [self.dict[x] for x in keys]
+			self.env['DICT_HASH'] = str(Utils.h_list(lst))
+
+		tsk = self.create_task('copy', node, newnode)
+		tsk.fun = self.fun
+		tsk.dict = self.dict
+		tsk.dep_vars = ['DICT_HASH']
+		tsk.chmod = getattr(self, 'chmod', Utils.O644)
+
+		if not tsk.env:
+			tsk.debug()
+			raise Errors.WafError('task without an environment')
+
+####################
+## command-output ####
+####################
+
+class cmd_arg(object):
+	"""command-output arguments for representing files or folders"""
+	def __init__(self, name, template='%s'):
+		self.name = name
+		self.template = template
+		self.node = None
+
+class input_file(cmd_arg):
+	def find_node(self, base_path):
+		assert isinstance(base_path, Node.Node)
+		self.node = base_path.find_resource(self.name)
+		if self.node is None:
+			raise Errors.WafError("Input file %s not found in " % (self.name, base_path))
+
+	def get_path(self, env, absolute):
+		if absolute:
+			return self.template % self.node.abspath()
+		else:
+			return self.template % self.node.srcpath()
+
+class output_file(cmd_arg):
+	def find_node(self, base_path):
+		assert isinstance(base_path, Node.Node)
+		self.node = base_path.find_or_declare(self.name)
+		if self.node is None:
+			raise Errors.WafError("Output file %s not found in " % (self.name, base_path))
+
+	def get_path(self, env, absolute):
+		if absolute:
+			return self.template % self.node.abspath()
+		else:
+			return self.template % self.node.bldpath()
+
+class cmd_dir_arg(cmd_arg):
+	def find_node(self, base_path):
+		assert isinstance(base_path, Node.Node)
+		self.node = base_path.find_dir(self.name)
+		if self.node is None:
+			raise Errors.WafError("Directory %s not found in " % (self.name, base_path))
+
+class input_dir(cmd_dir_arg):
+	def get_path(self, dummy_env, dummy_absolute):
+		return self.template % self.node.abspath()
+
+class output_dir(cmd_dir_arg):
+	def get_path(self, env, dummy_absolute):
+		return self.template % self.node.abspath()
+
+
+class command_output(Task.Task):
+	color = "BLUE"
+	def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
+		Task.Task.__init__(self, env=env)
+		assert isinstance(command, (str, Node.Node))
+		self.command = command
+		self.command_args = command_args
+		self.stdin = stdin
+		self.stdout = stdout
+		self.cwd = cwd
+		self.os_env = os_env
+		self.stderr = stderr
+
+		if command_node is not None: self.dep_nodes = [command_node]
+		self.dep_vars = [] # additional environment variables to look
+
+	def run(self):
+		task = self
+		#assert len(task.inputs) > 0
+
+		def input_path(node, template):
+			if task.cwd is None:
+				return template % node.bldpath()
+			else:
+				return template % node.abspath()
+		def output_path(node, template):
+			fun = node.abspath
+			if task.cwd is None: fun = node.bldpath
+			return template % fun()
+
+		if isinstance(task.command, Node.Node):
+			argv = [input_path(task.command, '%s')]
+		else:
+			argv = [task.command]
+
+		for arg in task.command_args:
+			if isinstance(arg, str):
+				argv.append(arg)
+			else:
+				assert isinstance(arg, cmd_arg)
+				argv.append(arg.get_path(task.env, (task.cwd is not None)))
+
+		if task.stdin:
+			stdin = open(input_path(task.stdin, '%s'))
+		else:
+			stdin = None
+
+		if task.stdout:
+			stdout = open(output_path(task.stdout, '%s'), "w")
+		else:
+			stdout = None
+
+		if task.stderr:
+			stderr = open(output_path(task.stderr, '%s'), "w")
+		else:
+			stderr = None
+
+		if task.cwd is None:
+			cwd = ('None (actually %r)' % os.getcwd())
+		else:
+			cwd = repr(task.cwd)
+		debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
+			     (cwd, stdin, stdout, argv))
+
+		if task.os_env is None:
+			os_env = os.environ
+		else:
+			os_env = task.os_env
+		command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
+		return command.wait()
+
+ feature('command-output')
+def init_cmd_output(self):
+	Utils.def_attrs(self,
+		stdin = None,
+		stdout = None,
+		stderr = None,
+		# the command to execute
+		command = None,
+
+		# whether it is an external command; otherwise it is assumed
+		# to be an executable binary or script that lives in the
+		# source or build tree.
+		command_is_external = False,
+
+		# extra parameters (argv) to pass to the command (excluding
+		# the command itself)
+		argv = [],
+
+		# dependencies to other objects -> this is probably not what you want (ita)
+		# values must be 'task_gen' instances (not names!)
+		dependencies = [],
+
+		# dependencies on env variable contents
+		dep_vars = [],
+
+		# input files that are implicit, i.e. they are not
+		# stdin, nor are they mentioned explicitly in argv
+		hidden_inputs = [],
+
+		# output files that are implicit, i.e. they are not
+		# stdout, nor are they mentioned explicitly in argv
+		hidden_outputs = [],
+
+		# change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
+		cwd = None,
+
+		# OS environment variables to pass to the subprocess
+		# if None, use the default environment variables unchanged
+		os_env = None)
+
+ feature('command-output')
+ after_method('init_cmd_output')
+def apply_cmd_output(self):
+	if self.command is None:
+		raise Errors.WafError("command-output missing command")
+	if self.command_is_external:
+		cmd = self.command
+		cmd_node = None
+	else:
+		cmd_node = self.path.find_resource(self.command)
+		assert cmd_node is not None, ('''Could not find command '%s' in source tree.
+Hint: if this is an external command,
+use command_is_external=True''') % (self.command,)
+		cmd = cmd_node
+
+	if self.cwd is None:
+		cwd = None
+	else:
+		assert isinstance(cwd, CmdDirArg)
+		self.cwd.find_node(self.path)
+
+	args = []
+	inputs = []
+	outputs = []
+
+	for arg in self.argv:
+		if isinstance(arg, cmd_arg):
+			arg.find_node(self.path)
+			if isinstance(arg, input_file):
+				inputs.append(arg.node)
+			if isinstance(arg, output_file):
+				outputs.append(arg.node)
+
+	if self.stdout is None:
+		stdout = None
+	else:
+		assert isinstance(self.stdout, str)
+		stdout = self.path.find_or_declare(self.stdout)
+		if stdout is None:
+			raise Errors.WafError("File %s not found" % (self.stdout,))
+		outputs.append(stdout)
+
+	if self.stderr is None:
+		stderr = None
+	else:
+		assert isinstance(self.stderr, str)
+		stderr = self.path.find_or_declare(self.stderr)
+		if stderr is None:
+			raise Errors.WafError("File %s not found" % (self.stderr,))
+		outputs.append(stderr)
+
+	if self.stdin is None:
+		stdin = None
+	else:
+		assert isinstance(self.stdin, str)
+		stdin = self.path.find_resource(self.stdin)
+		if stdin is None:
+			raise Errors.WafError("File %s not found" % (self.stdin,))
+		inputs.append(stdin)
+
+	for hidden_input in self.to_list(self.hidden_inputs):
+		node = self.path.find_resource(hidden_input)
+		if node is None:
+			raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path))
+		inputs.append(node)
+
+	for hidden_output in self.to_list(self.hidden_outputs):
+		node = self.path.find_or_declare(hidden_output)
+		if node is None:
+			raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path))
+		outputs.append(node)
+
+	if not (inputs or getattr(self, 'no_inputs', None)):
+		raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs')
+	if not (outputs or getattr(self, 'no_outputs', None)):
+		raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs')
+
+	cwd = self.bld.variant_dir
+	task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
+	task.generator = self
+	copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
+	self.tasks.append(task)
+
+	task.inputs = inputs
+	task.outputs = outputs
+	task.dep_vars = self.to_list(self.dep_vars)
+
+	for dep in self.dependencies:
+		assert dep is not self
+		dep.post()
+		for dep_task in dep.tasks:
+			task.set_run_after(dep_task)
+
+	if not task.inputs:
+		# the case for svnversion, always run, and update the output nodes
+		task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
+		task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
+
+	# TODO the case with no outputs?
+
+def post_run(self):
+	for x in self.outputs:
+		x.sig = Utils.h_file(x.abspath())
+
+def runnable_status(self):
+	return self.RUN_ME
+
+Task.task_factory('copy', vars=[], func=action_process_file_func)
+
diff --git a/waflib/extras/objcopy.py b/waflib/extras/objcopy.py
new file mode 100644
index 0000000..923a7f2
--- /dev/null
+++ b/waflib/extras/objcopy.py
@@ -0,0 +1,54 @@
+#!/usr/bin/python
+# Grygoriy Fuchedzhy 2010
+
+"""
+Support for converting linked targets to ihex, srec or binary files using
+objcopy. Use the 'objcopy' feature in conjuction with the 'cc' or 'cxx'
+feature. The 'objcopy' feature uses the following attributes:
+
+objcopy_bfdname		Target object format name (eg. ihex, srec, binary).
+					   Defaults to ihex.
+objcopy_target		 File name used for objcopy output. This defaults to the
+					   target name with objcopy_bfdname as extension.
+objcopy_install_path   Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
+objcopy_flags		  Additional flags passed to objcopy.
+"""
+
+from waflib.Utils import def_attrs
+from waflib import Task
+from waflib.TaskGen import feature, after_method
+
+class objcopy(Task.Task):
+	run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
+	color   = 'CYAN'
+
+ feature('objcopy')
+ after_method('apply_link')
+def objcopy(self):
+	def_attrs(self,
+	   objcopy_bfdname = 'ihex',
+	   objcopy_target = None,
+	   objcopy_install_path = "${PREFIX}/firmware",
+	   objcopy_flags = '')
+
+	link_output = self.link_task.outputs[0]
+	if not self.objcopy_target:
+		self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
+	task = self.create_task('objcopy',
+							src=link_output,
+							tgt=self.path.find_or_declare(self.objcopy_target))
+
+	task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
+	try:
+		task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
+	except AttributeError:
+		pass
+
+	if self.objcopy_install_path:
+		self.bld.install_files(self.objcopy_install_path,
+							   task.outputs[0],
+							   env=task.env.derive())
+
+def configure(ctx):
+	objcopy = ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
+
diff --git a/waflib/extras/ocaml.py b/waflib/extras/ocaml.py
new file mode 100644
index 0000000..5cbe5b7
--- /dev/null
+++ b/waflib/extras/ocaml.py
@@ -0,0 +1,326 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"ocaml support"
+
+import os, re
+from waflib import TaskGen, Utils, Task, Build
+from waflib.Logs import error
+from waflib.TaskGen import feature, before_method, after_method, extension
+
+EXT_MLL = ['.mll']
+EXT_MLY = ['.mly']
+EXT_MLI = ['.mli']
+EXT_MLC = ['.c']
+EXT_ML  = ['.ml']
+
+open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
+foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
+def filter_comments(txt):
+	meh = [0]
+	def repl(m):
+		if m.group(1): meh[0] += 1
+		elif m.group(2): meh[0] -= 1
+		elif not meh[0]: return m.group(0)
+		return ''
+	return foo.sub(repl, txt)
+
+def scan(self):
+	node = self.inputs[0]
+	code = filter_comments(node.read())
+
+	global open_re
+	names = []
+	import_iterator = open_re.finditer(code)
+	if import_iterator:
+		for import_match in import_iterator:
+			names.append(import_match.group(1))
+	found_lst = []
+	raw_lst = []
+	for name in names:
+		nd = None
+		for x in self.incpaths:
+			nd = x.find_resource(name.lower()+'.ml')
+			if not nd: nd = x.find_resource(name+'.ml')
+			if nd:
+				found_lst.append(nd)
+				break
+		else:
+			raw_lst.append(name)
+
+	return (found_lst, raw_lst)
+
+native_lst=['native', 'all', 'c_object']
+bytecode_lst=['bytecode', 'all']
+
+ feature('ocaml')
+def init_ml(self):
+	Utils.def_attrs(self,
+		type = 'all',
+		incpaths_lst = [],
+		bld_incpaths_lst = [],
+		mlltasks = [],
+		mlytasks = [],
+		mlitasks = [],
+		native_tasks = [],
+		bytecode_tasks = [],
+		linktasks = [],
+		bytecode_env = None,
+		native_env = None,
+		compiled_tasks = [],
+		includes = '',
+		uselib = '',
+		are_deps_set = 0)
+
+ feature('ocaml')
+ after_method('init_ml')
+def init_envs_ml(self):
+
+	self.islibrary = getattr(self, 'islibrary', False)
+
+	global native_lst, bytecode_lst
+	self.native_env = None
+	if self.type in native_lst:
+		self.native_env = self.env.derive()
+		if self.islibrary: self.native_env['OCALINKFLAGS']   = '-a'
+
+	self.bytecode_env = None
+	if self.type in bytecode_lst:
+		self.bytecode_env = self.env.derive()
+		if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
+
+	if self.type == 'c_object':
+		self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
+
+ feature('ocaml')
+ before_method('apply_vars_ml')
+ after_method('init_envs_ml')
+def apply_incpaths_ml(self):
+	inc_lst = self.includes.split()
+	lst = self.incpaths_lst
+	for dir in inc_lst:
+		node = self.path.find_dir(dir)
+		if not node:
+			error("node not found: " + str(dir))
+			continue
+		if not node in lst:
+			lst.append(node)
+		self.bld_incpaths_lst.append(node)
+	# now the nodes are added to self.incpaths_lst
+
+ feature('ocaml')
+ before_method('process_source')
+def apply_vars_ml(self):
+	for i in self.incpaths_lst:
+		if self.bytecode_env:
+			app = self.bytecode_env.append_value
+			app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+		if self.native_env:
+			app = self.native_env.append_value
+			app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+	varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
+	for name in self.uselib.split():
+		for vname in varnames:
+			cnt = self.env[vname+'_'+name]
+			if cnt:
+				if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
+				if self.native_env: self.native_env.append_value(vname, cnt)
+
+ feature('ocaml')
+ after_method('process_source')
+def apply_link_ml(self):
+
+	if self.bytecode_env:
+		ext = self.islibrary and '.cma' or '.run'
+
+		linktask = self.create_task('ocalink')
+		linktask.bytecode = 1
+		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+		linktask.env = self.bytecode_env
+		self.linktasks.append(linktask)
+
+	if self.native_env:
+		if self.type == 'c_object': ext = '.o'
+		elif self.islibrary: ext = '.cmxa'
+		else: ext = ''
+
+		linktask = self.create_task('ocalinkx')
+		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+		linktask.env = self.native_env
+		self.linktasks.append(linktask)
+
+		# we produce a .o file to be used by gcc
+		self.compiled_tasks.append(linktask)
+
+ extension(*EXT_MLL)
+def mll_hook(self, node):
+	mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
+	mll_task.env = self.native_env.derive()
+	self.mlltasks.append(mll_task)
+
+	self.source.append(mll_task.outputs[0])
+
+ extension(*EXT_MLY)
+def mly_hook(self, node):
+	mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
+	mly_task.env = self.native_env.derive()
+	self.mlytasks.append(mly_task)
+	self.source.append(mly_task.outputs[0])
+
+	task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
+	task.env = self.native_env.derive()
+
+ extension(*EXT_MLI)
+def mli_hook(self, node):
+	task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
+	task.env = self.native_env.derive()
+	self.mlitasks.append(task)
+
+ extension(*EXT_MLC)
+def mlc_hook(self, node):
+	task = self.create_task('ocamlcc', node, node.change_ext('.o'))
+	task.env = self.native_env.derive()
+	self.compiled_tasks.append(task)
+
+ extension(*EXT_ML)
+def ml_hook(self, node):
+	if self.native_env:
+		task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
+		task.env = self.native_env.derive()
+		task.incpaths = self.bld_incpaths_lst
+		self.native_tasks.append(task)
+
+	if self.bytecode_env:
+		task = self.create_task('ocaml', node, node.change_ext('.cmo'))
+		task.env = self.bytecode_env.derive()
+		task.bytecode = 1
+		task.incpaths = self.bld_incpaths_lst
+		self.bytecode_tasks.append(task)
+
+def compile_may_start(self):
+
+	if not getattr(self, 'flag_deps', ''):
+		self.flag_deps = 1
+
+		# the evil part is that we can only compute the dependencies after the
+		# source files can be read (this means actually producing the source files)
+		if getattr(self, 'bytecode', ''): alltasks = self.generator.bytecode_tasks
+		else: alltasks = self.generator.native_tasks
+
+		self.signature() # ensure that files are scanned - unfortunately
+		tree = self.generator.bld
+		env = self.env
+		for node in self.inputs:
+			lst = tree.node_deps[self.uid()]
+			for depnode in lst:
+				for t in alltasks:
+					if t == self: continue
+					if depnode in t.inputs:
+						self.set_run_after(t)
+
+		# TODO necessary to get the signature right - for now
+		delattr(self, 'cache_sig')
+		self.signature()
+
+	return Task.Task.runnable_status(self)
+
+class ocamlx(Task.Task):
+	"""native caml compilation"""
+	color   = 'GREEN'
+	run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+	scan    = scan
+	runnable_status = compile_may_start
+
+class ocaml(Task.Task):
+	"""bytecode caml compilation"""
+	color   = 'GREEN'
+	run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+	scan    = scan
+	runnable_status = compile_may_start
+
+class ocamlcmi(Task.Task):
+	"""interface generator (the .i files?)"""
+	color   = 'BLUE'
+	run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
+	before  = ['ocamlcc', 'ocaml', 'ocamlcc']
+
+class ocamlcc(Task.Task):
+	"""ocaml to c interfaces"""
+	color   = 'GREEN'
+	run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
+
+class ocamllex(Task.Task):
+	"""lexical generator"""
+	color   = 'BLUE'
+	run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
+	before  = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+class ocamlyacc(Task.Task):
+	"""parser generator"""
+	color   = 'BLUE'
+	run_str = '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}'
+	before  = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+def link_may_start(self):
+
+	if getattr(self, 'bytecode', 0): alltasks = self.generator.bytecode_tasks
+	else: alltasks = self.generator.native_tasks
+
+	for x in alltasks:
+		if not x.hasrun:
+			return Task.ASK_LATER
+
+	if not getattr(self, 'order', ''):
+
+		# now reorder the inputs given the task dependencies
+		# this part is difficult, we do not have a total order on the tasks
+		# if the dependencies are wrong, this may not stop
+		seen = []
+		pendant = []+alltasks
+		while pendant:
+			task = pendant.pop(0)
+			if task in seen: continue
+			for x in task.run_after:
+				if not x in seen:
+					pendant.append(task)
+					break
+			else:
+				seen.append(task)
+		self.inputs = [x.outputs[0] for x in seen]
+		self.order = 1
+	return Task.Task.runnable_status(self)
+
+class ocalink(Task.Task):
+	"""bytecode caml link"""
+	color   = 'YELLOW'
+	run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
+	runnable_status = link_may_start
+	after = ['ocaml', 'ocamlcc']
+
+class ocalinkx(Task.Task):
+	"""native caml link"""
+	color   = 'YELLOW'
+	run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
+	runnable_status = link_may_start
+	after = ['ocamlx', 'ocamlcc']
+
+def configure(conf):
+	opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
+	occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
+	if (not opt) or (not occ):
+		conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
+
+	v = conf.env
+	v['OCAMLC']       = occ
+	v['OCAMLOPT']     = opt
+	v['OCAMLLEX']     = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
+	v['OCAMLYACC']    = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
+	v['OCAMLFLAGS']   = ''
+	v['OCAMLLIB']     = conf.cmd_and_log(conf.env['OCAMLC']+' -where').strip()+os.sep
+	v['LIBPATH_OCAML'] = conf.cmd_and_log(conf.env['OCAMLC']+' -where').strip()+os.sep
+	v['INCLUDES_OCAML'] = conf.cmd_and_log(conf.env['OCAMLC']+' -where').strip()+os.sep
+	v['LIB_OCAML'] = 'camlrun'
+
diff --git a/waflib/extras/package.py b/waflib/extras/package.py
new file mode 100644
index 0000000..b03c95c
--- /dev/null
+++ b/waflib/extras/package.py
@@ -0,0 +1,76 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011
+
+"""
+Obtain packages, unpack them in a location, and add associated uselib variables
+(CFLAGS_pkgname, LIBPATH_pkgname, etc).
+
+The default is use a Dependencies.txt file in the source directory.
+
+This is a work in progress.
+
+Usage:
+
+def options(opt):
+	opt.load('package')
+
+def configure(conf):
+    conf.load_packages()
+"""
+
+from waflib import Logs
+from waflib.Configure import conf
+
+try:
+	from urllib import request
+except:
+	from urllib import urlopen
+else:
+	urlopen = request.urlopen
+
+
+CACHEVAR = 'WAFCACHE_PACKAGE'
+
+ conf
+def get_package_cache_dir(self):
+	cache = None
+	if CACHEVAR in conf.environ:
+		cache = conf.environ[CACHEVAR]
+		cache = self.root.make_node(cache)
+	elif self.env[CACHEVAR]:
+		cache = self.env[CACHEVAR]
+		cache = self.root.make_node(cache)
+	else:
+		cache = self.srcnode.make_node('.wafcache_package')
+	cache.mkdir()
+	return cache
+
+ conf
+def download_archive(self, src, dst):
+	for x in self.env.PACKAGE_REPO:
+		url = '/'.join((x, src))
+		try:
+			web = urlopen(url)
+			try:
+				if web.getcode() != 200:
+					continue
+			except AttributeError:
+				pass
+		except Exception:
+			# on python3 urlopen throws an exception
+			# python 2.3 does not have getcode and throws an exception to fail
+			continue
+		else:
+			tmp = self.root.make_node(dst)
+			tmp.write(web.read())
+			Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url))
+			break
+	else:
+		self.fatal('Could not get the package %s' % src)
+
+ conf
+def load_packages(self):
+	cache = self.get_package_cache_dir()
+	# read the dependencies, get the archives, ..
+
diff --git a/waflib/extras/parallel_debug.py b/waflib/extras/parallel_debug.py
new file mode 100644
index 0000000..3f02b97
--- /dev/null
+++ b/waflib/extras/parallel_debug.py
@@ -0,0 +1,342 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2010 (ita)
+
+"""
+Debugging helper for parallel compilation, outputs
+a file named pdebug.svg in the source directory::
+
+	def options(opt):
+		opt.load('parallel_debug')
+	def configure(conf):
+		conf.load('parallel_debug')
+	def build(bld):
+   	 ...
+"""
+
+import os, time, sys
+try: from Queue import Queue
+except: from queue import Queue
+from waflib import Runner, Options, Utils, Task, Logs, Errors
+
+#import random
+#random.seed(100)
+
+def options(opt):
+	opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
+		help='title for the svg diagram', dest='dtitle')
+	opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
+	opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
+	opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
+	opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
+
+# red   #ff4d4d
+# green #4da74d
+# lila  #a751ff
+
+color2code = {
+	'GREEN'  : '#4da74d',
+	'YELLOW' : '#fefe44',
+	'PINK'   : '#a751ff',
+	'RED'    : '#cc1d1d',
+	'BLUE'   : '#6687bb',
+	'CYAN'   : '#34e2e2',
+}
+
+mp = {}
+info = [] # list of (text,color)
+
+def map_to_color(name):
+	if name in mp:
+		return mp[name]
+	try:
+		cls = Task.classes[name]
+	except KeyError:
+		return color2code['RED']
+	if cls.color in mp:
+		return mp[cls.color]
+	if cls.color in color2code:
+		return color2code[cls.color]
+	return color2code['RED']
+
+def process(self):
+	m = self.master
+	if m.stop:
+		m.out.put(self)
+		return
+
+	self.master.set_running(1, id(Utils.threading.currentThread()), self)
+
+	# remove the task signature immediately before it is executed
+	# in case of failure the task will be executed again
+	try:
+		del self.generator.bld.task_sigs[self.uid()]
+	except:
+		pass
+
+	try:
+		self.generator.bld.returned_tasks.append(self)
+		self.log_display(self.generator.bld)
+		ret = self.run()
+	except Exception:
+		self.err_msg = Utils.ex_stack()
+		self.hasrun = Task.EXCEPTION
+
+		# TODO cleanup
+		m.error_handler(self)
+		m.out.put(self)
+		return
+
+	if ret:
+		self.err_code = ret
+		self.hasrun = Task.CRASHED
+	else:
+		try:
+			self.post_run()
+		except Errors.WafError:
+			pass
+		except Exception:
+			self.err_msg = Utils.ex_stack()
+			self.hasrun = Task.EXCEPTION
+		else:
+			self.hasrun = Task.SUCCESS
+	if self.hasrun != Task.SUCCESS:
+		m.error_handler(self)
+
+	self.master.set_running(-1, id(Utils.threading.currentThread()), self)
+	m.out.put(self)
+Task.TaskBase.process_back = Task.TaskBase.process
+Task.TaskBase.process = process
+
+old_start = Runner.Parallel.start
+def do_start(self):
+	try:
+		Options.options.dband
+	except AttributeError:
+		self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')
+
+	self.taskinfo = Queue()
+	old_start(self)
+	if self.dirty:
+		process_colors(self)
+Runner.Parallel.start = do_start
+
+def set_running(self, by, i, tsk):
+	self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by)  )
+Runner.Parallel.set_running = set_running
+
+def name2class(name):
+	return name.replace(' ', '_').replace('.', '_')
+
+def process_colors(producer):
+	# first, cast the parameters
+	tmp = []
+	try:
+		while True:
+			tup = producer.taskinfo.get(False)
+			tmp.append(list(tup))
+	except:
+		pass
+
+	try:
+		ini = float(tmp[0][2])
+	except:
+		return
+
+	if not info:
+		seen = []
+		for x in tmp:
+			name = x[3]
+			if not name in seen:
+				seen.append(name)
+			else:
+				continue
+
+			info.append((name, map_to_color(name)))
+		info.sort(key=lambda x: x[0])
+
+	thread_count = 0
+	acc = []
+	for x in tmp:
+		thread_count += x[6]
+		acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
+	data_node = producer.bld.path.make_node('pdebug.dat')
+	data_node.write('\n'.join(acc))
+
+	tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
+
+	st = {}
+	for l in tmp:
+		if not l[0] in st:
+			st[l[0]] = len(st.keys())
+	tmp = [  [st[lst[0]]] + lst[1:] for lst in tmp ]
+	THREAD_AMOUNT = len(st.keys())
+
+	st = {}
+	for l in tmp:
+		if not l[1] in st:
+			st[l[1]] = len(st.keys())
+	tmp = [  [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
+
+
+	BAND = Options.options.dband
+
+	seen = {}
+	acc = []
+	for x in range(len(tmp)):
+		line = tmp[x]
+		id = line[1]
+
+		if id in seen:
+			continue
+		seen[id] = True
+
+		begin = line[2]
+		thread_id = line[0]
+		for y in range(x + 1, len(tmp)):
+			line = tmp[y]
+			if line[1] == id:
+				end = line[2]
+				#print id, thread_id, begin, end
+				#acc.append(  ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
+				acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
+				break
+
+	if Options.options.dmaxtime < 0.1:
+		gwidth = 1
+		for x in tmp:
+			m = BAND * x[2]
+			if m > gwidth:
+				gwidth = m
+	else:
+		gwidth = BAND * Options.options.dmaxtime
+
+	ratio = float(Options.options.dwidth) / gwidth
+	gwidth = Options.options.dwidth
+
+	gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
+
+	out = []
+
+	out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
+<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
+\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\";>
+<svg xmlns=\"http://www.w3.org/2000/svg\"; xmlns:xlink=\"http://www.w3.org/1999/xlink\"; version=\"1.0\"
+   x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
+   id=\"svg602\" xml:space=\"preserve\">
+
+<style type='text/css' media='screen'>
+	g.over rect  { stroke:#FF0000; fill-opacity:0.4 }
+</style>
+
+<script type='text/javascript'><![CDATA[
+var svg  = document.getElementsByTagName('svg')[0];
+
+svg.addEventListener('mouseover', function(e) {
+	var g = e.target.parentNode;
+	var x = document.getElementById('r_' + g.id);
+	if (x) {
+		g.setAttribute('class', g.getAttribute('class') + ' over');
+		x.setAttribute('class', x.getAttribute('class') + ' over');
+		showInfo(e, g.id);
+	}
+}, false);
+
+svg.addEventListener('mouseout', function(e) {
+		var g = e.target.parentNode;
+		var x = document.getElementById('r_' + g.id);
+		if (x) {
+			g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
+			x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
+			hideInfo(e);
+		}
+}, false);
+
+function showInfo(evt, txt) {
+	tooltip = document.getElementById('tooltip');
+
+	var t = document.getElementById('tooltiptext');
+	t.firstChild.data = txt;
+
+	var x = evt.clientX + 9;
+	if (x > 250) { x -= t.getComputedTextLength() + 16; }
+	var y = evt.clientY + 20;
+	tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
+	tooltip.setAttributeNS(null, "visibility", "visible");
+
+	var r = document.getElementById('tooltiprect');
+	r.setAttribute('width', t.getComputedTextLength() + 6);
+}
+
+function hideInfo(evt) {
+	var tooltip = document.getElementById('tooltip');
+	tooltip.setAttributeNS(null,"visibility","hidden");
+}
+]]></script>
+
+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
+<rect
+   x='%r' y='%r'
+   width='%r' height='%r'
+   style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
+   />\n
+
+""" % (0, 0, gwidth + 4, gheight + 4,   0, 0, gwidth + 4, gheight + 4))
+
+	# main title
+	if Options.options.dtitle:
+		out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
+""" % (gwidth/2, gheight - 5, Options.options.dtitle))
+
+	# the rectangles
+
+	groups = {}
+	for (x, y, w, h, clsname) in acc:
+		try:
+			groups[clsname].append((x, y, w, h))
+		except:
+			groups[clsname] = [(x, y, w, h)]
+	for cls in groups:
+		out.append("<g id='%s'>\n" % name2class(cls))
+
+		for (x, y, w, h) in groups[cls]:
+			out.append("""<rect
+   x='%r' y='%r'
+   width='%r' height='%r'
+   style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.4;\"
+   />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
+		out.append("</g>\n")
+
+	# output the caption
+	cnt = THREAD_AMOUNT
+
+	for (text, color) in info:
+		# caption box
+		b = BAND/2
+		out.append("""<g id='r_%s'><rect
+		x='%r' y='%r'
+		width='%r' height='%r'
+		style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.4;\"
+  />\n""" %                       (name2class(text), 2 + BAND,     5 + (cnt + 0.5) * BAND, b, b, color))
+
+		# caption text
+		out.append("""<text
+   style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
+   x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
+		cnt += 1
+
+	out.append("""
+<g transform="translate(0,0)" visibility="hidden" id="tooltip">
+  <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
+  <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
+</g>""")
+
+	out.append("\n</svg>")
+
+	node = producer.bld.path.make_node('pdebug.svg')
+	node.write("".join(out))
+	Logs.warn('Created the diagram %r' % node.abspath())
+
+	p = node.parent.abspath()
+	producer.bld.exec_command(['convert', p + os.sep + 'pdebug.svg', p + os.sep + 'pdebug.png'])
+
diff --git a/waflib/extras/pep8.py b/waflib/extras/pep8.py
new file mode 100644
index 0000000..5cace1d
--- /dev/null
+++ b/waflib/extras/pep8.py
@@ -0,0 +1,106 @@
+#! /usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2011
+
+'''
+Install pep8 module:
+$ easy_install pep8
+	or
+$ pip install pep8
+
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,pep8
+	or, if you have waf >= 1.6.2
+$ ./waf update --files=pep8
+
+
+Then add this to your wscript:
+
+[at]extension('.py', 'wscript')
+def run_pep8(self, node):
+	self.create_task('Pep8', node)
+
+'''
+
+import threading
+from waflib import TaskGen, Task, Options
+
+pep8 = __import__('pep8')
+
+
+class Pep8(Task.Task):
+	color = 'PINK'
+	lock = threading.Lock()
+
+	def check_options(self):
+		if pep8.options:
+			return
+		pep8.options = Options.options
+		pep8.options.prog = 'pep8'
+		excl = pep8.options.exclude.split(',')
+		pep8.options.exclude = [s.rstrip('/') for s in excl]
+		if pep8.options.filename:
+			pep8.options.filename = pep8.options.filename.split(',')
+		if pep8.options.select:
+			pep8.options.select = pep8.options.select.split(',')
+		else:
+			pep8.options.select = []
+		if pep8.options.ignore:
+			pep8.options.ignore = pep8.options.ignore.split(',')
+		elif pep8.options.select:
+			# Ignore all checks which are not explicitly selected
+			pep8.options.ignore = ['']
+		elif pep8.options.testsuite or pep8.options.doctest:
+			# For doctest and testsuite, all checks are required
+			pep8.options.ignore = []
+		else:
+			# The default choice: ignore controversial checks
+			pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
+		pep8.options.physical_checks = pep8.find_checks('physical_line')
+		pep8.options.logical_checks = pep8.find_checks('logical_line')
+		pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
+		pep8.options.messages = {}
+
+	def run(self):
+		with Pep8.lock:
+			self.check_options()
+		pep8.input_file(self.inputs[0].abspath())
+		return 0 if not pep8.get_count() else -1
+
+
+def options(opt):
+	opt.add_option('-q', '--quiet', default=0, action='count',
+				   help="report only file names, or nothing with -qq")
+	opt.add_option('-r', '--repeat', action='store_true',
+				   help="show all occurrences of the same error")
+	opt.add_option('--exclude', metavar='patterns',
+				   default=pep8.DEFAULT_EXCLUDE,
+				   help="exclude files or directories which match these "
+				   "comma separated patterns (default: %s)" %
+				   pep8.DEFAULT_EXCLUDE,
+				   dest='exclude')
+	opt.add_option('--filename', metavar='patterns', default='*.py',
+				   help="when parsing directories, only check filenames "
+				   "matching these comma separated patterns (default: "
+				   "*.py)")
+	opt.add_option('--select', metavar='errors', default='',
+				   help="select errors and warnings (e.g. E,W6)")
+	opt.add_option('--ignore', metavar='errors', default='',
+				   help="skip errors and warnings (e.g. E4,W)")
+	opt.add_option('--show-source', action='store_true',
+				   help="show source code for each error")
+	opt.add_option('--show-pep8', action='store_true',
+				   help="show text of PEP 8 for each error")
+	opt.add_option('--statistics', action='store_true',
+				   help="count errors and warnings")
+	opt.add_option('--count', action='store_true',
+				   help="print total number of errors and warnings "
+				   "to standard error and set exit code to 1 if "
+				   "total is not null")
+	opt.add_option('--benchmark', action='store_true',
+				   help="measure processing speed")
+	opt.add_option('--testsuite', metavar='dir',
+				   help="run regression tests from dir")
+	opt.add_option('--doctest', action='store_true',
+				   help="run doctest on myself")
diff --git a/waflib/extras/print_commands.py b/waflib/extras/print_commands.py
new file mode 100644
index 0000000..3005c3e
--- /dev/null
+++ b/waflib/extras/print_commands.py
@@ -0,0 +1,46 @@
+#! /usr/bin/env python
+
+"""
+Illustrate how to override a class method to do something
+
+In this case, print the commands being executed as strings
+(the commands are usually lists, so this can be misleading)
+"""
+
+import sys
+from waflib import Context, Utils, Logs
+
+def exec_command(self, cmd, **kw):
+	subprocess = Utils.subprocess
+	kw['shell'] = isinstance(cmd, str)
+
+	txt = cmd
+	if isinstance(cmd, list):
+		txt = ' '.join(cmd)
+
+	print(txt)
+	Logs.debug('runner_env: kw=%s' % kw)
+
+	try:
+		if self.logger:
+			# warning: may deadlock with a lot of output (subprocess limitation)
+
+			self.logger.info(cmd)
+
+			kw['stdout'] = kw['stderr'] = subprocess.PIPE
+			p = subprocess.Popen(cmd, **kw)
+			(out, err) = p.communicate()
+			if out:
+				self.logger.debug('out: %s' % out.decode(sys.stdout.encoding or 'iso8859-1'))
+			if err:
+				self.logger.error('err: %s' % err.decode(sys.stdout.encoding or 'iso8859-1'))
+			return p.returncode
+		else:
+			p = subprocess.Popen(cmd, **kw)
+			return p.wait()
+	except OSError:
+		return -1
+
+Context.Context.exec_command = exec_command
+
+
diff --git a/waflib/extras/proc.py b/waflib/extras/proc.py
new file mode 100644
index 0000000..f97adef
--- /dev/null
+++ b/waflib/extras/proc.py
@@ -0,0 +1,56 @@
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import environ, path
+from waflib import TaskGen, Utils
+
+def options(opt):
+	grp = opt.add_option_group('Oracle ProC Options')
+	grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
+	grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
+	grp.add_option('--connection', action='store', default='dummy-user/dummy-password dummy-server', help='Format: user/password server')
+
+def configure(cnf):
+	env = cnf.env
+	if not env.PROC_ORACLE:
+		env.PROC_ORACLE = cnf.options.oracle_home
+	if not env.PROC_TNS_ADMIN:
+		env.PROC_TNS_ADMIN = cnf.options.tns_admin
+	if not env.PROC_CONNECTION:
+		env.PROC_CONNECTION = cnf.options.connection
+	cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')
+
+def proc(tsk):
+	env = tsk.env
+	gen = tsk.generator
+	bld = gen.bld
+	inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
+
+	# FIXME the if-else construct will not work in python 2
+	cmd = (
+		[env.PROC] +
+		['SQLCHECK=SEMANTICS'] +
+		(['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
+			if env.PROC_INCLUDES else []) +
+		['INCLUDE=(' + ','.join(
+			[i.bldpath() for i in inc_nodes]
+		) + ')'] +
+		['userid=' + env.PROC_CONNECTION] +
+		['INAME=' + tsk.inputs[0].bldpath()] +
+		['ONAME=' + tsk.outputs[0].bldpath()]
+	)
+	exec_env = {
+		'ORACLE_HOME': env.PROC_ORACLE,
+		'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
+	}
+	if env.PROC_TNS_ADMIN:
+		exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
+	return tsk.exec_command(cmd, env=exec_env)
+
+TaskGen.declare_chain(
+	name = 'proc',
+	rule = proc,
+	ext_in = '.pc',
+	ext_out = '.c',
+)
+
diff --git a/waflib/extras/relocation.py b/waflib/extras/relocation.py
new file mode 100644
index 0000000..aa758b4
--- /dev/null
+++ b/waflib/extras/relocation.py
@@ -0,0 +1,85 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Waf 1.6
+
+Try to detect if the project directory was relocated, and if it was,
+change the node representing the project directory. Just call:
+
+ waf configure build
+
+Note that if the project directory name changes, the signatures for the tasks using
+files in that directory will change, causing a partial build.
+"""
+
+import os
+from waflib import Build, ConfigSet, Task, Utils, Errors
+from waflib.TaskGen import feature, before_method, after_method
+
+EXTRA_LOCK = '.old_srcdir'
+
+old1 = Build.BuildContext.store
+def store(self):
+	old1(self)
+	db = os.path.join(self.variant_dir, EXTRA_LOCK)
+	env = ConfigSet.ConfigSet()
+	env.SRCDIR = self.srcnode.abspath()
+	env.store(db)
+Build.BuildContext.store = store
+
+old2 = Build.BuildContext.init_dirs
+def init_dirs(self):
+
+	if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
+		raise Errors.WafError('The project was not configured: run "waf configure" first!')
+
+	srcdir = None
+	db = os.path.join(self.variant_dir, EXTRA_LOCK)
+	env = ConfigSet.ConfigSet()
+	try:
+		env.load(db)
+		srcdir = env.SRCDIR
+	except:
+		pass
+
+	if srcdir:
+		d = self.root.find_node(srcdir)
+		if d and srcdir != self.top_dir and getattr(d, 'children', ''):
+			srcnode = self.root.make_node(self.top_dir)
+			print("relocating the source directory %r -> %r" % (srcdir, self.top_dir))
+			srcnode.children = {}
+
+			for (k, v) in d.children.items():
+				srcnode.children[k] = v
+				v.parent = srcnode
+			d.children = {}
+
+	old2(self)
+
+Build.BuildContext.init_dirs = init_dirs
+
+
+def uid(self):
+	try:
+		return self.uid_
+	except AttributeError:
+		# this is not a real hot zone, but we want to avoid surprizes here
+		m = Utils.md5()
+		up = m.update
+		up(self.__class__.__name__.encode())
+		for x in self.inputs + self.outputs:
+			up(x.path_from(x.ctx.srcnode).encode())
+		self.uid_ = m.digest()
+		return self.uid_
+Task.Task.uid = uid
+
+ feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes')
+ after_method('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+	lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+	self.includes_nodes = lst
+	bld = self.bld
+	self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
+
+
diff --git a/waflib/extras/review.py b/waflib/extras/review.py
new file mode 100644
index 0000000..4a7ad2f
--- /dev/null
+++ b/waflib/extras/review.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Laurent Birtz, 2011
+# moved the code into a separate tool (ita)
+
+"""
+There are several things here:
+- a different command-line option management making options persistent
+- the review command to display the options set
+
+Assumptions:
+- configuration options are not always added to the right group (and do not count on the users to do it...)
+- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration
+- when the options change, the build is invalidated (forcing a reconfiguration)
+"""
+
+import os, textwrap, shutil
+from waflib import Logs, Context, ConfigSet, Options, Build, Configure
+
+class Odict(dict):
+	"""Ordered dictionary"""
+	def __init__(self, data=None):
+		self._keys = []
+		dict.__init__(self)
+		if data:
+			# we were provided a regular dict
+			if isinstance(data, dict):
+				self.append_from_dict(data)
+
+			# we were provided a tuple list
+			elif type(data) == list:
+				self.append_from_plist(data)
+
+			# we were provided invalid input
+			else:
+				raise Exception("expected a dict or a tuple list")
+
+	def append_from_dict(self, dict):
+		map(self.__setitem__, dict.keys(), dict.values())
+
+	def append_from_plist(self, plist):
+		for pair in plist:
+			if len(pair) != 2:
+				raise Exception("invalid pairs list")
+		for (k, v) in plist:
+			self.__setitem__(k, v)
+
+	def __delitem__(self, key):
+		if not key in self._keys:
+			raise KeyError(key)
+		dict.__delitem__(self, key)
+		self._keys.remove(key)
+
+	def __setitem__(self, key, item):
+		dict.__setitem__(self, key, item)
+		if key not in self._keys:
+			self._keys.append(key)
+
+	def clear(self):
+		dict.clear(self)
+		self._keys = []
+
+	def copy(self):
+		return Odict(self.plist())
+
+	def items(self):
+		return zip(self._keys, self.values())
+
+	def keys(self):
+		return list(self._keys) # return a copy of the list
+
+	def values(self):
+		return map(self.get, self._keys)
+
+	def plist(self):
+		p = []
+		for k, v in self.items():
+			p.append( (k, v) )
+		return p
+
+	def __str__(self):
+		s = "{"
+		l = len(self._keys)
+		for k, v in self.items():
+			l -= 1
+			strkey = str(k)
+			if isinstance(k, basestring): strkey = "'"+strkey+"'"
+			strval = str(v)
+			if isinstance(v, basestring): strval = "'"+strval+"'"
+			s += strkey + ":" + strval
+			if l > 0: s += ", "
+		s += "}"
+		return s
+
+review_options = Odict()
+"""
+Ordered dictionary mapping configuration option names to their optparse option.
+"""
+
+review_defaults = {}
+"""
+Dictionary mapping configuration option names to their default value.
+"""
+
+old_review_set = None
+"""
+Review set containing the configuration values before parsing the command line.
+"""
+
+new_review_set = None
+"""
+Review set containing the configuration values after parsing the command line.
+"""
+
+class OptionsReview(Options.OptionsContext):
+	def __init__(self, **kw):
+		super(self.__class__, self).__init__(**kw)
+
+	def prepare_config_review(self):
+		"""
+		Find the configuration options that are reviewable, detach
+		their default value from their optparse object and store them
+		into the review dictionaries.
+		"""
+		gr = self.get_option_group('configure options')
+		for opt in gr.option_list:
+			if opt.action != 'store' or opt.dest in ("out", "top"):
+				continue
+			review_options[opt.dest] = opt
+			review_defaults[opt.dest] = opt.default
+			if gr.defaults.has_key(opt.dest):
+				del gr.defaults[opt.dest]
+			opt.default = None
+
+	def parse_args(self):
+		self.prepare_config_review()
+		self.parser.get_option('--prefix').help = 'installation prefix'
+		super(OptionsReview, self).parse_args()
+		Context.create_context('review').refresh_review_set()
+
+class ReviewContext(Context.Context):
+	'''reviews the configuration values'''
+
+	cmd = 'review'
+
+	def __init__(self, **kw):
+		super(self.__class__, self).__init__(**kw)
+
+		out = Options.options.out
+		if not out:
+			out = getattr(Context.g_module, Context.OUT, None)
+		if not out:
+			out = Options.lockfile.replace('.lock-waf', '')
+		self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath()
+		"""Path to the build directory"""
+
+		self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR)
+		"""Path to the cache directory"""
+
+		self.review_path = os.path.join(self.cache_path, 'review.cache')
+		"""Path to the review cache file"""
+
+	def execute(self):
+		"""
+		Display and store the review set. Invalidate the cache as required.
+		"""
+		if not self.compare_review_set(old_review_set, new_review_set):
+			self.invalidate_cache()
+		self.store_review_set(new_review_set)
+		print(self.display_review_set(new_review_set))
+
+	def invalidate_cache(self):
+		"""Invalidate the cache to prevent bad builds."""
+		try:
+			Logs.warn("Removing the cached configuration since the options have changed")
+			shutil.rmtree(self.cache_path)
+		except:
+			pass
+
+	def refresh_review_set(self):
+		"""
+		Obtain the old review set and the new review set, and import the new set.
+		"""
+		global old_review_set, new_review_set
+		old_review_set = self.load_review_set()
+		new_review_set = self.update_review_set(old_review_set)
+		self.import_review_set(new_review_set)
+
+	def load_review_set(self):
+		"""
+		Load and return the review set from the cache if it exists.
+		Otherwise, return an empty set.
+		"""
+		if os.path.isfile(self.review_path):
+			return ConfigSet.ConfigSet(self.review_path)
+		return ConfigSet.ConfigSet()
+
+	def store_review_set(self, review_set):
+		"""
+		Store the review set specified in the cache.
+		"""
+		if not os.path.isdir(self.cache_path):
+			os.makedirs(self.cache_path)
+		review_set.store(self.review_path)
+
+	def update_review_set(self, old_set):
+		"""
+		Merge the options passed on the command line with those imported
+		from the previous review set and return the corresponding
+		preview set.
+		"""
+
+		# Convert value to string. It's important that 'None' maps to
+		# the empty string.
+		def val_to_str(val):
+			if val == None or val == '':
+				return ''
+			return str(val)
+
+		new_set = ConfigSet.ConfigSet()
+		opt_dict = Options.options.__dict__
+
+		for name in review_options.keys():
+			# the option is specified explicitly on the command line
+			if name in opt_dict:
+				# if the option is the default, pretend it was never specified
+				if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]):
+					new_set[name] = opt_dict[name]
+			# the option was explicitly specified in a previous command
+			elif name in old_set:
+				new_set[name] = old_set[name]
+
+		return new_set
+
+	def import_review_set(self, review_set):
+		"""
+		Import the actual value of the reviewable options in the option
+		dictionary, given the current review set.
+		"""
+		for name in review_options.keys():
+			if name in review_set:
+				value = review_set[name]
+			else:
+				value = review_defaults[name]
+			setattr(Options.options, name, value)
+
+	def compare_review_set(self, set1, set2):
+		"""
+		Return true if the review sets specified are equal.
+		"""
+		if len(set1.keys()) != len(set2.keys()): return False
+		for key in set1.keys():
+			if not key in set2 or set1[key] != set2[key]:
+				return False
+		return True
+
+	def display_review_set(self, review_set):
+		"""
+		Return the string representing the review set specified.
+		"""
+		term_width = Logs.get_term_cols()
+		lines = []
+		for dest in review_options.keys():
+			opt = review_options[dest]
+			name = ", ".join(opt._short_opts + opt._long_opts)
+			help = opt.help
+			actual = None
+			if dest in review_set: actual = review_set[dest]
+			default = review_defaults[dest]
+			lines.append(self.format_option(name, help, actual, default, term_width))
+		return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
+
+	def format_option(self, name, help, actual, default, term_width):
+		"""
+		Return the string representing the option specified.
+		"""
+		def val_to_str(val):
+			if val == None or val == '':
+				return "(void)"
+			return str(val)
+
+		max_name_len = 20
+		sep_len = 2
+
+		w = textwrap.TextWrapper()
+		w.width = term_width - 1
+		if w.width < 60: w.width = 60
+
+		out = ""
+
+		# format the help
+		out += w.fill(help) + "\n"
+
+		# format the name
+		name_len = len(name)
+		out += Logs.colors.CYAN + name + Logs.colors.NORMAL
+
+		# set the indentation used when the value wraps to the next line
+		w.subsequent_indent = " ".rjust(max_name_len + sep_len)
+		w.width -= (max_name_len + sep_len)
+
+		# the name string is too long, switch to the next line
+		if name_len > max_name_len:
+			out += "\n" + w.subsequent_indent
+
+		# fill the remaining of the line with spaces
+		else:
+			out += " ".rjust(max_name_len + sep_len - name_len)
+
+		# format the actual value, if there is one
+		if actual != None:
+			out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent
+
+		# format the default value
+		default_fmt = val_to_str(default)
+		if actual != None:
+			default_fmt = "default: " + default_fmt
+		out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL
+
+		return out
+
+# Monkey-patch ConfigurationContext.execute() to have it store the review set.
+old_configure_execute = Configure.ConfigurationContext.execute
+def new_configure_execute(self):
+	old_configure_execute(self)
+	Context.create_context('review').store_review_set(new_review_set)
+Configure.ConfigurationContext.execute = new_configure_execute
+
diff --git a/waflib/extras/smart_continue.py b/waflib/extras/smart_continue.py
new file mode 100644
index 0000000..3af7b1f
--- /dev/null
+++ b/waflib/extras/smart_continue.py
@@ -0,0 +1,81 @@
+#! /usr/bin/env python
+# Thomas Nagy, 2011
+
+# Try to cancel the tasks that cannot run with the option -k when an error occurs:
+# 1 direct file dependencies
+# 2 tasks listed in the before/after/ext_in/ext_out attributes
+
+from waflib import Task, Runner
+
+Task.CANCELED = 4
+
+def cancel_next(self, tsk):
+	if not isinstance(tsk, Task.TaskBase):
+		return
+	if tsk.hasrun >= Task.SKIPPED:
+		# normal execution, no need to do anything here
+		return
+
+	try:
+		canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
+	except AttributeError:
+		canceled_tasks = self.canceled_tasks = set([])
+		canceled_nodes = self.canceled_nodes = set([])
+
+	try:
+		canceled_nodes.update(tsk.outputs)
+	except AttributeError:
+		pass
+
+	try:
+		canceled_tasks.add(tsk)
+	except AttributeError:
+		pass
+
+def get_out(self):
+	tsk = self.out.get()
+	if not self.stop:
+		self.add_more_tasks(tsk)
+	self.count -= 1
+	self.dirty = True
+	self.cancel_next(tsk) # new code
+
+def error_handler(self, tsk):
+	if not self.bld.keep:
+		self.stop = True
+	self.error.append(tsk)
+	self.cancel_next(tsk) # new code
+
+Runner.Parallel.cancel_next = cancel_next
+Runner.Parallel.get_out = get_out
+Runner.Parallel.error_handler = error_handler
+
+def get_next_task(self):
+	tsk = self.get_next_task_smart_continue()
+	if not tsk:
+		return tsk
+
+	try:
+		canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
+	except AttributeError:
+		pass
+	else:
+		# look in the tasks that this one is waiting on
+		# if one of them was canceled, cancel this one too
+		for x in tsk.run_after:
+			if x in canceled_tasks:
+				tsk.hasrun = Task.CANCELED
+				self.cancel_next(tsk)
+				break
+		else:
+			# so far so good, now consider the nodes
+			for x in getattr(tsk, 'inputs', []) + getattr(tsk, 'deps', []):
+				if x in canceled_nodes:
+					tsk.hasrun = Task.CANCELED
+					self.cancel_next(tsk)
+					break
+	return tsk
+
+Runner.Parallel.get_next_task_smart_continue = Runner.Parallel.get_next_task
+Runner.Parallel.get_next_task = get_next_task
+
diff --git a/waflib/extras/subprocess.py b/waflib/extras/subprocess.py
new file mode 100644
index 0000000..cb15178
--- /dev/null
+++ b/waflib/extras/subprocess.py
@@ -0,0 +1,620 @@
+# borrowed from python 2.5.2c1
+# Copyright (c) 2003-2005 by Peter Astrand <astrand lysator liu se>
+# Licensed to PSF under a Contributor Agreement.
+
+import sys
+mswindows = (sys.platform == "win32")
+
+import os
+import types
+import traceback
+import gc
+
+class CalledProcessError(Exception):
+    def __init__(self, returncode, cmd):
+        self.returncode = returncode
+        self.cmd = cmd
+    def __str__(self):
+        return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
+
+if mswindows:
+    import threading
+    import msvcrt
+    if 0:
+        import pywintypes
+        from win32api import GetStdHandle, STD_INPUT_HANDLE, \
+                             STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
+        from win32api import GetCurrentProcess, DuplicateHandle, \
+                             GetModuleFileName, GetVersion
+        from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
+        from win32pipe import CreatePipe
+        from win32process import CreateProcess, STARTUPINFO, \
+                                 GetExitCodeProcess, STARTF_USESTDHANDLES, \
+                                 STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
+        from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
+    else:
+        from _subprocess import *
+        class STARTUPINFO:
+            dwFlags = 0
+            hStdInput = None
+            hStdOutput = None
+            hStdError = None
+            wShowWindow = 0
+        class pywintypes:
+            error = IOError
+else:
+    import select
+    import errno
+    import fcntl
+    import pickle
+
+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
+
+try:
+    MAXFD = os.sysconf("SC_OPEN_MAX")
+except:
+    MAXFD = 256
+
+try:
+    False
+except NameError:
+    False = 0
+    True = 1
+
+_active = []
+
+def _cleanup():
+    for inst in _active[:]:
+        if inst.poll(_deadstate=sys.maxint) >= 0:
+            try:
+                _active.remove(inst)
+            except ValueError:
+                pass
+
+PIPE = -1
+STDOUT = -2
+
+
+def call(*popenargs, **kwargs):
+    return Popen(*popenargs, **kwargs).wait()
+
+def check_call(*popenargs, **kwargs):
+    retcode = call(*popenargs, **kwargs)
+    cmd = kwargs.get("args")
+    if cmd is None:
+        cmd = popenargs[0]
+    if retcode:
+        raise CalledProcessError(retcode, cmd)
+    return retcode
+
+
+def list2cmdline(seq):
+    result = []
+    needquote = False
+    for arg in seq:
+        bs_buf = []
+
+        if result:
+            result.append(' ')
+
+        needquote = (" " in arg) or ("\t" in arg) or arg == ""
+        if needquote:
+            result.append('"')
+
+        for c in arg:
+            if c == '\\':
+                bs_buf.append(c)
+            elif c == '"':
+                result.append('\\' * len(bs_buf)*2)
+                bs_buf = []
+                result.append('\\"')
+            else:
+                if bs_buf:
+                    result.extend(bs_buf)
+                    bs_buf = []
+                result.append(c)
+
+        if bs_buf:
+            result.extend(bs_buf)
+
+        if needquote:
+            result.extend(bs_buf)
+            result.append('"')
+
+    return ''.join(result)
+
+class Popen(object):
+    def __init__(self, args, bufsize=0, executable=None,
+                 stdin=None, stdout=None, stderr=None,
+                 preexec_fn=None, close_fds=False, shell=False,
+                 cwd=None, env=None, universal_newlines=False,
+                 startupinfo=None, creationflags=0):
+        _cleanup()
+
+        self._child_created = False
+        if not isinstance(bufsize, (int, long)):
+            raise TypeError("bufsize must be an integer")
+
+        if mswindows:
+            if preexec_fn is not None:
+                raise ValueError("preexec_fn is not supported on Windows platforms")
+            if close_fds:
+                raise ValueError("close_fds is not supported on Windows platforms")
+        else:
+            if startupinfo is not None:
+                raise ValueError("startupinfo is only supported on Windows platforms")
+            if creationflags != 0:
+                raise ValueError("creationflags is only supported on Windows platforms")
+
+        self.stdin = None
+        self.stdout = None
+        self.stderr = None
+        self.pid = None
+        self.returncode = None
+        self.universal_newlines = universal_newlines
+
+        (p2cread, p2cwrite,
+         c2pread, c2pwrite,
+         errread, errwrite) = self._get_handles(stdin, stdout, stderr)
+
+        self._execute_child(args, executable, preexec_fn, close_fds,
+                            cwd, env, universal_newlines,
+                            startupinfo, creationflags, shell,
+                            p2cread, p2cwrite,
+                            c2pread, c2pwrite,
+                            errread, errwrite)
+
+        if mswindows:
+            if stdin is None and p2cwrite is not None:
+                os.close(p2cwrite)
+                p2cwrite = None
+            if stdout is None and c2pread is not None:
+                os.close(c2pread)
+                c2pread = None
+            if stderr is None and errread is not None:
+                os.close(errread)
+                errread = None
+
+        if p2cwrite:
+            self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
+        if c2pread:
+            if universal_newlines:
+                self.stdout = os.fdopen(c2pread, 'rU', bufsize)
+            else:
+                self.stdout = os.fdopen(c2pread, 'rb', bufsize)
+        if errread:
+            if universal_newlines:
+                self.stderr = os.fdopen(errread, 'rU', bufsize)
+            else:
+                self.stderr = os.fdopen(errread, 'rb', bufsize)
+
+
+    def _translate_newlines(self, data):
+        data = data.replace("\r\n", "\n")
+        data = data.replace("\r", "\n")
+        return data
+
+
+    def __del__(self, sys=sys):
+        if not self._child_created:
+            return
+        self.poll(_deadstate=sys.maxint)
+        if self.returncode is None and _active is not None:
+            _active.append(self)
+
+
+    def communicate(self, input=None):
+        if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
+            stdout = None
+            stderr = None
+            if self.stdin:
+                if input:
+                    self.stdin.write(input)
+                self.stdin.close()
+            elif self.stdout:
+                stdout = self.stdout.read()
+            elif self.stderr:
+                stderr = self.stderr.read()
+            self.wait()
+            return (stdout, stderr)
+
+        return self._communicate(input)
+
+
+    if mswindows:
+        def _get_handles(self, stdin, stdout, stderr):
+            if stdin is None and stdout is None and stderr is None:
+                return (None, None, None, None, None, None)
+
+            p2cread, p2cwrite = None, None
+            c2pread, c2pwrite = None, None
+            errread, errwrite = None, None
+
+            if stdin is None:
+                p2cread = GetStdHandle(STD_INPUT_HANDLE)
+            if p2cread is not None:
+                pass
+            elif stdin is None or stdin == PIPE:
+                p2cread, p2cwrite = CreatePipe(None, 0)
+                p2cwrite = p2cwrite.Detach()
+                p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
+            elif isinstance(stdin, int):
+                p2cread = msvcrt.get_osfhandle(stdin)
+            else:
+                p2cread = msvcrt.get_osfhandle(stdin.fileno())
+            p2cread = self._make_inheritable(p2cread)
+
+            if stdout is None:
+                c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
+            if c2pwrite is not None:
+                pass
+            elif stdout is None or stdout == PIPE:
+                c2pread, c2pwrite = CreatePipe(None, 0)
+                c2pread = c2pread.Detach()
+                c2pread = msvcrt.open_osfhandle(c2pread, 0)
+            elif isinstance(stdout, int):
+                c2pwrite = msvcrt.get_osfhandle(stdout)
+            else:
+                c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
+            c2pwrite = self._make_inheritable(c2pwrite)
+
+            if stderr is None:
+                errwrite = GetStdHandle(STD_ERROR_HANDLE)
+            if errwrite is not None:
+                pass
+            elif stderr is None or stderr == PIPE:
+                errread, errwrite = CreatePipe(None, 0)
+                errread = errread.Detach()
+                errread = msvcrt.open_osfhandle(errread, 0)
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif isinstance(stderr, int):
+                errwrite = msvcrt.get_osfhandle(stderr)
+            else:
+                errwrite = msvcrt.get_osfhandle(stderr.fileno())
+            errwrite = self._make_inheritable(errwrite)
+
+            return (p2cread, p2cwrite,
+                    c2pread, c2pwrite,
+                    errread, errwrite)
+        def _make_inheritable(self, handle):
+            return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
+
+        def _find_w9xpopen(self):
+            w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
+            if not os.path.exists(w9xpopen):
+                w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
+                if not os.path.exists(w9xpopen):
+                    raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
+            return w9xpopen
+
+        def _execute_child(self, args, executable, preexec_fn, close_fds,
+                           cwd, env, universal_newlines,
+                           startupinfo, creationflags, shell,
+                           p2cread, p2cwrite,
+                           c2pread, c2pwrite,
+                           errread, errwrite):
+
+            if not isinstance(args, types.StringTypes):
+                args = list2cmdline(args)
+
+            if startupinfo is None:
+                startupinfo = STARTUPINFO()
+            if None not in (p2cread, c2pwrite, errwrite):
+                startupinfo.dwFlags |= STARTF_USESTDHANDLES
+                startupinfo.hStdInput = p2cread
+                startupinfo.hStdOutput = c2pwrite
+                startupinfo.hStdError = errwrite
+
+            if shell:
+                startupinfo.dwFlags |= STARTF_USESHOWWINDOW
+                startupinfo.wShowWindow = SW_HIDE
+                comspec = os.environ.get("COMSPEC", "cmd.exe")
+                args = comspec + " /c " + args
+                if (GetVersion() >= 0x80000000L or
+                        os.path.basename(comspec).lower() == "command.com"):
+                    w9xpopen = self._find_w9xpopen()
+                    args = '"%s" %s' % (w9xpopen, args)
+                    creationflags |= CREATE_NEW_CONSOLE
+
+            try:
+                hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
+            except pywintypes.error, e:
+                raise WindowsError(*e.args)
+
+            self._child_created = True
+            self._handle = hp
+            self.pid = pid
+            ht.Close()
+
+            if p2cread is not None:
+                p2cread.Close()
+            if c2pwrite is not None:
+                c2pwrite.Close()
+            if errwrite is not None:
+                errwrite.Close()
+
+
+        def poll(self, _deadstate=None):
+            if self.returncode is None:
+                if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
+                    self.returncode = GetExitCodeProcess(self._handle)
+            return self.returncode
+
+
+        def wait(self):
+            if self.returncode is None:
+                obj = WaitForSingleObject(self._handle, INFINITE)
+                self.returncode = GetExitCodeProcess(self._handle)
+            return self.returncode
+
+        def _readerthread(self, fh, buffer):
+            buffer.append(fh.read())
+
+        def _communicate(self, input):
+            stdout = None
+            stderr = None
+
+            if self.stdout:
+                stdout = []
+                stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
+                stdout_thread.setDaemon(True)
+                stdout_thread.start()
+            if self.stderr:
+                stderr = []
+                stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
+                stderr_thread.setDaemon(True)
+                stderr_thread.start()
+
+            if self.stdin:
+                if input is not None:
+                    self.stdin.write(input)
+                self.stdin.close()
+
+            if self.stdout:
+                stdout_thread.join()
+            if self.stderr:
+                stderr_thread.join()
+
+            if stdout is not None:
+                stdout = stdout[0]
+            if stderr is not None:
+                stderr = stderr[0]
+
+            if self.universal_newlines and hasattr(file, 'newlines'):
+                if stdout:
+                    stdout = self._translate_newlines(stdout)
+                if stderr:
+                    stderr = self._translate_newlines(stderr)
+
+            self.wait()
+            return (stdout, stderr)
+
+    else:
+        def _get_handles(self, stdin, stdout, stderr):
+            p2cread, p2cwrite = None, None
+            c2pread, c2pwrite = None, None
+            errread, errwrite = None, None
+
+            if stdin is None:
+                pass
+            elif stdin == PIPE:
+                p2cread, p2cwrite = os.pipe()
+            elif isinstance(stdin, int):
+                p2cread = stdin
+            else:
+                p2cread = stdin.fileno()
+
+            if stdout is None:
+                pass
+            elif stdout == PIPE:
+                c2pread, c2pwrite = os.pipe()
+            elif isinstance(stdout, int):
+                c2pwrite = stdout
+            else:
+                c2pwrite = stdout.fileno()
+
+            if stderr is None:
+                pass
+            elif stderr == PIPE:
+                errread, errwrite = os.pipe()
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif isinstance(stderr, int):
+                errwrite = stderr
+            else:
+                errwrite = stderr.fileno()
+
+            return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
+
+        def _set_cloexec_flag(self, fd):
+            try:
+                cloexec_flag = fcntl.FD_CLOEXEC
+            except AttributeError:
+                cloexec_flag = 1
+
+            old = fcntl.fcntl(fd, fcntl.F_GETFD)
+            fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
+
+        def _close_fds(self, but):
+            for i in xrange(3, MAXFD):
+                if i == but:
+                    continue
+                try:
+                    os.close(i)
+                except:
+                    pass
+
+        def _execute_child(self, args, executable, preexec_fn, close_fds,
+                           cwd, env, universal_newlines, startupinfo, creationflags, shell,
+                           p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
+
+            if isinstance(args, types.StringTypes):
+                args = [args]
+            else:
+                args = list(args)
+
+            if shell:
+                args = ["/bin/sh", "-c"] + args
+
+            if executable is None:
+                executable = args[0]
+
+            errpipe_read, errpipe_write = os.pipe()
+            self._set_cloexec_flag(errpipe_write)
+
+            gc_was_enabled = gc.isenabled()
+            gc.disable()
+            try:
+                self.pid = os.fork()
+            except:
+                if gc_was_enabled:
+                    gc.enable()
+                raise
+            self._child_created = True
+            if self.pid == 0:
+                try:
+                    if p2cwrite:
+                        os.close(p2cwrite)
+                    if c2pread:
+                        os.close(c2pread)
+                    if errread:
+                        os.close(errread)
+                    os.close(errpipe_read)
+
+                    if p2cread:
+                        os.dup2(p2cread, 0)
+                    if c2pwrite:
+                        os.dup2(c2pwrite, 1)
+                    if errwrite:
+                        os.dup2(errwrite, 2)
+
+                    if p2cread and p2cread not in (0,):
+                        os.close(p2cread)
+                    if c2pwrite and c2pwrite not in (p2cread, 1):
+                        os.close(c2pwrite)
+                    if errwrite and errwrite not in (p2cread, c2pwrite, 2):
+                        os.close(errwrite)
+
+                    if close_fds:
+                        self._close_fds(but=errpipe_write)
+
+                    if cwd is not None:
+                        os.chdir(cwd)
+
+                    if preexec_fn:
+                        apply(preexec_fn)
+
+                    if env is None:
+                        os.execvp(executable, args)
+                    else:
+                        os.execvpe(executable, args, env)
+
+                except:
+                    exc_type, exc_value, tb = sys.exc_info()
+                    exc_lines = traceback.format_exception(exc_type, exc_value, tb)
+                    exc_value.child_traceback = ''.join(exc_lines)
+                    os.write(errpipe_write, pickle.dumps(exc_value))
+
+                os._exit(255)
+
+            if gc_was_enabled:
+                gc.enable()
+            os.close(errpipe_write)
+            if p2cread and p2cwrite:
+                os.close(p2cread)
+            if c2pwrite and c2pread:
+                os.close(c2pwrite)
+            if errwrite and errread:
+                os.close(errwrite)
+
+            data = os.read(errpipe_read, 1048576)
+            os.close(errpipe_read)
+            if data != "":
+                os.waitpid(self.pid, 0)
+                child_exception = pickle.loads(data)
+                raise child_exception
+
+        def _handle_exitstatus(self, sts):
+            if os.WIFSIGNALED(sts):
+                self.returncode = -os.WTERMSIG(sts)
+            elif os.WIFEXITED(sts):
+                self.returncode = os.WEXITSTATUS(sts)
+            else:
+                raise RuntimeError("Unknown child exit status!")
+
+        def poll(self, _deadstate=None):
+            if self.returncode is None:
+                try:
+                    pid, sts = os.waitpid(self.pid, os.WNOHANG)
+                    if pid == self.pid:
+                        self._handle_exitstatus(sts)
+                except os.error:
+                    if _deadstate is not None:
+                        self.returncode = _deadstate
+            return self.returncode
+
+        def wait(self):
+            if self.returncode is None:
+                pid, sts = os.waitpid(self.pid, 0)
+                self._handle_exitstatus(sts)
+            return self.returncode
+
+        def _communicate(self, input):
+            read_set = []
+            write_set = []
+            stdout = None
+            stderr = None
+
+            if self.stdin:
+                self.stdin.flush()
+                if input:
+                    write_set.append(self.stdin)
+                else:
+                    self.stdin.close()
+            if self.stdout:
+                read_set.append(self.stdout)
+                stdout = []
+            if self.stderr:
+                read_set.append(self.stderr)
+                stderr = []
+
+            input_offset = 0
+            while read_set or write_set:
+                rlist, wlist, xlist = select.select(read_set, write_set, [])
+
+                if self.stdin in wlist:
+                    bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
+                    input_offset += bytes_written
+                    if input_offset >= len(input):
+                        self.stdin.close()
+                        write_set.remove(self.stdin)
+
+                if self.stdout in rlist:
+                    data = os.read(self.stdout.fileno(), 1024)
+                    if data == "":
+                        self.stdout.close()
+                        read_set.remove(self.stdout)
+                    stdout.append(data)
+
+                if self.stderr in rlist:
+                    data = os.read(self.stderr.fileno(), 1024)
+                    if data == "":
+                        self.stderr.close()
+                        read_set.remove(self.stderr)
+                    stderr.append(data)
+
+            if stdout is not None:
+                stdout = ''.join(stdout)
+            if stderr is not None:
+                stderr = ''.join(stderr)
+
+            if self.universal_newlines and hasattr(file, 'newlines'):
+                if stdout:
+                    stdout = self._translate_newlines(stdout)
+                if stderr:
+                    stderr = self._translate_newlines(stderr)
+
+            self.wait()
+            return (stdout, stderr)
+
diff --git a/waflib/extras/syms.py b/waflib/extras/syms.py
new file mode 100644
index 0000000..39ee7d8
--- /dev/null
+++ b/waflib/extras/syms.py
@@ -0,0 +1,71 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+this tool supports the export_symbols_regex to export the symbols in a shared library.
+by default, all symbols are exported by gcc, and nothing by msvc.
+to use the tool, do something like:
+
+def build(ctx):
+	ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib')
+
+only the symbols starting with 'mylib_' will be exported.
+"""
+
+import re
+from waflib.Context import STDOUT
+from waflib.Task import Task
+from waflib.Errors import WafError
+from waflib.TaskGen import feature, after_method
+
+class gen_sym(Task):
+	def run(self):
+		obj = self.inputs[0]
+		if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+			re_nm = re.compile(r'External\s+\|\s+_(' + self.generator.export_symbols_regex + r')\b')
+			cmd = ['dumpbin', '/symbols', obj.abspath()]
+		else:
+			if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
+				re_nm = re.compile(r'T\s+_(' + self.generator.export_symbols_regex + r')\b')
+			else:
+				re_nm = re.compile(r'T\s+(' + self.generator.export_symbols_regex + r')\b')
+			cmd = ['nm', '-g', obj.abspath()]
+		syms = re_nm.findall(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT))
+		self.outputs[0].write('%r' % syms)
+
+class compile_sym(Task):
+	def run(self):
+		syms = {}
+		for x in self.inputs:
+			slist = eval(x.read())
+			for s in slist:
+				syms[s] = 1
+		lsyms = syms.keys()
+		lsyms.sort()
+		if self.env.DEST_BINFMT == 'pe':
+			self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms))
+		elif self.env.DEST_BINFMT == 'elf':
+			self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n")
+		else:
+			raise WafError('NotImplemented')
+
+ feature('syms')
+ after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local')
+def do_the_symbol_stuff(self):
+	ins = [x.outputs[0] for x in self.compiled_tasks]
+	self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+
+	tsk = self.create_task('compile_sym',
+			       [x.outputs[0] for x in self.gen_sym_tasks],
+			       self.path.find_or_declare(getattr(self, 'sym_filename', self.target + '.def')))
+	self.link_task.set_run_after(tsk)
+	self.link_task.dep_nodes = [tsk.outputs[0]]
+	if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+		self.link_task.env.append_value('LINKFLAGS', ['/def:' + tsk.outputs[0].bldpath()])
+	elif self.env.DEST_BINFMT == 'pe': #gcc on windows takes *.def as an additional input
+		self.link_task.inputs.append(tsk.outputs[0])
+	elif self.env.DEST_BINFMT == 'elf':
+		self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + tsk.outputs[0].bldpath()])
+	else:
+		raise WafError('NotImplemented')
+
diff --git a/waflib/fixpy2.py b/waflib/fixpy2.py
new file mode 100644
index 0000000..2896962
--- /dev/null
+++ b/waflib/fixpy2.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+burn a book, save a tree
+"""
+
+import os
+all_modifs = {}
+
+def fixdir(dir):
+	"""call all the substitution functions on the waf folders"""
+	global all_modifs
+	for k in all_modifs:
+		for v in all_modifs[k]:
+			modif(os.path.join(dir, 'waflib'), k, v)
+
+def modif(dir, name, fun):
+	"""execute a substitution function"""
+	if name == '*':
+		lst = []
+		for y in '. Tools extras'.split():
+			for x in os.listdir(os.path.join(dir, y)):
+				if x.endswith('.py'):
+					lst.append(y + os.sep + x)
+		for x in lst:
+			modif(dir, x, fun)
+		return
+
+	filename = os.path.join(dir, name)
+	f = open(filename, 'r')
+	txt = f.read()
+	f.close()
+
+	txt = fun(txt)
+
+	f = open(filename, 'w')
+	f.write(txt)
+	f.close()
+
+def subst(*k):
+	"""register a substitution function"""
+	def do_subst(fun):
+		global all_modifs
+		for x in k:
+			try:
+				all_modifs[x].append(fun)
+			except KeyError:
+				all_modifs[x] = [fun]
+		return fun
+	return do_subst
+
+ subst('*')
+def r1(code):
+	"utf-8 fixes for python < 2.6"
+	code = code.replace('as e:', ',e:')
+	code = code.replace(".decode(sys.stdout.encoding or 'iso8859-1')", '')
+	code = code.replace('.encode()', '')
+	return code
+
+ subst('Runner.py')
+def r4(code):
+	"generator syntax"
+	code = code.replace('next(self.biter)', 'self.biter.next()')
+	return code
+
diff --git a/wscript b/wscript
index 66645b4..4991e4a 100644
--- a/wscript
+++ b/wscript
@@ -60,7 +60,7 @@ config_subdirs = "auxdata extras help"
 build_subdirs = "auxdata data po extras help"
 
 EXTRA_DIST = [
-	"waf",
+	#"waf",
 	"GIT_VERSION",
 ]
 



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]