Include waf as an extracted source directory, rather than as a one-in-a-file script.
authorJelmer Vernooij <jelmer@samba.org>
Tue, 3 Jan 2012 23:31:27 +0000 (00:31 +0100)
committerJelmer Vernooij <jelmer@samba.org>
Wed, 4 Jan 2012 21:34:20 +0000 (22:34 +0100)
79 files changed:
buildtools/README [new file with mode: 0644]
buildtools/bin/README [deleted file]
buildtools/bin/waf [changed from symlink to file mode: 0755]
buildtools/bin/waf-svn [deleted file]
buildtools/update-waf.sh [new file with mode: 0755]
buildtools/wafadmin/3rdparty/ParallelDebug.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/batched_cc.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/boost.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/fluid.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/gccdeps.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/go.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/lru_cache.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/paranoid.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/swig.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/valadoc.py [new file with mode: 0644]
buildtools/wafadmin/Build.py [new file with mode: 0644]
buildtools/wafadmin/Configure.py [new file with mode: 0644]
buildtools/wafadmin/Constants.py [new file with mode: 0644]
buildtools/wafadmin/Environment.py [new file with mode: 0644]
buildtools/wafadmin/Logs.py [new file with mode: 0644]
buildtools/wafadmin/Node.py [new file with mode: 0644]
buildtools/wafadmin/Options.py [new file with mode: 0644]
buildtools/wafadmin/Runner.py [new file with mode: 0644]
buildtools/wafadmin/Scripting.py [new file with mode: 0644]
buildtools/wafadmin/Task.py [new file with mode: 0644]
buildtools/wafadmin/TaskGen.py [new file with mode: 0644]
buildtools/wafadmin/Tools/__init__.py [new file with mode: 0644]
buildtools/wafadmin/Tools/ar.py [new file with mode: 0644]
buildtools/wafadmin/Tools/bison.py [new file with mode: 0644]
buildtools/wafadmin/Tools/cc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/ccroot.py [new file with mode: 0644]
buildtools/wafadmin/Tools/compiler_cc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/compiler_cxx.py [new file with mode: 0644]
buildtools/wafadmin/Tools/compiler_d.py [new file with mode: 0644]
buildtools/wafadmin/Tools/config_c.py [new file with mode: 0644]
buildtools/wafadmin/Tools/cs.py [new file with mode: 0644]
buildtools/wafadmin/Tools/cxx.py [new file with mode: 0644]
buildtools/wafadmin/Tools/d.py [new file with mode: 0644]
buildtools/wafadmin/Tools/dbus.py [new file with mode: 0644]
buildtools/wafadmin/Tools/dmd.py [new file with mode: 0644]
buildtools/wafadmin/Tools/flex.py [new file with mode: 0644]
buildtools/wafadmin/Tools/gas.py [new file with mode: 0644]
buildtools/wafadmin/Tools/gcc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/gdc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/glib2.py [new file with mode: 0644]
buildtools/wafadmin/Tools/gnome.py [new file with mode: 0644]
buildtools/wafadmin/Tools/gnu_dirs.py [new file with mode: 0644]
buildtools/wafadmin/Tools/gob2.py [new file with mode: 0644]
buildtools/wafadmin/Tools/gxx.py [new file with mode: 0644]
buildtools/wafadmin/Tools/icc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/icpc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/intltool.py [new file with mode: 0644]
buildtools/wafadmin/Tools/javaw.py [new file with mode: 0644]
buildtools/wafadmin/Tools/kde4.py [new file with mode: 0644]
buildtools/wafadmin/Tools/libtool.py [new file with mode: 0644]
buildtools/wafadmin/Tools/lua.py [new file with mode: 0644]
buildtools/wafadmin/Tools/misc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/msvc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/nasm.py [new file with mode: 0644]
buildtools/wafadmin/Tools/ocaml.py [new file with mode: 0644]
buildtools/wafadmin/Tools/osx.py [new file with mode: 0644]
buildtools/wafadmin/Tools/perl.py [new file with mode: 0644]
buildtools/wafadmin/Tools/preproc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/python.py [new file with mode: 0644]
buildtools/wafadmin/Tools/qt4.py [new file with mode: 0644]
buildtools/wafadmin/Tools/ruby.py [new file with mode: 0644]
buildtools/wafadmin/Tools/suncc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/suncxx.py [new file with mode: 0644]
buildtools/wafadmin/Tools/tex.py [new file with mode: 0644]
buildtools/wafadmin/Tools/unittestw.py [new file with mode: 0644]
buildtools/wafadmin/Tools/vala.py [new file with mode: 0644]
buildtools/wafadmin/Tools/winres.py [new file with mode: 0644]
buildtools/wafadmin/Tools/xlc.py [new file with mode: 0644]
buildtools/wafadmin/Tools/xlcxx.py [new file with mode: 0644]
buildtools/wafadmin/Utils.py [new file with mode: 0644]
buildtools/wafadmin/__init__.py [new file with mode: 0644]
buildtools/wafadmin/ansiterm.py [new file with mode: 0644]
buildtools/wafadmin/pproc.py [new file with mode: 0644]
buildtools/wafadmin/py3kfixes.py [new file with mode: 0644]

diff --git a/buildtools/README b/buildtools/README
new file mode 100644 (file)
index 0000000..eab0382
--- /dev/null
@@ -0,0 +1,12 @@
+See http://code.google.com/p/waf/ for more information on waf
+
+You can get a svn copy of the upstream source with:
+
+  svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
+
+Samba currently uses waf 1.5, which can be found at:
+
+  http://waf.googlecode.com/svn/branches/waf-1.5
+
+To update the current copy of waf, use the update-waf.sh script in this
+directory.
diff --git a/buildtools/bin/README b/buildtools/bin/README
deleted file mode 100644 (file)
index 9ef8a1f..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-This copy of waf-svn is taken from the git mirror of waf
-at:
-
-  git://git.samba.org/tridge/waf-svn.git
-
-using the waf-samba branch
-
-It was built using the command:
-
-  ./waf-light --zip-type=gz --make-waf
-
-See http://code.google.com/p/waf/ for more information on waf
-
-You can get a svn copy of the upstream source with:
-
-  svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
deleted file mode 120000 (symlink)
index 1e5b2420622f14571e519f48eee1edb983900a3a..0000000000000000000000000000000000000000
+++ /dev/null
@@ -1 +0,0 @@
-waf-svn
\ No newline at end of file
new file mode 100755 (executable)
index 0000000000000000000000000000000000000000..db6a7d3386a137602901b9a55560b469f76d5e35
--- /dev/null
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+# encoding: ISO-8859-1
+# Thomas Nagy, 2005-2010
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+   derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import os, sys
+if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3")
+
+if 'PSYCOWAF' in os.environ:
+       try:import psyco;psyco.full()
+       except:pass
+
+VERSION="1.5.19"
+REVISION="x"
+INSTALL="x"
+C1='x'
+C2='x'
+cwd = os.getcwd()
+join = os.path.join
+
+WAF='waf'
+def b(x):
+       return x
+
+if sys.hexversion>0x300000f:
+       WAF='waf3'
+       def b(x):
+               return x.encode()
+
+def err(m):
+       print(('\033[91mError: %s\033[0m' % m))
+       sys.exit(1)
+
+def test(dir):
+       try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir)
+       except OSError: pass
+
+def find_lib():
+       return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+
+wafdir = find_lib()
+w = join(wafdir, 'wafadmin')
+t = join(w, 'Tools')
+f = join(w, '3rdparty')
+sys.path = [w, t, f] + sys.path
+
+if __name__ == '__main__':
+       import Scripting
+       Scripting.prepare(t, cwd, VERSION, wafdir)
+
diff --git a/buildtools/bin/waf-svn b/buildtools/bin/waf-svn
deleted file mode 100755 (executable)
index 6d54d5f..0000000
Binary files a/buildtools/bin/waf-svn and /dev/null differ
diff --git a/buildtools/update-waf.sh b/buildtools/update-waf.sh
new file mode 100755 (executable)
index 0000000..bb3a4bf
--- /dev/null
@@ -0,0 +1,13 @@
+#!/bin/sh
+# Update our copy of waf
+
+TARGETDIR="`dirname $0`"
+WORKDIR="`mktemp -d`"
+
+mkdir -p "$WORKDIR"
+
+svn checkout http://waf.googlecode.com/svn/branches/waf-1.5/wafadmin "$WORKDIR/wafadmin"
+
+rsync -C -avz --delete "$WORKDIR/wafadmin/" "$TARGETDIR/wafadmin/"
+
+rm -rf "$WORKDIR"
diff --git a/buildtools/wafadmin/3rdparty/ParallelDebug.py b/buildtools/wafadmin/3rdparty/ParallelDebug.py
new file mode 100644 (file)
index 0000000..9d0493e
--- /dev/null
@@ -0,0 +1,299 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2010 (ita)
+
+"""
+debugging helpers for parallel compilation, outputs
+a svg file in the build directory
+"""
+
+import os, time, sys, threading
+try: from Queue import Queue
+except: from queue import Queue
+import Runner, Options, Utils, Task, Logs
+from Constants import *
+
+#import random
+#random.seed(100)
+
+def set_options(opt):
+       opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
+               help='title for the svg diagram', dest='dtitle')
+       opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
+       opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
+       opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
+       opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
+
+# red   #ff4d4d
+# green #4da74d
+# lila  #a751ff
+
+color2code = {
+       'GREEN'  : '#4da74d',
+       'YELLOW' : '#fefe44',
+       'PINK'   : '#a751ff',
+       'RED'    : '#cc1d1d',
+       'BLUE'   : '#6687bb',
+       'CYAN'   : '#34e2e2',
+
+}
+
+mp = {}
+info = [] # list of (text,color)
+
+def map_to_color(name):
+       if name in mp:
+               return mp[name]
+       try:
+               cls = Task.TaskBase.classes[name]
+       except KeyError:
+               return color2code['RED']
+       if cls.color in mp:
+               return mp[cls.color]
+       if cls.color in color2code:
+               return color2code[cls.color]
+       return color2code['RED']
+
+def loop(self):
+       while 1:
+               tsk=Runner.TaskConsumer.ready.get()
+               tsk.master.set_running(1, id(threading.currentThread()), tsk)
+               Runner.process_task(tsk)
+               tsk.master.set_running(-1, id(threading.currentThread()), tsk)
+Runner.TaskConsumer.loop = loop
+
+
+old_start = Runner.Parallel.start
+def do_start(self):
+        print Options.options
+       try:
+               Options.options.dband
+       except AttributeError:
+               raise ValueError('use def options(opt): opt.load("parallel_debug")!')
+
+       self.taskinfo = Queue()
+       old_start(self)
+       process_colors(self)
+Runner.Parallel.start = do_start
+
+def set_running(self, by, i, tsk):
+       self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by)  )
+Runner.Parallel.set_running = set_running
+
+def name2class(name):
+       return name.replace(' ', '_').replace('.', '_')
+
+def process_colors(producer):
+       # first, cast the parameters
+       tmp = []
+       try:
+               while True:
+                       tup = producer.taskinfo.get(False)
+                       tmp.append(list(tup))
+       except:
+               pass
+
+       try:
+               ini = float(tmp[0][2])
+       except:
+               return
+
+       if not info:
+               seen = []
+               for x in tmp:
+                       name = x[3]
+                       if not name in seen:
+                               seen.append(name)
+                       else:
+                               continue
+
+                       info.append((name, map_to_color(name)))
+               info.sort(key=lambda x: x[0])
+
+       thread_count = 0
+       acc = []
+       for x in tmp:
+               thread_count += x[6]
+               acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
+       f = open('pdebug.dat', 'w')
+       #Utils.write('\n'.join(acc))
+       f.write('\n'.join(acc))
+
+       tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
+
+       st = {}
+       for l in tmp:
+               if not l[0] in st:
+                       st[l[0]] = len(st.keys())
+       tmp = [  [st[lst[0]]] + lst[1:] for lst in tmp ]
+       THREAD_AMOUNT = len(st.keys())
+
+       st = {}
+       for l in tmp:
+               if not l[1] in st:
+                       st[l[1]] = len(st.keys())
+       tmp = [  [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
+
+
+       BAND = Options.options.dband
+
+       seen = {}
+       acc = []
+       for x in range(len(tmp)):
+               line = tmp[x]
+               id = line[1]
+
+               if id in seen:
+                       continue
+               seen[id] = True
+
+               begin = line[2]
+               thread_id = line[0]
+               for y in range(x + 1, len(tmp)):
+                       line = tmp[y]
+                       if line[1] == id:
+                               end = line[2]
+                               #print id, thread_id, begin, end
+                               #acc.append(  ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
+                               acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
+                               break
+
+       if Options.options.dmaxtime < 0.1:
+               gwidth = 1
+               for x in tmp:
+                       m = BAND * x[2]
+                       if m > gwidth:
+                               gwidth = m
+       else:
+               gwidth = BAND * Options.options.dmaxtime
+
+       ratio = float(Options.options.dwidth) / gwidth
+       gwidth = Options.options.dwidth
+
+       gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
+
+       out = []
+
+       out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
+<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
+\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
+<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
+   x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
+   id=\"svg602\" xml:space=\"preserve\">
+
+<style type='text/css' media='screen'>
+    g.over rect  { stroke:#FF0000; fill-opacity:0.4 }
+</style>
+
+<script type='text/javascript'><![CDATA[
+    var svg  = document.getElementsByTagName('svg')[0];
+    var svgNS = svg.getAttribute('xmlns');
+    svg.addEventListener('mouseover',function(e){
+      var g = e.target.parentNode;
+      var x = document.getElementById('r_'+g.id);
+      if (x) {
+         g.setAttribute('class', g.getAttribute('class')+' over');
+         x.setAttribute('class', x.getAttribute('class')+' over');
+         showInfo(e, g.id);
+      }
+    },false);
+    svg.addEventListener('mouseout',function(e){
+      var g = e.target.parentNode;
+      var x = document.getElementById('r_'+g.id);
+      if (x) {
+         g.setAttribute('class',g.getAttribute('class').replace(' over',''));
+         x.setAttribute('class',x.getAttribute('class').replace(' over',''));
+         hideInfo(e);
+      }
+    },false);
+
+function showInfo(evt, txt) {
+    tooltip = document.getElementById('tooltip');
+
+    var t = document.getElementById('tooltiptext');
+    t.firstChild.data = txt;
+
+    var x = evt.clientX+10;
+    if (x > 200) { x -= t.getComputedTextLength() + 16; }
+    var y = evt.clientY+30;
+    tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
+    tooltip.setAttributeNS(null,"visibility","visible");
+
+    var r = document.getElementById('tooltiprect');
+    r.setAttribute('width', t.getComputedTextLength()+6)
+}
+
+
+function hideInfo(evt) {
+    tooltip = document.getElementById('tooltip');
+    tooltip.setAttributeNS(null,"visibility","hidden");
+}
+
+]]></script>
+
+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
+<rect
+   x='%r' y='%r'
+   width='%r' height='%r' z-index='10'
+   style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
+   />\n
+
+""" % (0, 0, gwidth + 4, gheight + 4,   0, 0, gwidth + 4, gheight + 4))
+
+       # main title
+       if Options.options.dtitle:
+               out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
+""" % (gwidth/2, gheight - 5, Options.options.dtitle))
+
+       # the rectangles
+       groups = {}
+       for (x, y, w, h, clsname) in acc:
+               try:
+                       groups[clsname].append((x, y, w, h))
+               except:
+                       groups[clsname] = [(x, y, w, h)]
+
+       for cls in groups:
+
+               out.append("<g id='%s'>\n" % name2class(cls))
+
+               for (x, y, w, h) in groups[cls]:
+                       out.append("""   <rect
+   x='%r' y='%r'
+   width='%r' height='%r' z-index='11'
+   style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
+   />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
+
+               out.append("</g>\n")
+
+       # output the caption
+       cnt = THREAD_AMOUNT
+
+       for (text, color) in info:
+               # caption box
+               b = BAND/2
+               out.append("""<g id='r_%s'><rect
+               x='%r' y='%r'
+               width='%r' height='%r'
+               style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
+  />\n""" %                       (name2class(text), 2 + BAND,     5 + (cnt + 0.5) * BAND, b, b, color))
+
+               # caption text
+               out.append("""<text
+   style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
+   x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
+               cnt += 1
+
+       out.append("""
+<g transform="translate(0,0)" visibility="hidden" id="tooltip">
+  <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
+  <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
+</g>""")
+
+       out.append("\n</svg>")
+
+       #node = producer.bld.path.make_node('pdebug.svg')
+       f = open('pdebug.svg', 'w')
+       f.write("".join(out))
+
+
diff --git a/buildtools/wafadmin/3rdparty/batched_cc.py b/buildtools/wafadmin/3rdparty/batched_cc.py
new file mode 100644 (file)
index 0000000..8e31074
--- /dev/null
@@ -0,0 +1,183 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006 (ita)
+
+"""
+Batched builds - compile faster
+instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
+cc -c ../file1.c ../file2.c ../file3.c
+
+Files are output on the directory where the compiler is called, and dependencies are more difficult
+to track (do not run the command on all source files if only one file changes)
+
+As such, we do as if the files were compiled one by one, but no command is actually run:
+replace each cc/cpp Task by a TaskSlave
+A new task called TaskMaster collects the signatures from each slave and finds out the command-line
+to run.
+
+To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
+it is only necessary to import this module in the configuration (no other change required)
+"""
+
+MAX_BATCH = 50
+MAXPARALLEL = False
+
+EXT_C = ['.c', '.cc', '.cpp', '.cxx']
+
+import os, threading
+import TaskGen, Task, ccroot, Build, Logs
+from TaskGen import extension, feature, before
+from Constants import *
+
+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
+cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
+
+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
+cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
+
+count = 70000
+class batch_task(Task.Task):
+       color = 'RED'
+
+       after = 'cc cxx'
+       before = 'cc_link cxx_link static_link'
+
+       def __str__(self):
+               return '(batch compilation for %d slaves)\n' % len(self.slaves)
+
+       def __init__(self, *k, **kw):
+               Task.Task.__init__(self, *k, **kw)
+               self.slaves = []
+               self.inputs = []
+               self.hasrun = 0
+
+               global count
+               count += 1
+               self.idx = count
+
+       def add_slave(self, slave):
+               self.slaves.append(slave)
+               self.set_run_after(slave)
+
+       def runnable_status(self):
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return ASK_LATER
+
+               for t in self.slaves:
+                       #if t.executed:
+                       if t.hasrun != SKIPPED:
+                               return RUN_ME
+
+               return SKIP_ME
+
+       def run(self):
+               outputs = []
+               self.outputs = []
+
+               srclst = []
+               slaves = []
+               for t in self.slaves:
+                       if t.hasrun != SKIPPED:
+                               slaves.append(t)
+                               srclst.append(t.inputs[0].abspath(self.env))
+
+               self.env.SRCLST = srclst
+               self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
+
+               env = self.env
+               app = env.append_unique
+               cpppath_st = env['CPPPATH_ST']
+               env._CCINCFLAGS = env.CXXINCFLAGS = []
+
+               # local flags come first
+               # set the user-defined includes paths
+               for i in env['INC_PATHS']:
+                       app('_CCINCFLAGS', cpppath_st % i.abspath())
+                       app('_CXXINCFLAGS', cpppath_st % i.abspath())
+                       app('_CCINCFLAGS', cpppath_st % i.abspath(env))
+                       app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
+
+               # set the library include paths
+               for i in env['CPPPATH']:
+                       app('_CCINCFLAGS', cpppath_st % i)
+                       app('_CXXINCFLAGS', cpppath_st % i)
+
+               if self.slaves[0].__class__.__name__ == 'cc':
+                       ret = cc_fun(self)
+               else:
+                       ret = cxx_fun(self)
+
+               if ret:
+                       return ret
+
+               for t in slaves:
+                       t.old_post_run()
+
+from TaskGen import extension, feature, after
+
+import cc, cxx
+def wrap(fun):
+       def foo(self, node):
+               # we cannot control the extension, this sucks
+               self.obj_ext = '.o'
+
+               task = fun(self, node)
+               if not getattr(self, 'masters', None):
+                       self.masters = {}
+                       self.allmasters = []
+
+               if not node.parent.id in self.masters:
+                       m = self.masters[node.parent.id] = self.master = self.create_task('batch')
+                       self.allmasters.append(m)
+               else:
+                       m = self.masters[node.parent.id]
+                       if len(m.slaves) > MAX_BATCH:
+                               m = self.masters[node.parent.id] = self.master = self.create_task('batch')
+                               self.allmasters.append(m)
+
+               m.add_slave(task)
+               return task
+       return foo
+
+c_hook = wrap(cc.c_hook)
+extension(cc.EXT_CC)(c_hook)
+
+cxx_hook = wrap(cxx.cxx_hook)
+extension(cxx.EXT_CXX)(cxx_hook)
+
+
+@feature('cprogram', 'cshlib', 'cstaticlib')
+@after('apply_link')
+def link_after_masters(self):
+       if getattr(self, 'allmasters', None):
+               for m in self.allmasters:
+                       self.link_task.set_run_after(m)
+
+for c in ['cc', 'cxx']:
+       t = Task.TaskBase.classes[c]
+       def run(self):
+               pass
+
+       def post_run(self):
+               #self.executed=1
+               pass
+
+       def can_retrieve_cache(self):
+               if self.old_can_retrieve_cache():
+                       for m in self.generator.allmasters:
+                               try:
+                                       m.slaves.remove(self)
+                               except ValueError:
+                                       pass    #this task wasn't included in that master
+                       return 1
+               else:
+                       return None
+
+       setattr(t, 'oldrun', t.__dict__['run'])
+       setattr(t, 'run', run)
+       setattr(t, 'old_post_run', t.post_run)
+       setattr(t, 'post_run', post_run)
+       setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
+       setattr(t, 'can_retrieve_cache', can_retrieve_cache)
+
diff --git a/buildtools/wafadmin/3rdparty/boost.py b/buildtools/wafadmin/3rdparty/boost.py
new file mode 100644 (file)
index 0000000..e690a4e
--- /dev/null
@@ -0,0 +1,343 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+#
+#def set_options(opt):
+#      opt.tool_options('boost')
+#      # ...
+#
+#def configure(conf):
+#      # ... (e.g. conf.check_tool('g++'))
+#      conf.check_tool('boost')
+#   conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
+#
+#def build(bld):
+#   bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
+#
+#ISSUES:
+# * find_includes should be called only once!
+# * support mandatory
+
+######## boost update ###########
+## ITA: * the method get_boost_version_number does work
+##      * the rest of the code has not really been tried
+#       * make certain a demo is provided (in demos/adv for example)
+
+# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
+
+import os.path, glob, types, re, sys
+import Configure, config_c, Options, Utils, Logs
+from Logs import warn, debug
+from Configure import conf
+
+boost_code = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_VERSION << std::endl; }
+'''
+
+boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
+boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
+
+STATIC_NOSTATIC = 'nostatic'
+STATIC_BOTH = 'both'
+STATIC_ONLYSTATIC = 'onlystatic'
+
+is_versiontag = re.compile('^\d+_\d+_?\d*$')
+is_threadingtag = re.compile('^mt$')
+is_abitag = re.compile('^[sgydpn]+$')
+is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
+is_pythontag=re.compile('^py[0-9]{2}$')
+
+def set_options(opt):
+       opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
+       opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
+
+def string_to_version(s):
+       version = s.split('.')
+       if len(version) < 3: return 0
+       return int(version[0])*100000 + int(version[1])*100 + int(version[2])
+
+def version_string(version):
+       major = version / 100000
+       minor = version / 100 % 1000
+       minor_minor = version % 100
+       if minor_minor == 0:
+               return "%d_%d" % (major, minor)
+       else:
+               return "%d_%d_%d" % (major, minor, minor_minor)
+
+def libfiles(lib, pattern, lib_paths):
+       result = []
+       for lib_path in lib_paths:
+               libname = pattern % ('boost_%s[!_]*' % lib)
+               result += glob.glob(os.path.join(lib_path, libname))
+       return result
+
+@conf
+def get_boost_version_number(self, dir):
+       """silently retrieve the boost version number"""
+       try:
+               return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
+       except Configure.ConfigurationError, e:
+               return -1
+
+def set_default(kw, var, val):
+       if not var in kw:
+               kw[var] = val
+
+def tags_score(tags, kw):
+       """
+       checks library tags
+
+       see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
+       """
+       score = 0
+       needed_tags = {
+               'threading': kw['tag_threading'],
+               'abi':       kw['tag_abi'],
+               'toolset':   kw['tag_toolset'],
+               'version':   kw['tag_version'],
+               'python':    kw['tag_python']
+       }
+
+       if kw['tag_toolset'] is None:
+               v = kw['env']
+               toolset = v['CXX_NAME']
+               if v['CXX_VERSION']:
+                       version_no = v['CXX_VERSION'].split('.')
+                       toolset += version_no[0]
+                       if len(version_no) > 1:
+                               toolset += version_no[1]
+               needed_tags['toolset'] = toolset
+
+       found_tags = {}
+       for tag in tags:
+               if is_versiontag.match(tag): found_tags['version'] = tag
+               if is_threadingtag.match(tag): found_tags['threading'] = tag
+               if is_abitag.match(tag): found_tags['abi'] = tag
+               if is_toolsettag.match(tag): found_tags['toolset'] = tag
+               if is_pythontag.match(tag): found_tags['python'] = tag
+
+       for tagname in needed_tags.iterkeys():
+               if needed_tags[tagname] is not None and tagname in found_tags:
+                       if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
+                               score += kw['score_' + tagname][0]
+                       else:
+                               score += kw['score_' + tagname][1]
+       return score
+
+@conf
+def validate_boost(self, kw):
+       ver = kw.get('version', '')
+
+       for x in 'min_version max_version version'.split():
+               set_default(kw, x, ver)
+
+       set_default(kw, 'lib', '')
+       kw['lib'] = Utils.to_list(kw['lib'])
+
+       set_default(kw, 'env', self.env)
+
+       set_default(kw, 'libpath', boost_libpath)
+       set_default(kw, 'cpppath', boost_cpppath)
+
+       for x in 'tag_threading tag_version tag_toolset'.split():
+               set_default(kw, x, None)
+       set_default(kw, 'tag_abi', '^[^d]*$')
+
+       set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
+       set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
+
+       set_default(kw, 'score_threading', (10, -10))
+       set_default(kw, 'score_abi', (10, -10))
+       set_default(kw, 'score_python', (10,-10))
+       set_default(kw, 'score_toolset', (1, -1))
+       set_default(kw, 'score_version', (100, -100))
+
+       set_default(kw, 'score_min', 0)
+       set_default(kw, 'static', STATIC_NOSTATIC)
+       set_default(kw, 'found_includes', False)
+       set_default(kw, 'min_score', 0)
+
+       set_default(kw, 'errmsg', 'not found')
+       set_default(kw, 'okmsg', 'ok')
+
+@conf
+def find_boost_includes(self, kw):
+       """
+       check every path in kw['cpppath'] for subdir
+       that either starts with boost- or is named boost.
+
+       Then the version is checked and selected accordingly to
+       min_version/max_version. The highest possible version number is
+       selected!
+
+       If no versiontag is set the versiontag is set accordingly to the
+       selected library and CPPPATH_BOOST is set.
+       """
+       boostPath = getattr(Options.options, 'boostincludes', '')
+       if boostPath:
+               boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
+       else:
+               boostPath = Utils.to_list(kw['cpppath'])
+
+       min_version = string_to_version(kw.get('min_version', ''))
+       max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
+
+       version = 0
+       for include_path in boostPath:
+               boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
+               debug('BOOST Paths: %r' % boost_paths)
+               for path in boost_paths:
+                       pathname = os.path.split(path)[-1]
+                       ret = -1
+                       if pathname == 'boost':
+                               path = include_path
+                               ret = self.get_boost_version_number(path)
+                       elif pathname.startswith('boost-'):
+                               ret = self.get_boost_version_number(path)
+                       ret = int(ret)
+
+                       if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
+                               boost_path = path
+                               version = ret
+       if not version:
+               self.fatal('boost headers not found! (required version min: %s max: %s)'
+                         % (kw['min_version'], kw['max_version']))
+               return False
+
+       found_version = version_string(version)
+       versiontag = '^' + found_version + '$'
+       if kw['tag_version'] is None:
+               kw['tag_version'] = versiontag
+       elif kw['tag_version'] != versiontag:
+               warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
+       env = self.env
+       env['CPPPATH_BOOST'] = boost_path
+       env['BOOST_VERSION'] = found_version
+       self.found_includes = 1
+       ret = 'Version %s (%s)' % (found_version, boost_path)
+       return ret
+
+@conf
+def find_boost_library(self, lib, kw):
+
+       def find_library_from_list(lib, files):
+               lib_pattern = re.compile('.*boost_(.*?)\..*')
+               result = (None, None)
+               resultscore = kw['min_score'] - 1
+               for file in files:
+                       m = lib_pattern.search(file, 1)
+                       if m:
+                               libname = m.group(1)
+                               libtags = libname.split('-')[1:]
+                               currentscore = tags_score(libtags, kw)
+                               if currentscore > resultscore:
+                                       result = (libname, file)
+                                       resultscore = currentscore
+               return result
+
+       lib_paths = getattr(Options.options, 'boostlibs', '')
+       if lib_paths:
+               lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
+       else:
+               lib_paths = Utils.to_list(kw['libpath'])
+
+       v = kw.get('env', self.env)
+
+       (libname, file) = (None, None)
+       if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
+               st_env_prefix = 'LIB'
+               files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
+               (libname, file) = find_library_from_list(lib, files)
+       if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
+               st_env_prefix = 'STATICLIB'
+               staticLibPattern = v['staticlib_PATTERN']
+               if self.env['CC_NAME'] == 'msvc':
+                       staticLibPattern = 'lib' + staticLibPattern
+               files = libfiles(lib, staticLibPattern, lib_paths)
+               (libname, file) = find_library_from_list(lib, files)
+       if libname is not None:
+               v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
+               if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
+                       v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
+               else:
+                       v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
+               return
+       self.fatal('lib boost_' + lib + ' not found!')
+
+@conf
+def check_boost(self, *k, **kw):
+       """
+       This should be the main entry point
+
+- min_version
+- max_version
+- version
+- include_path
+- lib_path
+- lib
+- toolsettag   - None or a regexp
+- threadingtag - None or a regexp
+- abitag       - None or a regexp
+- versiontag   - WARNING: you should rather use version or min_version/max_version
+- static       - look for static libs (values:
+         'nostatic'   or STATIC_NOSTATIC   - ignore static libs (default)
+         'both'       or STATIC_BOTH       - find static libs, too
+         'onlystatic' or STATIC_ONLYSTATIC - find only static libs
+- score_version
+- score_abi
+- scores_threading
+- score_toolset
+ * the scores are tuples (match_score, nomatch_score)
+   match_score is the added to the score if the tag is matched
+   nomatch_score is added when a tag is found and does not match
+- min_score
+       """
+
+       if not self.env['CXX']:
+               self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
+       self.validate_boost(kw)
+       ret = None
+       try:
+               if not kw.get('found_includes', None):
+                       self.check_message_1(kw.get('msg_includes', 'boost headers'))
+                       ret = self.find_boost_includes(kw)
+
+       except Configure.ConfigurationError, e:
+               if 'errmsg' in kw:
+                       self.check_message_2(kw['errmsg'], 'YELLOW')
+               if 'mandatory' in kw:
+                       if Logs.verbose > 1:
+                               raise
+                       else:
+                               self.fatal('the configuration failed (see %r)' % self.log.name)
+       else:
+               if 'okmsg' in kw:
+                       self.check_message_2(kw.get('okmsg_includes', ret))
+
+       for lib in kw['lib']:
+               self.check_message_1('library boost_'+lib)
+               try:
+                       self.find_boost_library(lib, kw)
+               except Configure.ConfigurationError, e:
+                       ret = False
+                       if 'errmsg' in kw:
+                               self.check_message_2(kw['errmsg'], 'YELLOW')
+                       if 'mandatory' in kw:
+                               if Logs.verbose > 1:
+                                       raise
+                               else:
+                                       self.fatal('the configuration failed (see %r)' % self.log.name)
+               else:
+                       if 'okmsg' in kw:
+                               self.check_message_2(kw['okmsg'])
+
+       return ret
+
diff --git a/buildtools/wafadmin/3rdparty/fluid.py b/buildtools/wafadmin/3rdparty/fluid.py
new file mode 100644 (file)
index 0000000..117edef
--- /dev/null
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+# encoding: utf-8
+# Grygoriy Fuchedzhy 2009
+
+"""
+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
+"""
+
+import Task
+from TaskGen import extension
+
+Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
+
+@extension('.fl')
+def fluid(self, node):
+       """add the .fl to the source list; the cxx file generated will be compiled when possible"""
+       cpp = node.change_ext('.cpp')
+       hpp = node.change_ext('.hpp')
+       self.create_task('fluid', node, [cpp, hpp])
+
+       if 'cxx' in self.features:
+               self.allnodes.append(cpp)
+
+def detect(conf):
+    fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
+    conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
+
diff --git a/buildtools/wafadmin/3rdparty/gccdeps.py b/buildtools/wafadmin/3rdparty/gccdeps.py
new file mode 100644 (file)
index 0000000..6600c9c
--- /dev/null
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2010 (ita)
+
+"""
+Execute the tasks with gcc -MD, read the dependencies from the .d file
+and prepare the dependency calculation for the next run
+"""
+
+import os, re, threading
+import Task, Logs, Utils, preproc
+from TaskGen import before, after, feature
+
+lock = threading.Lock()
+
+preprocessor_flag = '-MD'
+
+@feature('cc')
+@before('apply_core')
+def add_mmd_cc(self):
+       if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
+               self.env.append_value('CCFLAGS', preprocessor_flag)
+
+@feature('cxx')
+@before('apply_core')
+def add_mmd_cxx(self):
+       if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
+               self.env.append_value('CXXFLAGS', preprocessor_flag)
+
+def scan(self):
+       "the scanner does not do anything initially"
+       nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
+       names = []
+       return (nodes, names)
+
+re_o = re.compile("\.o$")
+re_src = re.compile("^(\.\.)[\\/](.*)$")
+
+def post_run(self):
+       # The following code is executed by threads, it is not safe, so a lock is needed...
+
+       if getattr(self, 'cached', None):
+               return Task.Task.post_run(self)
+
+       name = self.outputs[0].abspath(self.env)
+       name = re_o.sub('.d', name)
+       txt = Utils.readf(name)
+       #os.unlink(name)
+
+       txt = txt.replace('\\\n', '')
+
+       lst = txt.strip().split(':')
+       val = ":".join(lst[1:])
+       val = val.split()
+
+       nodes = []
+       bld = self.generator.bld
+
+       f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
+       for x in val:
+               if os.path.isabs(x):
+
+                       if not preproc.go_absolute:
+                               continue
+
+                       lock.acquire()
+                       try:
+                               node = bld.root.find_resource(x)
+                       finally:
+                               lock.release()
+               else:
+                       g = re.search(re_src, x)
+                       if g:
+                               x = g.group(2)
+                               lock.acquire()
+                               try:
+                                       node = bld.bldnode.parent.find_resource(x)
+                               finally:
+                                       lock.release()
+                       else:
+                               g = re.search(f, x)
+                               if g:
+                                       x = g.group(2)
+                                       lock.acquire()
+                                       try:
+                                               node = bld.srcnode.find_resource(x)
+                                       finally:
+                                               lock.release()
+
+               if id(node) == id(self.inputs[0]):
+                       # ignore the source file, it is already in the dependencies
+                       # this way, successful config tests may be retrieved from the cache
+                       continue
+
+               if not node:
+                       raise ValueError('could not find %r for %r' % (x, self))
+               else:
+                       nodes.append(node)
+
+       Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
+
+       bld.node_deps[self.unique_id()] = nodes
+       bld.raw_deps[self.unique_id()] = []
+
+       try:
+               del self.cache_sig
+       except:
+               pass
+
+       Task.Task.post_run(self)
+
+import Constants, Utils
+def sig_implicit_deps(self):
+       try:
+               return Task.Task.sig_implicit_deps(self)
+       except Utils.WafError:
+               return Constants.SIG_NIL
+
+for name in 'cc cxx'.split():
+       try:
+               cls = Task.TaskBase.classes[name]
+       except KeyError:
+               pass
+       else:
+               cls.post_run = post_run
+               cls.scan = scan
+               cls.sig_implicit_deps = sig_implicit_deps
+
diff --git a/buildtools/wafadmin/3rdparty/go.py b/buildtools/wafadmin/3rdparty/go.py
new file mode 100644 (file)
index 0000000..2d8df0d
--- /dev/null
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# go.py - Waf tool for the Go programming language
+# By: Tom Wambold <tom5760@gmail.com>
+
+import platform, os
+
+import Task
+import Utils
+from TaskGen import feature, extension, after
+
+Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
+Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
+Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
+
+def detect(conf):
+
+       def set_def(var, val):
+               if not conf.env[var]:
+                       conf.env[var] = val
+
+       goarch = os.getenv("GOARCH")
+
+       if goarch == '386':
+               set_def('GO_PLATFORM', 'i386')
+       elif goarch == 'amd64':
+               set_def('GO_PLATFORM', 'x86_64')
+       elif goarch == 'arm':
+               set_def('GO_PLATFORM', 'arm')
+       else:
+               set_def('GO_PLATFORM', platform.machine())
+
+       if conf.env.GO_PLATFORM == 'x86_64':
+               set_def('GO_COMPILER', '6g')
+               set_def('GO_LINKER', '6l')
+               set_def('GO_EXTENSION', '.6')
+       elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
+               set_def('GO_COMPILER', '8g')
+               set_def('GO_LINKER', '8l')
+               set_def('GO_EXTENSION', '.8')
+       elif conf.env.GO_PLATFORM == 'arm':
+               set_def('GO_COMPILER', '5g')
+               set_def('GO_LINKER', '5l')
+               set_def('GO_EXTENSION', '.5')
+
+       if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
+               raise conf.fatal('Unsupported platform ' + platform.machine())
+
+       set_def('GO_PACK', 'gopack')
+       set_def('GO_PACK_EXTENSION', '.a')
+
+       conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
+       conf.find_program(conf.env.GO_LINKER,   var='GOL', mandatory=True)
+       conf.find_program(conf.env.GO_PACK,     var='GOP', mandatory=True)
+       conf.find_program('cgo',                var='CGO', mandatory=True)
+
+@extension('.go')
+def compile_go(self, node):
+       try:
+               self.go_nodes.append(node)
+       except AttributeError:
+               self.go_nodes = [node]
+
+@feature('go')
+@after('apply_core')
+def apply_compile_go(self):
+       try:
+               nodes = self.go_nodes
+       except AttributeError:
+               self.go_compile_task = None
+       else:
+               self.go_compile_task = self.create_task('gocompile',
+                       nodes,
+                       [self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
+
+@feature('gopackage', 'goprogram')
+@after('apply_compile_go')
+def apply_goinc(self):
+       if not getattr(self, 'go_compile_task', None):
+               return
+
+       names = self.to_list(getattr(self, 'uselib_local', []))
+       for name in names:
+               obj = self.name_to_obj(name)
+               if not obj:
+                       raise Utils.WafError('object %r was not found in uselib_local '
+                                       '(required by %r)' % (lib_name, self.name))
+               obj.post()
+               self.go_compile_task.set_run_after(obj.go_package_task)
+               self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
+               self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
+               self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
+
+@feature('gopackage')
+@after('apply_goinc')
+def apply_gopackage(self):
+       self.go_package_task = self.create_task('gopack',
+                       self.go_compile_task.outputs[0],
+                       self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
+       self.go_package_task.set_run_after(self.go_compile_task)
+       self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
+
+@feature('goprogram')
+@after('apply_goinc')
+def apply_golink(self):
+       self.go_link_task = self.create_task('golink',
+                       self.go_compile_task.outputs[0],
+                       self.path.find_or_declare(self.target))
+       self.go_link_task.set_run_after(self.go_compile_task)
+       self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
+
diff --git a/buildtools/wafadmin/3rdparty/lru_cache.py b/buildtools/wafadmin/3rdparty/lru_cache.py
new file mode 100644 (file)
index 0000000..5b00abc
--- /dev/null
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2011
+
+import os, shutil, re
+import Options, Build, Logs
+
+"""
+Apply a least recently used policy to the Waf cache.
+
+For performance reasons, it is called after the build is complete.
+
+We assume that the the folders are written atomically
+
+Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
+If missing, the default cache size will be set to 10GB
+"""
+
+re_num = re.compile('[a-zA-Z_]+(\d+)')
+
+CACHESIZE = 10*1024*1024*1024 # in bytes
+CLEANRATIO = 0.8
+DIRSIZE = 4096
+
+def compile(self):
+       if Options.cache_global and not Options.options.nocache:
+               try:
+                       os.makedirs(Options.cache_global)
+               except:
+                       pass
+
+       try:
+               self.raw_compile()
+       finally:
+               if Options.cache_global and not Options.options.nocache:
+                       self.sweep()
+
+def sweep(self):
+       global CACHESIZE
+       CACHEDIR = Options.cache_global
+
+       # get the cache max size from the WAFCACHE filename
+       re_num = re.compile('[a-zA-Z_]+(\d+)')
+       val = re_num.sub('\\1', os.path.basename(Options.cache_global))
+       try:
+               CACHESIZE = int(val)
+       except:
+               pass
+
+       # map folder names to timestamps
+       flist = {}
+       for x in os.listdir(CACHEDIR):
+               j = os.path.join(CACHEDIR, x)
+               if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
+                       flist[x] = [os.stat(j).st_mtime, 0]
+
+       for (x, v) in flist.items():
+               cnt = DIRSIZE # each entry takes 4kB
+               d = os.path.join(CACHEDIR, x)
+               for k in os.listdir(d):
+                       cnt += os.stat(os.path.join(d, k)).st_size
+               flist[x][1] = cnt
+
+       total = sum([x[1] for x in flist.values()])
+       Logs.debug('lru: Cache size is %r' % total)
+
+       if total >= CACHESIZE:
+               Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
+
+               # make a list to sort the folders by timestamp
+               lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
+               lst.sort(key=lambda x: x[1]) # sort by timestamp
+               lst.reverse()
+
+               while total >= CACHESIZE * CLEANRATIO:
+                       (k, t, s) = lst.pop()
+                       p = os.path.join(CACHEDIR, k)
+                       v = p + '.del'
+                       try:
+                               os.rename(p, v)
+                       except:
+                               # someone already did it
+                               pass
+                       else:
+                               try:
+                                       shutil.rmtree(v)
+                               except:
+                                       # this should not happen, but who knows?
+                                       Logs.warn('If you ever see this message, report it (%r)' % v)
+                       total -= s
+                       del flist[k]
+       Logs.debug('lru: Total at the end %r' % total)
+
+Build.BuildContext.raw_compile = Build.BuildContext.compile
+Build.BuildContext.compile = compile
+Build.BuildContext.sweep = sweep
+
diff --git a/buildtools/wafadmin/3rdparty/paranoid.py b/buildtools/wafadmin/3rdparty/paranoid.py
new file mode 100644 (file)
index 0000000..ead64ea
--- /dev/null
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# ita 2010
+
+import Logs, Utils, Build, Task
+
+def say(txt):
+       Logs.warn("^o^: %s" % txt)
+
+try:
+       ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
+except Exception, e:
+       pass
+else:
+       def say(txt):
+               f = Utils.cmd_output([ret, txt])
+               Utils.pprint('PINK', f)
+
+say('you make the errors, we detect them')
+
+def check_task_classes(self):
+       for x in Task.TaskBase.classes:
+               if isinstance(x, Task.Task):
+                       if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
+                               say('class %s has no precedence constraints (ext_in/before)')
+                       if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
+                               say('class %s has no precedence constraints (ext_out/after)')
+
+comp = Build.BuildContext.compile
+def compile(self):
+       if not getattr(self, 'magic', None):
+               check_task_classes(self)
+       return comp(self)
+Build.BuildContext.compile = compile
+
diff --git a/buildtools/wafadmin/3rdparty/swig.py b/buildtools/wafadmin/3rdparty/swig.py
new file mode 100644 (file)
index 0000000..c0a4108
--- /dev/null
@@ -0,0 +1,190 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Petar Forai
+# Thomas Nagy 2008
+
+import re
+import Task, Utils, Logs
+from TaskGen import extension
+from Configure import conf
+import preproc
+
+"""
+Welcome in the hell of adding tasks dynamically
+
+swig interface files may be created at runtime, the module name may be unknown in advance
+
+rev 5859 is much more simple
+"""
+
+SWIG_EXTS = ['.swig', '.i']
+
+swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
+cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
+
+def runnable_status(self):
+       for t in self.run_after:
+               if not t.hasrun:
+                       return ASK_LATER
+
+       if not getattr(self, 'init_outputs', None):
+               self.init_outputs = True
+               if not getattr(self, 'module', None):
+                       # search the module name
+                       txt = self.inputs[0].read(self.env)
+                       m = re_module.search(txt)
+                       if not m:
+                               raise ValueError("could not find the swig module name")
+                       self.module = m.group(1)
+
+               swig_c(self)
+
+               # add the language-specific output files as nodes
+               # call funs in the dict swig_langs
+               for x in self.env['SWIGFLAGS']:
+                       # obtain the language
+                       x = x[1:]
+                       try:
+                               fun = swig_langs[x]
+                       except KeyError:
+                               pass
+                       else:
+                               fun(self)
+
+       return Task.Task.runnable_status(self)
+setattr(cls, 'runnable_status', runnable_status)
+
+re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
+
+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
+re_2 = re.compile('%include "(.*)"', re.M)
+re_3 = re.compile('#include "(.*)"', re.M)
+
+def scan(self):
+       "scan for swig dependencies, climb the .i files"
+       env = self.env
+
+       lst_src = []
+
+       seen = []
+       to_see = [self.inputs[0]]
+
+       while to_see:
+               node = to_see.pop(0)
+               if node.id in seen:
+                       continue
+               seen.append(node.id)
+               lst_src.append(node)
+
+               # read the file
+               code = node.read(env)
+               code = preproc.re_nl.sub('', code)
+               code = preproc.re_cpp.sub(preproc.repl, code)
+
+               # find .i files and project headers
+               names = re_2.findall(code) + re_3.findall(code)
+               for n in names:
+                       for d in self.generator.env.INC_PATHS + [node.parent]:
+                               u = d.find_resource(n)
+                               if u:
+                                       to_see.append(u)
+                                       break
+                       else:
+                               Logs.warn('could not find %r' % n)
+
+       # list of nodes this one depends on, and module name if present
+       if Logs.verbose:
+               Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
+       return (lst_src, [])
+cls.scan = scan
+
+# provide additional language processing
+swig_langs = {}
+def swig(fun):
+       swig_langs[fun.__name__.replace('swig_', '')] = fun
+
+def swig_c(self):
+       ext = '.swigwrap_%d.c' % self.generator.idx
+       flags = self.env['SWIGFLAGS']
+       if '-c++' in flags:
+               ext += 'xx'
+       out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
+
+       try:
+               if '-c++' in flags:
+                       fun = self.generator.cxx_hook
+               else:
+                       fun = self.generator.c_hook
+       except AttributeError:
+               raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
+
+       task = fun(out_node)
+       task.set_run_after(self)
+
+       ge = self.generator.bld.generator
+       ge.outstanding.insert(0, task)
+       ge.total += 1
+
+       try:
+               ltask = self.generator.link_task
+       except AttributeError:
+               pass
+       else:
+               ltask.inputs.append(task.outputs[0])
+
+       self.outputs.append(out_node)
+
+       if not '-o' in self.env['SWIGFLAGS']:
+               self.env.append_value('SWIGFLAGS', '-o')
+               self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
+
+@swig
+def swig_python(tsk):
+       tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
+
+@swig
+def swig_ocaml(tsk):
+       tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
+       tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
+
+@extension(SWIG_EXTS)
+def i_file(self, node):
+       # the task instance
+       tsk = self.create_task('swig')
+       tsk.set_inputs(node)
+       tsk.module = getattr(self, 'swig_module', None)
+
+       flags = self.to_list(getattr(self, 'swig_flags', []))
+       self.env.append_value('SWIGFLAGS', flags)
+
+       if not '-outdir' in flags:
+               flags.append('-outdir')
+               flags.append(node.parent.abspath(self.env))
+
+@conf
+def check_swig_version(conf, minver=None):
+       """Check for a minimum swig version like conf.check_swig_version('1.3.28')
+       or conf.check_swig_version((1,3,28)) """
+       reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
+
+       swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
+
+       swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
+       if isinstance(minver, basestring):
+               minver = [int(s) for s in minver.split(".")]
+       if isinstance(minver, tuple):
+               minver = [int(s) for s in minver]
+       result = (minver is None) or (minver[:3] <= swigver[:3])
+       swigver_full = '.'.join(map(str, swigver))
+       if result:
+               conf.env['SWIG_VERSION'] = swigver_full
+       minver_str = '.'.join(map(str, minver))
+       if minver is None:
+               conf.check_message_custom('swig version', '', swigver_full)
+       else:
+               conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
+       return result
+
+def detect(conf):
+       swig = conf.find_program('swig', var='SWIG', mandatory=True)
+
diff --git a/buildtools/wafadmin/3rdparty/valadoc.py b/buildtools/wafadmin/3rdparty/valadoc.py
new file mode 100644 (file)
index 0000000..d0a9fe8
--- /dev/null
@@ -0,0 +1,113 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Nicolas Joseph 2009
+
+from fnmatch import fnmatchcase
+import os, os.path, re, stat
+import Task, Utils, Node, Constants
+from TaskGen import feature, extension, after
+from Logs import debug, warn, error
+
+VALADOC_STR = '${VALADOC}'
+
+class valadoc_task(Task.Task):
+
+  vars = ['VALADOC', 'VALADOCFLAGS']
+  color = 'BLUE'
+  after = 'cxx_link cc_link'
+  quiet = True
+
+  output_dir = ''
+  doclet = ''
+  package_name = ''
+  package_version = ''
+  files = []
+  protected = True
+  private = False
+  inherit = False
+  deps = False
+  enable_non_null_experimental = False
+  force = False
+
+  def runnable_status(self):
+    return True
+
+  def run(self):
+    if self.env['VALADOC']:
+      if not self.env['VALADOCFLAGS']:
+        self.env['VALADOCFLAGS'] = ''
+      cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
+      cmd.append ('-o %s' % self.output_dir)
+      if getattr(self, 'doclet', None):
+        cmd.append ('--doclet %s' % self.doclet)
+      cmd.append ('--package-name %s' % self.package_name)
+      if getattr(self, 'version', None):
+        cmd.append ('--package-version %s' % self.package_version)
+      if getattr(self, 'packages', None):
+        for package in self.packages:
+          cmd.append ('--pkg %s' % package)
+      if getattr(self, 'vapi_dirs', None):
+        for vapi_dir in self.vapi_dirs:
+          cmd.append ('--vapidir %s' % vapi_dir)
+      if not getattr(self, 'protected', None):
+        cmd.append ('--no-protected')
+      if getattr(self, 'private', None):
+        cmd.append ('--private')
+      if getattr(self, 'inherit', None):
+        cmd.append ('--inherit')
+      if getattr(self, 'deps', None):
+        cmd.append ('--deps')
+      if getattr(self, 'enable_non_null_experimental', None):
+        cmd.append ('--enable-non-null-experimental')
+      if getattr(self, 'force', None):
+        cmd.append ('--force')
+      cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
+      return self.generator.bld.exec_command(' '.join(cmd))
+    else:
+      error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
+      return -1
+
+@feature('valadoc')
+def process_valadoc(self):
+  task = getattr(self, 'task', None)
+  if not task:
+    task = self.create_task('valadoc')
+    self.task = task
+    if getattr(self, 'output_dir', None):
+      task.output_dir = self.output_dir
+    else:
+      Utils.WafError('no output directory')
+    if getattr(self, 'doclet', None):
+      task.doclet = self.doclet
+    else:
+      Utils.WafError('no doclet directory')
+    if getattr(self, 'package_name', None):
+      task.package_name = self.package_name
+    else:
+      Utils.WafError('no package name')
+    if getattr(self, 'package_version', None):
+      task.package_version = self.package_version
+    if getattr(self, 'packages', None):
+      task.packages = Utils.to_list(self.packages)
+    if getattr(self, 'vapi_dirs', None):
+      task.vapi_dirs = Utils.to_list(self.vapi_dirs)
+    if getattr(self, 'files', None):
+      task.files = self.files
+    else:
+      Utils.WafError('no input file')
+    if getattr(self, 'protected', None):
+      task.protected = self.protected
+    if getattr(self, 'private', None):
+      task.private = self.private
+    if getattr(self, 'inherit', None):
+      task.inherit = self.inherit
+    if getattr(self, 'deps', None):
+      task.deps = self.deps
+    if getattr(self, 'enable_non_null_experimental', None):
+      task.enable_non_null_experimental = self.enable_non_null_experimental
+    if getattr(self, 'force', None):
+      task.force = self.force
+
+def detect(conf):
+  conf.find_program('valadoc', var='VALADOC', mandatory=False)
+
diff --git a/buildtools/wafadmin/Build.py b/buildtools/wafadmin/Build.py
new file mode 100644 (file)
index 0000000..8e7c72c
--- /dev/null
@@ -0,0 +1,1033 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005 (ita)
+
+"""
+Dependency tree holder
+
+The class Build holds all the info related to a build:
+* file system representation (tree of Node instances)
+* various cached objects (task signatures, file scan results, ..)
+
+There is only one Build object at a time (bld singleton)
+"""
+
+import os, sys, errno, re, glob, gc, datetime, shutil
+try: import cPickle
+except: import pickle as cPickle
+import Runner, TaskGen, Node, Scripting, Utils, Environment, Task, Logs, Options
+from Logs import debug, error, info
+from Constants import *
+
+SAVED_ATTRS = 'root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
+"Build class members to save"
+
+bld = None
+"singleton - safe to use when Waf is not used as a library"
+
+class BuildError(Utils.WafError):
+       def __init__(self, b=None, t=[]):
+               self.bld = b
+               self.tasks = t
+               self.ret = 1
+               Utils.WafError.__init__(self, self.format_error())
+
+       def format_error(self):
+               lst = ['Build failed:']
+               for tsk in self.tasks:
+                       txt = tsk.format_error()
+                       if txt: lst.append(txt)
+               sep = ' '
+               if len(lst) > 2:
+                       sep = '\n'
+               return sep.join(lst)
+
+def group_method(fun):
+       """
+       sets a build context method to execute after the current group has finished executing
+       this is useful for installing build files:
+       * calling install_files/install_as will fail if called too early
+       * people do not want to define install method in their task classes
+
+       TODO: try it
+       """
+       def f(*k, **kw):
+               if not k[0].is_install:
+                       return False
+
+               postpone = True
+               if 'postpone' in kw:
+                       postpone = kw['postpone']
+                       del kw['postpone']
+
+               # TODO waf 1.6 in theory there should be no reference to the TaskManager internals here
+               if postpone:
+                       m = k[0].task_manager
+                       if not m.groups: m.add_group()
+                       m.groups[m.current_group].post_funs.append((fun, k, kw))
+                       if not 'cwd' in kw:
+                               kw['cwd'] = k[0].path
+               else:
+                       fun(*k, **kw)
+       return f
+
+class BuildContext(Utils.Context):
+       "holds the dependency tree"
+       def __init__(self):
+
+               # not a singleton, but provided for compatibility
+               global bld
+               bld = self
+
+               self.task_manager = Task.TaskManager()
+
+               # instead of hashing the nodes, we assign them a unique id when they are created
+               self.id_nodes = 0
+               self.idx = {}
+
+               # map names to environments, the 'default' must be defined
+               self.all_envs = {}
+
+               # ======================================= #
+               # code for reading the scripts
+
+               # project build directory - do not reset() from load_dirs()
+               self.bdir = ''
+
+               # the current directory from which the code is run
+               # the folder changes everytime a wscript is read
+               self.path = None
+
+               # Manual dependencies.
+               self.deps_man = Utils.DefaultDict(list)
+
+               # ======================================= #
+               # cache variables
+
+               # local cache for absolute paths - cache_node_abspath[variant][node]
+               self.cache_node_abspath = {}
+
+               # list of folders that are already scanned
+               # so that we do not need to stat them one more time
+               self.cache_scanned_folders = {}
+
+               # list of targets to uninstall for removing the empty folders after uninstalling
+               self.uninstall = []
+
+               # ======================================= #
+               # tasks and objects
+
+               # build dir variants (release, debug, ..)
+               for v in 'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
+                       var = {}
+                       setattr(self, v, var)
+
+               self.cache_dir_contents = {}
+
+               self.all_task_gen = []
+               self.task_gen_cache_names = {}
+               self.cache_sig_vars = {}
+               self.log = None
+
+               self.root = None
+               self.srcnode = None
+               self.bldnode = None
+
+               # bind the build context to the nodes in use
+               # this means better encapsulation and no build context singleton
+               class node_class(Node.Node):
+                       pass
+               self.node_class = node_class
+               self.node_class.__module__ = "Node"
+               self.node_class.__name__ = "Nodu"
+               self.node_class.bld = self
+
+               self.is_install = None
+
+       def __copy__(self):
+               "nodes are not supposed to be copied"
+               raise Utils.WafError('build contexts are not supposed to be cloned')
+
+       def load(self):
+               "load the cache from the disk"
+               try:
+                       env = Environment.Environment(os.path.join(self.cachedir, 'build.config.py'))
+               except (IOError, OSError):
+                       pass
+               else:
+                       if env['version'] < HEXVERSION:
+                               raise Utils.WafError('Version mismatch! reconfigure the project')
+                       for t in env['tools']:
+                               self.setup(**t)
+
+               try:
+                       gc.disable()
+                       f = data = None
+
+                       Node.Nodu = self.node_class
+
+                       try:
+                               f = open(os.path.join(self.bdir, DBFILE), 'rb')
+                       except (IOError, EOFError):
+                               # handle missing file/empty file
+                               pass
+
+                       try:
+                               if f: data = cPickle.load(f)
+                       except AttributeError:
+                               # handle file of an old Waf version
+                               # that has an attribute which no longer exist
+                               # (e.g. AttributeError: 'module' object has no attribute 'BuildDTO')
+                               if Logs.verbose > 1: raise
+
+                       if data:
+                               for x in SAVED_ATTRS: setattr(self, x, data[x])
+                       else:
+                               debug('build: Build cache loading failed')
+
+               finally:
+                       if f: f.close()
+                       gc.enable()
+
+       def save(self):
+               "store the cache on disk, see self.load"
+               gc.disable()
+               self.root.__class__.bld = None
+
+               # some people are very nervous with ctrl+c so we have to make a temporary file
+               Node.Nodu = self.node_class
+               db = os.path.join(self.bdir, DBFILE)
+               file = open(db + '.tmp', 'wb')
+               data = {}
+               for x in SAVED_ATTRS: data[x] = getattr(self, x)
+               cPickle.dump(data, file, -1)
+               file.close()
+
+               # do not use shutil.move
+               try: os.unlink(db)
+               except OSError: pass
+               os.rename(db + '.tmp', db)
+               self.root.__class__.bld = self
+               gc.enable()
+
+       # ======================================= #
+
+       def clean(self):
+               debug('build: clean called')
+
+               # does not clean files created during the configuration
+               precious = set([])
+               for env in self.all_envs.values():
+                       for x in env[CFG_FILES]:
+                               node = self.srcnode.find_resource(x)
+                               if node:
+                                       precious.add(node.id)
+
+               def clean_rec(node):
+                       for x in list(node.childs.keys()):
+                               nd = node.childs[x]
+
+                               tp = nd.id & 3
+                               if tp == Node.DIR:
+                                       clean_rec(nd)
+                               elif tp == Node.BUILD:
+                                       if nd.id in precious: continue
+                                       for env in self.all_envs.values():
+                                               try: os.remove(nd.abspath(env))
+                                               except OSError: pass
+                                       node.childs.__delitem__(x)
+
+               clean_rec(self.srcnode)
+
+               for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
+                       setattr(self, v, {})
+
+       def compile(self):
+               """The cache file is not written if nothing was build at all (build is up to date)"""
+               debug('build: compile called')
+
+               """
+               import cProfile, pstats
+               cProfile.run("import Build\nBuild.bld.flush()", 'profi.txt')
+               p = pstats.Stats('profi.txt')
+               p.sort_stats('cumulative').print_stats(80)
+               """
+               self.flush()
+               #"""
+
+               self.generator = Runner.Parallel(self, Options.options.jobs)
+
+               def dw(on=True):
+                       if Options.options.progress_bar:
+                               if on: sys.stderr.write(Logs.colors.cursor_on)
+                               else: sys.stderr.write(Logs.colors.cursor_off)
+
+               debug('build: executor starting')
+
+               back = os.getcwd()
+               os.chdir(self.bldnode.abspath())
+
+               try:
+                       try:
+                               dw(on=False)
+                               self.generator.start()
+                       except KeyboardInterrupt:
+                               dw()
+                               # if self.generator.processed != 1: TODO
+                               self.save()
+                               raise
+                       except Exception:
+                               dw()
+                               # do not store anything, for something bad happened
+                               raise
+                       else:
+                               dw()
+                               #if self.generator.processed != 1: TODO
+                               self.save()
+
+                       if self.generator.error:
+                               raise BuildError(self, self.task_manager.tasks_done)
+
+               finally:
+                       os.chdir(back)
+
+       def install(self):
+               "this function is called for both install and uninstall"
+               debug('build: install called')
+
+               self.flush()
+
+               # remove empty folders after uninstalling
+               if self.is_install < 0:
+                       lst = []
+                       for x in self.uninstall:
+                               dir = os.path.dirname(x)
+                               if not dir in lst: lst.append(dir)
+                       lst.sort()
+                       lst.reverse()
+
+                       nlst = []
+                       for y in lst:
+                               x = y
+                               while len(x) > 4:
+                                       if not x in nlst: nlst.append(x)
+                                       x = os.path.dirname(x)
+
+                       nlst.sort()
+                       nlst.reverse()
+                       for x in nlst:
+                               try: os.rmdir(x)
+                               except OSError: pass
+
+       def new_task_gen(self, *k, **kw):
+               if self.task_gen_cache_names:
+                       self.task_gen_cache_names = {}
+
+               kw['bld'] = self
+               if len(k) == 0:
+                       ret = TaskGen.task_gen(*k, **kw)
+               else:
+                       cls_name = k[0]
+
+                       try: cls = TaskGen.task_gen.classes[cls_name]
+                       except KeyError: raise Utils.WscriptError('%s is not a valid task generator -> %s' %
+                               (cls_name, [x for x in TaskGen.task_gen.classes]))
+                       ret = cls(*k, **kw)
+               return ret
+
+       def __call__(self, *k, **kw):
+               if self.task_gen_cache_names:
+                       self.task_gen_cache_names = {}
+
+               kw['bld'] = self
+               return TaskGen.task_gen(*k, **kw)
+
+       def load_envs(self):
+               try:
+                       lst = Utils.listdir(self.cachedir)
+               except OSError, e:
+                       if e.errno == errno.ENOENT:
+                               raise Utils.WafError('The project was not configured: run "waf configure" first!')
+                       else:
+                               raise
+
+               if not lst:
+                       raise Utils.WafError('The cache directory is empty: reconfigure the project')
+
+               for file in lst:
+                       if file.endswith(CACHE_SUFFIX):
+                               env = Environment.Environment(os.path.join(self.cachedir, file))
+                               name = file[:-len(CACHE_SUFFIX)]
+
+                               self.all_envs[name] = env
+
+               self.init_variants()
+
+               for env in self.all_envs.values():
+                       for f in env[CFG_FILES]:
+                               newnode = self.path.find_or_declare(f)
+                               try:
+                                       hash = Utils.h_file(newnode.abspath(env))
+                               except (IOError, AttributeError):
+                                       error("cannot find "+f)
+                                       hash = SIG_NIL
+                               self.node_sigs[env.variant()][newnode.id] = hash
+
+               # TODO: hmmm, these nodes are removed from the tree when calling rescan()
+               self.bldnode = self.root.find_dir(self.bldnode.abspath())
+               self.path = self.srcnode = self.root.find_dir(self.srcnode.abspath())
+               self.cwd = self.bldnode.abspath()
+
+       def setup(self, tool, tooldir=None, funs=None):
+               "setup tools for build process"
+               if isinstance(tool, list):
+                       for i in tool: self.setup(i, tooldir)
+                       return
+
+               if not tooldir: tooldir = Options.tooldir
+
+               module = Utils.load_tool(tool, tooldir)
+               if hasattr(module, "setup"): module.setup(self)
+
+       def init_variants(self):
+               debug('build: init variants')
+
+               lstvariants = []
+               for env in self.all_envs.values():
+                       if not env.variant() in lstvariants:
+                               lstvariants.append(env.variant())
+               self.lst_variants = lstvariants
+
+               debug('build: list of variants is %r', lstvariants)
+
+               for name in lstvariants+[0]:
+                       for v in 'node_sigs cache_node_abspath'.split():
+                               var = getattr(self, v)
+                               if not name in var:
+                                       var[name] = {}
+
+       # ======================================= #
+       # node and folder handling
+
+       # this should be the main entry point
+       def load_dirs(self, srcdir, blddir, load_cache=1):
+               "this functions should be the start of everything"
+
+               assert(os.path.isabs(srcdir))
+               assert(os.path.isabs(blddir))
+
+               self.cachedir = os.path.join(blddir, CACHE_DIR)
+
+               if srcdir == blddir:
+                       raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))
+
+               self.bdir = blddir
+
+               # try to load the cache file, if it does not exist, nothing happens
+               self.load()
+
+               if not self.root:
+                       Node.Nodu = self.node_class
+                       self.root = Node.Nodu('', None, Node.DIR)
+
+               if not self.srcnode:
+                       self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
+               debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
+
+               self.path = self.srcnode
+
+               # create this build dir if necessary
+               try: os.makedirs(blddir)
+               except OSError: pass
+
+               if not self.bldnode:
+                       self.bldnode = self.root.ensure_dir_node_from_path(blddir)
+
+               self.init_variants()
+
+       def rescan(self, src_dir_node):
+               """
+               look the contents of a (folder)node and update its list of childs
+
+               The intent is to perform the following steps
+               * remove the nodes for the files that have disappeared
+               * remove the signatures for the build files that have disappeared
+               * cache the results of os.listdir
+               * create the build folder equivalent (mkdir) for each variant
+               src/bar -> build/default/src/bar, build/release/src/bar
+
+               when a folder in the source directory is removed, we do not check recursively
+               to remove the unused nodes. To do that, call 'waf clean' and build again.
+               """
+
+               # do not rescan over and over again
+               # TODO use a single variable in waf 1.6
+               if self.cache_scanned_folders.get(src_dir_node.id, None): return
+               self.cache_scanned_folders[src_dir_node.id] = True
+
+               # TODO remove in waf 1.6
+               if hasattr(self, 'repository'): self.repository(src_dir_node)
+
+               if not src_dir_node.name and sys.platform == 'win32':
+                       # the root has no name, contains drive letters, and cannot be listed
+                       return
+
+
+               # first, take the case of the source directory
+               parent_path = src_dir_node.abspath()
+               try:
+                       lst = set(Utils.listdir(parent_path))
+               except OSError:
+                       lst = set([])
+
+               # TODO move this at the bottom
+               self.cache_dir_contents[src_dir_node.id] = lst
+
+               # hash the existing source files, remove the others
+               cache = self.node_sigs[0]
+               for x in src_dir_node.childs.values():
+                       if x.id & 3 != Node.FILE: continue
+                       if x.name in lst:
+                               try:
+                                       cache[x.id] = Utils.h_file(x.abspath())
+                               except IOError:
+                                       raise Utils.WafError('The file %s is not readable or has become a dir' % x.abspath())
+                       else:
+                               try: del cache[x.id]
+                               except KeyError: pass
+
+                               del src_dir_node.childs[x.name]
+
+
+               # first obtain the differences between srcnode and src_dir_node
+               h1 = self.srcnode.height()
+               h2 = src_dir_node.height()
+
+               lst = []
+               child = src_dir_node
+               while h2 > h1:
+                       lst.append(child.name)
+                       child = child.parent
+                       h2 -= 1
+               lst.reverse()
+
+               # list the files in the build dirs
+               try:
+                       for variant in self.lst_variants:
+                               sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
+                               self.listdir_bld(src_dir_node, sub_path, variant)
+               except OSError:
+
+                       # listdir failed, remove the build node signatures for all variants
+                       for node in src_dir_node.childs.values():
+                               if node.id & 3 != Node.BUILD:
+                                       continue
+
+                               for dct in self.node_sigs.values():
+                                       if node.id in dct:
+                                               dct.__delitem__(node.id)
+
+                               # the policy is to avoid removing nodes representing directories
+                               src_dir_node.childs.__delitem__(node.name)
+
+                       for variant in self.lst_variants:
+                               sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
+                               try:
+                                       os.makedirs(sub_path)
+                               except OSError:
+                                       pass
+
+       # ======================================= #
+       def listdir_src(self, parent_node):
+               """do not use, kept for compatibility"""
+               pass
+
+       def remove_node(self, node):
+               """do not use, kept for compatibility"""
+               pass
+
+       def listdir_bld(self, parent_node, path, variant):
+               """in this method we do not add timestamps but we remove them
+               when the files no longer exist (file removed in the build dir)"""
+
+               i_existing_nodes = [x for x in parent_node.childs.values() if x.id & 3 == Node.BUILD]
+
+               lst = set(Utils.listdir(path))
+               node_names = set([x.name for x in i_existing_nodes])
+               remove_names = node_names - lst
+
+               # remove the stamps of the build nodes that no longer exist on the filesystem
+               ids_to_remove = [x.id for x in i_existing_nodes if x.name in remove_names]
+               cache = self.node_sigs[variant]
+               for nid in ids_to_remove:
+                       if nid in cache:
+                               cache.__delitem__(nid)
+
+       def get_env(self):
+               return self.env_of_name('default')
+       def set_env(self, name, val):
+               self.all_envs[name] = val
+
+       env = property(get_env, set_env)
+
+       def add_manual_dependency(self, path, value):
+               if isinstance(path, Node.Node):
+                       node = path
+               elif os.path.isabs(path):
+                       node = self.root.find_resource(path)
+               else:
+                       node = self.path.find_resource(path)
+               self.deps_man[node.id].append(value)
+
+       def launch_node(self):
+               """return the launch directory as a node"""
+               # p_ln is kind of private, but public in case if
+               try:
+                       return self.p_ln
+               except AttributeError:
+                       self.p_ln = self.root.find_dir(Options.launch_dir)
+                       return self.p_ln
+
+       def glob(self, pattern, relative=True):
+               "files matching the pattern, seen from the current folder"
+               path = self.path.abspath()
+               files = [self.root.find_resource(x) for x in glob.glob(path+os.sep+pattern)]
+               if relative:
+                       files = [x.path_to_parent(self.path) for x in files if x]
+               else:
+                       files = [x.abspath() for x in files if x]
+               return files
+
+       ## the following methods are candidates for the stable apis ##
+
+       def add_group(self, *k):
+               self.task_manager.add_group(*k)
+
+       def set_group(self, *k, **kw):
+               self.task_manager.set_group(*k, **kw)
+
+       def hash_env_vars(self, env, vars_lst):
+               """hash environment variables
+               ['CXX', ..] -> [env['CXX'], ..] -> md5()"""
+
+               # ccroot objects use the same environment for building the .o at once
+               # the same environment and the same variables are used
+
+               idx = str(id(env)) + str(vars_lst)
+               try: return self.cache_sig_vars[idx]
+               except KeyError: pass
+
+               lst = [str(env[a]) for a in vars_lst]
+               ret = Utils.h_list(lst)
+               debug('envhash: %r %r', ret, lst)
+
+               # next time
+               self.cache_sig_vars[idx] = ret
+               return ret
+
+       def name_to_obj(self, name, env):
+               """retrieve a task generator from its name or its target name
+               remember that names must be unique"""
+               cache = self.task_gen_cache_names
+               if not cache:
+                       # create the index lazily
+                       for x in self.all_task_gen:
+                               vt = x.env.variant() + '_'
+                               if x.name:
+                                       cache[vt + x.name] = x
+                               else:
+                                       if isinstance(x.target, str):
+                                               target = x.target
+                                       else:
+                                               target = ' '.join(x.target)
+                                       v = vt + target
+                                       if not cache.get(v, None):
+                                               cache[v] = x
+               return cache.get(env.variant() + '_' + name, None)
+
+       def flush(self, all=1):
+               """tell the task generators to create the tasks"""
+
+               self.ini = datetime.datetime.now()
+               # force the initialization of the mapping name->object in flush
+               # name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
+               self.task_gen_cache_names = {}
+               self.name_to_obj('', self.env)
+
+               debug('build: delayed operation TaskGen.flush() called')
+
+               if Options.options.compile_targets:
+                       debug('task_gen: posting objects %r listed in compile_targets', Options.options.compile_targets)
+
+                       mana = self.task_manager
+                       to_post = []
+                       min_grp = 0
+
+                       # ensure the target names exist, fail before any post()
+                       target_objects = Utils.DefaultDict(list)
+                       for target_name in Options.options.compile_targets.split(','):
+                               # trim target_name (handle cases when the user added spaces to targets)
+                               target_name = target_name.strip()
+                               for env in self.all_envs.values():
+                                       tg = self.name_to_obj(target_name, env)
+                                       if tg:
+                                               target_objects[target_name].append(tg)
+
+                                               m = mana.group_idx(tg)
+                                               if m > min_grp:
+                                                       min_grp = m
+                                                       to_post = [tg]
+                                               elif m == min_grp:
+                                                       to_post.append(tg)
+
+                               if not target_name in target_objects and all:
+                                       raise Utils.WafError("target '%s' does not exist" % target_name)
+
+                       debug('group: Forcing up to group %s for target %s', mana.group_name(min_grp), Options.options.compile_targets)
+
+                       # post all the task generators in previous groups
+                       for i in xrange(len(mana.groups)):
+                               mana.current_group = i
+                               if i == min_grp:
+                                       break
+                               g = mana.groups[i]
+                               debug('group: Forcing group %s', mana.group_name(g))
+                               for t in g.tasks_gen:
+                                       debug('group: Posting %s', t.name or t.target)
+                                       t.post()
+
+                       # then post the task generators listed in compile_targets in the last group
+                       for t in to_post:
+                               t.post()
+
+               else:
+                       debug('task_gen: posting objects (normal)')
+                       ln = self.launch_node()
+                       # if the build is started from the build directory, do as if it was started from the top-level
+                       # for the pretty-printing (Node.py), the two lines below cannot be moved to Build::launch_node
+                       if ln.is_child_of(self.bldnode) or not ln.is_child_of(self.srcnode):
+                               ln = self.srcnode
+
+                       # if the project file is located under the source directory, build all targets by default
+                       # else 'waf configure build' does nothing
+                       proj_node = self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
+                       if proj_node.id != self.srcnode.id:
+                               ln = self.srcnode
+
+                       for i in xrange(len(self.task_manager.groups)):
+                               g = self.task_manager.groups[i]
+                               self.task_manager.current_group = i
+                               if Logs.verbose:
+                                       groups = [x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x]) == id(g)]
+                                       name = groups and groups[0] or 'unnamed'
+                                       Logs.debug('group: group', name)
+                               for tg in g.tasks_gen:
+                                       if not tg.path.is_child_of(ln):
+                                               continue
+                                       if Logs.verbose:
+                                               Logs.debug('group: %s' % tg)
+                                       tg.post()
+
+       def env_of_name(self, name):
+               try:
+                       return self.all_envs[name]
+               except KeyError:
+                       error('no such environment: '+name)
+                       return None
+
+       def progress_line(self, state, total, col1, col2):
+               n = len(str(total))
+
+               Utils.rot_idx += 1
+               ind = Utils.rot_chr[Utils.rot_idx % 4]
+
+               ini = self.ini
+
+               pc = (100.*state)/total
+               eta = Utils.get_elapsed_time(ini)
+               fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
+               left = fs % (state, total, col1, pc, col2)
+               right = '][%s%s%s]' % (col1, eta, col2)
+
+               cols = Utils.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
+               if cols < 7: cols = 7
+
+               ratio = int((cols*state)/total) - 1
+
+               bar = ('='*ratio+'>').ljust(cols)
+               msg = Utils.indicator % (left, bar, right)
+
+               return msg
+
+
+       # do_install is not used anywhere
+       def do_install(self, src, tgt, chmod=O644):
+               """returns true if the file was effectively installed or uninstalled, false otherwise"""
+               if self.is_install > 0:
+                       if not Options.options.force:
+                               # check if the file is already there to avoid a copy
+                               try:
+                                       st1 = os.stat(tgt)
+                                       st2 = os.stat(src)
+                               except OSError:
+                                       pass
+                               else:
+                                       # same size and identical timestamps -> make no copy
+                                       if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
+                                               return False
+
+                       srclbl = src.replace(self.srcnode.abspath(None)+os.sep, '')
+                       info("* installing %s as %s" % (srclbl, tgt))
+
+                       # following is for shared libs and stale inodes (-_-)
+                       try: os.remove(tgt)
+                       except OSError: pass
+
+                       try:
+                               shutil.copy2(src, tgt)
+                               os.chmod(tgt, chmod)
+                       except IOError:
+                               try:
+                                       os.stat(src)
+                               except (OSError, IOError):
+                                       error('File %r does not exist' % src)
+                               raise Utils.WafError('Could not install the file %r' % tgt)
+                       return True
+
+               elif self.is_install < 0:
+                       info("* uninstalling %s" % tgt)
+
+                       self.uninstall.append(tgt)
+
+                       try:
+                               os.remove(tgt)
+                       except OSError, e:
+                               if e.errno != errno.ENOENT:
+                                       if not getattr(self, 'uninstall_error', None):
+                                               self.uninstall_error = True
+                                               Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+                                       if Logs.verbose > 1:
+                                               Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
+                       return True
+
+       red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
+       def get_install_path(self, path, env=None):
+               "installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
+               if not env: env = self.env
+               destdir = env.get_destdir()
+               path = path.replace('/', os.sep)
+               destpath = Utils.subst_vars(path, env)
+               if destdir:
+                       destpath = os.path.join(destdir, self.red.sub('', destpath))
+               return destpath
+
+       def install_dir(self, path, env=None):
+               """
+               create empty folders for the installation (very rarely used)
+               """
+               if env:
+                       assert isinstance(env, Environment.Environment), "invalid parameter"
+               else:
+                       env = self.env
+
+               if not path:
+                       return []
+
+               destpath = self.get_install_path(path, env)
+
+               if self.is_install > 0:
+                       info('* creating %s' % destpath)
+                       Utils.check_dir(destpath)
+               elif self.is_install < 0:
+                       info('* removing %s' % destpath)
+                       self.uninstall.append(destpath + '/xxx') # yes, ugly
+
+       def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
+               """To install files only after they have been built, put the calls in a method named
+               post_build on the top-level wscript
+
+               The files must be a list and contain paths as strings or as Nodes
+
+               The relative_trick flag can be set to install folders, use bld.path.ant_glob() with it
+               """
+               if env:
+                       assert isinstance(env, Environment.Environment), "invalid parameter"
+               else:
+                       env = self.env
+
+               if not path: return []
+
+               if not cwd:
+                       cwd = self.path
+
+               if isinstance(files, str) and '*' in files:
+                       gl = cwd.abspath() + os.sep + files
+                       lst = glob.glob(gl)
+               else:
+                       lst = Utils.to_list(files)
+
+               if not getattr(lst, '__iter__', False):
+                       lst = [lst]
+
+               destpath = self.get_install_path(path, env)
+
+               Utils.check_dir(destpath)
+
+               installed_files = []
+               for filename in lst:
+                       if isinstance(filename, str) and os.path.isabs(filename):
+                               alst = Utils.split_path(filename)
+                               destfile = os.path.join(destpath, alst[-1])
+                       else:
+                               if isinstance(filename, Node.Node):
+                                       nd = filename
+                               else:
+                                       nd = cwd.find_resource(filename)
+                               if not nd:
+                                       raise Utils.WafError("Unable to install the file %r (not found in %s)" % (filename, cwd))
+
+                               if relative_trick:
+                                       destfile = os.path.join(destpath, filename)
+                                       Utils.check_dir(os.path.dirname(destfile))
+                               else:
+                                       destfile = os.path.join(destpath, nd.name)
+
+                               filename = nd.abspath(env)
+
+                       if self.do_install(filename, destfile, chmod):
+                               installed_files.append(destfile)
+               return installed_files
+
+       def install_as(self, path, srcfile, env=None, chmod=O644, cwd=None):
+               """
+               srcfile may be a string or a Node representing the file to install
+
+               returns True if the file was effectively installed, False otherwise
+               """
+               if env:
+                       assert isinstance(env, Environment.Environment), "invalid parameter"
+               else:
+                       env = self.env
+
+               if not path:
+                       raise Utils.WafError("where do you want to install %r? (%r?)" % (srcfile, path))
+
+               if not cwd:
+                       cwd = self.path
+
+               destpath = self.get_install_path(path, env)
+
+               dir, name = os.path.split(destpath)
+               Utils.check_dir(dir)
+
+               # the source path
+               if isinstance(srcfile, Node.Node):
+                       src = srcfile.abspath(env)
+               else:
+                       src = srcfile
+                       if not os.path.isabs(srcfile):
+                               node = cwd.find_resource(srcfile)
+                               if not node:
+                                       raise Utils.WafError("Unable to install the file %r (not found in %s)" % (srcfile, cwd))
+                               src = node.abspath(env)
+
+               return self.do_install(src, destpath, chmod)
+
+       def symlink_as(self, path, src, env=None, cwd=None):
+               """example:  bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') """
+
+               if sys.platform == 'win32':
+                       # well, this *cannot* work
+                       return
+
+               if not path:
+                       raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
+
+               tgt = self.get_install_path(path, env)
+
+               dir, name = os.path.split(tgt)
+               Utils.check_dir(dir)
+
+               if self.is_install > 0:
+                       link = False
+                       if not os.path.islink(tgt):
+                               link = True
+                       elif os.readlink(tgt) != src:
+                               link = True
+
+                       if link:
+                               try: os.remove(tgt)
+                               except OSError: pass
+
+                               info('* symlink %s (-> %s)' % (tgt, src))
+                               os.symlink(src, tgt)
+                       return 0
+
+               else: # UNINSTALL
+                       try:
+                               info('* removing %s' % (tgt))
+                               os.remove(tgt)
+                               return 0
+                       except OSError:
+                               return 1
+
+       def exec_command(self, cmd, **kw):
+               # 'runner' zone is printed out for waf -v, see wafadmin/Options.py
+               debug('runner: system command -> %s', cmd)
+               if self.log:
+                       self.log.write('%s\n' % cmd)
+                       kw['log'] = self.log
+               try:
+                       if not kw.get('cwd', None):
+                               kw['cwd'] = self.cwd
+               except AttributeError:
+                       self.cwd = kw['cwd'] = self.bldnode.abspath()
+               return Utils.exec_command(cmd, **kw)
+
+       def printout(self, s):
+               f = self.log or sys.stderr
+               f.write(s)
+               f.flush()
+
+       def add_subdirs(self, dirs):
+               self.recurse(dirs, 'build')
+
+       def pre_recurse(self, name_or_mod, path, nexdir):
+               if not hasattr(self, 'oldpath'):
+                       self.oldpath = []
+               self.oldpath.append(self.path)
+               self.path = self.root.find_dir(nexdir)
+               return {'bld': self, 'ctx': self}
+
+       def post_recurse(self, name_or_mod, path, nexdir):
+               self.path = self.oldpath.pop()
+
+       ###### user-defined behaviour
+
+       def pre_build(self):
+               if hasattr(self, 'pre_funs'):
+                       for m in self.pre_funs:
+                               m(self)
+
+       def post_build(self):
+               if hasattr(self, 'post_funs'):
+                       for m in self.post_funs:
+                               m(self)
+
+       def add_pre_fun(self, meth):
+               try: self.pre_funs.append(meth)
+               except AttributeError: self.pre_funs = [meth]
+
+       def add_post_fun(self, meth):
+               try: self.post_funs.append(meth)
+               except AttributeError: self.post_funs = [meth]
+
+       def use_the_magic(self):
+               Task.algotype = Task.MAXPARALLEL
+               Task.file_deps = Task.extract_deps
+               self.magic = True
+
+       install_as = group_method(install_as)
+       install_files = group_method(install_files)
+       symlink_as = group_method(symlink_as)
+
diff --git a/buildtools/wafadmin/Configure.py b/buildtools/wafadmin/Configure.py
new file mode 100644 (file)
index 0000000..35b4e51
--- /dev/null
@@ -0,0 +1,444 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2008 (ita)
+
+"""
+Configuration system
+
+A configuration instance is created when "waf configure" is called, it is used to:
+* create data dictionaries (Environment instances)
+* store the list of modules to import
+
+The old model (copied from Scons) was to store logic (mapping file extensions to functions)
+along with the data. In Waf a way was found to separate that logic by adding an indirection
+layer (storing the names in the Environment instances)
+
+In the new model, the logic is more object-oriented, and the user scripts provide the
+logic. The data files (Environments) must contain configuration data only (flags, ..).
+
+Note: the c/c++ related code is in the module config_c
+"""
+
+import os, shlex, sys, time
+try: import cPickle
+except ImportError: import pickle as cPickle
+import Environment, Utils, Options, Logs
+from Logs import warn
+from Constants import *
+
+try:
+       from urllib import request
+except:
+       from urllib import urlopen
+else:
+       urlopen = request.urlopen
+
+conf_template = '''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#
+'''
+
+class ConfigurationError(Utils.WscriptError):
+       pass
+
+autoconfig = False
+"reconfigure the project automatically"
+
+def find_file(filename, path_list):
+       """find a file in a list of paths
+       @param filename: name of the file to search for
+       @param path_list: list of directories to search
+       @return: the first occurrence filename or '' if filename could not be found
+"""
+       for directory in Utils.to_list(path_list):
+               if os.path.exists(os.path.join(directory, filename)):
+                       return directory
+       return ''
+
+def find_program_impl(env, filename, path_list=[], var=None, environ=None):
+       """find a program in folders path_lst, and sets env[var]
+       @param env: environment
+       @param filename: name of the program to search for
+       @param path_list: list of directories to search for filename
+       @param var: environment value to be checked for in env or os.environ
+       @return: either the value that is referenced with [var] in env or os.environ
+         or the first occurrence filename or '' if filename could not be found
+"""
+
+       if not environ:
+               environ = os.environ
+
+       try: path_list = path_list.split()
+       except AttributeError: pass
+
+       if var:
+               if env[var]: return env[var]
+               if var in environ: env[var] = environ[var]
+
+       if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
+
+       ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
+       for y in [filename+x for x in ext.split(',')]:
+               for directory in path_list:
+                       x = os.path.join(directory, y)
+                       if os.path.isfile(x):
+                               if var: env[var] = x
+                               return x
+       return ''
+
+class ConfigurationContext(Utils.Context):
+       tests = {}
+       error_handlers = []
+       def __init__(self, env=None, blddir='', srcdir=''):
+               self.env = None
+               self.envname = ''
+
+               self.environ = dict(os.environ)
+
+               self.line_just = 40
+
+               self.blddir = blddir
+               self.srcdir = srcdir
+               self.all_envs = {}
+
+               # curdir: necessary for recursion
+               self.cwd = self.curdir = os.getcwd()
+
+               self.tools = [] # tools loaded in the configuration, and that will be loaded when building
+
+               self.setenv(DEFAULT)
+
+               self.lastprog = ''
+
+               self.hash = 0
+               self.files = []
+
+               self.tool_cache = []
+
+               if self.blddir:
+                       self.post_init()
+
+       def post_init(self):
+
+               self.cachedir = os.path.join(self.blddir, CACHE_DIR)
+
+               path = os.path.join(self.blddir, WAF_CONFIG_LOG)
+               try: os.unlink(path)
+               except (OSError, IOError): pass
+
+               try:
+                       self.log = open(path, 'w')
+               except (OSError, IOError):
+                       self.fatal('could not open %r for writing' % path)
+
+               app = Utils.g_module.APPNAME
+               if app:
+                       ver = getattr(Utils.g_module, 'VERSION', '')
+                       if ver:
+                               app = "%s (%s)" % (app, ver)
+
+               now = time.ctime()
+               pyver = sys.hexversion
+               systype = sys.platform
+               args = " ".join(sys.argv)
+               wafver = WAFVERSION
+               abi = ABI
+               self.log.write(conf_template % vars())
+
+       def __del__(self):
+               """cleanup function: close config.log"""
+
+               # may be ran by the gc, not always after initialization
+               if hasattr(self, 'log') and self.log:
+                       self.log.close()
+
+       def fatal(self, msg):
+               raise ConfigurationError(msg)
+
+       def check_tool(self, input, tooldir=None, funs=None):
+               "load a waf tool"
+
+               tools = Utils.to_list(input)
+               if tooldir: tooldir = Utils.to_list(tooldir)
+               for tool in tools:
+                       tool = tool.replace('++', 'xx')
+                       if tool == 'java': tool = 'javaw'
+                       if tool.lower() == 'unittest': tool = 'unittestw'
+                       # avoid loading the same tool more than once with the same functions
+                       # used by composite projects
+
+                       mag = (tool, id(self.env), funs)
+                       if mag in self.tool_cache:
+                               continue
+                       self.tool_cache.append(mag)
+
+                       module = None
+                       try:
+                               module = Utils.load_tool(tool, tooldir)
+                       except Exception, e:
+                               ex = e
+                               if Options.options.download:
+                                       _3rdparty = os.path.normpath(Options.tooldir[0] + os.sep + '..' + os.sep + '3rdparty')
+
+                                       # try to download the tool from the repository then
+                                       # the default is set to false
+                                       for x in Utils.to_list(Options.remote_repo):
+                                               for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
+                                                       url = '/'.join((x, sub, tool + '.py'))
+                                                       try:
+                                                               web = urlopen(url)
+                                                               if web.getcode() != 200:
+                                                                       continue
+                                                       except Exception, e:
+                                                               # on python3 urlopen throws an exception
+                                                               continue
+                                                       else:
+                                                               loc = None
+                                                               try:
+                                                                       loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
+                                                                       loc.write(web.read())
+                                                                       web.close()
+                                                               finally:
+                                                                       if loc:
+                                                                               loc.close()
+                                                               Logs.warn('downloaded %s from %s' % (tool, url))
+                                                               try:
+                                                                       module = Utils.load_tool(tool, tooldir)
+                                                               except:
+                                                                       Logs.warn('module %s from %s is unusable' % (tool, url))
+                                                                       try:
+                                                                               os.unlink(_3rdparty + os.sep + tool + '.py')
+                                                                       except:
+                                                                               pass
+                                                                       continue
+                                               else:
+                                                       break
+
+                                       if not module:
+                                               Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
+                                               raise ex
+                               else:
+                                       Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s' % (tool, sys.path, e))
+                                       raise ex
+
+                       if funs is not None:
+                               self.eval_rules(funs)
+                       else:
+                               func = getattr(module, 'detect', None)
+                               if func:
+                                       if type(func) is type(find_file): func(self)
+                                       else: self.eval_rules(func)
+
+                       self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
+
+       def sub_config(self, k):
+               "executes the configure function of a wscript module"
+               self.recurse(k, name='configure')
+
+       def pre_recurse(self, name_or_mod, path, nexdir):
+               return {'conf': self, 'ctx': self}
+
+       def post_recurse(self, name_or_mod, path, nexdir):
+               if not autoconfig:
+                       return
+               self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
+               self.files.append(path)
+
+       def store(self, file=''):
+               "save the config results into the cache file"
+               if not os.path.isdir(self.cachedir):
+                       os.makedirs(self.cachedir)
+
+               if not file:
+                       file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
+               file.write('version = 0x%x\n' % HEXVERSION)
+               file.write('tools = %r\n' % self.tools)
+               file.close()
+
+               if not self.all_envs:
+                       self.fatal('nothing to store in the configuration context!')
+               for key in self.all_envs:
+                       tmpenv = self.all_envs[key]
+                       tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
+
+       def set_env_name(self, name, env):
+               "add a new environment called name"
+               self.all_envs[name] = env
+               return env
+
+       def retrieve(self, name, fromenv=None):
+               "retrieve an environment called name"
+               try:
+                       env = self.all_envs[name]
+               except KeyError:
+                       env = Environment.Environment()
+                       env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
+                       self.all_envs[name] = env
+               else:
+                       if fromenv: warn("The environment %s may have been configured already" % name)
+               return env
+
+       def setenv(self, name):
+               "enable the environment called name"
+               self.env = self.retrieve(name)
+               self.envname = name
+
+       def add_os_flags(self, var, dest=None):
+               # do not use 'get' to make certain the variable is not defined
+               try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
+               except KeyError: pass
+
+       def check_message_1(self, sr):
+               self.line_just = max(self.line_just, len(sr))
+               for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
+                       self.log.write(x)
+               Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
+
+       def check_message_2(self, sr, color='GREEN'):
+               self.log.write(sr)
+               self.log.write('\n')
+               Utils.pprint(color, sr)
+
+       def check_message(self, th, msg, state, option=''):
+               sr = 'Checking for %s %s' % (th, msg)
+               self.check_message_1(sr)
+               p = self.check_message_2
+               if state: p('ok ' + str(option))
+               else: p('not found', 'YELLOW')
+
+       # FIXME remove in waf 1.6
+       # the parameter 'option' is not used (kept for compatibility)
+       def check_message_custom(self, th, msg, custom, option='', color='PINK'):
+               sr = 'Checking for %s %s' % (th, msg)
+               self.check_message_1(sr)
+               self.check_message_2(custom, color)
+
+       def msg(self, msg, result, color=None):
+               """Prints a configuration message 'Checking for xxx: ok'"""
+               self.start_msg('Checking for ' + msg)
+
+               if not isinstance(color, str):
+                       color = result and 'GREEN' or 'YELLOW'
+
+               self.end_msg(result, color)
+
+       def start_msg(self, msg):
+               try:
+                       if self.in_msg:
+                               return
+               except:
+                       self.in_msg = 0
+               self.in_msg += 1
+
+               self.line_just = max(self.line_just, len(msg))
+               for x in ('\n', self.line_just * '-', '\n', msg, '\n'):
+                       self.log.write(x)
+               Utils.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
+
+       def end_msg(self, result, color):
+               self.in_msg -= 1
+               if self.in_msg:
+                       return
+
+               if not color:
+                       color = 'GREEN'
+               if result == True:
+                       msg = 'ok'
+               elif result == False:
+                       msg = 'not found'
+                       color = 'YELLOW'
+               else:
+                       msg = str(result)
+
+               self.log.write(msg)
+               self.log.write('\n')
+               Utils.pprint(color, msg)
+
+       def find_program(self, filename, path_list=[], var=None, mandatory=False):
+               "wrapper that adds a configuration message"
+
+               ret = None
+               if var:
+                       if self.env[var]:
+                               ret = self.env[var]
+                       elif var in os.environ:
+                               ret = os.environ[var]
+
+               if not isinstance(filename, list): filename = [filename]
+               if not ret:
+                       for x in filename:
+                               ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
+                               if ret: break
+
+               self.check_message_1('Checking for program %s' % ' or '.join(filename))
+               self.log.write('  find program=%r paths=%r var=%r\n  -> %r\n' % (filename, path_list, var, ret))
+               if ret:
+                       Utils.pprint('GREEN', str(ret))
+               else:
+                       Utils.pprint('YELLOW', 'not found')
+                       if mandatory:
+                               self.fatal('The program %r is required' % filename)
+
+               if var:
+                       self.env[var] = ret
+               return ret
+
+       def cmd_to_list(self, cmd):
+               "commands may be written in pseudo shell like 'ccache g++'"
+               if isinstance(cmd, str) and cmd.find(' '):
+                       try:
+                               os.stat(cmd)
+                       except OSError:
+                               return shlex.split(cmd)
+                       else:
+                               return [cmd]
+               return cmd
+
+       def __getattr__(self, name):
+               r = self.__class__.__dict__.get(name, None)
+               if r: return r
+               if name and name.startswith('require_'):
+
+                       for k in ['check_', 'find_']:
+                               n = name.replace('require_', k)
+                               ret = self.__class__.__dict__.get(n, None)
+                               if ret:
+                                       def run(*k, **kw):
+                                               r = ret(self, *k, **kw)
+                                               if not r:
+                                                       self.fatal('requirement failure')
+                                               return r
+                                       return run
+               self.fatal('No such method %r' % name)
+
+       def eval_rules(self, rules):
+               self.rules = Utils.to_list(rules)
+               for x in self.rules:
+                       f = getattr(self, x)
+                       if not f: self.fatal("No such method '%s'." % x)
+                       try:
+                               f()
+                       except Exception, e:
+                               ret = self.err_handler(x, e)
+                               if ret == BREAK:
+                                       break
+                               elif ret == CONTINUE:
+                                       continue
+                               else:
+                                       self.fatal(e)
+
+       def err_handler(self, fun, error):
+               pass
+
+def conf(f):
+       "decorator: attach new configuration functions"
+       setattr(ConfigurationContext, f.__name__, f)
+       return f
+
+def conftest(f):
+       "decorator: attach new configuration tests (registered as strings)"
+       ConfigurationContext.tests[f.__name__] = f
+       return conf(f)
+
+
diff --git a/buildtools/wafadmin/Constants.py b/buildtools/wafadmin/Constants.py
new file mode 100644 (file)
index 0000000..e67dda6
--- /dev/null
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Yinon dot me gmail 2008
+
+"""
+these constants are somewhat public, try not to mess them
+
+maintainer: the version number is updated from the top-level wscript file
+"""
+
+# do not touch these three lines, they are updated automatically
+HEXVERSION=0x105019
+WAFVERSION="1.5.19"
+WAFREVISION = "9709M"
+ABI = 7
+
+# permissions
+O644 = 420
+O755 = 493
+
+MAXJOBS = 99999999
+
+CACHE_DIR          = 'c4che'
+CACHE_SUFFIX       = '.cache.py'
+DBFILE             = '.wafpickle-%d' % ABI
+WSCRIPT_FILE       = 'wscript'
+WSCRIPT_BUILD_FILE = 'wscript_build'
+WAF_CONFIG_LOG     = 'config.log'
+WAF_CONFIG_H       = 'config.h'
+
+SIG_NIL = 'iluvcuteoverload'
+
+VARIANT = '_VARIANT_'
+DEFAULT = 'default'
+
+SRCDIR  = 'srcdir'
+BLDDIR  = 'blddir'
+APPNAME = 'APPNAME'
+VERSION = 'VERSION'
+
+DEFINES = 'defines'
+UNDEFINED = ()
+
+BREAK = "break"
+CONTINUE = "continue"
+
+# task scheduler options
+JOBCONTROL = "JOBCONTROL"
+MAXPARALLEL = "MAXPARALLEL"
+NORMAL = "NORMAL"
+
+# task state
+NOT_RUN = 0
+MISSING = 1
+CRASHED = 2
+EXCEPTION = 3
+SKIPPED = 8
+SUCCESS = 9
+
+ASK_LATER = -1
+SKIP_ME = -2
+RUN_ME = -3
+
+
+LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
+HOUR_FORMAT = "%H:%M:%S"
+
+TEST_OK = True
+
+CFG_FILES = 'cfg_files'
+
+# positive '->' install
+# negative '<-' uninstall
+INSTALL = 1337
+UNINSTALL = -1337
+
diff --git a/buildtools/wafadmin/Environment.py b/buildtools/wafadmin/Environment.py
new file mode 100644 (file)
index 0000000..52c83b4
--- /dev/null
@@ -0,0 +1,210 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005 (ita)
+
+"""Environment representation
+
+There is one gotcha: getitem returns [] if the contents evals to False
+This means env['foo'] = {}; print env['foo'] will print [] not {}
+"""
+
+import os, copy, re
+import Logs, Options, Utils
+from Constants import *
+re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
+
+class Environment(object):
+       """A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
+       An environment instance can be stored into a file and loaded easily
+       """
+       __slots__ = ("table", "parent")
+       def __init__(self, filename=None):
+               self.table = {}
+               #self.parent = None
+
+               if filename:
+                       self.load(filename)
+
+       def __contains__(self, key):
+               if key in self.table: return True
+               try: return self.parent.__contains__(key)
+               except AttributeError: return False # parent may not exist
+
+       def __str__(self):
+               keys = set()
+               cur = self
+               while cur:
+                       keys.update(cur.table.keys())
+                       cur = getattr(cur, 'parent', None)
+               keys = list(keys)
+               keys.sort()
+               return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
+
+       def __getitem__(self, key):
+               try:
+                       while 1:
+                               x = self.table.get(key, None)
+                               if not x is None:
+                                       return x
+                               self = self.parent
+               except AttributeError:
+                       return []
+
+       def __setitem__(self, key, value):
+               self.table[key] = value
+
+       def __delitem__(self, key):
+               del self.table[key]
+
+       def pop(self, key, *args):
+               if len(args):
+                       return self.table.pop(key, *args)
+               return self.table.pop(key)
+
+       def set_variant(self, name):
+               self.table[VARIANT] = name
+
+       def variant(self):
+               try:
+                       while 1:
+                               x = self.table.get(VARIANT, None)
+                               if not x is None:
+                                       return x
+                               self = self.parent
+               except AttributeError:
+                       return DEFAULT
+
+       def copy(self):
+               # TODO waf 1.6 rename this method derive, #368
+               newenv = Environment()
+               newenv.parent = self
+               return newenv
+
+       def detach(self):
+               """TODO try it
+               modifying the original env will not change the copy"""
+               tbl = self.get_merged_dict()
+               try:
+                       delattr(self, 'parent')
+               except AttributeError:
+                       pass
+               else:
+                       keys = tbl.keys()
+                       for x in keys:
+                               tbl[x] = copy.deepcopy(tbl[x])
+                       self.table = tbl
+
+       def get_flat(self, key):
+               s = self[key]
+               if isinstance(s, str): return s
+               return ' '.join(s)
+
+       def _get_list_value_for_modification(self, key):
+               """Gets a value that must be a list for further modification.  The
+               list may be modified inplace and there is no need to
+               "self.table[var] = value" afterwards.
+               """
+               try:
+                       value = self.table[key]
+               except KeyError:
+                       try: value = self.parent[key]
+                       except AttributeError: value = []
+                       if isinstance(value, list):
+                               value = value[:]
+                       else:
+                               value = [value]
+               else:
+                       if not isinstance(value, list):
+                               value = [value]
+               self.table[key] = value
+               return value
+
+       def append_value(self, var, value):
+               current_value = self._get_list_value_for_modification(var)
+
+               if isinstance(value, list):
+                       current_value.extend(value)
+               else:
+                       current_value.append(value)
+
+       def prepend_value(self, var, value):
+               current_value = self._get_list_value_for_modification(var)
+
+               if isinstance(value, list):
+                       current_value = value + current_value
+                       # a new list: update the dictionary entry
+                       self.table[var] = current_value
+               else:
+                       current_value.insert(0, value)
+
+       # prepend unique would be ambiguous
+       def append_unique(self, var, value):
+               current_value = self._get_list_value_for_modification(var)
+
+               if isinstance(value, list):
+                       for value_item in value:
+                               if value_item not in current_value:
+                                       current_value.append(value_item)
+               else:
+                       if value not in current_value:
+                               current_value.append(value)
+
+       def get_merged_dict(self):
+               """compute a merged table"""
+               table_list = []
+               env = self
+               while 1:
+                       table_list.insert(0, env.table)
+                       try: env = env.parent
+                       except AttributeError: break
+               merged_table = {}
+               for table in table_list:
+                       merged_table.update(table)
+               return merged_table
+
+       def store(self, filename):
+               "Write the variables into a file"
+               file = open(filename, 'w')
+               merged_table = self.get_merged_dict()
+               keys = list(merged_table.keys())
+               keys.sort()
+               for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
+               file.close()
+
+       def load(self, filename):
+               "Retrieve the variables from a file"
+               tbl = self.table
+               code = Utils.readf(filename)
+               for m in re_imp.finditer(code):
+                       g = m.group
+                       tbl[g(2)] = eval(g(3))
+               Logs.debug('env: %s', self.table)
+
+       def get_destdir(self):
+               "return the destdir, useful for installing"
+               if self.__getitem__('NOINSTALL'): return ''
+               return Options.options.destdir
+
+       def update(self, d):
+               for k, v in d.iteritems():
+                       self[k] = v
+
+
+       def __getattr__(self, name):
+               if name in self.__slots__:
+                       return object.__getattr__(self, name)
+               else:
+                       return self[name]
+
+       def __setattr__(self, name, value):
+               if name in self.__slots__:
+                       object.__setattr__(self, name, value)
+               else:
+                       self[name] = value
+
+       def __delattr__(self, name):
+               if name in self.__slots__:
+                       object.__delattr__(self, name)
+               else:
+                       del self[name]
+
diff --git a/buildtools/wafadmin/Logs.py b/buildtools/wafadmin/Logs.py
new file mode 100644 (file)
index 0000000..c160b37
--- /dev/null
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005 (ita)
+
+import ansiterm
+import os, re, logging, traceback, sys
+from Constants import *
+
+zones = ''
+verbose = 0
+
+colors_lst = {
+'USE' : True,
+'BOLD'  :'\x1b[01;1m',
+'RED'   :'\x1b[01;31m',
+'GREEN' :'\x1b[32m',
+'YELLOW':'\x1b[33m',
+'PINK'  :'\x1b[35m',
+'BLUE'  :'\x1b[01;34m',
+'CYAN'  :'\x1b[36m',
+'NORMAL':'\x1b[0m',
+'cursor_on'  :'\x1b[?25h',
+'cursor_off' :'\x1b[?25l',
+}
+
+got_tty = False
+term = os.environ.get('TERM', 'dumb')
+if not term in ['dumb', 'emacs']:
+       try:
+               got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
+       except AttributeError:
+               pass
+
+import Utils
+
+if not got_tty or 'NOCOLOR' in os.environ:
+       colors_lst['USE'] = False
+
+# test
+#if sys.platform == 'win32':
+#      colors_lst['USE'] = True
+
+def get_color(cl):
+       if not colors_lst['USE']: return ''
+       return colors_lst.get(cl, '')
+
+class foo(object):
+       def __getattr__(self, a):
+               return get_color(a)
+       def __call__(self, a):
+               return get_color(a)
+
+colors = foo()
+
+re_log = re.compile(r'(\w+): (.*)', re.M)
+class log_filter(logging.Filter):
+       def __init__(self, name=None):
+               pass
+
+       def filter(self, rec):
+               rec.c1 = colors.PINK
+               rec.c2 = colors.NORMAL
+               rec.zone = rec.module
+               if rec.levelno >= logging.INFO:
+                       if rec.levelno >= logging.ERROR:
+                               rec.c1 = colors.RED
+                       elif rec.levelno >= logging.WARNING:
+                               rec.c1 = colors.YELLOW
+                       else:
+                               rec.c1 = colors.GREEN
+                       return True
+
+               zone = ''
+               m = re_log.match(rec.msg)
+               if m:
+                       zone = rec.zone = m.group(1)
+                       rec.msg = m.group(2)
+
+               if zones:
+                       return getattr(rec, 'zone', '') in zones or '*' in zones
+               elif not verbose > 2:
+                       return False
+               return True
+
+class formatter(logging.Formatter):
+       def __init__(self):
+               logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
+
+       def format(self, rec):
+               if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
+                       try:
+                               return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
+                       except:
+                               return rec.c1+rec.msg+rec.c2
+               return logging.Formatter.format(self, rec)
+
+def debug(*k, **kw):
+       if verbose:
+               k = list(k)
+               k[0] = k[0].replace('\n', ' ')
+               logging.debug(*k, **kw)
+
+def error(*k, **kw):
+       logging.error(*k, **kw)
+       if verbose > 1:
+               if isinstance(k[0], Utils.WafError):
+                       st = k[0].stack
+               else:
+                       st = traceback.extract_stack()
+               if st:
+                       st = st[:-1]
+                       buf = []
+                       for filename, lineno, name, line in st:
+                               buf.append('  File "%s", line %d, in %s' % (filename, lineno, name))
+                               if line:
+                                       buf.append('    %s' % line.strip())
+                       if buf: logging.error("\n".join(buf))
+
+warn = logging.warn
+info = logging.info
+
+def init_log():
+       log = logging.getLogger()
+       log.handlers = []
+       log.filters = []
+       hdlr = logging.StreamHandler()
+       hdlr.setFormatter(formatter())
+       log.addHandler(hdlr)
+       log.addFilter(log_filter())
+       log.setLevel(logging.DEBUG)
+
+# may be initialized more than once
+init_log()
+
diff --git a/buildtools/wafadmin/Node.py b/buildtools/wafadmin/Node.py
new file mode 100644 (file)
index 0000000..236dd0d
--- /dev/null
@@ -0,0 +1,695 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005 (ita)
+
+"""
+Node: filesystem structure, contains lists of nodes
+
+IMPORTANT:
+1. Each file/folder is represented by exactly one node.
+
+2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
+unused class members increase the .wafpickle file size sensibly with lots of objects.
+
+3. The build is launched from the top of the build dir (for example, in _build_/).
+
+4. Node should not be instantiated directly.
+Each instance of Build.BuildContext has a Node subclass.
+(aka: 'Nodu', see BuildContext initializer)
+The BuildContext is referenced here as self.__class__.bld
+Its Node class is referenced here as self.__class__
+
+The public and advertised apis are the following:
+${TGT}                 -> dir/to/file.ext
+${TGT[0].base()}       -> dir/to/file
+${TGT[0].dir(env)}     -> dir/to
+${TGT[0].file()}       -> file.ext
+${TGT[0].file_base()}   -> file
+${TGT[0].suffix()}     -> .ext
+${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
+
+"""
+
+import os, sys, fnmatch, re, stat
+import Utils, Constants
+
+UNDEFINED = 0
+DIR = 1
+FILE = 2
+BUILD = 3
+
+type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
+
+# These fnmatch expressions are used by default to prune the directory tree
+# while doing the recursive traversal in the find_iter method of the Node class.
+prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
+
+# These fnmatch expressions are used by default to exclude files and dirs
+# while doing the recursive traversal in the find_iter method of the Node class.
+exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
+
+# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
+# while doing the recursive traversal in the ant_glob method of the Node class.
+exclude_regs = '''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/_darcs
+**/_darcs/**
+**/.DS_Store'''
+
+class Node(object):
+       __slots__ = ("name", "parent", "id", "childs")
+       def __init__(self, name, parent, node_type = UNDEFINED):
+               self.name = name
+               self.parent = parent
+
+               # assumption: one build object at a time
+               self.__class__.bld.id_nodes += 4
+               self.id = self.__class__.bld.id_nodes + node_type
+
+               if node_type == DIR: self.childs = {}
+
+               # We do not want to add another type attribute (memory)
+               # use the id to find out: type = id & 3
+               # for setting: new type = type + x - type & 3
+
+               if parent and name in parent.childs:
+                       raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
+
+               if parent: parent.childs[name] = self
+
+       def __setstate__(self, data):
+               if len(data) == 4:
+                       (self.parent, self.name, self.id, self.childs) = data
+               else:
+                       (self.parent, self.name, self.id) = data
+
+       def __getstate__(self):
+               if getattr(self, 'childs', None) is None:
+                       return (self.parent, self.name, self.id)
+               else:
+                       return (self.parent, self.name, self.id, self.childs)
+
+       def __str__(self):
+               if not self.parent: return ''
+               return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
+
+       def __repr__(self):
+               return self.__str__()
+
+       def __hash__(self):
+               "expensive, make certain it is not used"
+               raise Utils.WafError('nodes, you are doing it wrong')
+
+       def __copy__(self):
+               "nodes are not supposed to be copied"
+               raise Utils.WafError('nodes are not supposed to be cloned')
+
+       def get_type(self):
+               return self.id & 3
+
+       def set_type(self, t):
+               "dangerous, you are not supposed to use this"
+               self.id = self.id + t - self.id & 3
+
+       def dirs(self):
+               return [x for x in self.childs.values() if x.id & 3 == DIR]
+
+       def files(self):
+               return [x for x in self.childs.values() if x.id & 3 == FILE]
+
+       def get_dir(self, name, default=None):
+               node = self.childs.get(name, None)
+               if not node or node.id & 3 != DIR: return default
+               return  node
+
+       def get_file(self, name, default=None):
+               node = self.childs.get(name, None)
+               if not node or node.id & 3 != FILE: return default
+               return node
+
+       def get_build(self, name, default=None):
+               node = self.childs.get(name, None)
+               if not node or node.id & 3 != BUILD: return default
+               return node
+
+       def find_resource(self, lst):
+               "Find an existing input file: either a build node declared previously or a source node"
+               if isinstance(lst, str):
+                       lst = Utils.split_path(lst)
+
+               if len(lst) == 1:
+                       parent = self
+               else:
+                       parent = self.find_dir(lst[:-1])
+                       if not parent: return None
+               self.__class__.bld.rescan(parent)
+
+               name = lst[-1]
+               node = parent.childs.get(name, None)
+               if node:
+                       tp = node.id & 3
+                       if tp == FILE or tp == BUILD:
+                               return node
+                       else:
+                               return None
+
+               tree = self.__class__.bld
+               if not name in tree.cache_dir_contents[parent.id]:
+                       return None
+
+               path = parent.abspath() + os.sep + name
+               try:
+                       st = Utils.h_file(path)
+               except IOError:
+                       return None
+
+               child = self.__class__(name, parent, FILE)
+               tree.node_sigs[0][child.id] = st
+               return child
+
+       def find_or_declare(self, lst):
+               "Used for declaring a build node representing a file being built"
+               if isinstance(lst, str):
+                       lst = Utils.split_path(lst)
+
+               if len(lst) == 1:
+                       parent = self
+               else:
+                       parent = self.find_dir(lst[:-1])
+                       if not parent: return None
+               self.__class__.bld.rescan(parent)
+
+               name = lst[-1]
+               node = parent.childs.get(name, None)
+               if node:
+                       tp = node.id & 3
+                       if tp != BUILD:
+                               raise Utils.WafError('find_or_declare found a source file where a build file was expected %r' % '/'.join(lst))
+                       return node
+               node = self.__class__(name, parent, BUILD)
+               return node
+
+       def find_dir(self, lst):
+               "search a folder in the filesystem"
+
+               if isinstance(lst, str):
+                       lst = Utils.split_path(lst)
+
+               current = self
+               for name in lst:
+                       self.__class__.bld.rescan(current)
+                       prev = current
+
+                       if not current.parent and name == current.name:
+                               continue
+                       elif not name:
+                               continue
+                       elif name == '.':
+                               continue
+                       elif name == '..':
+                               current = current.parent or current
+                       else:
+                               current = prev.childs.get(name, None)
+                               if current is None:
+                                       dir_cont = self.__class__.bld.cache_dir_contents
+                                       if prev.id in dir_cont and name in dir_cont[prev.id]:
+                                               if not prev.name:
+                                                       if os.sep == '/':
+                                                               # cygwin //machine/share
+                                                               dirname = os.sep + name
+                                                       else:
+                                                               # windows c:
+                                                               dirname = name
+                                               else:
+                                                       # regular path
+                                                       dirname = prev.abspath() + os.sep + name
+                                               if not os.path.isdir(dirname):
+                                                       return None
+                                               current = self.__class__(name, prev, DIR)
+                                       elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
+                                               # drive letter or \\ path for windows
+                                               current = self.__class__(name, prev, DIR)
+                                       else:
+                                               return None
+                               else:
+                                       if current.id & 3 != DIR:
+                                               return None
+               return current
+
+       def ensure_dir_node_from_path(self, lst):
+               "used very rarely, force the construction of a branch of node instance for representing folders"
+
+               if isinstance(lst, str):
+                       lst = Utils.split_path(lst)
+
+               current = self
+               for name in lst:
+                       if not name:
+                               continue
+                       elif name == '.':
+                               continue
+                       elif name == '..':
+                               current = current.parent or current
+                       else:
+                               prev = current
+                               current = prev.childs.get(name, None)
+                               if current is None:
+                                       current = self.__class__(name, prev, DIR)
+               return current
+
+       def exclusive_build_node(self, path):
+               """
+               create a hierarchy in the build dir (no source folders) for ill-behaving compilers
+               the node is not hashed, so you must do it manually
+
+               after declaring such a node, find_dir and find_resource should work as expected
+               """
+               lst = Utils.split_path(path)
+               name = lst[-1]
+               if len(lst) > 1:
+                       parent = None
+                       try:
+                               parent = self.find_dir(lst[:-1])
+                       except OSError:
+                               pass
+                       if not parent:
+                               parent = self.ensure_dir_node_from_path(lst[:-1])
+                               self.__class__.bld.rescan(parent)
+                       else:
+                               try:
+                                       self.__class__.bld.rescan(parent)
+                               except OSError:
+                                       pass
+               else:
+                       parent = self
+
+               node = parent.childs.get(name, None)
+               if not node:
+                       node = self.__class__(name, parent, BUILD)
+
+               return node
+
+       def path_to_parent(self, parent):
+               "path relative to a direct ancestor, as string"
+               lst = []
+               p = self
+               h1 = parent.height()
+               h2 = p.height()
+               while h2 > h1:
+                       h2 -= 1
+                       lst.append(p.name)
+                       p = p.parent
+               if lst:
+                       lst.reverse()
+                       ret = os.path.join(*lst)
+               else:
+                       ret = ''
+               return ret
+
+       def find_ancestor(self, node):
+               "find a common ancestor for two nodes - for the shortest path in hierarchy"
+               dist = self.height() - node.height()
+               if dist < 0: return node.find_ancestor(self)
+               # now the real code
+               cand = self
+               while dist > 0:
+                       cand = cand.parent
+                       dist -= 1
+               if cand == node: return cand
+               cursor = node
+               while cand.parent:
+                       cand = cand.parent
+                       cursor = cursor.parent
+                       if cand == cursor: return cand
+
+       def relpath_gen(self, from_node):
+               "string representing a relative path between self to another node"
+
+               if self == from_node: return '.'
+               if from_node.parent == self: return '..'
+
+               # up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
+               ancestor = self.find_ancestor(from_node)
+               lst = []
+               cand = self
+               while not cand.id == ancestor.id:
+                       lst.append(cand.name)
+                       cand = cand.parent
+               cand = from_node
+               while not cand.id == ancestor.id:
+                       lst.append('..')
+                       cand = cand.parent
+               lst.reverse()
+               return os.sep.join(lst)
+
+       def nice_path(self, env=None):
+               "printed in the console, open files easily from the launch directory"
+               tree = self.__class__.bld
+               ln = tree.launch_node()
+
+               if self.id & 3 == FILE: return self.relpath_gen(ln)
+               else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
+
+       def is_child_of(self, node):
+               "does this node belong to the subtree node"
+               p = self
+               diff = self.height() - node.height()
+               while diff > 0:
+                       diff -= 1
+                       p = p.parent
+               return p.id == node.id
+
+       def variant(self, env):
+               "variant, or output directory for this node, a source has for variant 0"
+               if not env: return 0
+               elif self.id & 3 == FILE: return 0
+               else: return env.variant()
+
+       def height(self):
+               "amount of parents"
+               # README a cache can be added here if necessary
+               d = self
+               val = -1
+               while d:
+                       d = d.parent
+                       val += 1
+               return val
+
+       # helpers for building things
+
+       def abspath(self, env=None):
+               """
+               absolute path
+               @param env [Environment]:
+                       * obligatory for build nodes: build/variant/src/dir/bar.o
+                       * optional for dirs: get either src/dir or build/variant/src/dir
+                       * excluded for source nodes: src/dir/bar.c
+
+               Instead of computing the absolute path each time again,
+               store the already-computed absolute paths in one of (variants+1) dictionaries:
+               bld.cache_node_abspath[0] holds absolute paths for source nodes.
+               bld.cache_node_abspath[variant] holds the absolute path for the build nodes
+               which reside in the variant given by env.
+               """
+               ## absolute path - hot zone, so do not touch
+
+               # less expensive
+               variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
+
+               ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
+               if ret: return ret
+
+               if not variant:
+                       # source directory
+                       if not self.parent:
+                               val = os.sep == '/' and os.sep or ''
+                       elif not self.parent.name: # root
+                               val = (os.sep == '/' and os.sep or '') + self.name
+                       else:
+                               val = self.parent.abspath() + os.sep + self.name
+               else:
+                       # build directory
+                       val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
+               self.__class__.bld.cache_node_abspath[variant][self.id] = val
+               return val
+
+       def change_ext(self, ext):
+               "node of the same path, but with a different extension - hot zone so do not touch"
+               name = self.name
+               k = name.rfind('.')
+               if k >= 0:
+                       name = name[:k] + ext
+               else:
+                       name = name + ext
+
+               return self.parent.find_or_declare([name])
+
+       def src_dir(self, env):
+               "src path without the file name"
+               return self.parent.srcpath(env)
+
+       def bld_dir(self, env):
+               "build path without the file name"
+               return self.parent.bldpath(env)
+
+       def bld_base(self, env):
+               "build path without the extension: src/dir/foo(.cpp)"
+               s = os.path.splitext(self.name)[0]
+               return os.path.join(self.bld_dir(env), s)
+
+       def bldpath(self, env=None):
+               "path seen from the build dir default/src/foo.cpp"
+               if self.id & 3 == FILE:
+                       return self.relpath_gen(self.__class__.bld.bldnode)
+               p = self.path_to_parent(self.__class__.bld.srcnode)
+               if p is not '':
+                       return env.variant() + os.sep + p
+               return env.variant()
+
+       def srcpath(self, env=None):
+               "path in the srcdir from the build dir ../src/foo.cpp"
+               if self.id & 3 == BUILD:
+                       return self.bldpath(env)
+               return self.relpath_gen(self.__class__.bld.bldnode)
+
+       def read(self, env):
+               "get the contents of a file, it is not used anywhere for the moment"
+               return Utils.readf(self.abspath(env))
+
+       def dir(self, env):
+               "scons-like"
+               return self.parent.abspath(env)
+
+       def file(self):
+               "scons-like"
+               return self.name
+
+       def file_base(self):
+               "scons-like"
+               return os.path.splitext(self.name)[0]
+
+       def suffix(self):
+               "scons-like - hot zone so do not touch"
+               k = max(0, self.name.rfind('.'))
+               return self.name[k:]
+
+       def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
+               """find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
+               bld_ctx = self.__class__.bld
+               bld_ctx.rescan(self)
+               for name in bld_ctx.cache_dir_contents[self.id]:
+                       if accept_name(self, name):
+                               node = self.find_resource(name)
+                               if node:
+                                       if src and node.id & 3 == FILE:
+                                               yield node
+                               else:
+                                       node = self.find_dir(name)
+                                       if node and node.id != bld_ctx.bldnode.id:
+                                               if dir:
+                                                       yield node
+                                               if not is_prune(self, name):
+                                                       if maxdepth:
+                                                               for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
+                                                                       yield k
+                       else:
+                               if not is_prune(self, name):
+                                       node = self.find_resource(name)
+                                       if not node:
+                                               # not a file, it is a dir
+                                               node = self.find_dir(name)
+                                               if node and node.id != bld_ctx.bldnode.id:
+                                                       if maxdepth:
+                                                               for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
+                                                                       yield k
+
+               if bld:
+                       for node in self.childs.values():
+                               if node.id == bld_ctx.bldnode.id:
+                                       continue
+                               if node.id & 3 == BUILD:
+                                       if accept_name(self, node.name):
+                                               yield node
+               raise StopIteration
+
+       def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
+               """find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
+
+               if not (src or bld or dir):
+                       raise StopIteration
+
+               if self.id & 3 != DIR:
+                       raise StopIteration
+
+               in_pat = Utils.to_list(in_pat)
+               ex_pat = Utils.to_list(ex_pat)
+               prune_pat = Utils.to_list(prune_pat)
+
+               def accept_name(node, name):
+                       for pat in ex_pat:
+                               if fnmatch.fnmatchcase(name, pat):
+                                       return False
+                       for pat in in_pat:
+                               if fnmatch.fnmatchcase(name, pat):
+                                       return True
+                       return False
+
+               def is_prune(node, name):
+                       for pat in prune_pat:
+                               if fnmatch.fnmatchcase(name, pat):
+                                       return True
+                       return False
+
+               ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
+               if flat:
+                       return " ".join([x.relpath_gen(self) for x in ret])
+
+               return ret
+
+       def ant_glob(self, *k, **kw):
+               """
+               known gotcha: will enumerate the files, but only if the folder exists in the source directory
+               """
+
+               src=kw.get('src', 1)
+               bld=kw.get('bld', 0)
+               dir=kw.get('dir', 0)
+               excl = kw.get('excl', exclude_regs)
+               incl = k and k[0] or kw.get('incl', '**')
+
+               def to_pat(s):
+                       lst = Utils.to_list(s)
+                       ret = []
+                       for x in lst:
+                               x = x.replace('//', '/')
+                               if x.endswith('/'):
+                                       x += '**'
+                               lst2 = x.split('/')
+                               accu = []
+                               for k in lst2:
+                                       if k == '**':
+                                               accu.append(k)
+                                       else:
+                                               k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
+                                               k = '^%s$' % k
+                                               #print "pattern", k
+                                               accu.append(re.compile(k))
+                               ret.append(accu)
+                       return ret
+
+               def filtre(name, nn):
+                       ret = []
+                       for lst in nn:
+                               if not lst:
+                                       pass
+                               elif lst[0] == '**':
+                                       ret.append(lst)
+                                       if len(lst) > 1:
+                                               if lst[1].match(name):
+                                                       ret.append(lst[2:])
+                                       else:
+                                               ret.append([])
+                               elif lst[0].match(name):
+                                       ret.append(lst[1:])
+                       return ret
+
+               def accept(name, pats):
+                       nacc = filtre(name, pats[0])
+                       nrej = filtre(name, pats[1])
+                       if [] in nrej:
+                               nacc = []
+                       return [nacc, nrej]
+
+               def ant_iter(nodi, maxdepth=25, pats=[]):
+                       nodi.__class__.bld.rescan(nodi)
+                       tmp = list(nodi.__class__.bld.cache_dir_contents[nodi.id])
+                       tmp.sort()
+                       for name in tmp:
+                               npats = accept(name, pats)
+                               if npats and npats[0]:
+                                       accepted = [] in npats[0]
+                                       #print accepted, nodi, name
+
+                                       node = nodi.find_resource(name)
+                                       if node and accepted:
+                                               if src and node.id & 3 == FILE:
+                                                       yield node
+                                       else:
+                                               node = nodi.find_dir(name)
+                                               if node and node.id != nodi.__class__.bld.bldnode.id:
+                                                       if accepted and dir:
+                                                               yield node
+                                                       if maxdepth:
+                                                               for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
+                                                                       yield k
+                       if bld:
+                               for node in nodi.childs.values():
+                                       if node.id == nodi.__class__.bld.bldnode.id:
+                                               continue
+                                       if node.id & 3 == BUILD:
+                                               npats = accept(node.name, pats)
+                                               if npats and npats[0] and [] in npats[0]:
+                                                       yield node
+                       raise StopIteration
+
+               ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
+
+               if kw.get('flat', True):
+                       return " ".join([x.relpath_gen(self) for x in ret])
+
+               return ret
+
+       def update_build_dir(self, env=None):
+
+               if not env:
+                       for env in bld.all_envs:
+                               self.update_build_dir(env)
+                       return
+
+               path = self.abspath(env)
+
+               lst = Utils.listdir(path)
+               try:
+                       self.__class__.bld.cache_dir_contents[self.id].update(lst)
+               except KeyError:
+                       self.__class__.bld.cache_dir_contents[self.id] = set(lst)
+               self.__class__.bld.cache_scanned_folders[self.id] = True
+
+               for k in lst:
+                       npath = path + os.sep + k
+                       st = os.stat(npath)
+                       if stat.S_ISREG(st[stat.ST_MODE]):
+                               ick = self.find_or_declare(k)
+                               if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
+                                       self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
+                       elif stat.S_ISDIR(st[stat.ST_MODE]):
+                               child = self.find_dir(k)
+                               if not child:
+                                       child = self.ensure_dir_node_from_path(k)
+                               child.update_build_dir(env)
+
+
+class Nodu(Node):
+       pass
+
diff --git a/buildtools/wafadmin/Options.py b/buildtools/wafadmin/Options.py
new file mode 100644 (file)
index 0000000..c9ddcfe
--- /dev/null
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Scott Newton, 2005 (scottn)
+# Thomas Nagy, 2006 (ita)
+
+"Custom command-line options"
+
+import os, sys, imp, types, tempfile, optparse
+import Logs, Utils
+from Constants import *
+
+cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
+
+# TODO remove in waf 1.6 the following two
+commands = {}
+is_install = False
+
+options = {}
+arg_line = []
+launch_dir = ''
+tooldir = ''
+lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
+try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
+except KeyError: cache_global = ''
+platform = Utils.unversioned_sys_platform()
+conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
+
+remote_repo = ['http://waf.googlecode.com/svn/']
+"""remote directory for the plugins"""
+
+
+# Such a command-line should work:  JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
+default_prefix = os.environ.get('PREFIX')
+if not default_prefix:
+       if platform == 'win32':
+               d = tempfile.gettempdir()
+               default_prefix = d[0].upper() + d[1:]
+               # win32 preserves the case, but gettempdir does not
+       else: default_prefix = '/usr/local/'
+
+default_jobs = os.environ.get('JOBS', -1)
+if default_jobs < 1:
+       try:
+               if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
+                       default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
+               else:
+                       default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
+       except:
+               if os.name == 'java': # platform.system() == 'Java'
+                       from java.lang import Runtime
+                       default_jobs = Runtime.getRuntime().availableProcessors()
+               else:
+                       # environment var defined on win32
+                       default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
+
+default_destdir = os.environ.get('DESTDIR', '')
+
+def get_usage(self):
+       cmds_str = []
+       module = Utils.g_module
+       if module:
+               # create the help messages for commands
+               tbl = module.__dict__
+               keys = list(tbl.keys())
+               keys.sort()
+
+               if 'build' in tbl:
+                       if not module.build.__doc__:
+                               module.build.__doc__ = 'builds the project'
+               if 'configure' in tbl:
+                       if not module.configure.__doc__:
+                               module.configure.__doc__ = 'configures the project'
+
+               ban = ['set_options', 'init', 'shutdown']
+
+               optlst = [x for x in keys if not x in ban
+                       and type(tbl[x]) is type(parse_args_impl)
+                       and tbl[x].__doc__
+                       and not x.startswith('_')]
+
+               just = max([len(x) for x in optlst])
+
+               for x in optlst:
+                       cmds_str.append('  %s: %s' % (x.ljust(just), tbl[x].__doc__))
+               ret = '\n'.join(cmds_str)
+       else:
+               ret = ' '.join(cmds)
+       return '''waf [command] [options]
+
+Main commands (example: ./waf build -j4)
+%s
+''' % ret
+
+
+setattr(optparse.OptionParser, 'get_usage', get_usage)
+
+def create_parser(module=None):
+       Logs.debug('options: create_parser is called')
+       parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
+
+       parser.formatter.width = Utils.get_term_cols()
+       p = parser.add_option
+
+       p('-j', '--jobs',
+               type    = 'int',
+               default = default_jobs,
+               help    = 'amount of parallel jobs (%r)' % default_jobs,
+               dest    = 'jobs')
+
+       p('-k', '--keep',
+               action  = 'store_true',
+               default = False,
+               help    = 'keep running happily on independent task groups',
+               dest    = 'keep')
+
+       p('-v', '--verbose',
+               action  = 'count',
+               default = 0,
+               help    = 'verbosity level -v -vv or -vvv [default: 0]',
+               dest    = 'verbose')
+
+       p('--nocache',
+               action  = 'store_true',
+               default = False,
+               help    = 'ignore the WAFCACHE (if set)',
+               dest    = 'nocache')
+
+       p('--zones',
+               action  = 'store',
+               default = '',
+               help    = 'debugging zones (task_gen, deps, tasks, etc)',
+               dest    = 'zones')
+
+       p('-p', '--progress',
+               action  = 'count',
+               default = 0,
+               help    = '-p: progress bar; -pp: ide output',
+               dest    = 'progress_bar')
+
+       p('--targets',
+               action  = 'store',
+               default = '',
+               help    = 'build given task generators, e.g. "target1,target2"',
+               dest    = 'compile_targets')
+
+       gr = optparse.OptionGroup(parser, 'configuration options')
+       parser.add_option_group(gr)
+       gr.add_option('-b', '--blddir',
+               action  = 'store',
+               default = '',
+               help    = 'out dir for the project (configuration)',
+               dest    = 'blddir')
+       gr.add_option('-s', '--srcdir',
+               action  = 'store',
+               default = '',
+               help    = 'top dir for the project (configuration)',
+               dest    = 'srcdir')
+       gr.add_option('--prefix',
+               help    = 'installation prefix (configuration) [default: %r]' % default_prefix,
+               default = default_prefix,
+               dest    = 'prefix')
+
+       gr.add_option('--download',
+               action  = 'store_true',
+               default = False,
+               help    = 'try to download the tools if missing',
+               dest    = 'download')
+
+       gr = optparse.OptionGroup(parser, 'installation options')
+       parser.add_option_group(gr)
+       gr.add_option('--destdir',
+               help    = 'installation root [default: %r]' % default_destdir,
+               default = default_destdir,
+               dest    = 'destdir')
+       gr.add_option('-f', '--force',
+               action  = 'store_true',
+               default = False,
+               help    = 'force file installation',
+               dest    = 'force')
+
+       return parser
+
+def parse_args_impl(parser, _args=None):
+       global options, commands, arg_line
+       (options, args) = parser.parse_args(args=_args)
+
+       arg_line = args
+       #arg_line = args[:] # copy
+
+       # By default, 'waf' is equivalent to 'waf build'
+       commands = {}
+       for var in cmds: commands[var] = 0
+       if not args:
+               commands['build'] = 1
+               args.append('build')
+
+       # Parse the command arguments
+       for arg in args:
+               commands[arg] = True
+
+       # the check thing depends on the build
+       if 'check' in args:
+               idx = args.index('check')
+               try:
+                       bidx = args.index('build')
+                       if bidx > idx:
+                               raise ValueError('build before check')
+               except ValueError, e:
+                       args.insert(idx, 'build')
+
+       if args[0] != 'init':
+               args.insert(0, 'init')
+
+       # TODO -k => -j0
+       if options.keep: options.jobs = 1
+       if options.jobs < 1: options.jobs = 1
+
+       if 'install' in sys.argv or 'uninstall' in sys.argv:
+               # absolute path only if set
+               options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
+
+       Logs.verbose = options.verbose
+       Logs.init_log()
+
+       if options.zones:
+               Logs.zones = options.zones.split(',')
+               if not Logs.verbose: Logs.verbose = 1
+       elif Logs.verbose > 0:
+               Logs.zones = ['runner']
+       if Logs.verbose > 2:
+               Logs.zones = ['*']
+
+# TODO waf 1.6
+# 1. rename the class to OptionsContext
+# 2. instead of a class attribute, use a module (static 'parser')
+# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
+
+class Handler(Utils.Context):
+       """loads wscript modules in folders for adding options
+       This class should be named 'OptionsContext'
+       A method named 'recurse' is bound when used by the module Scripting"""
+
+       parser = None
+       # make it possible to access the reference, like Build.bld
+
+       def __init__(self, module=None):
+               self.parser = create_parser(module)
+               self.cwd = os.getcwd()
+               Handler.parser = self
+
+       def add_option(self, *k, **kw):
+               self.parser.add_option(*k, **kw)
+
+       def add_option_group(self, *k, **kw):
+               return self.parser.add_option_group(*k, **kw)
+
+       def get_option_group(self, opt_str):
+               return self.parser.get_option_group(opt_str)
+
+       def sub_options(self, *k, **kw):
+               if not k: raise Utils.WscriptError('folder expected')
+               self.recurse(k[0], name='set_options')
+
+       def tool_options(self, *k, **kw):
+               Utils.python_24_guard()
+
+               if not k[0]:
+                       raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
+               tools = Utils.to_list(k[0])
+
+               # TODO waf 1.6 remove the global variable tooldir
+               path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
+
+               for tool in tools:
+                       tool = tool.replace('++', 'xx')
+                       if tool == 'java': tool = 'javaw'
+                       if tool.lower() == 'unittest': tool = 'unittestw'
+                       module = Utils.load_tool(tool, path)
+                       try:
+                               fun = module.set_options
+                       except AttributeError:
+                               pass
+                       else:
+                               fun(kw.get('option_group', self))
+
+       def parse_args(self, args=None):
+               parse_args_impl(self.parser, args)
+
diff --git a/buildtools/wafadmin/Runner.py b/buildtools/wafadmin/Runner.py
new file mode 100644 (file)
index 0000000..94db0fb
--- /dev/null
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2008 (ita)
+
+"Execute the tasks"
+
+import os, sys, random, time, threading, traceback
+try: from Queue import Queue
+except ImportError: from queue import Queue
+import Build, Utils, Logs, Options
+from Logs import debug, error
+from Constants import *
+
+GAP = 15
+
+run_old = threading.Thread.run
+def run(*args, **kwargs):
+       try:
+               run_old(*args, **kwargs)
+       except (KeyboardInterrupt, SystemExit):
+               raise
+       except:
+               sys.excepthook(*sys.exc_info())
+threading.Thread.run = run
+
+def process_task(tsk):
+
+       m = tsk.master
+       if m.stop:
+               m.out.put(tsk)
+               return
+
+       try:
+               tsk.generator.bld.printout(tsk.display())
+               if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
+               # actual call to task's run() function
+               else: ret = tsk.call_run()
+       except Exception, e:
+               tsk.err_msg = Utils.ex_stack()
+               tsk.hasrun = EXCEPTION
+
+               # TODO cleanup
+               m.error_handler(tsk)
+               m.out.put(tsk)
+               return
+
+       if ret:
+               tsk.err_code = ret
+               tsk.hasrun = CRASHED
+       else:
+               try:
+                       tsk.post_run()
+               except Utils.WafError:
+                       pass
+               except Exception:
+                       tsk.err_msg = Utils.ex_stack()
+                       tsk.hasrun = EXCEPTION
+               else:
+                       tsk.hasrun = SUCCESS
+       if tsk.hasrun != SUCCESS:
+               m.error_handler(tsk)
+
+       m.out.put(tsk)
+
+class TaskConsumer(threading.Thread):
+       ready = Queue(0)
+       consumers = []
+
+       def __init__(self):
+               threading.Thread.__init__(self)
+               self.setDaemon(1)
+               self.start()
+
+       def run(self):
+               try:
+                       self.loop()
+               except:
+                       pass
+
+       def loop(self):
+               while 1:
+                       tsk = TaskConsumer.ready.get()
+                       process_task(tsk)
+
+class Parallel(object):
+       """
+       keep the consumer threads busy, and avoid consuming cpu cycles
+       when no more tasks can be added (end of the build, etc)
+       """
+       def __init__(self, bld, j=2):
+
+               # number of consumers
+               self.numjobs = j
+
+               self.manager = bld.task_manager
+               self.manager.current_group = 0
+
+               self.total = self.manager.total()
+
+               # tasks waiting to be processed - IMPORTANT
+               self.outstanding = []
+               self.maxjobs = MAXJOBS
+
+               # tasks that are awaiting for another task to complete
+               self.frozen = []
+
+               # tasks returned by the consumers
+               self.out = Queue(0)
+
+               self.count = 0 # tasks not in the producer area
+
+               self.processed = 1 # progress indicator
+
+               self.stop = False # error condition to stop the build
+               self.error = False # error flag
+
+       def get_next(self):
+               "override this method to schedule the tasks in a particular order"
+               if not self.outstanding:
+                       return None
+               return self.outstanding.pop(0)
+
+       def postpone(self, tsk):
+               "override this method to schedule the tasks in a particular order"
+               # TODO consider using a deque instead
+               if random.randint(0, 1):
+                       self.frozen.insert(0, tsk)
+               else:
+                       self.frozen.append(tsk)
+
+       def refill_task_list(self):
+               "called to set the next group of tasks"
+
+               while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
+                       self.get_out()
+
+               while not self.outstanding:
+                       if self.count:
+                               self.get_out()
+
+                       if self.frozen:
+                               self.outstanding += self.frozen
+                               self.frozen = []
+                       elif not self.count:
+                               (jobs, tmp) = self.manager.get_next_set()
+                               if jobs != None: self.maxjobs = jobs
+                               if tmp: self.outstanding += tmp
+                               break
+
+       def get_out(self):
+               "the tasks that are put to execute are all collected using get_out"
+               ret = self.out.get()
+               self.manager.add_finished(ret)
+               if not self.stop and getattr(ret, 'more_tasks', None):
+                       self.outstanding += ret.more_tasks
+                       self.total += len(ret.more_tasks)
+               self.count -= 1
+
+       def error_handler(self, tsk):
+               "by default, errors make the build stop (not thread safe so be careful)"
+               if not Options.options.keep:
+                       self.stop = True
+               self.error = True
+
+       def start(self):
+               "execute the tasks"
+
+               if TaskConsumer.consumers:
+                       # the worker pool is usually loaded lazily (see below)
+                       # in case it is re-used with a different value of numjobs:
+                       while len(TaskConsumer.consumers) < self.numjobs:
+                               TaskConsumer.consumers.append(TaskConsumer())
+
+               while not self.stop:
+
+                       self.refill_task_list()
+
+                       # consider the next task
+                       tsk = self.get_next()
+                       if not tsk:
+                               if self.count:
+                                       # tasks may add new ones after they are run
+                                       continue
+                               else:
+                                       # no tasks to run, no tasks running, time to exit
+                                       break
+
+                       if tsk.hasrun:
+                               # if the task is marked as "run", just skip it
+                               self.processed += 1
+                               self.manager.add_finished(tsk)
+                               continue
+
+                       try:
+                               st = tsk.runnable_status()
+                       except Exception, e:
+                               self.processed += 1
+                               if self.stop and not Options.options.keep:
+                                       tsk.hasrun = SKIPPED
+                                       self.manager.add_finished(tsk)
+                                       continue
+                               self.error_handler(tsk)
+                               self.manager.add_finished(tsk)
+                               tsk.hasrun = EXCEPTION
+                               tsk.err_msg = Utils.ex_stack()
+                               continue
+
+                       if st == ASK_LATER:
+                               self.postpone(tsk)
+                       elif st == SKIP_ME:
+                               self.processed += 1
+                               tsk.hasrun = SKIPPED
+                               self.manager.add_finished(tsk)
+                       else:
+                               # run me: put the task in ready queue
+                               tsk.position = (self.processed, self.total)
+                               self.count += 1
+                               tsk.master = self
+                               self.processed += 1
+
+                               if self.numjobs == 1:
+                                       process_task(tsk)
+                               else:
+                                       TaskConsumer.ready.put(tsk)
+                                       # create the consumer threads only if there is something to consume
+                                       if not TaskConsumer.consumers:
+                                               TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
+
+               # self.count represents the tasks that have been made available to the consumer threads
+               # collect all the tasks after an error else the message may be incomplete
+               while self.error and self.count:
+                       self.get_out()
+
+               #print loop
+               assert (self.count == 0 or self.stop)
+
diff --git a/buildtools/wafadmin/Scripting.py b/buildtools/wafadmin/Scripting.py
new file mode 100644 (file)
index 0000000..d975bd9
--- /dev/null
@@ -0,0 +1,586 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005 (ita)
+
+"Module called for configuring, compiling and installing targets"
+
+import os, sys, shutil, traceback, datetime, inspect, errno
+
+import Utils, Configure, Build, Logs, Options, Environment, Task
+from Logs import error, warn, info
+from Constants import *
+
+g_gz = 'bz2'
+commands = []
+
+def prepare_impl(t, cwd, ver, wafdir):
+       Options.tooldir = [t]
+       Options.launch_dir = cwd
+
+       # some command-line options can be processed immediately
+       if '--version' in sys.argv:
+               opt_obj = Options.Handler()
+               opt_obj.curdir = cwd
+               opt_obj.parse_args()
+               sys.exit(0)
+
+       # now find the wscript file
+       msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
+
+       # in theory projects can be configured in an autotool-like manner:
+       # mkdir build && cd build && ../waf configure && ../waf
+       build_dir_override = None
+       candidate = None
+
+       lst = os.listdir(cwd)
+
+       search_for_candidate = True
+       if WSCRIPT_FILE in lst:
+               candidate = cwd
+
+       elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
+               # autotool-like configuration
+               calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
+               if WSCRIPT_FILE in os.listdir(calldir):
+                       candidate = calldir
+                       search_for_candidate = False
+               else:
+                       error('arg[0] directory does not contain a wscript file')
+                       sys.exit(1)
+               build_dir_override = cwd
+
+       # climb up to find a script if it is not found
+       while search_for_candidate:
+               if len(cwd) <= 3:
+                       break # stop at / or c:
+               dirlst = os.listdir(cwd)
+               if WSCRIPT_FILE in dirlst:
+                       candidate = cwd
+               if 'configure' in sys.argv and candidate:
+                       break
+               if Options.lockfile in dirlst:
+                       env = Environment.Environment()
+                       try:
+                               env.load(os.path.join(cwd, Options.lockfile))
+                       except:
+                               error('could not load %r' % Options.lockfile)
+                       try:
+                               os.stat(env['cwd'])
+                       except:
+                               candidate = cwd
+                       else:
+                               candidate = env['cwd']
+                       break
+               cwd = os.path.dirname(cwd) # climb up
+
+       if not candidate:
+               # check if the user only wanted to display the help
+               if '-h' in sys.argv or '--help' in sys.argv:
+                       warn('No wscript file found: the help message may be incomplete')
+                       opt_obj = Options.Handler()
+                       opt_obj.curdir = cwd
+                       opt_obj.parse_args()
+               else:
+                       error(msg1)
+               sys.exit(0)
+
+       # We have found wscript, but there is no guarantee that it is valid
+       try:
+               os.chdir(candidate)
+       except OSError:
+               raise Utils.WafError("the folder %r is unreadable" % candidate)
+
+       # define the main module containing the functions init, shutdown, ..
+       Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
+
+       if build_dir_override:
+               d = getattr(Utils.g_module, BLDDIR, None)
+               if d:
+                       # test if user has set the blddir in wscript.
+                       msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
+                       warn(msg)
+               Utils.g_module.blddir = build_dir_override
+
+       # bind a few methods and classes by default
+
+       def set_def(obj, name=''):
+               n = name or obj.__name__
+               if not n in Utils.g_module.__dict__:
+                       setattr(Utils.g_module, n, obj)
+
+       for k in [dist, distclean, distcheck, clean, install, uninstall]:
+               set_def(k)
+
+       set_def(Configure.ConfigurationContext, 'configure_context')
+
+       for k in ['build', 'clean', 'install', 'uninstall']:
+               set_def(Build.BuildContext, k + '_context')
+
+       # now parse the options from the user wscript file
+       opt_obj = Options.Handler(Utils.g_module)
+       opt_obj.curdir = candidate
+       try:
+               f = Utils.g_module.set_options
+       except AttributeError:
+               pass
+       else:
+               opt_obj.sub_options([''])
+       opt_obj.parse_args()
+
+       if not 'init' in Utils.g_module.__dict__:
+               Utils.g_module.init = Utils.nada
+       if not 'shutdown' in Utils.g_module.__dict__:
+               Utils.g_module.shutdown = Utils.nada
+
+       main()
+
+def prepare(t, cwd, ver, wafdir):
+       if WAFVERSION != ver:
+               msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
+               print('\033[91mError: %s\033[0m' % msg)
+               sys.exit(1)
+
+       #"""
+       try:
+               prepare_impl(t, cwd, ver, wafdir)
+       except Utils.WafError, e:
+               error(str(e))
+               sys.exit(1)
+       except KeyboardInterrupt:
+               Utils.pprint('RED', 'Interrupted')
+               sys.exit(68)
+       """
+       import cProfile, pstats
+       cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
+               {'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
+                'profi.txt')
+       p = pstats.Stats('profi.txt')
+       p.sort_stats('time').print_stats(45)
+       #"""
+
+def main():
+       global commands
+       commands = Options.arg_line[:]
+
+       while commands:
+               x = commands.pop(0)
+
+               ini = datetime.datetime.now()
+               if x == 'configure':
+                       fun = configure
+               elif x == 'build':
+                       fun = build
+               else:
+                       fun = getattr(Utils.g_module, x, None)
+
+               if not fun:
+                       raise Utils.WscriptError('No such command %r' % x)
+
+               ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
+
+               if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
+                       # compatibility TODO remove in waf 1.6
+                       try:
+                               fun(ctx)
+                       except TypeError:
+                               fun()
+               else:
+                       fun(ctx)
+
+               ela = ''
+               if not Options.options.progress_bar:
+                       ela = ' (%s)' % Utils.get_elapsed_time(ini)
+
+               if x != 'init' and x != 'shutdown':
+                       info('%r finished successfully%s' % (x, ela))
+
+               if not commands and x != 'shutdown':
+                       commands.append('shutdown')
+
+def configure(conf):
+
+       src = getattr(Options.options, SRCDIR, None)
+       if not src: src = getattr(Utils.g_module, SRCDIR, None)
+       if not src: src = getattr(Utils.g_module, 'top', None)
+       if not src:
+               src = '.'
+               incomplete_src = 1
+       src = os.path.abspath(src)
+
+       bld = getattr(Options.options, BLDDIR, None)
+       if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
+       if not bld: bld = getattr(Utils.g_module, 'out', None)
+       if not bld:
+               bld = 'build'
+               incomplete_bld = 1
+       if bld == '.':
+               raise Utils.WafError('Setting blddir="." may cause distclean problems')
+       bld = os.path.abspath(bld)
+
+       try: os.makedirs(bld)
+       except OSError: pass
+
+       # It is not possible to compile specific targets in the configuration
+       # this may cause configuration errors if autoconfig is set
+       targets = Options.options.compile_targets
+       Options.options.compile_targets = None
+       Options.is_install = False
+
+       conf.srcdir = src
+       conf.blddir = bld
+       conf.post_init()
+
+       if 'incomplete_src' in vars():
+               conf.check_message_1('Setting srcdir to')
+               conf.check_message_2(src)
+       if 'incomplete_bld' in vars():
+               conf.check_message_1('Setting blddir to')
+               conf.check_message_2(bld)
+
+       # calling to main wscript's configure()
+       conf.sub_config([''])
+
+       conf.store()
+
+       # this will write a configure lock so that subsequent builds will
+       # consider the current path as the root directory (see prepare_impl).
+       # to remove: use 'waf distclean'
+       env = Environment.Environment()
+       env[BLDDIR] = bld
+       env[SRCDIR] = src
+       env['argv'] = sys.argv
+       env['commands'] = Options.commands
+       env['options'] = Options.options.__dict__
+
+       # conf.hash & conf.files hold wscript files paths and hash
+       # (used only by Configure.autoconfig)
+       env['hash'] = conf.hash
+       env['files'] = conf.files
+       env['environ'] = dict(conf.environ)
+       env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
+
+       if Utils.g_module.root_path != src:
+               # in case the source dir is somewhere else
+               env.store(os.path.join(src, Options.lockfile))
+
+       env.store(Options.lockfile)
+
+       Options.options.compile_targets = targets
+
+def clean(bld):
+       '''removes the build files'''
+       try:
+               proj = Environment.Environment(Options.lockfile)
+       except IOError:
+               raise Utils.WafError('Nothing to clean (project not configured)')
+
+       bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
+       bld.load_envs()
+
+       bld.is_install = 0 # False
+
+       # read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
+       bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
+
+       try:
+               bld.clean()
+       finally:
+               bld.save()
+
+def check_configured(bld):
+       if not Configure.autoconfig:
+               return bld
+
+       conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
+       bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
+
+       def reconf(proj):
+               back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
+
+               Options.commands = proj['commands']
+               Options.options.__dict__ = proj['options']
+               conf = conf_cls()
+               conf.environ = proj['environ']
+               configure(conf)
+
+               (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
+
+       try:
+               proj = Environment.Environment(Options.lockfile)
+       except IOError:
+               conf = conf_cls()
+               configure(conf)
+       else:
+               try:
+                       bld = bld_cls()
+                       bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
+                       bld.load_envs()
+               except Utils.WafError:
+                       reconf(proj)
+                       return bld_cls()
+
+       try:
+               proj = Environment.Environment(Options.lockfile)
+       except IOError:
+               raise Utils.WafError('Auto-config: project does not configure (bug)')
+
+       h = 0
+       try:
+               for file in proj['files']:
+                       if file.endswith('configure'):
+                               h = hash((h, Utils.readf(file)))
+                       else:
+                               mod = Utils.load_module(file)
+                               h = hash((h, mod.waf_hash_val))
+       except (OSError, IOError):
+               warn('Reconfiguring the project: a file is unavailable')
+               reconf(proj)
+       else:
+               if (h != proj['hash']):
+                       warn('Reconfiguring the project: the configuration has changed')
+                       reconf(proj)
+
+       return bld_cls()
+
+def install(bld):
+       '''installs the build files'''
+       bld = check_configured(bld)
+
+       Options.commands['install'] = True
+       Options.commands['uninstall'] = False
+       Options.is_install = True
+
+       bld.is_install = INSTALL
+
+       build_impl(bld)
+       bld.install()
+
+def uninstall(bld):
+       '''removes the installed files'''
+       Options.commands['install'] = False
+       Options.commands['uninstall'] = True
+       Options.is_install = True
+
+       bld.is_install = UNINSTALL
+
+       try:
+               def runnable_status(self):
+                       return SKIP_ME
+               setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
+               setattr(Task.Task, 'runnable_status', runnable_status)
+
+               build_impl(bld)
+               bld.install()
+       finally:
+               setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
+
+def build(bld):
+       bld = check_configured(bld)
+
+       Options.commands['install'] = False
+       Options.commands['uninstall'] = False
+       Options.is_install = False
+
+       bld.is_install = 0 # False
+
+       return build_impl(bld)
+
+def build_impl(bld):
+       # compile the project and/or install the files
+       try:
+               proj = Environment.Environment(Options.lockfile)
+       except IOError:
+               raise Utils.WafError("Project not configured (run 'waf configure' first)")
+
+       bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
+       bld.load_envs()
+
+       info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
+       bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
+
+       # execute something immediately before the build starts
+       bld.pre_build()
+
+       try:
+               bld.compile()
+       finally:
+               if Options.options.progress_bar: print('')
+               info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
+
+       # execute something immediately after a successful build
+       bld.post_build()
+
+       bld.install()
+
+excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
+dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
+def dont_dist(name, src, build_dir):
+       global excludes, dist_exts
+
+       if (name.startswith(',,')
+               or name.startswith('++')
+               or name.startswith('.waf')
+               or (src == '.' and name == Options.lockfile)
+               or name in excludes
+               or name == build_dir
+               ):
+               return True
+
+       for ext in dist_exts:
+               if name.endswith(ext):
+                       return True
+
+       return False
+
+# like shutil.copytree
+# exclude files and to raise exceptions immediately
+def copytree(src, dst, build_dir):
+       names = os.listdir(src)
+       os.makedirs(dst)
+       for name in names:
+               srcname = os.path.join(src, name)
+               dstname = os.path.join(dst, name)
+
+               if dont_dist(name, src, build_dir):
+                       continue
+
+               if os.path.isdir(srcname):
+                       copytree(srcname, dstname, build_dir)
+               else:
+                       shutil.copy2(srcname, dstname)
+
+# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
+def distclean(ctx=None):
+       '''removes the build directory'''
+       global commands
+       lst = os.listdir('.')
+       for f in lst:
+               if f == Options.lockfile:
+                       try:
+                               proj = Environment.Environment(f)
+                       except:
+                               Logs.warn('could not read %r' % f)
+                               continue
+
+                       try:
+                               shutil.rmtree(proj[BLDDIR])
+                       except IOError:
+                               pass
+                       except OSError, e:
+                               if e.errno != errno.ENOENT:
+                                       Logs.warn('project %r cannot be removed' % proj[BLDDIR])
+
+                       try:
+                               os.remove(f)
+                       except OSError, e:
+                               if e.errno != errno.ENOENT:
+                                       Logs.warn('file %r cannot be removed' % f)
+
+               # remove the local waf cache
+               if not commands and f.startswith('.waf'):
+                       shutil.rmtree(f, ignore_errors=True)
+
+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
+def dist(appname='', version=''):
+       '''makes a tarball for redistributing the sources'''
+       # return return (distdirname, tarballname)
+       import tarfile
+
+       if not appname: appname = Utils.g_module.APPNAME
+       if not version: version = Utils.g_module.VERSION
+
+       tmp_folder = appname + '-' + version
+       if g_gz in ['gz', 'bz2']:
+               arch_name = tmp_folder + '.tar.' + g_gz
+       else:
+               arch_name = tmp_folder + '.' + 'zip'
+
+       # remove the previous dir
+       try:
+               shutil.rmtree(tmp_folder)
+       except (OSError, IOError):
+               pass
+
+       # remove the previous archive
+       try:
+               os.remove(arch_name)
+       except (OSError, IOError):
+               pass
+
+       # copy the files into the temporary folder
+       blddir = getattr(Utils.g_module, BLDDIR, None)
+       if not blddir:
+               blddir = getattr(Utils.g_module, 'out', None)
+       copytree('.', tmp_folder, blddir)
+
+       # undocumented hook for additional cleanup
+       dist_hook = getattr(Utils.g_module, 'dist_hook', None)
+       if dist_hook:
+               back = os.getcwd()
+               os.chdir(tmp_folder)
+               try:
+                       dist_hook()
+               finally:
+                       # go back to the root directory
+                       os.chdir(back)
+
+       if g_gz in ['gz', 'bz2']:
+               tar = tarfile.open(arch_name, 'w:' + g_gz)
+               tar.add(tmp_folder)
+               tar.close()
+       else:
+               Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
+
+       try: from hashlib import sha1 as sha
+       except ImportError: from sha import sha
+       try:
+               digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
+       except:
+               digest = ''
+
+       info('New archive created: %s%s' % (arch_name, digest))
+
+       if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
+       return arch_name
+
+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
+def distcheck(appname='', version='', subdir=''):
+       '''checks if the sources compile (tarball from 'dist')'''
+       import tempfile, tarfile
+
+       if not appname: appname = Utils.g_module.APPNAME
+       if not version: version = Utils.g_module.VERSION
+
+       waf = os.path.abspath(sys.argv[0])
+       tarball = dist(appname, version)
+
+       path = appname + '-' + version
+
+       # remove any previous instance
+       if os.path.exists(path):
+               shutil.rmtree(path)
+
+       t = tarfile.open(tarball)
+       for x in t: t.extract(x)
+       t.close()
+
+       # build_path is the directory for the waf invocation
+       if subdir:
+               build_path = os.path.join(path, subdir)
+       else:
+               build_path = path
+
+       instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
+       ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
+       if ret:
+               raise Utils.WafError('distcheck failed with code %i' % ret)
+
+       if os.path.exists(instdir):
+               raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
+
+       shutil.rmtree(path)
+
+# FIXME remove in Waf 1.6 (kept for compatibility)
+def add_subdir(dir, bld):
+       bld.recurse(dir, 'build')
+
diff --git a/buildtools/wafadmin/Task.py b/buildtools/wafadmin/Task.py
new file mode 100644 (file)
index 0000000..5cda2ec
--- /dev/null
@@ -0,0 +1,1200 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2008 (ita)
+
+"""
+Running tasks in parallel is a simple problem, but in practice it is more complicated:
+* dependencies discovered during the build (dynamic task creation)
+* dependencies discovered after files are compiled
+* the amount of tasks and dependencies (graph size) can be huge
+
+This is why the dependency management is split on three different levels:
+1. groups of tasks that run all after another group of tasks
+2. groups of tasks that can be run in parallel
+3. tasks that can run in parallel, but with possible unknown ad-hoc dependencies
+
+The point #1 represents a strict sequential order between groups of tasks, for example a compiler is produced
+and used to compile the rest, whereas #2 and #3 represent partial order constraints where #2 applies to the kind of task
+and #3 applies to the task instances.
+
+#1 is held by the task manager: ordered list of TaskGroups (see bld.add_group)
+#2 is held by the task groups and the task types: precedence after/before (topological sort),
+   and the constraints extracted from file extensions
+#3 is held by the tasks individually (attribute run_after),
+   and the scheduler (Runner.py) use Task::runnable_status to reorder the tasks
+
+--
+
+To try, use something like this in your code:
+import Constants, Task
+Task.algotype = Constants.MAXPARALLEL
+
+--
+
+There are two concepts with the tasks (individual units of change):
+* dependency (if 1 is recompiled, recompile 2)
+* order (run 2 after 1)
+
+example 1: if t1 depends on t2 and t2 depends on t3 it is not necessary to make t1 depend on t3 (dependency is transitive)
+example 2: if t1 depends on a node produced by t2, it is not immediately obvious that t1 must run after t2 (order is not obvious)
+
+The role of the Task Manager is to give the tasks in order (groups of task that may be run in parallel one after the other)
+
+"""
+
+import os, shutil, sys, re, random, datetime, tempfile, shlex
+from Utils import md5
+import Build, Runner, Utils, Node, Logs, Options
+from Logs import debug, warn, error
+from Constants import *
+
+algotype = NORMAL
+#algotype = JOBCONTROL
+#algotype = MAXPARALLEL
+
+COMPILE_TEMPLATE_SHELL = '''
+def f(task):
+       env = task.env
+       wd = getattr(task, 'cwd', None)
+       p = env.get_flat
+       cmd = \'\'\' %s \'\'\' % s
+       return task.exec_command(cmd, cwd=wd)
+'''
+
+COMPILE_TEMPLATE_NOSHELL = '''
+def f(task):
+       env = task.env
+       wd = getattr(task, 'cwd', None)
+       def to_list(xx):
+               if isinstance(xx, str): return [xx]
+               return xx
+       lst = []
+       %s
+       lst = [x for x in lst if x]
+       return task.exec_command(lst, cwd=wd)
+'''
+
+
+"""
+Enable different kind of dependency algorithms:
+1 make groups: first compile all cpps and then compile all links (NORMAL)
+2 parallelize all (each link task run after its dependencies) (MAXPARALLEL)
+3 like 1 but provide additional constraints for the parallelization (MAXJOBS)
+
+In theory 1. will be faster than 2 for waf, but might be slower for builds
+The scheme 2 will not allow for running tasks one by one so it can cause disk thrashing on huge builds
+"""
+
+file_deps = Utils.nada
+"""
+Additional dependency pre-check may be added by replacing the function file_deps.
+e.g. extract_outputs, extract_deps below.
+"""
+
+class TaskManager(object):
+       """The manager is attached to the build object, it holds a list of TaskGroup"""
+       def __init__(self):
+               self.groups = []
+               self.tasks_done = []
+               self.current_group = 0
+               self.groups_names = {}
+
+       def group_name(self, g):
+               """name for the group g (utility)"""
+               if not isinstance(g, TaskGroup):
+                       g = self.groups[g]
+               for x in self.groups_names:
+                       if id(self.groups_names[x]) == id(g):
+                               return x
+               return ''
+
+       def group_idx(self, tg):
+               """group the task generator tg is in"""
+               se = id(tg)
+               for i in range(len(self.groups)):
+                       g = self.groups[i]
+                       for t in g.tasks_gen:
+                               if id(t) == se:
+                                       return i
+               return None
+
+       def get_next_set(self):
+               """return the next set of tasks to execute
+               the first parameter is the maximum amount of parallelization that may occur"""
+               ret = None
+               while not ret and self.current_group < len(self.groups):
+                       ret = self.groups[self.current_group].get_next_set()
+                       if ret: return ret
+                       else:
+                               self.groups[self.current_group].process_install()
+                               self.current_group += 1
+               return (None, None)
+
+       def add_group(self, name=None, set=True):
+               #if self.groups and not self.groups[0].tasks:
+               #       error('add_group: an empty group is already present')
+               g = TaskGroup()
+
+               if name and name in self.groups_names:
+                       error('add_group: name %s already present' % name)
+               self.groups_names[name] = g
+               self.groups.append(g)
+               if set:
+                       self.current_group = len(self.groups) - 1
+
+       def set_group(self, idx):
+               if isinstance(idx, str):
+                       g = self.groups_names[idx]
+                       for x in xrange(len(self.groups)):
+                               if id(g) == id(self.groups[x]):
+                                       self.current_group = x
+               else:
+                       self.current_group = idx
+
+       def add_task_gen(self, tgen):
+               if not self.groups: self.add_group()
+               self.groups[self.current_group].tasks_gen.append(tgen)
+
+       def add_task(self, task):
+               if not self.groups: self.add_group()
+               self.groups[self.current_group].tasks.append(task)
+
+       def total(self):
+               total = 0
+               if not self.groups: return 0
+               for group in self.groups:
+                       total += len(group.tasks)
+               return total
+
+       def add_finished(self, tsk):
+               self.tasks_done.append(tsk)
+               bld = tsk.generator.bld
+               if bld.is_install:
+                       f = None
+                       if 'install' in tsk.__dict__:
+                               f = tsk.__dict__['install']
+                               # install=0 to prevent installation
+                               if f: f(tsk)
+                       else:
+                               tsk.install()
+
+class TaskGroup(object):
+       "the compilation of one group does not begin until the previous group has finished (in the manager)"
+       def __init__(self):
+               self.tasks = [] # this list will be consumed
+               self.tasks_gen = []
+
+               self.cstr_groups = Utils.DefaultDict(list) # tasks having equivalent constraints
+               self.cstr_order = Utils.DefaultDict(set) # partial order between the cstr groups
+               self.temp_tasks = [] # tasks put on hold
+               self.ready = 0
+               self.post_funs = []
+
+       def reset(self):
+               "clears the state of the object (put back the tasks into self.tasks)"
+               for x in self.cstr_groups:
+                       self.tasks += self.cstr_groups[x]
+               self.tasks = self.temp_tasks + self.tasks
+               self.temp_tasks = []
+               self.cstr_groups = Utils.DefaultDict(list)
+               self.cstr_order = Utils.DefaultDict(set)
+               self.ready = 0
+
+       def process_install(self):
+               for (f, k, kw) in self.post_funs:
+                       f(*k, **kw)
+
+       def prepare(self):
+               "prepare the scheduling"
+               self.ready = 1
+               file_deps(self.tasks)
+               self.make_cstr_groups()
+               self.extract_constraints()
+
+       def get_next_set(self):
+               "next list of tasks to execute using max job settings, returns (maxjobs, task_list)"
+               global algotype
+               if algotype == NORMAL:
+                       tasks = self.tasks_in_parallel()
+                       maxj = MAXJOBS
+               elif algotype == JOBCONTROL:
+                       (maxj, tasks) = self.tasks_by_max_jobs()
+               elif algotype == MAXPARALLEL:
+                       tasks = self.tasks_with_inner_constraints()
+                       maxj = MAXJOBS
+               else:
+                       raise Utils.WafError("unknown algorithm type %s" % (algotype))
+
+               if not tasks: return ()
+               return (maxj, tasks)
+
+       def make_cstr_groups(self):
+               "unite the tasks that have similar constraints"
+               self.cstr_groups = Utils.DefaultDict(list)
+               for x in self.tasks:
+                       h = x.hash_constraints()
+                       self.cstr_groups[h].append(x)
+
+       def set_order(self, a, b):
+               self.cstr_order[a].add(b)
+
+       def compare_exts(self, t1, t2):
+               "extension production"
+               x = "ext_in"
+               y = "ext_out"
+               in_ = t1.attr(x, ())
+               out_ = t2.attr(y, ())
+               for k in in_:
+                       if k in out_:
+                               return -1
+               in_ = t2.attr(x, ())
+               out_ = t1.attr(y, ())
+               for k in in_:
+                       if k in out_:
+                               return 1
+               return 0
+
+       def compare_partial(self, t1, t2):
+               "partial relations after/before"
+               m = "after"
+               n = "before"
+               name = t2.__class__.__name__
+               if name in Utils.to_list(t1.attr(m, ())): return -1
+               elif name in Utils.to_list(t1.attr(n, ())): return 1
+               name = t1.__class__.__name__
+               if name in Utils.to_list(t2.attr(m, ())): return 1
+               elif name in Utils.to_list(t2.attr(n, ())): return -1
+               return 0
+
+       def extract_constraints(self):
+               "extract the parallelization constraints from the tasks with different constraints"
+               keys = self.cstr_groups.keys()
+               max = len(keys)
+               # hopefully the length of this list is short
+               for i in xrange(max):
+                       t1 = self.cstr_groups[keys[i]][0]
+                       for j in xrange(i + 1, max):
+                               t2 = self.cstr_groups[keys[j]][0]
+
+                               # add the constraints based on the comparisons
+                               val = (self.compare_exts(t1, t2)
+                                       or self.compare_partial(t1, t2)
+                                       )
+                               if val > 0:
+                                       self.set_order(keys[i], keys[j])
+                               elif val < 0:
+                                       self.set_order(keys[j], keys[i])
+
+       def tasks_in_parallel(self):
+               "(NORMAL) next list of tasks that may be executed in parallel"
+
+               if not self.ready: self.prepare()
+
+               keys = self.cstr_groups.keys()
+
+               unconnected = []
+               remainder = []
+
+               for u in keys:
+                       for k in self.cstr_order.values():
+                               if u in k:
+                                       remainder.append(u)
+                                       break
+                       else:
+                               unconnected.append(u)
+
+               toreturn = []
+               for y in unconnected:
+                       toreturn.extend(self.cstr_groups[y])
+
+               # remove stuff only after
+               for y in unconnected:
+                               try: self.cstr_order.__delitem__(y)
+                               except KeyError: pass
+                               self.cstr_groups.__delitem__(y)
+
+               if not toreturn and remainder:
+                       raise Utils.WafError("circular order constraint detected %r" % remainder)
+
+               return toreturn
+
+       def tasks_by_max_jobs(self):
+               "(JOBCONTROL) returns the tasks that can run in parallel with the max amount of jobs"
+               if not self.ready: self.prepare()
+               if not self.temp_tasks: self.temp_tasks = self.tasks_in_parallel()
+               if not self.temp_tasks: return (None, None)
+
+               maxjobs = MAXJOBS
+               ret = []
+               remaining = []
+               for t in self.temp_tasks:
+                       m = getattr(t, "maxjobs", getattr(self.__class__, "maxjobs", MAXJOBS))
+                       if m > maxjobs:
+                               remaining.append(t)
+                       elif m < maxjobs:
+                               remaining += ret
+                               ret = [t]
+                               maxjobs = m
+                       else:
+                               ret.append(t)
+               self.temp_tasks = remaining
+               return (maxjobs, ret)
+
+       def tasks_with_inner_constraints(self):
+               """(MAXPARALLEL) returns all tasks in this group, but add the constraints on each task instance
+               as an optimization, it might be desirable to discard the tasks which do not have to run"""
+               if not self.ready: self.prepare()
+
+               if getattr(self, "done", None): return None
+
+               for p in self.cstr_order:
+                       for v in self.cstr_order[p]:
+                               for m in self.cstr_groups[p]:
+                                       for n in self.cstr_groups[v]:
+                                               n.set_run_after(m)
+               self.cstr_order = Utils.DefaultDict(set)
+               self.cstr_groups = Utils.DefaultDict(list)
+               self.done = 1
+               return self.tasks[:] # make a copy
+
+class store_task_type(type):
+       "store the task types that have a name ending in _task into a map (remember the existing task types)"
+       def __init__(cls, name, bases, dict):
+               super(store_task_type, cls).__init__(name, bases, dict)
+               name = cls.__name__
+
+               if name.endswith('_task'):
+                       name = name.replace('_task', '')
+               if name != 'TaskBase':
+                       TaskBase.classes[name] = cls
+
+class TaskBase(object):
+       """Base class for all Waf tasks
+
+       The most important methods are (by usual order of call):
+       1 runnable_status: ask the task if it should be run, skipped, or if we have to ask later
+       2 __str__: string to display to the user
+       3 run: execute the task
+       4 post_run: after the task is run, update the cache about the task
+
+       This class should be seen as an interface, it provides the very minimum necessary for the scheduler
+       so it does not do much.
+
+       For illustration purposes, TaskBase instances try to execute self.fun (if provided)
+       """
+
+       __metaclass__ = store_task_type
+
+       color = "GREEN"
+       maxjobs = MAXJOBS
+       classes = {}
+       stat = None
+
+       def __init__(self, *k, **kw):
+               self.hasrun = NOT_RUN
+
+               try:
+                       self.generator = kw['generator']
+               except KeyError:
+                       self.generator = self
+                       self.bld = Build.bld
+
+               if kw.get('normal', 1):
+                       self.generator.bld.task_manager.add_task(self)
+
+       def __repr__(self):
+               "used for debugging"
+               return '\n\t{task: %s %s}' % (self.__class__.__name__, str(getattr(self, "fun", "")))
+
+       def __str__(self):
+               "string to display to the user"
+               if hasattr(self, 'fun'):
+                       return 'executing: %s\n' % self.fun.__name__
+               return self.__class__.__name__ + '\n'
+
+       def exec_command(self, *k, **kw):
+               "use this for executing commands from tasks"
+               # TODO in waf 1.6, eliminate bld.exec_command, and move the cwd processing to here
+               if self.env['env']:
+                       kw['env'] = self.env['env']
+               return self.generator.bld.exec_command(*k, **kw)
+
+       def runnable_status(self):
+               "RUN_ME SKIP_ME or ASK_LATER"
+               return RUN_ME
+
+       def can_retrieve_cache(self):
+               return False
+
+       def call_run(self):
+               if self.can_retrieve_cache():
+                       return 0
+               return self.run()
+
+       def run(self):
+               "called if the task must run"
+               if hasattr(self, 'fun'):
+                       return self.fun(self)
+               return 0
+
+       def post_run(self):
+               "update the dependency tree (node stats)"
+               pass
+
+       def display(self):
+               "print either the description (using __str__) or the progress bar or the ide output"
+               col1 = Logs.colors(self.color)
+               col2 = Logs.colors.NORMAL
+
+               if Options.options.progress_bar == 1:
+                       return self.generator.bld.progress_line(self.position[0], self.position[1], col1, col2)
+
+               if Options.options.progress_bar == 2:
+                       ela = Utils.get_elapsed_time(self.generator.bld.ini)
+                       try:
+                               ins  = ','.join([n.name for n in self.inputs])
+                       except AttributeError:
+                               ins = ''
+                       try:
+                               outs = ','.join([n.name for n in self.outputs])
+                       except AttributeError:
+                               outs = ''
+                       return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (self.position[1], self.position[0], ins, outs, ela)
+
+               total = self.position[1]
+               n = len(str(total))
+               fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n)
+               return fs % (self.position[0], self.position[1], col1, str(self), col2)
+
+       def attr(self, att, default=None):
+               "retrieve an attribute from the instance or from the class (microoptimization here)"
+               ret = getattr(self, att, self)
+               if ret is self: return getattr(self.__class__, att, default)
+               return ret
+
+       def hash_constraints(self):
+               "identify a task type for all the constraints relevant for the scheduler: precedence, file production"
+               a = self.attr
+               sum = hash((self.__class__.__name__,
+                       str(a('before', '')),
+                       str(a('after', '')),
+                       str(a('ext_in', '')),
+                       str(a('ext_out', '')),
+                       self.__class__.maxjobs))
+               return sum
+
+       def format_error(self):
+               "error message to display to the user (when a build fails)"
+               if getattr(self, "err_msg", None):
+                       return self.err_msg
+               elif self.hasrun == CRASHED:
+                       try:
+                               return " -> task failed (err #%d): %r" % (self.err_code, self)
+                       except AttributeError:
+                               return " -> task failed: %r" % self
+               elif self.hasrun == MISSING:
+                       return " -> missing files: %r" % self
+               else:
+                       return ''
+
+       def install(self):
+               """
+               installation is performed by looking at the task attributes:
+               * install_path: installation path like "${PREFIX}/bin"
+               * filename: install the first node in the outputs as a file with a particular name, be certain to give os.sep
+               * chmod: permissions
+               """
+               bld = self.generator.bld
+               d = self.attr('install')
+
+               if self.attr('install_path'):
+                       lst = [a.relpath_gen(bld.srcnode) for a in self.outputs]
+                       perm = self.attr('chmod', O644)
+                       if self.attr('src'):
+                               # if src is given, install the sources too
+                               lst += [a.relpath_gen(bld.srcnode) for a in self.inputs]
+                       if self.attr('filename'):
+                               dir = self.install_path.rstrip(os.sep) + os.sep + self.attr('filename')
+                               bld.install_as(dir, lst[0], self.env, perm)
+                       else:
+                               bld.install_files(self.install_path, lst, self.env, perm)
+
+class Task(TaskBase):
+       """The parent class is quite limited, in this version:
+       * file system interaction: input and output nodes
+       * persistence: do not re-execute tasks that have already run
+       * caching: same files can be saved and retrieved from a cache directory
+       * dependencies:
+               implicit, like .c files depending on .h files
+               explicit, like the input nodes or the dep_nodes
+               environment variables, like the CXXFLAGS in self.env
+       """
+       vars = []
+       def __init__(self, env, **kw):
+               TaskBase.__init__(self, **kw)
+               self.env = env
+
+               # inputs and outputs are nodes
+               # use setters when possible
+               self.inputs  = []
+               self.outputs = []
+
+               self.dep_nodes = []
+               self.run_after = []
+
+               # Additionally, you may define the following
+               #self.dep_vars  = 'PREFIX DATADIR'
+
+       def __str__(self):
+               "string to display to the user"
+               env = self.env
+               src_str = ' '.join([a.nice_path(env) for a in self.inputs])
+               tgt_str = ' '.join([a.nice_path(env) for a in self.outputs])
+               if self.outputs: sep = ' -> '
+               else: sep = ''
+               return '%s: %s%s%s\n' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str)
+
+       def __repr__(self):
+               return "".join(['\n\t{task: ', self.__class__.__name__, " ", ",".join([x.name for x in self.inputs]), " -> ", ",".join([x.name for x in self.outputs]), '}'])
+
+       def unique_id(self):
+               "get a unique id: hash the node paths, the variant, the class, the function"
+               try:
+                       return self.uid
+               except AttributeError:
+                       "this is not a real hot zone, but we want to avoid surprizes here"
+                       m = md5()
+                       up = m.update
+                       up(self.__class__.__name__)
+                       up(self.env.variant())
+                       p = None
+                       for x in self.inputs + self.outputs:
+                               if p != x.parent.id:
+                                       p = x.parent.id
+                                       up(x.parent.abspath())
+                               up(x.name)
+                       self.uid = m.digest()
+                       return self.uid
+
+       def set_inputs(self, inp):
+               if isinstance(inp, list): self.inputs += inp
+               else: self.inputs.append(inp)
+
+       def set_outputs(self, out):
+               if isinstance(out, list): self.outputs += out
+               else: self.outputs.append(out)
+
+       def set_run_after(self, task):
+               "set (scheduler) order on another task"
+               # TODO: handle list or object
+               assert isinstance(task, TaskBase)
+               self.run_after.append(task)
+
+       def add_file_dependency(self, filename):
+               "TODO user-provided file dependencies"
+               node = self.generator.bld.path.find_resource(filename)
+               self.dep_nodes.append(node)
+
+       def signature(self):
+               # compute the result one time, and suppose the scan_signature will give the good result
+               try: return self.cache_sig[0]
+               except AttributeError: pass
+
+               self.m = md5()
+
+               # explicit deps
+               exp_sig = self.sig_explicit_deps()
+
+               # env vars
+               var_sig = self.sig_vars()
+
+               # implicit deps
+
+               imp_sig = SIG_NIL
+               if self.scan:
+                       try:
+                               imp_sig = self.sig_implicit_deps()
+                       except ValueError:
+                               return self.signature()
+
+               # we now have the signature (first element) and the details (for debugging)
+               ret = self.m.digest()
+               self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
+               return ret
+
+       def runnable_status(self):
+               "SKIP_ME RUN_ME or ASK_LATER"
+               #return 0 # benchmarking
+
+               if self.inputs and (not self.outputs):
+                       if not getattr(self.__class__, 'quiet', None):
+                               warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r" % self)
+
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return ASK_LATER
+
+               env = self.env
+               bld = self.generator.bld
+
+               # first compute the signature
+               new_sig = self.signature()
+
+               # compare the signature to a signature computed previously
+               key = self.unique_id()
+               try:
+                       prev_sig = bld.task_sigs[key][0]
+               except KeyError:
+                       debug("task: task %r must run as it was never run before or the task code changed", self)
+                       return RUN_ME
+
+               # compare the signatures of the outputs
+               for node in self.outputs:
+                       variant = node.variant(env)
+                       try:
+                               if bld.node_sigs[variant][node.id] != new_sig:
+                                       return RUN_ME
+                       except KeyError:
+                               debug("task: task %r must run as the output nodes do not exist", self)
+                               return RUN_ME
+
+               # debug if asked to
+               if Logs.verbose: self.debug_why(bld.task_sigs[key])
+
+               if new_sig != prev_sig:
+                       return RUN_ME
+               return SKIP_ME
+
+       def post_run(self):
+               "called after a successful task run"
+               bld = self.generator.bld
+               env = self.env
+               sig = self.signature()
+               ssig = sig.encode('hex')
+
+               variant = env.variant()
+               for node in self.outputs:
+                       # check if the node exists ..
+                       try:
+                               os.stat(node.abspath(env))
+                       except OSError:
+                               self.hasrun = MISSING
+                               self.err_msg = '-> missing file: %r' % node.abspath(env)
+                               raise Utils.WafError
+
+                       # important, store the signature for the next run
+                       bld.node_sigs[variant][node.id] = sig
+               bld.task_sigs[self.unique_id()] = self.cache_sig
+
+               # file caching, if possible
+               # try to avoid data corruption as much as possible
+               if not Options.cache_global or Options.options.nocache or not self.outputs:
+                       return None
+
+               if getattr(self, 'cached', None):
+                       return None
+
+               dname = os.path.join(Options.cache_global, ssig)
+               tmpdir = tempfile.mkdtemp(prefix=Options.cache_global + os.sep + 'waf')
+
+               try:
+                       shutil.rmtree(dname)
+               except:
+                       pass
+
+               try:
+                       i = 0
+                       for node in self.outputs:
+                               variant = node.variant(env)
+                               dest = os.path.join(tmpdir, str(i) + node.name)
+                               shutil.copy2(node.abspath(env), dest)
+                               i += 1
+               except (OSError, IOError):
+                       try:
+                               shutil.rmtree(tmpdir)
+                       except:
+                               pass
+               else:
+                       try:
+                               os.rename(tmpdir, dname)
+                       except OSError:
+                               try:
+                                       shutil.rmtree(tmpdir)
+                               except:
+                                       pass
+                       else:
+                               try:
+                                       os.chmod(dname, O755)
+                               except:
+                                       pass
+
+       def can_retrieve_cache(self):
+               """
+               Retrieve build nodes from the cache
+               update the file timestamps to help cleaning the least used entries from the cache
+               additionally, set an attribute 'cached' to avoid re-creating the same cache files
+
+               suppose there are files in cache/dir1/file1 and cache/dir2/file2
+               first, read the timestamp of dir1
+               then try to copy the files
+               then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
+               should an exception occur, ignore the data
+               """
+               if not Options.cache_global or Options.options.nocache or not self.outputs:
+                       return None
+
+               env = self.env
+               sig = self.signature()
+               ssig = sig.encode('hex')
+
+               # first try to access the cache folder for the task
+               dname = os.path.join(Options.cache_global, ssig)
+               try:
+                       t1 = os.stat(dname).st_mtime
+               except OSError:
+                       return None
+
+               i = 0
+               for node in self.outputs:
+                       variant = node.variant(env)
+
+                       orig = os.path.join(dname, str(i) + node.name)
+                       try:
+                               shutil.copy2(orig, node.abspath(env))
+                               # mark the cache file as used recently (modified)
+                               os.utime(orig, None)
+                       except (OSError, IOError):
+                               debug('task: failed retrieving file')
+                               return None
+                       i += 1
+
+               # is it the same folder?
+               try:
+                       t2 = os.stat(dname).st_mtime
+               except OSError:
+                       return None
+
+               if t1 != t2:
+                       return None
+
+               for node in self.outputs:
+                       self.generator.bld.node_sigs[variant][node.id] = sig
+                       if Options.options.progress_bar < 1:
+                               self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
+
+               self.cached = True
+               return 1
+
+       def debug_why(self, old_sigs):
+               "explains why a task is run"
+
+               new_sigs = self.cache_sig
+               def v(x):
+                       return x.encode('hex')
+
+               debug("Task %r", self)
+               msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
+               tmp = 'task: -> %s: %s %s'
+               for x in xrange(len(msgs)):
+                       if (new_sigs[x] != old_sigs[x]):
+                               debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
+
+       def sig_explicit_deps(self):
+               bld = self.generator.bld
+               up = self.m.update
+
+               # the inputs
+               for x in self.inputs + getattr(self, 'dep_nodes', []):
+                       if not x.parent.id in bld.cache_scanned_folders:
+                               bld.rescan(x.parent)
+
+                       variant = x.variant(self.env)
+                       try:
+                               up(bld.node_sigs[variant][x.id])
+                       except KeyError:
+                               raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
+
+               # manual dependencies, they can slow down the builds
+               if bld.deps_man:
+                       additional_deps = bld.deps_man
+                       for x in self.inputs + self.outputs:
+                               try:
+                                       d = additional_deps[x.id]
+                               except KeyError:
+                                       continue
+
+                               for v in d:
+                                       if isinstance(v, Node.Node):
+                                               bld.rescan(v.parent)
+                                               variant = v.variant(self.env)
+                                               try:
+                                                       v = bld.node_sigs[variant][v.id]
+                                               except KeyError:
+                                                       raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
+                                       elif hasattr(v, '__call__'):
+                                               v = v() # dependency is a function, call it
+                                       up(v)
+
+               for x in self.dep_nodes:
+                       v = bld.node_sigs[x.variant(self.env)][x.id]
+                       up(v)
+
+               return self.m.digest()
+
+       def sig_vars(self):
+               bld = self.generator.bld
+               env = self.env
+
+               # dependencies on the environment vars
+               act_sig = bld.hash_env_vars(env, self.__class__.vars)
+               self.m.update(act_sig)
+
+               # additional variable dependencies, if provided
+               dep_vars = getattr(self, 'dep_vars', None)
+               if dep_vars:
+                       self.m.update(bld.hash_env_vars(env, dep_vars))
+
+               return self.m.digest()
+
+       #def scan(self, node):
+       #       """this method returns a tuple containing:
+       #       * a list of nodes corresponding to real files
+       #       * a list of names for files not found in path_lst
+       #       the input parameters may have more parameters that the ones used below
+       #       """
+       #       return ((), ())
+       scan = None
+
+       # compute the signature, recompute it if there is no match in the cache
+       def sig_implicit_deps(self):
+               "the signature obtained may not be the one if the files have changed, we do it in two steps"
+
+               bld = self.generator.bld
+
+               # get the task signatures from previous runs
+               key = self.unique_id()
+               prev_sigs = bld.task_sigs.get(key, ())
+               if prev_sigs:
+                       try:
+                               # for issue #379
+                               if prev_sigs[2] == self.compute_sig_implicit_deps():
+                                       return prev_sigs[2]
+                       except (KeyError, OSError):
+                               pass
+                       del bld.task_sigs[key]
+                       raise ValueError('rescan')
+
+               # no previous run or the signature of the dependencies has changed, rescan the dependencies
+               (nodes, names) = self.scan()
+               if Logs.verbose:
+                       debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
+
+               # store the dependencies in the cache
+               bld.node_deps[key] = nodes
+               bld.raw_deps[key] = names
+
+               # recompute the signature and return it
+               try:
+                       sig = self.compute_sig_implicit_deps()
+               except KeyError:
+                       try:
+                               nodes = []
+                               for k in bld.node_deps.get(self.unique_id(), []):
+                                       if k.id & 3 == 2: # Node.FILE:
+                                               if not k.id in bld.node_sigs[0]:
+                                                       nodes.append(k)
+                                       else:
+                                               if not k.id in bld.node_sigs[self.env.variant()]:
+                                                       nodes.append(k)
+                       except:
+                               nodes = '?'
+                       raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
+
+               return sig
+
+       def compute_sig_implicit_deps(self):
+               """it is intended for .cpp and inferred .h files
+               there is a single list (no tree traversal)
+               this is the hot spot so ... do not touch"""
+               upd = self.m.update
+
+               bld = self.generator.bld
+               tstamp = bld.node_sigs
+               env = self.env
+
+               for k in bld.node_deps.get(self.unique_id(), []):
+                       # unlikely but necessary if it happens
+                       if not k.parent.id in bld.cache_scanned_folders:
+                               # if the parent folder is removed, an OSError may be thrown
+                               bld.rescan(k.parent)
+
+                       # if the parent folder is removed, a KeyError will be thrown
+                       if k.id & 3 == 2: # Node.FILE:
+                               upd(tstamp[0][k.id])
+                       else:
+                               upd(tstamp[env.variant()][k.id])
+
+               return self.m.digest()
+
+def funex(c):
+       dc = {}
+       exec(c, dc)
+       return dc['f']
+
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})", re.M)
+def compile_fun_shell(name, line):
+       """Compiles a string (once) into a function, eg:
+       simple_task_type('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
+
+       The env variables (CXX, ..) on the task must not hold dicts (order)
+       The reserved keywords TGT and SRC represent the task input and output nodes
+
+       quick test:
+       bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
+       """
+
+       extr = []
+       def repl(match):
+               g = match.group
+               if g('dollar'): return "$"
+               elif g('backslash'): return '\\\\'
+               elif g('subst'): extr.append((g('var'), g('code'))); return "%s"
+               return None
+
+       line = reg_act.sub(repl, line) or line
+
+       parm = []
+       dvars = []
+       app = parm.append
+       for (var, meth) in extr:
+               if var == 'SRC':
+                       if meth: app('task.inputs%s' % meth)
+                       else: app('" ".join([a.srcpath(env) for a in task.inputs])')
+               elif var == 'TGT':
+                       if meth: app('task.outputs%s' % meth)
+                       else: app('" ".join([a.bldpath(env) for a in task.outputs])')
+               else:
+                       if not var in dvars: dvars.append(var)
+                       app("p('%s')" % var)
+       if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
+       else: parm = ''
+
+       c = COMPILE_TEMPLATE_SHELL % (line, parm)
+
+       debug('action: %s', c)
+       return (funex(c), dvars)
+
+def compile_fun_noshell(name, line):
+
+       extr = []
+       def repl(match):
+               g = match.group
+               if g('dollar'): return "$"
+               elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>"
+               return None
+
+       line2 = reg_act.sub(repl, line)
+       params = line2.split('<<|@|>>')
+
+       buf = []
+       dvars = []
+       app = buf.append
+       for x in xrange(len(extr)):
+               params[x] = params[x].strip()
+               if params[x]:
+                       app("lst.extend(%r)" % params[x].split())
+               (var, meth) = extr[x]
+               if var == 'SRC':
+                       if meth: app('lst.append(task.inputs%s)' % meth)
+                       else: app("lst.extend([a.srcpath(env) for a in task.inputs])")
+               elif var == 'TGT':
+                       if meth: app('lst.append(task.outputs%s)' % meth)
+                       else: app("lst.extend([a.bldpath(env) for a in task.outputs])")
+               else:
+                       app('lst.extend(to_list(env[%r]))' % var)
+                       if not var in dvars: dvars.append(var)
+
+       if params[-1]:
+               app("lst.extend(%r)" % shlex.split(params[-1]))
+
+       fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
+       debug('action: %s', fun)
+       return (funex(fun), dvars)
+
+def compile_fun(name, line, shell=None):
+       "commands can be launched by the shell or not"
+       if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
+               shell = True
+       #else:
+       #       shell = False
+
+       if shell is None:
+               if sys.platform == 'win32':
+                       shell = False
+               else:
+                       shell = True
+
+       if shell:
+               return compile_fun_shell(name, line)
+       else:
+               return compile_fun_noshell(name, line)
+
+def simple_task_type(name, line, color='GREEN', vars=[], ext_in=[], ext_out=[], before=[], after=[], shell=None):
+       """return a new Task subclass with the function run compiled from the line given"""
+       (fun, dvars) = compile_fun(name, line, shell)
+       fun.code = line
+       return task_type_from_func(name, fun, vars or dvars, color, ext_in, ext_out, before, after)
+
+def task_type_from_func(name, func, vars=[], color='GREEN', ext_in=[], ext_out=[], before=[], after=[]):
+       """return a new Task subclass with the function run compiled from the line given"""
+       params = {
+               'run': func,
+               'vars': vars,
+               'color': color,
+               'name': name,
+               'ext_in': Utils.to_list(ext_in),
+               'ext_out': Utils.to_list(ext_out),
+               'before': Utils.to_list(before),
+               'after': Utils.to_list(after),
+       }
+
+       cls = type(Task)(name, (Task,), params)
+       TaskBase.classes[name] = cls
+       return cls
+
+def always_run(cls):
+       """Set all task instances of this class to be executed whenever a build is started
+       The task signature is calculated, but the result of the comparation between
+       task signatures is bypassed
+       """
+       old = cls.runnable_status
+       def always(self):
+               ret = old(self)
+               if ret == SKIP_ME:
+                       return RUN_ME
+               return ret
+       cls.runnable_status = always
+
+def update_outputs(cls):
+       """When a command is always run, it is possible that the output only change
+       sometimes. By default the build node have as a hash the signature of the task
+       which may not change. With this, the output nodes (produced) are hashed,
+       and the hashes are set to the build nodes
+
+       This may avoid unnecessary recompilations, but it uses more resources
+       (hashing the output files) so it is not used by default
+       """
+       old_post_run = cls.post_run
+       def post_run(self):
+               old_post_run(self)
+               bld = self.generator.bld
+               for output in self.outputs:
+                       bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
+                       bld.task_sigs[output.id] = self.unique_id()
+       cls.post_run = post_run
+
+       old_runnable_status = cls.runnable_status
+       def runnable_status(self):
+               status = old_runnable_status(self)
+               if status != RUN_ME:
+                       return status
+
+               uid = self.unique_id()
+               try:
+                       bld = self.outputs[0].__class__.bld
+                       new_sig  = self.signature()
+                       prev_sig = bld.task_sigs[uid][0]
+                       if prev_sig == new_sig:
+                               for x in self.outputs:
+                                       if not x.id in bld.node_sigs[self.env.variant()]:
+                                               return RUN_ME
+                                       if bld.task_sigs[x.id] != uid: # ensure the outputs are associated with *this* task
+                                               return RUN_ME
+                               return SKIP_ME
+               except KeyError:
+                       pass
+               except IndexError:
+                       pass
+               return RUN_ME
+       cls.runnable_status = runnable_status
+
+def extract_outputs(tasks):
+       """file_deps: Infer additional dependencies from task input and output nodes
+       """
+       v = {}
+       for x in tasks:
+               try:
+                       (ins, outs) = v[x.env.variant()]
+               except KeyError:
+                       ins = {}
+                       outs = {}
+                       v[x.env.variant()] = (ins, outs)
+
+               for a in getattr(x, 'inputs', []):
+                       try: ins[a.id].append(x)
+                       except KeyError: ins[a.id] = [x]
+               for a in getattr(x, 'outputs', []):
+                       try: outs[a.id].append(x)
+                       except KeyError: outs[a.id] = [x]
+
+       for (ins, outs) in v.values():
+               links = set(ins.iterkeys()).intersection(outs.iterkeys())
+               for k in links:
+                       for a in ins[k]:
+                               for b in outs[k]:
+                                       a.set_run_after(b)
+
+def extract_deps(tasks):
+       """file_deps: Infer additional dependencies from task input and output nodes and from implicit dependencies
+       returned by the scanners - that will only work if all tasks are created
+
+       this is aimed at people who have pathological builds and who do not care enough
+       to implement the build dependencies properly
+
+       with two loops over the list of tasks, do not expect this to be really fast
+       """
+
+       # first reuse the function above
+       extract_outputs(tasks)
+
+       # map the output nodes to the tasks producing them
+       out_to_task = {}
+       for x in tasks:
+               v = x.env.variant()
+               try:
+                       lst = x.outputs
+               except AttributeError:
+                       pass
+               else:
+                       for node in lst:
+                               out_to_task[(v, node.id)] = x
+
+       # map the dependencies found to the tasks compiled
+       dep_to_task = {}
+       for x in tasks:
+               try:
+                       x.signature()
+               except: # this is on purpose
+                       pass
+
+               v = x.env.variant()
+               key = x.unique_id()
+               for k in x.generator.bld.node_deps.get(x.unique_id(), []):
+                       try: dep_to_task[(v, k.id)].append(x)
+                       except KeyError: dep_to_task[(v, k.id)] = [x]
+
+       # now get the intersection
+       deps = set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
+
+       # and add the dependencies from task to task
+       for idx in deps:
+               for k in dep_to_task[idx]:
+                       k.set_run_after(out_to_task[idx])
+
+       # cleanup, remove the signatures
+       for x in tasks:
+               try:
+                       delattr(x, 'cache_sig')
+               except AttributeError:
+                       pass
+
diff --git a/buildtools/wafadmin/TaskGen.py b/buildtools/wafadmin/TaskGen.py
new file mode 100644 (file)
index 0000000..ae1834a
--- /dev/null
@@ -0,0 +1,612 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2008 (ita)
+
+"""
+The class task_gen encapsulates the creation of task objects (low-level code)
+The instances can have various parameters, but the creation of task nodes (Task.py)
+is delayed. To achieve this, various methods are called from the method "apply"
+
+The class task_gen contains lots of methods, and a configuration table:
+* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
+* the order of the methods (self.prec or by default task_gen.prec) is configurable
+* new methods can be inserted dynamically without pasting old code
+
+Additionally, task_gen provides the method apply_core
+* file extensions are mapped to methods: def meth(self, name_or_node)
+* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
+* when called, the functions may modify self.allnodes to re-add source to process
+* the mappings can map an extension or a filename (see the code below)
+
+WARNING: subclasses must reimplement the clone method
+"""
+
+import os, traceback, copy
+import Build, Task, Utils, Logs, Options
+from Logs import debug, error, warn
+from Constants import *
+
+typos = {
+'sources':'source',
+'targets':'target',
+'include':'includes',
+'define':'defines',
+'importpath':'importpaths',
+'install_var':'install_path',
+'install_subdir':'install_path',
+'inst_var':'install_path',
+'inst_dir':'install_path',
+'feature':'features',
+}
+
+class register_obj(type):
+       """no decorators for classes, so we use a metaclass
+       we store into task_gen.classes the classes that inherit task_gen
+       and whose names end in '_taskgen'
+       """
+       def __init__(cls, name, bases, dict):
+               super(register_obj, cls).__init__(name, bases, dict)
+               name = cls.__name__
+               suffix = '_taskgen'
+               if name.endswith(suffix):
+                       task_gen.classes[name.replace(suffix, '')] = cls
+
+class task_gen(object):
+       """
+       Most methods are of the form 'def meth(self):' without any parameters
+       there are many of them, and they do many different things:
+       * task creation
+       * task results installation
+       * environment modification
+       * attribute addition/removal
+
+       The inheritance approach is complicated
+       * mixing several languages at once
+       * subclassing is needed even for small changes
+       * inserting new methods is complicated
+
+       This new class uses a configuration table:
+       * adding new methods easily
+       * obtaining the order in which to call the methods
+       * postponing the method calls (post() -> apply)
+
+       Additionally, a 'traits' static attribute is provided:
+       * this list contains methods
+       * the methods can remove or add methods from self.meths
+       Example1: the attribute 'staticlib' is set on an instance
+       a method set in the list of traits is executed when the
+       instance is posted, it finds that flag and adds another method for execution
+       Example2: a method set in the list of traits finds the msvc
+       compiler (from self.env['MSVC']==1); more methods are added to self.meths
+       """
+
+       __metaclass__ = register_obj
+       mappings = {}
+       mapped = {}
+       prec = Utils.DefaultDict(list)
+       traits = Utils.DefaultDict(set)
+       classes = {}
+
+       def __init__(self, *kw, **kwargs):
+               self.prec = Utils.DefaultDict(list)
+               "map precedence of function names to call"
+               # so we will have to play with directed acyclic graphs
+               # detect cycles, etc
+
+               self.source = ''
+               self.target = ''
+
+               # list of methods to execute - does not touch it by hand unless you know
+               self.meths = []
+
+               # list of mappings extension -> function
+               self.mappings = {}
+
+               # list of features (see the documentation on traits)
+               self.features = list(kw)
+
+               # not always a good idea
+               self.tasks = []
+
+               self.default_chmod = O644
+               self.default_install_path = None
+
+               # kind of private, beware of what you put in it, also, the contents are consumed
+               self.allnodes = []
+
+               self.bld = kwargs.get('bld', Build.bld)
+               self.env = self.bld.env.copy()
+
+               self.path = self.bld.path # emulate chdir when reading scripts
+               self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
+
+               # provide a unique id
+               self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
+
+               for key, val in kwargs.iteritems():
+                       setattr(self, key, val)
+
+               self.bld.task_manager.add_task_gen(self)
+               self.bld.all_task_gen.append(self)
+
+       def __str__(self):
+               return ("<task_gen '%s' of type %s defined in %s>"
+                       % (self.name or self.target, self.__class__.__name__, str(self.path)))
+
+       def __setattr__(self, name, attr):
+               real = typos.get(name, name)
+               if real != name:
+                       warn('typo %s -> %s' % (name, real))
+                       if Logs.verbose > 0:
+                               traceback.print_stack()
+               object.__setattr__(self, real, attr)
+
+       def to_list(self, value):
+               "helper: returns a list"
+               if isinstance(value, str): return value.split()
+               else: return value
+
+       def apply(self):
+               "order the methods to execute using self.prec or task_gen.prec"
+               keys = set(self.meths)
+
+               # add the methods listed in the features
+               self.features = Utils.to_list(self.features)
+               for x in self.features + ['*']:
+                       st = task_gen.traits[x]
+                       if not st:
+                               warn('feature %r does not exist - bind at least one method to it' % x)
+                       keys.update(st)
+
+               # copy the precedence table
+               prec = {}
+               prec_tbl = self.prec or task_gen.prec
+               for x in prec_tbl:
+                       if x in keys:
+                               prec[x] = prec_tbl[x]
+
+               # elements disconnected
+               tmp = []
+               for a in keys:
+                       for x in prec.values():
+                               if a in x: break
+                       else:
+                               tmp.append(a)
+
+               # topological sort
+               out = []
+               while tmp:
+                       e = tmp.pop()
+                       if e in keys: out.append(e)
+                       try:
+                               nlst = prec[e]
+                       except KeyError:
+                               pass
+                       else:
+                               del prec[e]
+                               for x in nlst:
+                                       for y in prec:
+                                               if x in prec[y]:
+                                                       break
+                                       else:
+                                               tmp.append(x)
+
+               if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
+               out.reverse()
+               self.meths = out
+
+               # then we run the methods in order
+  &n