thirdparty:waf: New files for waf 1.9.10
authorThomas Nagy <tnagy@waf.io>
Sat, 26 Mar 2016 12:32:11 +0000 (13:32 +0100)
committerAndrew Bartlett <abartlet@samba.org>
Wed, 5 Sep 2018 04:37:22 +0000 (06:37 +0200)
Signed-off-by: Thomas Nagy <tnagy@waf.io>
Reviewed-by: Alexander Bokovoy <ab@samba.org>
Reviewed-by: Andrew Bartlett <abartlet@samba.org>
221 files changed:
third_party/waf/wafadmin/3rdparty/ParallelDebug.py [deleted file]
third_party/waf/wafadmin/3rdparty/batched_cc.py [deleted file]
third_party/waf/wafadmin/3rdparty/boost.py [deleted file]
third_party/waf/wafadmin/3rdparty/build_file_tracker.py [deleted file]
third_party/waf/wafadmin/3rdparty/fluid.py [deleted file]
third_party/waf/wafadmin/3rdparty/gccdeps.py [deleted file]
third_party/waf/wafadmin/3rdparty/go.py [deleted file]
third_party/waf/wafadmin/3rdparty/lru_cache.py [deleted file]
third_party/waf/wafadmin/3rdparty/paranoid.py [deleted file]
third_party/waf/wafadmin/3rdparty/prefork.py [deleted file]
third_party/waf/wafadmin/3rdparty/print_commands.py [deleted file]
third_party/waf/wafadmin/3rdparty/swig.py [deleted file]
third_party/waf/wafadmin/3rdparty/valadoc.py [deleted file]
third_party/waf/wafadmin/Build.py [deleted file]
third_party/waf/wafadmin/Configure.py [deleted file]
third_party/waf/wafadmin/Constants.py [deleted file]
third_party/waf/wafadmin/Environment.py [deleted file]
third_party/waf/wafadmin/Logs.py [deleted file]
third_party/waf/wafadmin/Node.py [deleted file]
third_party/waf/wafadmin/Options.py [deleted file]
third_party/waf/wafadmin/Runner.py [deleted file]
third_party/waf/wafadmin/Scripting.py [deleted file]
third_party/waf/wafadmin/Task.py [deleted file]
third_party/waf/wafadmin/TaskGen.py [deleted file]
third_party/waf/wafadmin/Tools/__init__.py [deleted file]
third_party/waf/wafadmin/Tools/ar.py [deleted file]
third_party/waf/wafadmin/Tools/bison.py [deleted file]
third_party/waf/wafadmin/Tools/cc.py [deleted file]
third_party/waf/wafadmin/Tools/ccroot.py [deleted file]
third_party/waf/wafadmin/Tools/compiler_cc.py [deleted file]
third_party/waf/wafadmin/Tools/compiler_cxx.py [deleted file]
third_party/waf/wafadmin/Tools/compiler_d.py [deleted file]
third_party/waf/wafadmin/Tools/config_c.py [deleted file]
third_party/waf/wafadmin/Tools/cs.py [deleted file]
third_party/waf/wafadmin/Tools/cxx.py [deleted file]
third_party/waf/wafadmin/Tools/d.py [deleted file]
third_party/waf/wafadmin/Tools/dbus.py [deleted file]
third_party/waf/wafadmin/Tools/dmd.py [deleted file]
third_party/waf/wafadmin/Tools/flex.py [deleted file]
third_party/waf/wafadmin/Tools/gas.py [deleted file]
third_party/waf/wafadmin/Tools/gcc.py [deleted file]
third_party/waf/wafadmin/Tools/gdc.py [deleted file]
third_party/waf/wafadmin/Tools/glib2.py [deleted file]
third_party/waf/wafadmin/Tools/gnome.py [deleted file]
third_party/waf/wafadmin/Tools/gnu_dirs.py [deleted file]
third_party/waf/wafadmin/Tools/gob2.py [deleted file]
third_party/waf/wafadmin/Tools/gxx.py [deleted file]
third_party/waf/wafadmin/Tools/icc.py [deleted file]
third_party/waf/wafadmin/Tools/icpc.py [deleted file]
third_party/waf/wafadmin/Tools/intltool.py [deleted file]
third_party/waf/wafadmin/Tools/javaw.py [deleted file]
third_party/waf/wafadmin/Tools/kde4.py [deleted file]
third_party/waf/wafadmin/Tools/libtool.py [deleted file]
third_party/waf/wafadmin/Tools/lua.py [deleted file]
third_party/waf/wafadmin/Tools/msvc.py [deleted file]
third_party/waf/wafadmin/Tools/nasm.py [deleted file]
third_party/waf/wafadmin/Tools/ocaml.py [deleted file]
third_party/waf/wafadmin/Tools/osx.py [deleted file]
third_party/waf/wafadmin/Tools/perl.py [deleted file]
third_party/waf/wafadmin/Tools/preproc.py [deleted file]
third_party/waf/wafadmin/Tools/python.py [deleted file]
third_party/waf/wafadmin/Tools/qt4.py [deleted file]
third_party/waf/wafadmin/Tools/ruby.py [deleted file]
third_party/waf/wafadmin/Tools/suncc.py [deleted file]
third_party/waf/wafadmin/Tools/suncxx.py [deleted file]
third_party/waf/wafadmin/Tools/tex.py [deleted file]
third_party/waf/wafadmin/Tools/unittestw.py [deleted file]
third_party/waf/wafadmin/Tools/vala.py [deleted file]
third_party/waf/wafadmin/Tools/winres.py [deleted file]
third_party/waf/wafadmin/Tools/xlc.py [deleted file]
third_party/waf/wafadmin/Tools/xlcxx.py [deleted file]
third_party/waf/wafadmin/Utils.py [deleted file]
third_party/waf/wafadmin/__init__.py [deleted file]
third_party/waf/wafadmin/ansiterm.py [deleted file]
third_party/waf/wafadmin/pproc.py [deleted file]
third_party/waf/wafadmin/py3kfixes.py [deleted file]
third_party/waf/waflib/Build.py [new file with mode: 0644]
third_party/waf/waflib/ConfigSet.py [new file with mode: 0644]
third_party/waf/waflib/Configure.py [new file with mode: 0644]
third_party/waf/waflib/Context.py [new file with mode: 0644]
third_party/waf/waflib/Errors.py [new file with mode: 0644]
third_party/waf/waflib/Logs.py [new file with mode: 0644]
third_party/waf/waflib/Node.py [new file with mode: 0644]
third_party/waf/waflib/Options.py [new file with mode: 0644]
third_party/waf/waflib/Runner.py [new file with mode: 0644]
third_party/waf/waflib/Scripting.py [new file with mode: 0644]
third_party/waf/waflib/Task.py [new file with mode: 0644]
third_party/waf/waflib/TaskGen.py [new file with mode: 0644]
third_party/waf/waflib/Tools/__init__.py [new file with mode: 0644]
third_party/waf/waflib/Tools/ar.py [new file with mode: 0644]
third_party/waf/waflib/Tools/asm.py [new file with mode: 0644]
third_party/waf/waflib/Tools/bison.py [new file with mode: 0644]
third_party/waf/waflib/Tools/c.py [new file with mode: 0644]
third_party/waf/waflib/Tools/c_aliases.py [new file with mode: 0644]
third_party/waf/waflib/Tools/c_config.py [new file with mode: 0644]
third_party/waf/waflib/Tools/c_osx.py [new file with mode: 0644]
third_party/waf/waflib/Tools/c_preproc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/c_tests.py [new file with mode: 0644]
third_party/waf/waflib/Tools/ccroot.py [new file with mode: 0644]
third_party/waf/waflib/Tools/clang.py [new file with mode: 0644]
third_party/waf/waflib/Tools/clangxx.py [new file with mode: 0644]
third_party/waf/waflib/Tools/compiler_c.py [new file with mode: 0644]
third_party/waf/waflib/Tools/compiler_cxx.py [new file with mode: 0644]
third_party/waf/waflib/Tools/compiler_d.py [new file with mode: 0644]
third_party/waf/waflib/Tools/compiler_fc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/cs.py [new file with mode: 0644]
third_party/waf/waflib/Tools/cxx.py [new file with mode: 0644]
third_party/waf/waflib/Tools/d.py [new file with mode: 0644]
third_party/waf/waflib/Tools/d_config.py [new file with mode: 0644]
third_party/waf/waflib/Tools/d_scan.py [new file with mode: 0644]
third_party/waf/waflib/Tools/dbus.py [new file with mode: 0644]
third_party/waf/waflib/Tools/dmd.py [new file with mode: 0644]
third_party/waf/waflib/Tools/errcheck.py [new file with mode: 0644]
third_party/waf/waflib/Tools/fc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/fc_config.py [new file with mode: 0644]
third_party/waf/waflib/Tools/fc_scan.py [new file with mode: 0644]
third_party/waf/waflib/Tools/flex.py [new file with mode: 0644]
third_party/waf/waflib/Tools/g95.py [new file with mode: 0644]
third_party/waf/waflib/Tools/gas.py [new file with mode: 0644]
third_party/waf/waflib/Tools/gcc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/gdc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/gfortran.py [new file with mode: 0644]
third_party/waf/waflib/Tools/glib2.py [new file with mode: 0644]
third_party/waf/waflib/Tools/gnu_dirs.py [new file with mode: 0644]
third_party/waf/waflib/Tools/gxx.py [new file with mode: 0644]
third_party/waf/waflib/Tools/icc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/icpc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/ifort.py [new file with mode: 0644]
third_party/waf/waflib/Tools/intltool.py [new file with mode: 0644]
third_party/waf/waflib/Tools/irixcc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/javaw.py [new file with mode: 0644]
third_party/waf/waflib/Tools/kde4.py [new file with mode: 0644]
third_party/waf/waflib/Tools/ldc2.py [new file with mode: 0644]
third_party/waf/waflib/Tools/lua.py [new file with mode: 0644]
third_party/waf/waflib/Tools/md5_tstamp.py [new file with mode: 0644]
third_party/waf/waflib/Tools/msvc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/nasm.py [new file with mode: 0644]
third_party/waf/waflib/Tools/nobuild.py [new file with mode: 0644]
third_party/waf/waflib/Tools/perl.py [new file with mode: 0644]
third_party/waf/waflib/Tools/python.py [new file with mode: 0644]
third_party/waf/waflib/Tools/qt4.py [new file with mode: 0644]
third_party/waf/waflib/Tools/qt5.py [new file with mode: 0644]
third_party/waf/waflib/Tools/ruby.py [new file with mode: 0644]
third_party/waf/waflib/Tools/suncc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/suncxx.py [new file with mode: 0644]
third_party/waf/waflib/Tools/tex.py [new file with mode: 0644]
third_party/waf/waflib/Tools/vala.py [new file with mode: 0644]
third_party/waf/waflib/Tools/waf_unit_test.py [new file with mode: 0644]
third_party/waf/waflib/Tools/winres.py [new file with mode: 0644]
third_party/waf/waflib/Tools/xlc.py [new file with mode: 0644]
third_party/waf/waflib/Tools/xlcxx.py [new file with mode: 0644]
third_party/waf/waflib/Utils.py [new file with mode: 0644]
third_party/waf/waflib/__init__.py [new file with mode: 0644]
third_party/waf/waflib/ansiterm.py [new file with mode: 0644]
third_party/waf/waflib/extras/__init__.py [new file with mode: 0644]
third_party/waf/waflib/extras/add_objects.py [new file with mode: 0644]
third_party/waf/waflib/extras/batched_cc.py [new file with mode: 0644]
third_party/waf/waflib/extras/build_file_tracker.py [new file with mode: 0644]
third_party/waf/waflib/extras/build_logs.py [new file with mode: 0644]
third_party/waf/waflib/extras/c_bgxlc.py [new file with mode: 0644]
third_party/waf/waflib/extras/c_dumbpreproc.py [new file with mode: 0644]
third_party/waf/waflib/extras/c_emscripten.py [new file with mode: 0644]
third_party/waf/waflib/extras/c_nec.py [new file with mode: 0644]
third_party/waf/waflib/extras/cfg_altoptions.py [new file with mode: 0644]
third_party/waf/waflib/extras/cfg_cross_gnu.py [new file with mode: 0644]
third_party/waf/waflib/extras/clang_compilation_database.py [new file with mode: 0644]
third_party/waf/waflib/extras/codelite.py [new file with mode: 0644]
third_party/waf/waflib/extras/color_gcc.py [new file with mode: 0644]
third_party/waf/waflib/extras/color_rvct.py [new file with mode: 0644]
third_party/waf/waflib/extras/compat15.py [new file with mode: 0644]
third_party/waf/waflib/extras/cppcheck.py [new file with mode: 0644]
third_party/waf/waflib/extras/cpplint.py [new file with mode: 0644]
third_party/waf/waflib/extras/cython.py [new file with mode: 0644]
third_party/waf/waflib/extras/dcc.py [new file with mode: 0644]
third_party/waf/waflib/extras/distnet.py [new file with mode: 0644]
third_party/waf/waflib/extras/doxygen.py [new file with mode: 0644]
third_party/waf/waflib/extras/dpapi.py [new file with mode: 0644]
third_party/waf/waflib/extras/file_to_object.py [new file with mode: 0644]
third_party/waf/waflib/extras/freeimage.py [new file with mode: 0644]
third_party/waf/waflib/extras/fsb.py [new file with mode: 0644]
third_party/waf/waflib/extras/gccdeps.py [new file with mode: 0644]
third_party/waf/waflib/extras/go.py [new file with mode: 0644]
third_party/waf/waflib/extras/gob2.py [new file with mode: 0644]
third_party/waf/waflib/extras/halide.py [new file with mode: 0644]
third_party/waf/waflib/extras/local_rpath.py [new file with mode: 0644]
third_party/waf/waflib/extras/make.py [new file with mode: 0644]
third_party/waf/waflib/extras/md5_tstamp.py [new file with mode: 0644]
third_party/waf/waflib/extras/mem_reducer.py [new file with mode: 0644]
third_party/waf/waflib/extras/misc.py [moved from third_party/waf/wafadmin/Tools/misc.py with 70% similarity]
third_party/waf/waflib/extras/msvcdeps.py [new file with mode: 0644]
third_party/waf/waflib/extras/msvs.py [new file with mode: 0644]
third_party/waf/waflib/extras/netcache_client.py [new file with mode: 0644]
third_party/waf/waflib/extras/nobuild.py [new file with mode: 0644]
third_party/waf/waflib/extras/objcopy.py [new file with mode: 0644]
third_party/waf/waflib/extras/package.py [new file with mode: 0644]
third_party/waf/waflib/extras/parallel_debug.py [new file with mode: 0644]
third_party/waf/waflib/extras/pch.py [new file with mode: 0644]
third_party/waf/waflib/extras/pep8.py [new file with mode: 0644]
third_party/waf/waflib/extras/prefork.py [new file with mode: 0755]
third_party/waf/waflib/extras/preforkjava.py [new file with mode: 0644]
third_party/waf/waflib/extras/preforkunix.py [new file with mode: 0644]
third_party/waf/waflib/extras/print_commands.py [new file with mode: 0644]
third_party/waf/waflib/extras/proc.py [new file with mode: 0644]
third_party/waf/waflib/extras/protoc.py [new file with mode: 0644]
third_party/waf/waflib/extras/relocation.py [new file with mode: 0644]
third_party/waf/waflib/extras/remote.py [new file with mode: 0644]
third_party/waf/waflib/extras/review.py [new file with mode: 0644]
third_party/waf/waflib/extras/rst.py [new file with mode: 0644]
third_party/waf/waflib/extras/smart_continue.py [new file with mode: 0644]
third_party/waf/waflib/extras/stale.py [new file with mode: 0644]
third_party/waf/waflib/extras/stracedeps.py [new file with mode: 0644]
third_party/waf/waflib/extras/swig.py [new file with mode: 0644]
third_party/waf/waflib/extras/syms.py [new file with mode: 0644]
third_party/waf/waflib/extras/sync_exec.py [new file with mode: 0644]
third_party/waf/waflib/extras/unc.py [new file with mode: 0644]
third_party/waf/waflib/extras/unity.py [new file with mode: 0644]
third_party/waf/waflib/extras/use_config.py [new file with mode: 0644]
third_party/waf/waflib/extras/why.py [new file with mode: 0644]
third_party/waf/waflib/extras/win32_opts.py [new file with mode: 0644]
third_party/waf/waflib/fixpy2.py [new file with mode: 0644]
third_party/waf/waflib/processor.py [new file with mode: 0755]

diff --git a/third_party/waf/wafadmin/3rdparty/ParallelDebug.py b/third_party/waf/wafadmin/3rdparty/ParallelDebug.py
deleted file mode 100644 (file)
index 0ff580e..0000000
+++ /dev/null
@@ -1,297 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2007-2010 (ita)
-
-"""
-debugging helpers for parallel compilation, outputs
-a svg file in the build directory
-"""
-
-import os, time, sys, threading
-try: from Queue import Queue
-except: from queue import Queue
-import Runner, Options, Utils, Task, Logs
-from Constants import *
-
-#import random
-#random.seed(100)
-
-def set_options(opt):
-       opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
-               help='title for the svg diagram', dest='dtitle')
-       opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
-       opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
-       opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
-       opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
-
-# red   #ff4d4d
-# green #4da74d
-# lila  #a751ff
-
-color2code = {
-       'GREEN'  : '#4da74d',
-       'YELLOW' : '#fefe44',
-       'PINK'   : '#a751ff',
-       'RED'    : '#cc1d1d',
-       'BLUE'   : '#6687bb',
-       'CYAN'   : '#34e2e2',
-
-}
-
-mp = {}
-info = [] # list of (text,color)
-
-def map_to_color(name):
-       if name in mp:
-               return mp[name]
-       try:
-               cls = Task.TaskBase.classes[name]
-       except KeyError:
-               return color2code['RED']
-       if cls.color in mp:
-               return mp[cls.color]
-       if cls.color in color2code:
-               return color2code[cls.color]
-       return color2code['RED']
-
-def loop(self):
-       while 1:
-               tsk=Runner.TaskConsumer.ready.get()
-               tsk.master.set_running(1, id(threading.currentThread()), tsk)
-               Runner.process_task(tsk)
-               tsk.master.set_running(-1, id(threading.currentThread()), tsk)
-Runner.TaskConsumer.loop = loop
-
-
-old_start = Runner.Parallel.start
-def do_start(self):
-        print Options.options
-       try:
-               Options.options.dband
-       except AttributeError:
-               raise ValueError('use def options(opt): opt.load("parallel_debug")!')
-
-       self.taskinfo = Queue()
-       old_start(self)
-       process_colors(self)
-Runner.Parallel.start = do_start
-
-def set_running(self, by, i, tsk):
-       self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by)  )
-Runner.Parallel.set_running = set_running
-
-def name2class(name):
-       return name.replace(' ', '_').replace('.', '_')
-
-def process_colors(producer):
-       # first, cast the parameters
-       tmp = []
-       try:
-               while True:
-                       tup = producer.taskinfo.get(False)
-                       tmp.append(list(tup))
-       except:
-               pass
-
-       try:
-               ini = float(tmp[0][2])
-       except:
-               return
-
-       if not info:
-               seen = []
-               for x in tmp:
-                       name = x[3]
-                       if not name in seen:
-                               seen.append(name)
-                       else:
-                               continue
-
-                       info.append((name, map_to_color(name)))
-               info.sort(key=lambda x: x[0])
-
-       thread_count = 0
-       acc = []
-       for x in tmp:
-               thread_count += x[6]
-               acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
-       f = open('pdebug.dat', 'w')
-       #Utils.write('\n'.join(acc))
-       f.write('\n'.join(acc))
-
-       tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
-
-       st = {}
-       for l in tmp:
-               if not l[0] in st:
-                       st[l[0]] = len(st.keys())
-       tmp = [  [st[lst[0]]] + lst[1:] for lst in tmp ]
-       THREAD_AMOUNT = len(st.keys())
-
-       st = {}
-       for l in tmp:
-               if not l[1] in st:
-                       st[l[1]] = len(st.keys())
-       tmp = [  [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
-
-
-       BAND = Options.options.dband
-
-       seen = {}
-       acc = []
-       for x in range(len(tmp)):
-               line = tmp[x]
-               id = line[1]
-
-               if id in seen:
-                       continue
-               seen[id] = True
-
-               begin = line[2]
-               thread_id = line[0]
-               for y in range(x + 1, len(tmp)):
-                       line = tmp[y]
-                       if line[1] == id:
-                               end = line[2]
-                               #print id, thread_id, begin, end
-                               #acc.append(  ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
-                               acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
-                               break
-
-       if Options.options.dmaxtime < 0.1:
-               gwidth = 1
-               for x in tmp:
-                       m = BAND * x[2]
-                       if m > gwidth:
-                               gwidth = m
-       else:
-               gwidth = BAND * Options.options.dmaxtime
-
-       ratio = float(Options.options.dwidth) / gwidth
-       gwidth = Options.options.dwidth
-
-       gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
-
-       out = []
-
-       out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
-<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
-\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
-<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
-   x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
-   id=\"svg602\" xml:space=\"preserve\">
-
-<style type='text/css' media='screen'>
-    g.over rect  { stroke:#FF0000; fill-opacity:0.4 }
-</style>
-
-<script type='text/javascript'><![CDATA[
-    var svg  = document.getElementsByTagName('svg')[0];
-    var svgNS = svg.getAttribute('xmlns');
-    svg.addEventListener('mouseover',function(e){
-      var g = e.target.parentNode;
-      var x = document.getElementById('r_'+g.id);
-      if (x) {
-         g.setAttribute('class', g.getAttribute('class')+' over');
-         x.setAttribute('class', x.getAttribute('class')+' over');
-         showInfo(e, g.id);
-      }
-    },false);
-    svg.addEventListener('mouseout',function(e){
-      var g = e.target.parentNode;
-      var x = document.getElementById('r_'+g.id);
-      if (x) {
-         g.setAttribute('class',g.getAttribute('class').replace(' over',''));
-         x.setAttribute('class',x.getAttribute('class').replace(' over',''));
-         hideInfo(e);
-      }
-    },false);
-
-function showInfo(evt, txt) {
-    tooltip = document.getElementById('tooltip');
-
-    var t = document.getElementById('tooltiptext');
-    t.firstChild.data = txt;
-
-    var x = evt.clientX+10;
-    if (x > 200) { x -= t.getComputedTextLength() + 16; }
-    var y = evt.clientY+30;
-    tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
-    tooltip.setAttributeNS(null,"visibility","visible");
-
-    var r = document.getElementById('tooltiprect');
-    r.setAttribute('width', t.getComputedTextLength()+6)
-}
-
-
-function hideInfo(evt) {
-    tooltip = document.getElementById('tooltip');
-    tooltip.setAttributeNS(null,"visibility","hidden");
-}
-
-]]></script>
-
-<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
-<rect
-   x='%r' y='%r'
-   width='%r' height='%r' z-index='10'
-   style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
-   />\n
-
-""" % (0, 0, gwidth + 4, gheight + 4,   0, 0, gwidth + 4, gheight + 4))
-
-       # main title
-       if Options.options.dtitle:
-               out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
-""" % (gwidth/2, gheight - 5, Options.options.dtitle))
-
-       # the rectangles
-       groups = {}
-       for (x, y, w, h, clsname) in acc:
-               try:
-                       groups[clsname].append((x, y, w, h))
-               except:
-                       groups[clsname] = [(x, y, w, h)]
-
-       for cls in groups:
-
-               out.append("<g id='%s'>\n" % name2class(cls))
-
-               for (x, y, w, h) in groups[cls]:
-                       out.append("""   <rect
-   x='%r' y='%r'
-   width='%r' height='%r' z-index='11'
-   style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
-   />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
-
-               out.append("</g>\n")
-
-       # output the caption
-       cnt = THREAD_AMOUNT
-
-       for (text, color) in info:
-               # caption box
-               b = BAND/2
-               out.append("""<g id='r_%s'><rect
-               x='%r' y='%r'
-               width='%r' height='%r'
-               style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
-  />\n""" %                       (name2class(text), 2 + BAND,     5 + (cnt + 0.5) * BAND, b, b, color))
-
-               # caption text
-               out.append("""<text
-   style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
-   x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
-               cnt += 1
-
-       out.append("""
-<g transform="translate(0,0)" visibility="hidden" id="tooltip">
-  <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
-  <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
-</g>""")
-
-       out.append("\n</svg>")
-
-       #node = producer.bld.path.make_node('pdebug.svg')
-       f = open('pdebug.svg', 'w')
-       f.write("".join(out))
diff --git a/third_party/waf/wafadmin/3rdparty/batched_cc.py b/third_party/waf/wafadmin/3rdparty/batched_cc.py
deleted file mode 100644 (file)
index 7ed569c..0000000
+++ /dev/null
@@ -1,182 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-"""
-Batched builds - compile faster
-instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
-cc -c ../file1.c ../file2.c ../file3.c
-
-Files are output on the directory where the compiler is called, and dependencies are more difficult
-to track (do not run the command on all source files if only one file changes)
-
-As such, we do as if the files were compiled one by one, but no command is actually run:
-replace each cc/cpp Task by a TaskSlave
-A new task called TaskMaster collects the signatures from each slave and finds out the command-line
-to run.
-
-To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
-it is only necessary to import this module in the configuration (no other change required)
-"""
-
-MAX_BATCH = 50
-MAXPARALLEL = False
-
-EXT_C = ['.c', '.cc', '.cpp', '.cxx']
-
-import os, threading
-import TaskGen, Task, ccroot, Build, Logs
-from TaskGen import extension, feature, before
-from Constants import *
-
-cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
-cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
-
-cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
-cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
-
-count = 70000
-class batch_task(Task.Task):
-       color = 'RED'
-
-       after = 'cc cxx'
-       before = 'cc_link cxx_link static_link'
-
-       def __str__(self):
-               return '(batch compilation for %d slaves)\n' % len(self.slaves)
-
-       def __init__(self, *k, **kw):
-               Task.Task.__init__(self, *k, **kw)
-               self.slaves = []
-               self.inputs = []
-               self.hasrun = 0
-
-               global count
-               count += 1
-               self.idx = count
-
-       def add_slave(self, slave):
-               self.slaves.append(slave)
-               self.set_run_after(slave)
-
-       def runnable_status(self):
-               for t in self.run_after:
-                       if not t.hasrun:
-                               return ASK_LATER
-
-               for t in self.slaves:
-                       #if t.executed:
-                       if t.hasrun != SKIPPED:
-                               return RUN_ME
-
-               return SKIP_ME
-
-       def run(self):
-               outputs = []
-               self.outputs = []
-
-               srclst = []
-               slaves = []
-               for t in self.slaves:
-                       if t.hasrun != SKIPPED:
-                               slaves.append(t)
-                               srclst.append(t.inputs[0].abspath(self.env))
-
-               self.env.SRCLST = srclst
-               self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
-
-               env = self.env
-               app = env.append_unique
-               cpppath_st = env['CPPPATH_ST']
-               env._CCINCFLAGS = env.CXXINCFLAGS = []
-
-               # local flags come first
-               # set the user-defined includes paths
-               for i in env['INC_PATHS']:
-                       app('_CCINCFLAGS', cpppath_st % i.abspath())
-                       app('_CXXINCFLAGS', cpppath_st % i.abspath())
-                       app('_CCINCFLAGS', cpppath_st % i.abspath(env))
-                       app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
-
-               # set the library include paths
-               for i in env['CPPPATH']:
-                       app('_CCINCFLAGS', cpppath_st % i)
-                       app('_CXXINCFLAGS', cpppath_st % i)
-
-               if self.slaves[0].__class__.__name__ == 'cc':
-                       ret = cc_fun(self)
-               else:
-                       ret = cxx_fun(self)
-
-               if ret:
-                       return ret
-
-               for t in slaves:
-                       t.old_post_run()
-
-from TaskGen import extension, feature, after
-
-import cc, cxx
-def wrap(fun):
-       def foo(self, node):
-               # we cannot control the extension, this sucks
-               self.obj_ext = '.o'
-
-               task = fun(self, node)
-               if not getattr(self, 'masters', None):
-                       self.masters = {}
-                       self.allmasters = []
-
-               if not node.parent.id in self.masters:
-                       m = self.masters[node.parent.id] = self.master = self.create_task('batch')
-                       self.allmasters.append(m)
-               else:
-                       m = self.masters[node.parent.id]
-                       if len(m.slaves) > MAX_BATCH:
-                               m = self.masters[node.parent.id] = self.master = self.create_task('batch')
-                               self.allmasters.append(m)
-
-               m.add_slave(task)
-               return task
-       return foo
-
-c_hook = wrap(cc.c_hook)
-extension(cc.EXT_CC)(c_hook)
-
-cxx_hook = wrap(cxx.cxx_hook)
-extension(cxx.EXT_CXX)(cxx_hook)
-
-
-@feature('cprogram', 'cshlib', 'cstaticlib')
-@after('apply_link')
-def link_after_masters(self):
-       if getattr(self, 'allmasters', None):
-               for m in self.allmasters:
-                       self.link_task.set_run_after(m)
-
-for c in ['cc', 'cxx']:
-       t = Task.TaskBase.classes[c]
-       def run(self):
-               pass
-
-       def post_run(self):
-               #self.executed=1
-               pass
-
-       def can_retrieve_cache(self):
-               if self.old_can_retrieve_cache():
-                       for m in self.generator.allmasters:
-                               try:
-                                       m.slaves.remove(self)
-                               except ValueError:
-                                       pass    #this task wasn't included in that master
-                       return 1
-               else:
-                       return None
-
-       setattr(t, 'oldrun', t.__dict__['run'])
-       setattr(t, 'run', run)
-       setattr(t, 'old_post_run', t.post_run)
-       setattr(t, 'post_run', post_run)
-       setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
-       setattr(t, 'can_retrieve_cache', can_retrieve_cache)
diff --git a/third_party/waf/wafadmin/3rdparty/boost.py b/third_party/waf/wafadmin/3rdparty/boost.py
deleted file mode 100644 (file)
index 1cbbf7e..0000000
+++ /dev/null
@@ -1,342 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-#
-# partially based on boost.py written by Gernot Vormayr
-# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
-# modified by Bjoern Michaelsen, 2008
-# modified by Luca Fossati, 2008
-# rewritten for waf 1.5.1, Thomas Nagy, 2008
-#
-#def set_options(opt):
-#      opt.tool_options('boost')
-#      # ...
-#
-#def configure(conf):
-#      # ... (e.g. conf.check_tool('g++'))
-#      conf.check_tool('boost')
-#   conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
-#
-#def build(bld):
-#   bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
-#
-#ISSUES:
-# * find_includes should be called only once!
-# * support mandatory
-
-######## boost update ###########
-## ITA: * the method get_boost_version_number does work
-##      * the rest of the code has not really been tried
-#       * make certain a demo is provided (in demos/adv for example)
-
-# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
-
-import os.path, glob, types, re, sys
-import Configure, config_c, Options, Utils, Logs
-from Logs import warn, debug
-from Configure import conf
-
-boost_code = '''
-#include <iostream>
-#include <boost/version.hpp>
-int main() { std::cout << BOOST_VERSION << std::endl; }
-'''
-
-boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
-boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
-
-STATIC_NOSTATIC = 'nostatic'
-STATIC_BOTH = 'both'
-STATIC_ONLYSTATIC = 'onlystatic'
-
-is_versiontag = re.compile('^\d+_\d+_?\d*$')
-is_threadingtag = re.compile('^mt$')
-is_abitag = re.compile('^[sgydpn]+$')
-is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
-is_pythontag=re.compile('^py[0-9]{2}$')
-
-def set_options(opt):
-       opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
-       opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
-
-def string_to_version(s):
-       version = s.split('.')
-       if len(version) < 3: return 0
-       return int(version[0])*100000 + int(version[1])*100 + int(version[2])
-
-def version_string(version):
-       major = version / 100000
-       minor = version / 100 % 1000
-       minor_minor = version % 100
-       if minor_minor == 0:
-               return "%d_%d" % (major, minor)
-       else:
-               return "%d_%d_%d" % (major, minor, minor_minor)
-
-def libfiles(lib, pattern, lib_paths):
-       result = []
-       for lib_path in lib_paths:
-               libname = pattern % ('boost_%s[!_]*' % lib)
-               result += glob.glob(os.path.join(lib_path, libname))
-       return result
-
-@conf
-def get_boost_version_number(self, dir):
-       """silently retrieve the boost version number"""
-       try:
-               return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
-       except Configure.ConfigurationError, e:
-               return -1
-
-def set_default(kw, var, val):
-       if not var in kw:
-               kw[var] = val
-
-def tags_score(tags, kw):
-       """
-       checks library tags
-
-       see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
-       """
-       score = 0
-       needed_tags = {
-               'threading': kw['tag_threading'],
-               'abi':       kw['tag_abi'],
-               'toolset':   kw['tag_toolset'],
-               'version':   kw['tag_version'],
-               'python':    kw['tag_python']
-       }
-
-       if kw['tag_toolset'] is None:
-               v = kw['env']
-               toolset = v['CXX_NAME']
-               if v['CXX_VERSION']:
-                       version_no = v['CXX_VERSION'].split('.')
-                       toolset += version_no[0]
-                       if len(version_no) > 1:
-                               toolset += version_no[1]
-               needed_tags['toolset'] = toolset
-
-       found_tags = {}
-       for tag in tags:
-               if is_versiontag.match(tag): found_tags['version'] = tag
-               if is_threadingtag.match(tag): found_tags['threading'] = tag
-               if is_abitag.match(tag): found_tags['abi'] = tag
-               if is_toolsettag.match(tag): found_tags['toolset'] = tag
-               if is_pythontag.match(tag): found_tags['python'] = tag
-
-       for tagname in needed_tags.iterkeys():
-               if needed_tags[tagname] is not None and tagname in found_tags:
-                       if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
-                               score += kw['score_' + tagname][0]
-                       else:
-                               score += kw['score_' + tagname][1]
-       return score
-
-@conf
-def validate_boost(self, kw):
-       ver = kw.get('version', '')
-
-       for x in 'min_version max_version version'.split():
-               set_default(kw, x, ver)
-
-       set_default(kw, 'lib', '')
-       kw['lib'] = Utils.to_list(kw['lib'])
-
-       set_default(kw, 'env', self.env)
-
-       set_default(kw, 'libpath', boost_libpath)
-       set_default(kw, 'cpppath', boost_cpppath)
-
-       for x in 'tag_threading tag_version tag_toolset'.split():
-               set_default(kw, x, None)
-       set_default(kw, 'tag_abi', '^[^d]*$')
-
-       set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
-       set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
-
-       set_default(kw, 'score_threading', (10, -10))
-       set_default(kw, 'score_abi', (10, -10))
-       set_default(kw, 'score_python', (10,-10))
-       set_default(kw, 'score_toolset', (1, -1))
-       set_default(kw, 'score_version', (100, -100))
-
-       set_default(kw, 'score_min', 0)
-       set_default(kw, 'static', STATIC_NOSTATIC)
-       set_default(kw, 'found_includes', False)
-       set_default(kw, 'min_score', 0)
-
-       set_default(kw, 'errmsg', 'not found')
-       set_default(kw, 'okmsg', 'ok')
-
-@conf
-def find_boost_includes(self, kw):
-       """
-       check every path in kw['cpppath'] for subdir
-       that either starts with boost- or is named boost.
-
-       Then the version is checked and selected accordingly to
-       min_version/max_version. The highest possible version number is
-       selected!
-
-       If no versiontag is set the versiontag is set accordingly to the
-       selected library and CPPPATH_BOOST is set.
-       """
-       boostPath = getattr(Options.options, 'boostincludes', '')
-       if boostPath:
-               boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
-       else:
-               boostPath = Utils.to_list(kw['cpppath'])
-
-       min_version = string_to_version(kw.get('min_version', ''))
-       max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
-
-       version = 0
-       for include_path in boostPath:
-               boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
-               debug('BOOST Paths: %r' % boost_paths)
-               for path in boost_paths:
-                       pathname = os.path.split(path)[-1]
-                       ret = -1
-                       if pathname == 'boost':
-                               path = include_path
-                               ret = self.get_boost_version_number(path)
-                       elif pathname.startswith('boost-'):
-                               ret = self.get_boost_version_number(path)
-                       ret = int(ret)
-
-                       if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
-                               boost_path = path
-                               version = ret
-       if not version:
-               self.fatal('boost headers not found! (required version min: %s max: %s)'
-                         % (kw['min_version'], kw['max_version']))
-               return False
-
-       found_version = version_string(version)
-       versiontag = '^' + found_version + '$'
-       if kw['tag_version'] is None:
-               kw['tag_version'] = versiontag
-       elif kw['tag_version'] != versiontag:
-               warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
-       env = self.env
-       env['CPPPATH_BOOST'] = boost_path
-       env['BOOST_VERSION'] = found_version
-       self.found_includes = 1
-       ret = 'Version %s (%s)' % (found_version, boost_path)
-       return ret
-
-@conf
-def find_boost_library(self, lib, kw):
-
-       def find_library_from_list(lib, files):
-               lib_pattern = re.compile('.*boost_(.*?)\..*')
-               result = (None, None)
-               resultscore = kw['min_score'] - 1
-               for file in files:
-                       m = lib_pattern.search(file, 1)
-                       if m:
-                               libname = m.group(1)
-                               libtags = libname.split('-')[1:]
-                               currentscore = tags_score(libtags, kw)
-                               if currentscore > resultscore:
-                                       result = (libname, file)
-                                       resultscore = currentscore
-               return result
-
-       lib_paths = getattr(Options.options, 'boostlibs', '')
-       if lib_paths:
-               lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
-       else:
-               lib_paths = Utils.to_list(kw['libpath'])
-
-       v = kw.get('env', self.env)
-
-       (libname, file) = (None, None)
-       if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
-               st_env_prefix = 'LIB'
-               files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
-               (libname, file) = find_library_from_list(lib, files)
-       if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
-               st_env_prefix = 'STATICLIB'
-               staticLibPattern = v['staticlib_PATTERN']
-               if self.env['CC_NAME'] == 'msvc':
-                       staticLibPattern = 'lib' + staticLibPattern
-               files = libfiles(lib, staticLibPattern, lib_paths)
-               (libname, file) = find_library_from_list(lib, files)
-       if libname is not None:
-               v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
-               if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
-                       v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
-               else:
-                       v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
-               return
-       self.fatal('lib boost_' + lib + ' not found!')
-
-@conf
-def check_boost(self, *k, **kw):
-       """
-       This should be the main entry point
-
-- min_version
-- max_version
-- version
-- include_path
-- lib_path
-- lib
-- toolsettag   - None or a regexp
-- threadingtag - None or a regexp
-- abitag       - None or a regexp
-- versiontag   - WARNING: you should rather use version or min_version/max_version
-- static       - look for static libs (values:
-         'nostatic'   or STATIC_NOSTATIC   - ignore static libs (default)
-         'both'       or STATIC_BOTH       - find static libs, too
-         'onlystatic' or STATIC_ONLYSTATIC - find only static libs
-- score_version
-- score_abi
-- scores_threading
-- score_toolset
- * the scores are tuples (match_score, nomatch_score)
-   match_score is the added to the score if the tag is matched
-   nomatch_score is added when a tag is found and does not match
-- min_score
-       """
-
-       if not self.env['CXX']:
-               self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
-       self.validate_boost(kw)
-       ret = None
-       try:
-               if not kw.get('found_includes', None):
-                       self.check_message_1(kw.get('msg_includes', 'boost headers'))
-                       ret = self.find_boost_includes(kw)
-
-       except Configure.ConfigurationError, e:
-               if 'errmsg' in kw:
-                       self.check_message_2(kw['errmsg'], 'YELLOW')
-               if 'mandatory' in kw:
-                       if Logs.verbose > 1:
-                               raise
-                       else:
-                               self.fatal('the configuration failed (see %r)' % self.log.name)
-       else:
-               if 'okmsg' in kw:
-                       self.check_message_2(kw.get('okmsg_includes', ret))
-
-       for lib in kw['lib']:
-               self.check_message_1('library boost_'+lib)
-               try:
-                       self.find_boost_library(lib, kw)
-               except Configure.ConfigurationError, e:
-                       ret = False
-                       if 'errmsg' in kw:
-                               self.check_message_2(kw['errmsg'], 'YELLOW')
-                       if 'mandatory' in kw:
-                               if Logs.verbose > 1:
-                                       raise
-                               else:
-                                       self.fatal('the configuration failed (see %r)' % self.log.name)
-               else:
-                       if 'okmsg' in kw:
-                               self.check_message_2(kw['okmsg'])
-
-       return ret
diff --git a/third_party/waf/wafadmin/3rdparty/build_file_tracker.py b/third_party/waf/wafadmin/3rdparty/build_file_tracker.py
deleted file mode 100644 (file)
index 5fc7358..0000000
+++ /dev/null
@@ -1,53 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015
-
-"""
-Force tasks to use file timestamps to force partial rebuilds when touch-ing build files
-
-touch out/libfoo.a
-... rebuild what depends on libfoo.a
-
-to use::
-    def options(opt):
-        opt.tool_options('build_file_tracker')
-"""
-
-import os
-import Task, Utils
-
-def signature(self):
-       try: return self.cache_sig[0]
-       except AttributeError: pass
-
-       self.m = Utils.md5()
-
-       # explicit deps
-       exp_sig = self.sig_explicit_deps()
-
-       # env vars
-       var_sig = self.sig_vars()
-
-       # implicit deps
-       imp_sig = Task.SIG_NIL
-       if self.scan:
-               try:
-                       imp_sig = self.sig_implicit_deps()
-               except ValueError:
-                       return self.signature()
-
-       # timestamp dependency on build files only (source files are hashed)
-       buf = []
-       for k in self.inputs + getattr(self, 'dep_nodes', []) + self.generator.bld.node_deps.get(self.unique_id(), []):
-               if k.id & 3 == 3:
-                       t = os.stat(k.abspath(self.env)).st_mtime
-                       buf.append(t)
-       self.m.update(str(buf))
-
-       # we now have the signature (first element) and the details (for debugging)
-       ret = self.m.digest()
-       self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
-       return ret
-
-Task.Task.signature_bak = Task.Task.signature # unused, kept just in case
-Task.Task.signature = signature # overridden
diff --git a/third_party/waf/wafadmin/3rdparty/fluid.py b/third_party/waf/wafadmin/3rdparty/fluid.py
deleted file mode 100644 (file)
index c858fe3..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/python
-# encoding: utf-8
-# Grygoriy Fuchedzhy 2009
-
-"""
-Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
-"""
-
-import Task
-from TaskGen import extension
-
-Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
-
-@extension('.fl')
-def fluid(self, node):
-       """add the .fl to the source list; the cxx file generated will be compiled when possible"""
-       cpp = node.change_ext('.cpp')
-       hpp = node.change_ext('.hpp')
-       self.create_task('fluid', node, [cpp, hpp])
-
-       if 'cxx' in self.features:
-               self.allnodes.append(cpp)
-
-def detect(conf):
-    fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
-    conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
diff --git a/third_party/waf/wafadmin/3rdparty/gccdeps.py b/third_party/waf/wafadmin/3rdparty/gccdeps.py
deleted file mode 100644 (file)
index 55cd515..0000000
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2010 (ita)
-
-"""
-Execute the tasks with gcc -MD, read the dependencies from the .d file
-and prepare the dependency calculation for the next run
-"""
-
-import os, re, threading
-import Task, Logs, Utils, preproc
-from TaskGen import before, after, feature
-
-lock = threading.Lock()
-
-preprocessor_flag = '-MD'
-
-@feature('cc', 'c')
-@before('apply_core')
-def add_mmd_cc(self):
-       if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
-               self.env.append_value('CCFLAGS', preprocessor_flag)
-
-@feature('cxx')
-@before('apply_core')
-def add_mmd_cxx(self):
-       if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
-               self.env.append_value('CXXFLAGS', preprocessor_flag)
-
-def scan(self):
-       "the scanner does not do anything initially"
-       nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
-       names = []
-       return (nodes, names)
-
-re_o = re.compile("\.o$")
-re_src = re.compile("^(\.\.)[\\/](.*)$")
-
-def post_run(self):
-       # The following code is executed by threads, it is not safe, so a lock is needed...
-
-       if getattr(self, 'cached', None):
-               return Task.Task.post_run(self)
-
-       name = self.outputs[0].abspath(self.env)
-       name = re_o.sub('.d', name)
-       txt = Utils.readf(name)
-       #os.unlink(name)
-
-       txt = txt.replace('\\\n', '')
-
-       lst = txt.strip().split(':')
-       val = ":".join(lst[1:])
-       val = val.split()
-
-       nodes = []
-       bld = self.generator.bld
-
-       f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
-       for x in val:
-               if os.path.isabs(x):
-
-                       if not preproc.go_absolute:
-                               continue
-
-                       lock.acquire()
-                       try:
-                               node = bld.root.find_resource(x)
-                       finally:
-                               lock.release()
-               else:
-                       g = re.search(re_src, x)
-                       if g:
-                               x = g.group(2)
-                               lock.acquire()
-                               try:
-                                       node = bld.bldnode.parent.find_resource(x)
-                               finally:
-                                       lock.release()
-                       else:
-                               g = re.search(f, x)
-                               if g:
-                                       x = g.group(2)
-                                       lock.acquire()
-                                       try:
-                                               node = bld.srcnode.find_resource(x)
-                                       finally:
-                                               lock.release()
-
-               if id(node) == id(self.inputs[0]):
-                       # ignore the source file, it is already in the dependencies
-                       # this way, successful config tests may be retrieved from the cache
-                       continue
-
-               if not node:
-                       raise ValueError('could not find %r for %r' % (x, self))
-               else:
-                       nodes.append(node)
-
-       Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
-
-       bld.node_deps[self.unique_id()] = nodes
-       bld.raw_deps[self.unique_id()] = []
-
-       try:
-               del self.cache_sig
-       except:
-               pass
-
-       Task.Task.post_run(self)
-
-import Constants, Utils
-def sig_implicit_deps(self):
-       try:
-               return Task.Task.sig_implicit_deps(self)
-       except Utils.WafError:
-               return Constants.SIG_NIL
-
-for name in 'cc cxx'.split():
-       try:
-               cls = Task.TaskBase.classes[name]
-       except KeyError:
-               pass
-       else:
-               cls.post_run = post_run
-               cls.scan = scan
-               cls.sig_implicit_deps = sig_implicit_deps
diff --git a/third_party/waf/wafadmin/3rdparty/go.py b/third_party/waf/wafadmin/3rdparty/go.py
deleted file mode 100644 (file)
index f8397c7..0000000
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# go.py - Waf tool for the Go programming language
-# By: Tom Wambold <tom5760@gmail.com>
-
-import platform, os
-
-import Task
-import Utils
-from TaskGen import feature, extension, after
-
-Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
-Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
-Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
-
-def detect(conf):
-
-       def set_def(var, val):
-               if not conf.env[var]:
-                       conf.env[var] = val
-
-       goarch = os.getenv("GOARCH")
-
-       if goarch == '386':
-               set_def('GO_PLATFORM', 'i386')
-       elif goarch == 'amd64':
-               set_def('GO_PLATFORM', 'x86_64')
-       elif goarch == 'arm':
-               set_def('GO_PLATFORM', 'arm')
-       else:
-               set_def('GO_PLATFORM', platform.machine())
-
-       if conf.env.GO_PLATFORM == 'x86_64':
-               set_def('GO_COMPILER', '6g')
-               set_def('GO_LINKER', '6l')
-               set_def('GO_EXTENSION', '.6')
-       elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
-               set_def('GO_COMPILER', '8g')
-               set_def('GO_LINKER', '8l')
-               set_def('GO_EXTENSION', '.8')
-       elif conf.env.GO_PLATFORM == 'arm':
-               set_def('GO_COMPILER', '5g')
-               set_def('GO_LINKER', '5l')
-               set_def('GO_EXTENSION', '.5')
-
-       if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
-               raise conf.fatal('Unsupported platform ' + platform.machine())
-
-       set_def('GO_PACK', 'gopack')
-       set_def('GO_PACK_EXTENSION', '.a')
-
-       conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
-       conf.find_program(conf.env.GO_LINKER,   var='GOL', mandatory=True)
-       conf.find_program(conf.env.GO_PACK,     var='GOP', mandatory=True)
-       conf.find_program('cgo',                var='CGO', mandatory=True)
-
-@extension('.go')
-def compile_go(self, node):
-       try:
-               self.go_nodes.append(node)
-       except AttributeError:
-               self.go_nodes = [node]
-
-@feature('go')
-@after('apply_core')
-def apply_compile_go(self):
-       try:
-               nodes = self.go_nodes
-       except AttributeError:
-               self.go_compile_task = None
-       else:
-               self.go_compile_task = self.create_task('gocompile',
-                       nodes,
-                       [self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
-
-@feature('gopackage', 'goprogram')
-@after('apply_compile_go')
-def apply_goinc(self):
-       if not getattr(self, 'go_compile_task', None):
-               return
-
-       names = self.to_list(getattr(self, 'uselib_local', []))
-       for name in names:
-               obj = self.name_to_obj(name)
-               if not obj:
-                       raise Utils.WafError('object %r was not found in uselib_local '
-                                       '(required by %r)' % (lib_name, self.name))
-               obj.post()
-               self.go_compile_task.set_run_after(obj.go_package_task)
-               self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
-               self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
-               self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
-
-@feature('gopackage')
-@after('apply_goinc')
-def apply_gopackage(self):
-       self.go_package_task = self.create_task('gopack',
-                       self.go_compile_task.outputs[0],
-                       self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
-       self.go_package_task.set_run_after(self.go_compile_task)
-       self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
-
-@feature('goprogram')
-@after('apply_goinc')
-def apply_golink(self):
-       self.go_link_task = self.create_task('golink',
-                       self.go_compile_task.outputs[0],
-                       self.path.find_or_declare(self.target))
-       self.go_link_task.set_run_after(self.go_compile_task)
-       self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
diff --git a/third_party/waf/wafadmin/3rdparty/lru_cache.py b/third_party/waf/wafadmin/3rdparty/lru_cache.py
deleted file mode 100644 (file)
index 96f0e6c..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy 2011
-
-import os, shutil, re
-import Options, Build, Logs
-
-"""
-Apply a least recently used policy to the Waf cache.
-
-For performance reasons, it is called after the build is complete.
-
-We assume that the the folders are written atomically
-
-Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
-If missing, the default cache size will be set to 10GB
-"""
-
-re_num = re.compile('[a-zA-Z_]+(\d+)')
-
-CACHESIZE = 10*1024*1024*1024 # in bytes
-CLEANRATIO = 0.8
-DIRSIZE = 4096
-
-def compile(self):
-       if Options.cache_global and not Options.options.nocache:
-               try:
-                       os.makedirs(Options.cache_global)
-               except:
-                       pass
-
-       try:
-               self.raw_compile()
-       finally:
-               if Options.cache_global and not Options.options.nocache:
-                       self.sweep()
-
-def sweep(self):
-       global CACHESIZE
-       CACHEDIR = Options.cache_global
-
-       # get the cache max size from the WAFCACHE filename
-       re_num = re.compile('[a-zA-Z_]+(\d+)')
-       val = re_num.sub('\\1', os.path.basename(Options.cache_global))
-       try:
-               CACHESIZE = int(val)
-       except:
-               pass
-
-       # map folder names to timestamps
-       flist = {}
-       for x in os.listdir(CACHEDIR):
-               j = os.path.join(CACHEDIR, x)
-               if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
-                       flist[x] = [os.stat(j).st_mtime, 0]
-
-       for (x, v) in flist.items():
-               cnt = DIRSIZE # each entry takes 4kB
-               d = os.path.join(CACHEDIR, x)
-               for k in os.listdir(d):
-                       cnt += os.stat(os.path.join(d, k)).st_size
-               flist[x][1] = cnt
-
-       total = sum([x[1] for x in flist.values()])
-       Logs.debug('lru: Cache size is %r' % total)
-
-       if total >= CACHESIZE:
-               Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
-
-               # make a list to sort the folders by timestamp
-               lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
-               lst.sort(key=lambda x: x[1]) # sort by timestamp
-               lst.reverse()
-
-               while total >= CACHESIZE * CLEANRATIO:
-                       (k, t, s) = lst.pop()
-                       p = os.path.join(CACHEDIR, k)
-                       v = p + '.del'
-                       try:
-                               os.rename(p, v)
-                       except:
-                               # someone already did it
-                               pass
-                       else:
-                               try:
-                                       shutil.rmtree(v)
-                               except:
-                                       # this should not happen, but who knows?
-                                       Logs.warn('If you ever see this message, report it (%r)' % v)
-                       total -= s
-                       del flist[k]
-       Logs.debug('lru: Total at the end %r' % total)
-
-Build.BuildContext.raw_compile = Build.BuildContext.compile
-Build.BuildContext.compile = compile
-Build.BuildContext.sweep = sweep
diff --git a/third_party/waf/wafadmin/3rdparty/paranoid.py b/third_party/waf/wafadmin/3rdparty/paranoid.py
deleted file mode 100644 (file)
index 13dfb68..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# ita 2010
-
-import Logs, Utils, Build, Task
-
-def say(txt):
-       Logs.warn("^o^: %s" % txt)
-
-try:
-       ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
-except Exception, e:
-       pass
-else:
-       def say(txt):
-               f = Utils.cmd_output([ret, txt])
-               Utils.pprint('PINK', f)
-
-say('you make the errors, we detect them')
-
-def check_task_classes(self):
-       for x in Task.TaskBase.classes:
-               if isinstance(x, Task.Task):
-                       if not getattr(x, 'ext_in', None) or getattr(x, 'before', None):
-                               say('class %s has no precedence constraints (ext_in/before)')
-                       if not getattr(x, 'ext_out', None) or getattr(x, 'after', None):
-                               say('class %s has no precedence constraints (ext_out/after)')
-
-comp = Build.BuildContext.compile
-def compile(self):
-       if not getattr(self, 'magic', None):
-               check_task_classes(self)
-       return comp(self)
-Build.BuildContext.compile = compile
diff --git a/third_party/waf/wafadmin/3rdparty/prefork.py b/third_party/waf/wafadmin/3rdparty/prefork.py
deleted file mode 100755 (executable)
index 88fb4e4..0000000
+++ /dev/null
@@ -1,275 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-#
-# prefer the waf 1.8 version
-
-"""
-The full samba build can be faster by ~10%, but there are a few limitations:
-* only one build process should be run at a time as the servers would use the same ports
-* only one build command is going to be called ("waf build configure build" would not work)
-
-def build(bld):
-
-    mod = Utils.load_tool('prefork')
-    mod.build(bld)
-    ...
-    (build declarations after)
-"""
-
-import os, re, socket, threading, sys, subprocess, time, atexit, traceback
-try:
-       import SocketServer
-except ImportError:
-       import socketserver as SocketServer
-try:
-       from queue import Queue
-except ImportError:
-       from Queue import Queue
-try:
-       import cPickle
-except ImportError:
-       import pickle as cPickle
-
-DEFAULT_PORT = 51200
-
-HEADER_SIZE = 128
-
-REQ = 'REQ'
-RES = 'RES'
-BYE = 'BYE'
-
-def make_header(params):
-       header = ','.join(params)
-       if sys.hexversion > 0x3000000:
-               header = header.encode('iso8859-1')
-       header = header.ljust(HEADER_SIZE)
-       assert(len(header) == HEADER_SIZE)
-       return header
-
-
-re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
-class req(SocketServer.StreamRequestHandler):
-       def handle(self):
-               while 1:
-                       try:
-                               self.process_command()
-                       except Exception as e:
-                               print(e)
-                               break
-
-       def process_command(self):
-               query = self.rfile.read(HEADER_SIZE)
-               if not query:
-                       return
-               #print(len(query))
-               assert(len(query) == HEADER_SIZE)
-               if sys.hexversion > 0x3000000:
-                       query = query.decode('iso8859-1')
-               #print "%r" % query
-               if not re_valid_query.match(query):
-                       raise ValueError('Invalid query %r' % query)
-
-               query = query.strip().split(',')
-
-               if query[0] == REQ:
-                       self.run_command(query[1:])
-               elif query[0] == BYE:
-                       raise ValueError('Exit')
-               else:
-                       raise ValueError('Invalid query %r' % query)
-
-       def run_command(self, query):
-
-               size = int(query[0])
-               data = self.rfile.read(size)
-               assert(len(data) == size)
-               kw = cPickle.loads(data)
-
-               # run command
-               ret = out = err = exc = None
-               cmd = kw['cmd']
-               del kw['cmd']
-               #print(cmd)
-
-               try:
-                       if kw['stdout'] or kw['stderr']:
-                               p = subprocess.Popen(cmd, **kw)
-                               (out, err) = p.communicate()
-                               ret = p.returncode
-                       else:
-                               ret = subprocess.Popen(cmd, **kw).wait()
-               except Exception as e:
-                       ret = -1
-                       exc = str(e) + traceback.format_exc()
-
-               # write the results
-               if out or err or exc:
-                       data = (out, err, exc)
-                       data = cPickle.dumps(data, -1)
-               else:
-                       data = ''
-
-               params = [RES, str(ret), str(len(data))]
-
-               self.wfile.write(make_header(params))
-
-               if data:
-                       self.wfile.write(data)
-
-def create_server(conn, cls):
-       #SocketServer.ThreadingTCPServer.allow_reuse_address = True
-       #server = SocketServer.ThreadingTCPServer(conn, req)
-
-       SocketServer.TCPServer.allow_reuse_address = True
-       server = SocketServer.TCPServer(conn, req)
-       #server.timeout = 6000 # seconds
-       server.serve_forever(poll_interval=0.001)
-
-if __name__ == '__main__':
-       if len(sys.argv) > 1:
-               port = int(sys.argv[1])
-       else:
-               port = DEFAULT_PORT
-       #conn = (socket.gethostname(), port)
-       conn = ("127.0.0.1", port)
-       #print("listening - %r %r\n" % conn)
-       create_server(conn, req)
-else:
-
-       import Runner, Utils
-
-       def init_task_pool(self):
-               # lazy creation, and set a common pool for all task consumers
-               pool = self.pool = []
-               for i in range(self.numjobs):
-                       consumer = Runner.get_pool()
-                       pool.append(consumer)
-                       consumer.idx = i
-               self.ready = Queue(0)
-               def setq(consumer):
-                       consumer.ready = self.ready
-                       try:
-                               threading.current_thread().idx = consumer.idx
-                       except Exception as e:
-                               print(e)
-               for x in pool:
-                       x.ready.put(setq)
-               return pool
-       Runner.Parallel.init_task_pool = init_task_pool
-
-       PORT = 51200
-
-       def make_server(idx):
-               port = PORT + idx
-               cmd = [sys.executable, os.path.abspath(__file__), str(port)]
-               proc = subprocess.Popen(cmd)
-               proc.port = port
-               return proc
-
-       def make_conn(srv):
-               #port = PORT + idx
-               port = srv.port
-               conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-               conn.connect(('127.0.0.1', port))
-               return conn
-
-       SERVERS = []
-       CONNS = []
-       def close_all():
-               while CONNS:
-                       conn = CONNS.pop()
-                       try:
-                               conn.close()
-                       except:
-                               pass
-               while SERVERS:
-                       srv = SERVERS.pop()
-                       try:
-                               srv.kill()
-                       except:
-                               pass
-       atexit.register(close_all)
-
-       def put_data(conn, data):
-               conn.send(data)
-
-       def read_data(conn, siz):
-               ret = conn.recv(siz)
-               if not ret:
-                       print("closed connection?")
-
-               assert(len(ret) == siz)
-               return ret
-
-       def exec_command(cmd, **kw):
-               if 'log' in kw:
-                       log = kw['log']
-                       kw['stdout'] = kw['stderr'] = subprocess.PIPE
-                       del(kw['log'])
-               else:
-                       kw['stdout'] = kw['stderr'] = None
-               kw['shell'] = isinstance(cmd, str)
-
-               idx = threading.current_thread().idx
-               kw['cmd'] = cmd
-
-               data = cPickle.dumps(kw, -1)
-               params = [REQ, str(len(data))]
-               header = make_header(params)
-
-               conn = CONNS[idx]
-
-               put_data(conn, header)
-               put_data(conn, data)
-
-               data = read_data(conn, HEADER_SIZE)
-               if sys.hexversion > 0x3000000:
-                       data = data.decode('iso8859-1')
-
-               lst = data.split(',')
-               ret = int(lst[1])
-               dlen = int(lst[2])
-
-               out = err = None
-               if dlen:
-                       data = read_data(conn, dlen)
-                       (out, err, exc) = cPickle.loads(data)
-                       if exc:
-                               raise Utils.WafError('Execution failure: %s' % exc)
-
-               if out:
-                       log.write(out)
-               if err:
-                       log.write(err)
-
-               return ret
-
-       def __init__(self):
-               threading.Thread.__init__(self)
-
-               # identifier of the current thread
-               self.idx = len(SERVERS)
-
-               # create a server and wait for the connection
-               srv = make_server(self.idx)
-               SERVERS.append(srv)
-
-               conn = None
-               for x in range(30):
-                       try:
-                               conn = make_conn(srv)
-                               break
-                       except socket.error:
-                               time.sleep(0.01)
-               if not conn:
-                       raise ValueError('Could not start the server!')
-               CONNS.append(conn)
-
-               self.setDaemon(1)
-               self.start()
-       Runner.TaskConsumer.__init__ = __init__
-
-       def build(bld):
-               # dangerous, there is no other command hopefully
-               Utils.exec_command = exec_command
diff --git a/third_party/waf/wafadmin/3rdparty/print_commands.py b/third_party/waf/wafadmin/3rdparty/print_commands.py
deleted file mode 100644 (file)
index 3b12aa3..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-#! /usr/bin/env python
-
-"""
-In this case, print the commands being executed as strings
-(the commands are usually lists, so this can be misleading)
-"""
-
-import Build, Utils, Logs
-
-def exec_command(self, cmd, **kw):
-       txt = cmd
-       if isinstance(cmd, list):
-               txt = ' '.join(cmd)
-       Logs.debug('runner: %s' % txt)
-       if self.log:
-               self.log.write('%s\n' % cmd)
-               kw['log'] = self.log
-       try:
-               if not kw.get('cwd', None):
-                       kw['cwd'] = self.cwd
-       except AttributeError:
-               self.cwd = kw['cwd'] = self.bldnode.abspath()
-       return Utils.exec_command(cmd, **kw)
-Build.BuildContext.exec_command = exec_command
-
diff --git a/third_party/waf/wafadmin/3rdparty/swig.py b/third_party/waf/wafadmin/3rdparty/swig.py
deleted file mode 100644 (file)
index 393e8e1..0000000
+++ /dev/null
@@ -1,189 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Petar Forai
-# Thomas Nagy 2008
-
-import re
-import Task, Utils, Logs
-from TaskGen import extension
-from Configure import conf
-import preproc
-
-"""
-Welcome in the hell of adding tasks dynamically
-
-swig interface files may be created at runtime, the module name may be unknown in advance
-
-rev 5859 is much more simple
-"""
-
-SWIG_EXTS = ['.swig', '.i']
-
-swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
-cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
-
-def runnable_status(self):
-       for t in self.run_after:
-               if not t.hasrun:
-                       return ASK_LATER
-
-       if not getattr(self, 'init_outputs', None):
-               self.init_outputs = True
-               if not getattr(self, 'module', None):
-                       # search the module name
-                       txt = self.inputs[0].read(self.env)
-                       m = re_module.search(txt)
-                       if not m:
-                               raise ValueError("could not find the swig module name")
-                       self.module = m.group(1)
-
-               swig_c(self)
-
-               # add the language-specific output files as nodes
-               # call funs in the dict swig_langs
-               for x in self.env['SWIGFLAGS']:
-                       # obtain the language
-                       x = x[1:]
-                       try:
-                               fun = swig_langs[x]
-                       except KeyError:
-                               pass
-                       else:
-                               fun(self)
-
-       return Task.Task.runnable_status(self)
-setattr(cls, 'runnable_status', runnable_status)
-
-re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
-
-re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
-re_2 = re.compile('%include "(.*)"', re.M)
-re_3 = re.compile('#include "(.*)"', re.M)
-
-def scan(self):
-       "scan for swig dependencies, climb the .i files"
-       env = self.env
-
-       lst_src = []
-
-       seen = []
-       to_see = [self.inputs[0]]
-
-       while to_see:
-               node = to_see.pop(0)
-               if node.id in seen:
-                       continue
-               seen.append(node.id)
-               lst_src.append(node)
-
-               # read the file
-               code = node.read(env)
-               code = preproc.re_nl.sub('', code)
-               code = preproc.re_cpp.sub(preproc.repl, code)
-
-               # find .i files and project headers
-               names = re_2.findall(code) + re_3.findall(code)
-               for n in names:
-                       for d in self.generator.env.INC_PATHS + [node.parent]:
-                               u = d.find_resource(n)
-                               if u:
-                                       to_see.append(u)
-                                       break
-                       else:
-                               Logs.warn('could not find %r' % n)
-
-       # list of nodes this one depends on, and module name if present
-       if Logs.verbose:
-               Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
-       return (lst_src, [])
-cls.scan = scan
-
-# provide additional language processing
-swig_langs = {}
-def swig(fun):
-       swig_langs[fun.__name__.replace('swig_', '')] = fun
-
-def swig_c(self):
-       ext = '.swigwrap_%d.c' % self.generator.idx
-       flags = self.env['SWIGFLAGS']
-       if '-c++' in flags:
-               ext += 'xx'
-       out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
-
-       try:
-               if '-c++' in flags:
-                       fun = self.generator.cxx_hook
-               else:
-                       fun = self.generator.c_hook
-       except AttributeError:
-               raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
-
-       task = fun(out_node)
-       task.set_run_after(self)
-
-       ge = self.generator.bld.generator
-       ge.outstanding.insert(0, task)
-       ge.total += 1
-
-       try:
-               ltask = self.generator.link_task
-       except AttributeError:
-               pass
-       else:
-               ltask.inputs.append(task.outputs[0])
-
-       self.outputs.append(out_node)
-
-       if not '-o' in self.env['SWIGFLAGS']:
-               self.env.append_value('SWIGFLAGS', '-o')
-               self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
-
-@swig
-def swig_python(tsk):
-       tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
-
-@swig
-def swig_ocaml(tsk):
-       tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
-       tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
-
-@extension(SWIG_EXTS)
-def i_file(self, node):
-       # the task instance
-       tsk = self.create_task('swig')
-       tsk.set_inputs(node)
-       tsk.module = getattr(self, 'swig_module', None)
-
-       flags = self.to_list(getattr(self, 'swig_flags', []))
-       self.env.append_value('SWIGFLAGS', flags)
-
-       if not '-outdir' in flags:
-               flags.append('-outdir')
-               flags.append(node.parent.abspath(self.env))
-
-@conf
-def check_swig_version(conf, minver=None):
-       """Check for a minimum swig version like conf.check_swig_version('1.3.28')
-       or conf.check_swig_version((1,3,28)) """
-       reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
-
-       swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
-
-       swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
-       if isinstance(minver, basestring):
-               minver = [int(s) for s in minver.split(".")]
-       if isinstance(minver, tuple):
-               minver = [int(s) for s in minver]
-       result = (minver is None) or (minver[:3] <= swigver[:3])
-       swigver_full = '.'.join(map(str, swigver))
-       if result:
-               conf.env['SWIG_VERSION'] = swigver_full
-       minver_str = '.'.join(map(str, minver))
-       if minver is None:
-               conf.check_message_custom('swig version', '', swigver_full)
-       else:
-               conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
-       return result
-
-def detect(conf):
-       swig = conf.find_program('swig', var='SWIG', mandatory=True)
diff --git a/third_party/waf/wafadmin/3rdparty/valadoc.py b/third_party/waf/wafadmin/3rdparty/valadoc.py
deleted file mode 100644 (file)
index bdb0c6b..0000000
+++ /dev/null
@@ -1,112 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Nicolas Joseph 2009
-
-from fnmatch import fnmatchcase
-import os, os.path, re, stat
-import Task, Utils, Node, Constants
-from TaskGen import feature, extension, after
-from Logs import debug, warn, error
-
-VALADOC_STR = '${VALADOC}'
-
-class valadoc_task(Task.Task):
-
-  vars = ['VALADOC', 'VALADOCFLAGS']
-  color = 'BLUE'
-  after = 'cxx_link cc_link'
-  quiet = True
-
-  output_dir = ''
-  doclet = ''
-  package_name = ''
-  package_version = ''
-  files = []
-  protected = True
-  private = False
-  inherit = False
-  deps = False
-  enable_non_null_experimental = False
-  force = False
-
-  def runnable_status(self):
-    return True
-
-  def run(self):
-    if self.env['VALADOC']:
-      if not self.env['VALADOCFLAGS']:
-        self.env['VALADOCFLAGS'] = ''
-      cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
-      cmd.append ('-o %s' % self.output_dir)
-      if getattr(self, 'doclet', None):
-        cmd.append ('--doclet %s' % self.doclet)
-      cmd.append ('--package-name %s' % self.package_name)
-      if getattr(self, 'version', None):
-        cmd.append ('--package-version %s' % self.package_version)
-      if getattr(self, 'packages', None):
-        for package in self.packages:
-          cmd.append ('--pkg %s' % package)
-      if getattr(self, 'vapi_dirs', None):
-        for vapi_dir in self.vapi_dirs:
-          cmd.append ('--vapidir %s' % vapi_dir)
-      if not getattr(self, 'protected', None):
-        cmd.append ('--no-protected')
-      if getattr(self, 'private', None):
-        cmd.append ('--private')
-      if getattr(self, 'inherit', None):
-        cmd.append ('--inherit')
-      if getattr(self, 'deps', None):
-        cmd.append ('--deps')
-      if getattr(self, 'enable_non_null_experimental', None):
-        cmd.append ('--enable-non-null-experimental')
-      if getattr(self, 'force', None):
-        cmd.append ('--force')
-      cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
-      return self.generator.bld.exec_command(' '.join(cmd))
-    else:
-      error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
-      return -1
-
-@feature('valadoc')
-def process_valadoc(self):
-  task = getattr(self, 'task', None)
-  if not task:
-    task = self.create_task('valadoc')
-    self.task = task
-    if getattr(self, 'output_dir', None):
-      task.output_dir = self.output_dir
-    else:
-      Utils.WafError('no output directory')
-    if getattr(self, 'doclet', None):
-      task.doclet = self.doclet
-    else:
-      Utils.WafError('no doclet directory')
-    if getattr(self, 'package_name', None):
-      task.package_name = self.package_name
-    else:
-      Utils.WafError('no package name')
-    if getattr(self, 'package_version', None):
-      task.package_version = self.package_version
-    if getattr(self, 'packages', None):
-      task.packages = Utils.to_list(self.packages)
-    if getattr(self, 'vapi_dirs', None):
-      task.vapi_dirs = Utils.to_list(self.vapi_dirs)
-    if getattr(self, 'files', None):
-      task.files = self.files
-    else:
-      Utils.WafError('no input file')
-    if getattr(self, 'protected', None):
-      task.protected = self.protected
-    if getattr(self, 'private', None):
-      task.private = self.private
-    if getattr(self, 'inherit', None):
-      task.inherit = self.inherit
-    if getattr(self, 'deps', None):
-      task.deps = self.deps
-    if getattr(self, 'enable_non_null_experimental', None):
-      task.enable_non_null_experimental = self.enable_non_null_experimental
-    if getattr(self, 'force', None):
-      task.force = self.force
-
-def detect(conf):
-  conf.find_program('valadoc', var='VALADOC', mandatory=False)
diff --git a/third_party/waf/wafadmin/Build.py b/third_party/waf/wafadmin/Build.py
deleted file mode 100644 (file)
index d36d3df..0000000
+++ /dev/null
@@ -1,1036 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
-
-"""
-Dependency tree holder
-
-The class Build holds all the info related to a build:
-* file system representation (tree of Node instances)
-* various cached objects (task signatures, file scan results, ..)
-
-There is only one Build object at a time (bld singleton)
-"""
-
-import os, sys, errno, re, glob, gc, datetime, shutil
-try: import cPickle
-except: import pickle as cPickle
-import Runner, TaskGen, Node, Scripting, Utils, Environment, Task, Logs, Options
-from Logs import debug, error, info
-from Constants import *
-
-SAVED_ATTRS = 'root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
-"Build class members to save"
-
-bld = None
-"singleton - safe to use when Waf is not used as a library"
-
-class BuildError(Utils.WafError):
-       def __init__(self, b=None, t=[]):
-               self.bld = b
-               self.tasks = t
-               self.ret = 1
-               Utils.WafError.__init__(self, self.format_error())
-
-       def format_error(self):
-               lst = ['Build failed:']
-               for tsk in self.tasks:
-                       txt = tsk.format_error()
-                       if txt: lst.append(txt)
-               sep = ' '
-               if len(lst) > 2:
-                       sep = '\n'
-               return sep.join(lst)
-
-def group_method(fun):
-       """
-       sets a build context method to execute after the current group has finished executing
-       this is useful for installing build files:
-       * calling install_files/install_as will fail if called too early
-       * people do not want to define install method in their task classes
-
-       TODO: try it
-       """
-       def f(*k, **kw):
-               if not k[0].is_install:
-                       return False
-
-               postpone = True
-               if 'postpone' in kw:
-                       postpone = kw['postpone']
-                       del kw['postpone']
-
-               # TODO waf 1.6 in theory there should be no reference to the TaskManager internals here
-               if postpone:
-                       m = k[0].task_manager
-                       if not m.groups: m.add_group()
-                       m.groups[m.current_group].post_funs.append((fun, k, kw))
-                       if not 'cwd' in kw:
-                               kw['cwd'] = k[0].path
-               else:
-                       fun(*k, **kw)
-       return f
-
-class BuildContext(Utils.Context):
-       "holds the dependency tree"
-       def __init__(self):
-
-               # not a singleton, but provided for compatibility
-               global bld
-               bld = self
-
-               self.task_manager = Task.TaskManager()
-
-               # instead of hashing the nodes, we assign them a unique id when they are created
-               self.id_nodes = 0
-               self.idx = {}
-
-               # map names to environments, the 'default' must be defined
-               self.all_envs = {}
-
-               # ======================================= #
-               # code for reading the scripts
-
-               # project build directory - do not reset() from load_dirs()
-               self.bdir = ''
-
-               # the current directory from which the code is run
-               # the folder changes everytime a wscript is read
-               self.path = None
-
-               # Manual dependencies.
-               self.deps_man = Utils.DefaultDict(list)
-
-               # ======================================= #
-               # cache variables
-
-               # local cache for absolute paths - cache_node_abspath[variant][node]
-               self.cache_node_abspath = {}
-
-               # list of folders that are already scanned
-               # so that we do not need to stat them one more time
-               self.cache_scanned_folders = {}
-
-               # list of targets to uninstall for removing the empty folders after uninstalling
-               self.uninstall = []
-
-               # ======================================= #
-               # tasks and objects
-
-               # build dir variants (release, debug, ..)
-               for v in 'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
-                       var = {}
-                       setattr(self, v, var)
-
-               self.cache_dir_contents = {}
-
-               self.all_task_gen = []
-               self.task_gen_cache_names = {}
-               self.cache_sig_vars = {}
-               self.log = None
-
-               self.root = None
-               self.srcnode = None
-               self.bldnode = None
-
-               # bind the build context to the nodes in use
-               # this means better encapsulation and no build context singleton
-               class node_class(Node.Node):
-                       pass
-               self.node_class = node_class
-               self.node_class.__module__ = "Node"
-               self.node_class.__name__ = "Nodu"
-               self.node_class.bld = self
-
-               self.is_install = None
-
-       def __copy__(self):
-               "nodes are not supposed to be copied"
-               raise Utils.WafError('build contexts are not supposed to be cloned')
-
-       def load(self):
-               "load the cache from the disk"
-               try:
-                       env = Environment.Environment(os.path.join(self.cachedir, 'build.config.py'))
-               except (IOError, OSError):
-                       pass
-               else:
-                       if env['version'] < HEXVERSION:
-                               raise Utils.WafError('Version mismatch! reconfigure the project')
-                       for t in env['tools']:
-                               self.setup(**t)
-
-               try:
-                       gc.disable()
-                       f = data = None
-
-                       Node.Nodu = self.node_class
-
-                       try:
-                               f = open(os.path.join(self.bdir, DBFILE), 'rb')
-                       except (IOError, EOFError):
-                               # handle missing file/empty file
-                               pass
-
-                       try:
-                               if f: data = cPickle.load(f)
-                       except AttributeError:
-                               # handle file of an old Waf version
-                               # that has an attribute which no longer exist
-                               # (e.g. AttributeError: 'module' object has no attribute 'BuildDTO')
-                               if Logs.verbose > 1: raise
-
-                       if data:
-                               for x in SAVED_ATTRS: setattr(self, x, data[x])
-                       else:
-                               debug('build: Build cache loading failed')
-
-               finally:
-                       if f: f.close()
-                       gc.enable()
-
-       def save(self):
-               "store the cache on disk, see self.load"
-               gc.disable()
-               self.root.__class__.bld = None
-
-               # some people are very nervous with ctrl+c so we have to make a temporary file
-               Node.Nodu = self.node_class
-               db = os.path.join(self.bdir, DBFILE)
-               file = open(db + '.tmp', 'wb')
-               data = {}
-               for x in SAVED_ATTRS: data[x] = getattr(self, x)
-               cPickle.dump(data, file, -1)
-               file.close()
-
-               # do not use shutil.move
-               try: os.unlink(db)
-               except OSError: pass
-               os.rename(db + '.tmp', db)
-               self.root.__class__.bld = self
-               gc.enable()
-
-       # ======================================= #
-
-       def clean(self):
-               debug('build: clean called')
-
-               # does not clean files created during the configuration
-               precious = set([])
-               for env in self.all_envs.values():
-                       for x in env[CFG_FILES]:
-                               node = self.srcnode.find_resource(x)
-                               if node:
-                                       precious.add(node.id)
-
-               def clean_rec(node):
-                       for x in list(node.childs.keys()):
-                               nd = node.childs[x]
-
-                               tp = nd.id & 3
-                               if tp == Node.DIR:
-                                       clean_rec(nd)
-                               elif tp == Node.BUILD:
-                                       if nd.id in precious: continue
-                                       for env in self.all_envs.values():
-                                               try: os.remove(nd.abspath(env))
-                                               except OSError: pass
-                                       node.childs.__delitem__(x)
-
-               clean_rec(self.srcnode)
-
-               for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
-                       setattr(self, v, {})
-
-       def compile(self):
-               """The cache file is not written if nothing was build at all (build is up to date)"""
-               debug('build: compile called')
-
-               """
-               import cProfile, pstats
-               cProfile.run("import Build\nBuild.bld.flush()", 'profi.txt')
-               p = pstats.Stats('profi.txt')
-               p.sort_stats('cumulative').print_stats(80)
-               """
-               self.flush()
-               #"""
-
-               self.generator = Runner.Parallel(self, Options.options.jobs)
-
-               def dw(on=True):
-                       if Options.options.progress_bar:
-                               if on: sys.stderr.write(Logs.colors.cursor_on)
-                               else: sys.stderr.write(Logs.colors.cursor_off)
-
-               debug('build: executor starting')
-
-               back = os.getcwd()
-               os.chdir(self.bldnode.abspath())
-
-               try:
-                       try:
-                               dw(on=False)
-                               self.generator.start()
-                       except KeyboardInterrupt:
-                               dw()
-                               # if self.generator.processed != 1: TODO
-                               self.save()
-                               raise
-                       except Exception:
-                               dw()
-                               # do not store anything, for something bad happened
-                               raise
-                       else:
-                               dw()
-                               #if self.generator.processed != 1: TODO
-                               self.save()
-
-                       if self.generator.error:
-                               raise BuildError(self, self.task_manager.tasks_done)
-
-               finally:
-                       os.chdir(back)
-
-       def install(self):
-               "this function is called for both install and uninstall"
-               debug('build: install called')
-
-               self.flush()
-
-               # remove empty folders after uninstalling
-               if self.is_install < 0:
-                       lst = []
-                       for x in self.uninstall:
-                               dir = os.path.dirname(x)
-                               if not dir in lst: lst.append(dir)
-                       lst.sort()
-                       lst.reverse()
-
-                       nlst = []
-                       for y in lst:
-                               x = y
-                               while len(x) > 4:
-                                       if not x in nlst: nlst.append(x)
-                                       x = os.path.dirname(x)
-
-                       nlst.sort()
-                       nlst.reverse()
-                       for x in nlst:
-                               try: os.rmdir(x)
-                               except OSError: pass
-
-       def new_task_gen(self, *k, **kw):
-               if self.task_gen_cache_names:
-                       self.task_gen_cache_names = {}
-
-               kw['bld'] = self
-               if len(k) == 0:
-                       ret = TaskGen.task_gen(*k, **kw)
-               else:
-                       cls_name = k[0]
-
-                       try: cls = TaskGen.task_gen.classes[cls_name]
-                       except KeyError: raise Utils.WscriptError('%s is not a valid task generator -> %s' %
-                               (cls_name, [x for x in TaskGen.task_gen.classes]))
-                       ret = cls(*k, **kw)
-               return ret
-
-       def __call__(self, *k, **kw):
-               if self.task_gen_cache_names:
-                       self.task_gen_cache_names = {}
-
-               kw['bld'] = self
-               return TaskGen.task_gen(*k, **kw)
-
-       def load_envs(self):
-               try:
-                       lst = Utils.listdir(self.cachedir)
-               except OSError, e:
-                       if e.errno == errno.ENOENT:
-                               raise Utils.WafError('The project was not configured: run "waf configure" first!')
-                       else:
-                               raise
-
-               if not lst:
-                       raise Utils.WafError('The cache directory is empty: reconfigure the project')
-
-               for file in lst:
-                       if file.endswith(CACHE_SUFFIX):
-                               env = Environment.Environment(os.path.join(self.cachedir, file))
-                               name = file[:-len(CACHE_SUFFIX)]
-
-                               self.all_envs[name] = env
-
-               self.init_variants()
-
-               for env in self.all_envs.values():
-                       for f in env[CFG_FILES]:
-                               newnode = self.path.find_or_declare(f)
-                               try:
-                                       hash = Utils.h_file(newnode.abspath(env))
-                               except (IOError, AttributeError):
-                                       error("cannot find "+f)
-                                       hash = SIG_NIL
-                               self.node_sigs[env.variant()][newnode.id] = hash
-
-               # TODO: hmmm, these nodes are removed from the tree when calling rescan()
-               self.bldnode = self.root.find_dir(self.bldnode.abspath())
-               self.path = self.srcnode = self.root.find_dir(self.srcnode.abspath())
-               self.cwd = self.bldnode.abspath()
-
-       def setup(self, tool, tooldir=None, funs=None):
-               "setup tools for build process"
-               if isinstance(tool, list):
-                       for i in tool: self.setup(i, tooldir)
-                       return
-
-               if not tooldir: tooldir = Options.tooldir
-
-               module = Utils.load_tool(tool, tooldir)
-               if hasattr(module, "setup"): module.setup(self)
-
-       def init_variants(self):
-               debug('build: init variants')
-
-               lstvariants = []
-               for env in self.all_envs.values():
-                       if not env.variant() in lstvariants:
-                               lstvariants.append(env.variant())
-               self.lst_variants = lstvariants
-
-               debug('build: list of variants is %r', lstvariants)
-
-               for name in lstvariants+[0]:
-                       for v in 'node_sigs cache_node_abspath'.split():
-                               var = getattr(self, v)
-                               if not name in var:
-                                       var[name] = {}
-
-       # ======================================= #
-       # node and folder handling
-
-       # this should be the main entry point
-       def load_dirs(self, srcdir, blddir, load_cache=1):
-               "this functions should be the start of everything"
-
-               assert(os.path.isabs(srcdir))
-               assert(os.path.isabs(blddir))
-
-               self.cachedir = os.path.join(blddir, CACHE_DIR)
-
-               if srcdir == blddir:
-                       raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))
-
-               self.bdir = blddir
-
-               # try to load the cache file, if it does not exist, nothing happens
-               self.load()
-
-               if not self.root:
-                       Node.Nodu = self.node_class
-                       self.root = Node.Nodu('', None, Node.DIR)
-
-               if not self.srcnode:
-                       self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
-               debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
-
-               self.path = self.srcnode
-
-               # create this build dir if necessary
-               try: os.makedirs(blddir)
-               except OSError: pass
-
-               if not self.bldnode:
-                       self.bldnode = self.root.ensure_dir_node_from_path(blddir)
-
-               self.init_variants()
-
-       def rescan(self, src_dir_node):
-               """
-               look the contents of a (folder)node and update its list of childs
-
-               The intent is to perform the following steps
-               * remove the nodes for the files that have disappeared
-               * remove the signatures for the build files that have disappeared
-               * cache the results of os.listdir
-               * create the build folder equivalent (mkdir) for each variant
-               src/bar -> build/default/src/bar, build/release/src/bar
-
-               when a folder in the source directory is removed, we do not check recursively
-               to remove the unused nodes. To do that, call 'waf clean' and build again.
-               """
-
-               # do not rescan over and over again
-               # TODO use a single variable in waf 1.6
-               if self.cache_scanned_folders.get(src_dir_node.id, None): return
-               self.cache_scanned_folders[src_dir_node.id] = True
-
-               # TODO remove in waf 1.6
-               if hasattr(self, 'repository'): self.repository(src_dir_node)
-
-               if not src_dir_node.name and sys.platform == 'win32':
-                       # the root has no name, contains drive letters, and cannot be listed
-                       return
-
-
-               # first, take the case of the source directory
-               parent_path = src_dir_node.abspath()
-               try:
-                       lst = set(Utils.listdir(parent_path))
-               except OSError:
-                       lst = set([])
-
-               # TODO move this at the bottom
-               self.cache_dir_contents[src_dir_node.id] = lst
-
-               # hash the existing source files, remove the others
-               cache = self.node_sigs[0]
-               for x in src_dir_node.childs.values():
-                       if x.id & 3 != Node.FILE: continue
-                       if x.name in lst:
-                               try:
-                                       cache[x.id] = Utils.h_file(x.abspath())
-                               except IOError:
-                                       raise Utils.WafError('The file %s is not readable or has become a dir' % x.abspath())
-                       else:
-                               try: del cache[x.id]
-                               except KeyError: pass
-
-                               del src_dir_node.childs[x.name]
-
-
-               # first obtain the differences between srcnode and src_dir_node
-               h1 = self.srcnode.height()
-               h2 = src_dir_node.height()
-
-               lst = []
-               child = src_dir_node
-               while h2 > h1:
-                       lst.append(child.name)
-                       child = child.parent
-                       h2 -= 1
-               lst.reverse()
-
-               # list the files in the build dirs
-               try:
-                       for variant in self.lst_variants:
-                               sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
-                               self.listdir_bld(src_dir_node, sub_path, variant)
-               except OSError:
-
-                       # listdir failed, remove the build node signatures for all variants
-                       for node in src_dir_node.childs.values():
-                               if node.id & 3 != Node.BUILD:
-                                       continue
-
-                               for dct in self.node_sigs.values():
-                                       if node.id in dct:
-                                               dct.__delitem__(node.id)
-
-                               # the policy is to avoid removing nodes representing directories
-                               src_dir_node.childs.__delitem__(node.name)
-
-                       for variant in self.lst_variants:
-                               sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
-                               try:
-                                       os.makedirs(sub_path)
-                               except OSError:
-                                       pass
-
-       # ======================================= #
-       def listdir_src(self, parent_node):
-               """do not use, kept for compatibility"""
-               pass
-
-       def remove_node(self, node):
-               """do not use, kept for compatibility"""
-               pass
-
-       def listdir_bld(self, parent_node, path, variant):
-               """in this method we do not add timestamps but we remove them
-               when the files no longer exist (file removed in the build dir)"""
-
-               i_existing_nodes = [x for x in parent_node.childs.values() if x.id & 3 == Node.BUILD]
-
-               lst = set(Utils.listdir(path))
-               node_names = set([x.name for x in i_existing_nodes])
-               remove_names = node_names - lst
-
-               # remove the stamps of the build nodes that no longer exist on the filesystem
-               ids_to_remove = [x.id for x in i_existing_nodes if x.name in remove_names]
-               cache = self.node_sigs[variant]
-               for nid in ids_to_remove:
-                       if nid in cache:
-                               cache.__delitem__(nid)
-
-       def get_env(self):
-               return self.env_of_name('default')
-       def set_env(self, name, val):
-               self.all_envs[name] = val
-
-       env = property(get_env, set_env)
-
-       def add_manual_dependency(self, path, value):
-               if isinstance(path, Node.Node):
-                       node = path
-               elif os.path.isabs(path):
-                       node = self.root.find_resource(path)
-               else:
-                       node = self.path.find_resource(path)
-               self.deps_man[node.id].append(value)
-
-       def launch_node(self):
-               """return the launch directory as a node"""
-               # p_ln is kind of private, but public in case if
-               try:
-                       return self.p_ln
-               except AttributeError:
-                       self.p_ln = self.root.find_dir(Options.launch_dir)
-                       return self.p_ln
-
-       def glob(self, pattern, relative=True):
-               "files matching the pattern, seen from the current folder"
-               path = self.path.abspath()
-               files = [self.root.find_resource(x) for x in glob.glob(path+os.sep+pattern)]
-               if relative:
-                       files = [x.path_to_parent(self.path) for x in files if x]
-               else:
-                       files = [x.abspath() for x in files if x]
-               return files
-
-       ## the following methods are candidates for the stable apis ##
-
-       def add_group(self, *k):
-               self.task_manager.add_group(*k)
-
-       def set_group(self, *k, **kw):
-               self.task_manager.set_group(*k, **kw)
-
-       def hash_env_vars(self, env, vars_lst):
-               """hash environment variables
-               ['CXX', ..] -> [env['CXX'], ..] -> md5()"""
-
-               # ccroot objects use the same environment for building the .o at once
-               # the same environment and the same variables are used
-
-               idx = str(id(env)) + str(vars_lst)
-               try: return self.cache_sig_vars[idx]
-               except KeyError: pass
-
-               lst = [str(env[a]) for a in vars_lst]
-               ret = Utils.h_list(lst)
-               debug('envhash: %r %r', ret, lst)
-
-               # next time
-               self.cache_sig_vars[idx] = ret
-               return ret
-
-       def name_to_obj(self, name, env):
-               """retrieve a task generator from its name or its target name
-               remember that names must be unique"""
-               cache = self.task_gen_cache_names
-               if not cache:
-                       # create the index lazily
-                       for x in self.all_task_gen:
-                               vt = x.env.variant() + '_'
-                               if x.name:
-                                       cache[vt + x.name] = x
-                               else:
-                                       if isinstance(x.target, str):
-                                               target = x.target
-                                       else:
-                                               target = ' '.join(x.target)
-                                       v = vt + target
-                                       if not cache.get(v, None):
-                                               cache[v] = x
-               return cache.get(env.variant() + '_' + name, None)
-
-       def get_tgen_by_name(self, name):
-               """waf 1.8 api"""
-               return self.name_to_obj(name, self.env)
-
-       def flush(self, all=1):
-               """tell the task generators to create the tasks"""
-
-               self.ini = datetime.datetime.now()
-               # force the initialization of the mapping name->object in flush
-               # name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
-               self.task_gen_cache_names = {}
-               self.name_to_obj('', self.env)
-
-               debug('build: delayed operation TaskGen.flush() called')
-
-               if Options.options.compile_targets:
-                       debug('task_gen: posting objects %r listed in compile_targets', Options.options.compile_targets)
-
-                       mana = self.task_manager
-                       to_post = []
-                       min_grp = 0
-
-                       # ensure the target names exist, fail before any post()
-                       target_objects = Utils.DefaultDict(list)
-                       for target_name in Options.options.compile_targets.split(','):
-                               # trim target_name (handle cases when the user added spaces to targets)
-                               target_name = target_name.strip()
-                               for env in self.all_envs.values():
-                                       tg = self.name_to_obj(target_name, env)
-                                       if tg:
-                                               target_objects[target_name].append(tg)
-
-                                               m = mana.group_idx(tg)
-                                               if m > min_grp:
-                                                       min_grp = m
-                                                       to_post = [tg]
-                                               elif m == min_grp:
-                                                       to_post.append(tg)
-
-                               if not target_name in target_objects and all:
-                                       raise Utils.WafError("target '%s' does not exist" % target_name)
-
-                       debug('group: Forcing up to group %s for target %s', mana.group_name(min_grp), Options.options.compile_targets)
-
-                       # post all the task generators in previous groups
-                       for i in xrange(len(mana.groups)):
-                               mana.current_group = i
-                               if i == min_grp:
-                                       break
-                               g = mana.groups[i]
-                               debug('group: Forcing group %s', mana.group_name(g))
-                               for t in g.tasks_gen:
-                                       debug('group: Posting %s', t.name or t.target)
-                                       t.post()
-
-                       # then post the task generators listed in compile_targets in the last group
-                       for t in to_post:
-                               t.post()
-
-               else:
-                       debug('task_gen: posting objects (normal)')
-                       ln = self.launch_node()
-                       # if the build is started from the build directory, do as if it was started from the top-level
-                       # for the pretty-printing (Node.py), the two lines below cannot be moved to Build::launch_node
-                       if ln.is_child_of(self.bldnode) or not ln.is_child_of(self.srcnode):
-                               ln = self.srcnode
-
-                       # if the project file is located under the source directory, build all targets by default
-                       # else 'waf configure build' does nothing
-                       proj_node = self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
-                       if proj_node.id != self.srcnode.id:
-                               ln = self.srcnode
-
-                       for i in xrange(len(self.task_manager.groups)):
-                               g = self.task_manager.groups[i]
-                               self.task_manager.current_group = i
-                               if Logs.verbose:
-                                       groups = [x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x]) == id(g)]
-                                       name = groups and groups[0] or 'unnamed'
-                                       Logs.debug('group: group', name)
-                               for tg in g.tasks_gen:
-                                       if not tg.path.is_child_of(ln):
-                                               continue
-                                       if Logs.verbose:
-                                               Logs.debug('group: %s' % tg)
-                                       tg.post()
-
-       def env_of_name(self, name):
-               try:
-                       return self.all_envs[name]
-               except KeyError:
-                       error('no such environment: '+name)
-                       return None
-
-       def progress_line(self, state, total, col1, col2):
-               n = len(str(total))
-
-               Utils.rot_idx += 1
-               ind = Utils.rot_chr[Utils.rot_idx % 4]
-
-               ini = self.ini
-
-               pc = (100.*state)/total
-               eta = Utils.get_elapsed_time(ini)
-               fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
-               left = fs % (state, total, col1, pc, col2)
-               right = '][%s%s%s]' % (col1, eta, col2)
-
-               cols = Utils.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
-               if cols < 7: cols = 7
-
-               ratio = int((cols*state)/total) - 1
-
-               bar = ('='*ratio+'>').ljust(cols)
-               msg = Utils.indicator % (left, bar, right)
-
-               return msg
-
-
-       # do_install is not used anywhere
-       def do_install(self, src, tgt, chmod=O644):
-               """returns true if the file was effectively installed or uninstalled, false otherwise"""
-               if self.is_install > 0:
-                       if not Options.options.force:
-                               # check if the file is already there to avoid a copy
-                               try:
-                                       st1 = os.stat(tgt)
-                                       st2 = os.stat(src)
-                               except OSError:
-                                       pass
-                               else:
-                                       # same size and identical timestamps -> make no copy
-                                       if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
-                                               return False
-
-                       srclbl = src.replace(self.srcnode.abspath(None)+os.sep, '')
-                       info("* installing %s as %s" % (srclbl, tgt))
-
-                       # following is for shared libs and stale inodes (-_-)
-                       try: os.remove(tgt)
-                       except OSError: pass
-
-                       try:
-                               shutil.copy2(src, tgt)
-                               os.chmod(tgt, chmod)
-                       except IOError:
-                               try:
-                                       os.stat(src)
-                               except (OSError, IOError):
-                                       error('File %r does not exist' % src)
-                               raise Utils.WafError('Could not install the file %r' % tgt)
-                       return True
-
-               elif self.is_install < 0:
-                       info("* uninstalling %s" % tgt)
-
-                       self.uninstall.append(tgt)
-
-                       try:
-                               os.remove(tgt)
-                       except OSError, e:
-                               if e.errno != errno.ENOENT:
-                                       if not getattr(self, 'uninstall_error', None):
-                                               self.uninstall_error = True
-                                               Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
-                                       if Logs.verbose > 1:
-                                               Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
-                       return True
-
-       red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
-       def get_install_path(self, path, env=None):
-               "installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
-               if not env: env = self.env
-               destdir = env.get_destdir()
-               path = path.replace('/', os.sep)
-               destpath = Utils.subst_vars(path, env)
-               if destdir:
-                       destpath = os.path.join(destdir, self.red.sub('', destpath))
-               return destpath
-
-       def install_dir(self, path, env=None):
-               """
-               create empty folders for the installation (very rarely used)
-               """
-               if env:
-                       assert isinstance(env, Environment.Environment), "invalid parameter"
-               else:
-                       env = self.env
-
-               if not path:
-                       return []
-
-               destpath = self.get_install_path(path, env)
-
-               if self.is_install > 0:
-                       info('* creating %s' % destpath)
-                       Utils.check_dir(destpath)
-               elif self.is_install < 0:
-                       info('* removing %s' % destpath)
-                       self.uninstall.append(destpath + '/xxx') # yes, ugly
-
-       def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
-               """To install files only after they have been built, put the calls in a method named
-               post_build on the top-level wscript
-
-               The files must be a list and contain paths as strings or as Nodes
-
-               The relative_trick flag can be set to install folders, use bld.path.ant_glob() with it
-               """
-               if env:
-                       assert isinstance(env, Environment.Environment), "invalid parameter"
-               else:
-                       env = self.env
-
-               if not path: return []
-
-               if not cwd:
-                       cwd = self.path
-
-               if isinstance(files, str) and '*' in files:
-                       gl = cwd.abspath() + os.sep + files
-                       lst = glob.glob(gl)
-               else:
-                       lst = Utils.to_list(files)
-
-               if not getattr(lst, '__iter__', False):
-                       lst = [lst]
-
-               destpath = self.get_install_path(path, env)
-
-               Utils.check_dir(destpath)
-
-               installed_files = []
-               for filename in lst:
-                       if isinstance(filename, str) and os.path.isabs(filename):
-                               alst = Utils.split_path(filename)
-                               destfile = os.path.join(destpath, alst[-1])
-                       else:
-                               if isinstance(filename, Node.Node):
-                                       nd = filename
-                               else:
-                                       nd = cwd.find_resource(filename)
-                               if not nd:
-                                       raise Utils.WafError("Unable to install the file %r (not found in %s)" % (filename, cwd))
-
-                               if relative_trick:
-                                       destfile = os.path.join(destpath, filename)
-                                       Utils.check_dir(os.path.dirname(destfile))
-                               else:
-                                       destfile = os.path.join(destpath, nd.name)
-
-                               filename = nd.abspath(env)
-
-                       if self.do_install(filename, destfile, chmod):
-                               installed_files.append(destfile)
-               return installed_files
-
-       def install_as(self, path, srcfile, env=None, chmod=O644, cwd=None):
-               """
-               srcfile may be a string or a Node representing the file to install
-
-               returns True if the file was effectively installed, False otherwise
-               """
-               if env:
-                       assert isinstance(env, Environment.Environment), "invalid parameter"
-               else:
-                       env = self.env
-
-               if not path:
-                       raise Utils.WafError("where do you want to install %r? (%r?)" % (srcfile, path))
-
-               if not cwd:
-                       cwd = self.path
-
-               destpath = self.get_install_path(path, env)
-
-               dir, name = os.path.split(destpath)
-               Utils.check_dir(dir)
-
-               # the source path
-               if isinstance(srcfile, Node.Node):
-                       src = srcfile.abspath(env)
-               else:
-                       src = srcfile
-                       if not os.path.isabs(srcfile):
-                               node = cwd.find_resource(srcfile)
-                               if not node:
-                                       raise Utils.WafError("Unable to install the file %r (not found in %s)" % (srcfile, cwd))
-                               src = node.abspath(env)
-
-               return self.do_install(src, destpath, chmod)
-
-       def symlink_as(self, path, src, env=None, cwd=None):
-               """example:  bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') """
-
-               if sys.platform == 'win32':
-                       # well, this *cannot* work
-                       return
-
-               if not path:
-                       raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
-
-               tgt = self.get_install_path(path, env)
-
-               dir, name = os.path.split(tgt)
-               Utils.check_dir(dir)
-
-               if self.is_install > 0:
-                       link = False
-                       if not os.path.islink(tgt):
-                               link = True
-                       elif os.readlink(tgt) != src:
-                               link = True
-
-                       if link:
-                               try: os.remove(tgt)
-                               except OSError: pass
-
-                               info('* symlink %s (-> %s)' % (tgt, src))
-                               os.symlink(src, tgt)
-                       return 0
-
-               else: # UNINSTALL
-                       try:
-                               info('* removing %s' % (tgt))
-                               os.remove(tgt)
-                               return 0
-                       except OSError:
-                               return 1
-
-       def exec_command(self, cmd, **kw):
-               # 'runner' zone is printed out for waf -v, see wafadmin/Options.py
-               debug('runner: system command -> %s', cmd)
-               if self.log:
-                       self.log.write('%s\n' % cmd)
-                       kw['log'] = self.log
-               try:
-                       if not kw.get('cwd', None):
-                               kw['cwd'] = self.cwd
-               except AttributeError:
-                       self.cwd = kw['cwd'] = self.bldnode.abspath()
-               return Utils.exec_command(cmd, **kw)
-
-       def printout(self, s):
-               f = self.log or sys.stderr
-               f.write(s)
-               f.flush()
-
-       def add_subdirs(self, dirs):
-               self.recurse(dirs, 'build')
-
-       def pre_recurse(self, name_or_mod, path, nexdir):
-               if not hasattr(self, 'oldpath'):
-                       self.oldpath = []
-               self.oldpath.append(self.path)
-               self.path = self.root.find_dir(nexdir)
-               return {'bld': self, 'ctx': self}
-
-       def post_recurse(self, name_or_mod, path, nexdir):
-               self.path = self.oldpath.pop()
-
-       ###### user-defined behaviour
-
-       def pre_build(self):
-               if hasattr(self, 'pre_funs'):
-                       for m in self.pre_funs:
-                               m(self)
-
-       def post_build(self):
-               if hasattr(self, 'post_funs'):
-                       for m in self.post_funs:
-                               m(self)
-
-       def add_pre_fun(self, meth):
-               try: self.pre_funs.append(meth)
-               except AttributeError: self.pre_funs = [meth]
-
-       def add_post_fun(self, meth):
-               try: self.post_funs.append(meth)
-               except AttributeError: self.post_funs = [meth]
-
-       def use_the_magic(self):
-               Task.algotype = Task.MAXPARALLEL
-               Task.file_deps = Task.extract_deps
-               self.magic = True
-
-       install_as = group_method(install_as)
-       install_files = group_method(install_files)
-       symlink_as = group_method(symlink_as)
diff --git a/third_party/waf/wafadmin/Configure.py b/third_party/waf/wafadmin/Configure.py
deleted file mode 100644 (file)
index 7575cef..0000000
+++ /dev/null
@@ -1,442 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2008 (ita)
-
-"""
-Configuration system
-
-A configuration instance is created when "waf configure" is called, it is used to:
-* create data dictionaries (Environment instances)
-* store the list of modules to import
-
-The old model (copied from Scons) was to store logic (mapping file extensions to functions)
-along with the data. In Waf a way was found to separate that logic by adding an indirection
-layer (storing the names in the Environment instances)
-
-In the new model, the logic is more object-oriented, and the user scripts provide the
-logic. The data files (Environments) must contain configuration data only (flags, ..).
-
-Note: the c/c++ related code is in the module config_c
-"""
-
-import os, shlex, sys, time
-try: import cPickle
-except ImportError: import pickle as cPickle
-import Environment, Utils, Options, Logs
-from Logs import warn
-from Constants import *
-
-try:
-       from urllib import request
-except:
-       from urllib import urlopen
-else:
-       urlopen = request.urlopen
-
-conf_template = '''# project %(app)s configured on %(now)s by
-# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
-# using %(args)s
-#
-'''
-
-class ConfigurationError(Utils.WscriptError):
-       pass
-
-autoconfig = False
-"reconfigure the project automatically"
-
-def find_file(filename, path_list):
-       """find a file in a list of paths
-       @param filename: name of the file to search for
-       @param path_list: list of directories to search
-       @return: the first occurrence filename or '' if filename could not be found
-"""
-       for directory in Utils.to_list(path_list):
-               if os.path.exists(os.path.join(directory, filename)):
-                       return directory
-       return ''
-
-def find_program_impl(env, filename, path_list=[], var=None, environ=None):
-       """find a program in folders path_lst, and sets env[var]
-       @param env: environment
-       @param filename: name of the program to search for
-       @param path_list: list of directories to search for filename
-       @param var: environment value to be checked for in env or os.environ
-       @return: either the value that is referenced with [var] in env or os.environ
-         or the first occurrence filename or '' if filename could not be found
-"""
-
-       if not environ:
-               environ = os.environ
-
-       try: path_list = path_list.split()
-       except AttributeError: pass
-
-       if var:
-               if env[var]: return env[var]
-               if var in environ: env[var] = environ[var]
-
-       if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
-
-       ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
-       for y in [filename+x for x in ext.split(',')]:
-               for directory in path_list:
-                       x = os.path.join(directory, y)
-                       if os.path.isfile(x):
-                               if var: env[var] = x
-                               return x
-       return ''
-
-class ConfigurationContext(Utils.Context):
-       tests = {}
-       error_handlers = []
-       def __init__(self, env=None, blddir='', srcdir=''):
-               self.env = None
-               self.envname = ''
-
-               self.environ = dict(os.environ)
-
-               self.line_just = 40
-
-               self.blddir = blddir
-               self.srcdir = srcdir
-               self.all_envs = {}
-
-               # curdir: necessary for recursion
-               self.cwd = self.curdir = os.getcwd()
-
-               self.tools = [] # tools loaded in the configuration, and that will be loaded when building
-
-               self.setenv(DEFAULT)
-
-               self.lastprog = ''
-
-               self.hash = 0
-               self.files = []
-
-               self.tool_cache = []
-
-               if self.blddir:
-                       self.post_init()
-
-       def post_init(self):
-
-               self.cachedir = os.path.join(self.blddir, CACHE_DIR)
-
-               path = os.path.join(self.blddir, WAF_CONFIG_LOG)
-               try: os.unlink(path)
-               except (OSError, IOError): pass
-
-               try:
-                       self.log = open(path, 'w')
-               except (OSError, IOError):
-                       self.fatal('could not open %r for writing' % path)
-
-               app = Utils.g_module.APPNAME
-               if app:
-                       ver = getattr(Utils.g_module, 'VERSION', '')
-                       if ver:
-                               app = "%s (%s)" % (app, ver)
-
-               now = time.ctime()
-               pyver = sys.hexversion
-               systype = sys.platform
-               args = " ".join(sys.argv)
-               wafver = WAFVERSION
-               abi = ABI
-               self.log.write(conf_template % vars())
-
-       def __del__(self):
-               """cleanup function: close config.log"""
-
-               # may be ran by the gc, not always after initialization
-               if hasattr(self, 'log') and self.log:
-                       self.log.close()
-
-       def fatal(self, msg):
-               raise ConfigurationError(msg)
-
-       def check_tool(self, input, tooldir=None, funs=None):
-               "load a waf tool"
-
-               tools = Utils.to_list(input)
-               if tooldir: tooldir = Utils.to_list(tooldir)
-               for tool in tools:
-                       tool = tool.replace('++', 'xx')
-                       if tool == 'java': tool = 'javaw'
-                       if tool.lower() == 'unittest': tool = 'unittestw'
-                       # avoid loading the same tool more than once with the same functions
-                       # used by composite projects
-
-                       mag = (tool, id(self.env), funs)
-                       if mag in self.tool_cache:
-                               continue
-                       self.tool_cache.append(mag)
-
-                       module = None
-                       try:
-                               module = Utils.load_tool(tool, tooldir)
-                       except Exception, e:
-                               ex = e
-                               if Options.options.download:
-                                       _3rdparty = os.path.normpath(Options.tooldir[0] + os.sep + '..' + os.sep + '3rdparty')
-
-                                       # try to download the tool from the repository then
-                                       # the default is set to false
-                                       for x in Utils.to_list(Options.remote_repo):
-                                               for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
-                                                       url = '/'.join((x, sub, tool + '.py'))
-                                                       try:
-                                                               web = urlopen(url)
-                                                               if web.getcode() != 200:
-                                                                       continue
-                                                       except Exception, e:
-                                                               # on python3 urlopen throws an exception
-                                                               continue
-                                                       else:
-                                                               loc = None
-                                                               try:
-                                                                       loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
-                                                                       loc.write(web.read())
-                                                                       web.close()
-                                                               finally:
-                                                                       if loc:
-                                                                               loc.close()
-                                                               Logs.warn('downloaded %s from %s' % (tool, url))
-                                                               try:
-                                                                       module = Utils.load_tool(tool, tooldir)
-                                                               except:
-                                                                       Logs.warn('module %s from %s is unusable' % (tool, url))
-                                                                       try:
-                                                                               os.unlink(_3rdparty + os.sep + tool + '.py')
-                                                                       except:
-                                                                               pass
-                                                                       continue
-                                               else:
-                                                       break
-
-                                       if not module:
-                                               Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
-                                               raise ex
-                               else:
-                                       Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s' % (tool, sys.path, e))
-                                       raise ex
-
-                       if funs is not None:
-                               self.eval_rules(funs)
-                       else:
-                               func = getattr(module, 'detect', None)
-                               if func:
-                                       if type(func) is type(find_file): func(self)
-                                       else: self.eval_rules(func)
-
-                       self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
-
-       def sub_config(self, k):
-               "executes the configure function of a wscript module"
-               self.recurse(k, name='configure')
-
-       def pre_recurse(self, name_or_mod, path, nexdir):
-               return {'conf': self, 'ctx': self}
-
-       def post_recurse(self, name_or_mod, path, nexdir):
-               if not autoconfig:
-                       return
-               self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
-               self.files.append(path)
-
-       def store(self, file=''):
-               "save the config results into the cache file"
-               if not os.path.isdir(self.cachedir):
-                       os.makedirs(self.cachedir)
-
-               if not file:
-                       file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
-               file.write('version = 0x%x\n' % HEXVERSION)
-               file.write('tools = %r\n' % self.tools)
-               file.close()
-
-               if not self.all_envs:
-                       self.fatal('nothing to store in the configuration context!')
-               for key in self.all_envs:
-                       tmpenv = self.all_envs[key]
-                       tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
-
-       def set_env_name(self, name, env):
-               "add a new environment called name"
-               self.all_envs[name] = env
-               return env
-
-       def retrieve(self, name, fromenv=None):
-               "retrieve an environment called name"
-               try:
-                       env = self.all_envs[name]
-               except KeyError:
-                       env = Environment.Environment()
-                       env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
-                       self.all_envs[name] = env
-               else:
-                       if fromenv: warn("The environment %s may have been configured already" % name)
-               return env
-
-       def setenv(self, name):
-               "enable the environment called name"
-               self.env = self.retrieve(name)
-               self.envname = name
-
-       def add_os_flags(self, var, dest=None):
-               # do not use 'get' to make certain the variable is not defined
-               try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
-               except KeyError: pass
-
-       def check_message_1(self, sr):
-               self.line_just = max(self.line_just, len(sr))
-               for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
-                       self.log.write(x)
-               Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
-
-       def check_message_2(self, sr, color='GREEN'):
-               self.log.write(sr)
-               self.log.write('\n')
-               Utils.pprint(color, sr)
-
-       def check_message(self, th, msg, state, option=''):
-               sr = 'Checking for %s %s' % (th, msg)
-               self.check_message_1(sr)
-               p = self.check_message_2
-               if state: p('ok ' + str(option))
-               else: p('not found', 'YELLOW')
-
-       # FIXME remove in waf 1.6
-       # the parameter 'option' is not used (kept for compatibility)
-       def check_message_custom(self, th, msg, custom, option='', color='PINK'):
-               sr = 'Checking for %s %s' % (th, msg)
-               self.check_message_1(sr)
-               self.check_message_2(custom, color)
-
-       def msg(self, msg, result, color=None):
-               """Prints a configuration message 'Checking for xxx: ok'"""
-               self.start_msg('Checking for ' + msg)
-
-               if not isinstance(color, str):
-                       color = result and 'GREEN' or 'YELLOW'
-
-               self.end_msg(result, color)
-
-       def start_msg(self, msg):
-               try:
-                       if self.in_msg:
-                               return
-               except:
-                       self.in_msg = 0
-               self.in_msg += 1
-
-               self.line_just = max(self.line_just, len(msg))
-               for x in ('\n', self.line_just * '-', '\n', msg, '\n'):
-                       self.log.write(x)
-               Utils.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
-
-       def end_msg(self, result, color):
-               self.in_msg -= 1
-               if self.in_msg:
-                       return
-
-               if not color:
-                       color = 'GREEN'
-               if result == True:
-                       msg = 'ok'
-               elif result == False:
-                       msg = 'not found'
-                       color = 'YELLOW'
-               else:
-                       msg = str(result)
-
-               self.log.write(msg)
-               self.log.write('\n')
-               Utils.pprint(color, msg)
-
-       def find_program(self, filename, path_list=[], var=None, mandatory=False):
-               "wrapper that adds a configuration message"
-
-               ret = None
-               if var:
-                       if self.env[var]:
-                               ret = self.env[var]
-                       elif var in os.environ:
-                               ret = os.environ[var]
-
-               if not isinstance(filename, list): filename = [filename]
-               if not ret:
-                       for x in filename:
-                               ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
-                               if ret: break
-
-               self.check_message_1('Checking for program %s' % ' or '.join(filename))
-               self.log.write('  find program=%r paths=%r var=%r\n  -> %r\n' % (filename, path_list, var, ret))
-               if ret:
-                       Utils.pprint('GREEN', str(ret))
-               else:
-                       Utils.pprint('YELLOW', 'not found')
-                       if mandatory:
-                               self.fatal('The program %r is required' % filename)
-
-               if var:
-                       self.env[var] = ret
-               return ret
-
-       def cmd_to_list(self, cmd):
-               "commands may be written in pseudo shell like 'ccache g++'"
-               if isinstance(cmd, str) and cmd.find(' '):
-                       try:
-                               os.stat(cmd)
-                       except OSError:
-                               return shlex.split(cmd)
-                       else:
-                               return [cmd]
-               return cmd
-
-       def __getattr__(self, name):
-               r = self.__class__.__dict__.get(name, None)
-               if r: return r
-               if name and name.startswith('require_'):
-
-                       for k in ['check_', 'find_']:
-                               n = name.replace('require_', k)
-                               ret = self.__class__.__dict__.get(n, None)
-                               if ret:
-                                       def run(*k, **kw):
-                                               r = ret(self, *k, **kw)
-                                               if not r:
-                                                       self.fatal('requirement failure')
-                                               return r
-                                       return run
-               self.fatal('No such method %r' % name)
-
-       def eval_rules(self, rules):
-               self.rules = Utils.to_list(rules)
-               for x in self.rules:
-                       f = getattr(self, x)
-                       if not f: self.fatal("No such method '%s'." % x)
-                       try:
-                               f()
-                       except Exception, e:
-                               ret = self.err_handler(x, e)
-                               if ret == BREAK:
-                                       break
-                               elif ret == CONTINUE:
-                                       continue
-                               else:
-                                       self.fatal(e)
-
-       def err_handler(self, fun, error):
-               pass
-
-def conf(f):
-       "decorator: attach new configuration functions"
-       setattr(ConfigurationContext, f.__name__, f)
-       return f
-
-def conftest(f):
-       "decorator: attach new configuration tests (registered as strings)"
-       ConfigurationContext.tests[f.__name__] = f
-       return conf(f)
diff --git a/third_party/waf/wafadmin/Constants.py b/third_party/waf/wafadmin/Constants.py
deleted file mode 100644 (file)
index 30960b9..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Yinon dot me gmail 2008
-
-"""
-these constants are somewhat public, try not to mess them
-
-maintainer: the version number is updated from the top-level wscript file
-"""
-
-# do not touch these three lines, they are updated automatically
-HEXVERSION=0x105019
-WAFVERSION="1.5.19"
-WAFREVISION = "9709M"
-ABI = 7
-
-# permissions
-O644 = 420
-O755 = 493
-
-MAXJOBS = 99999999
-
-CACHE_DIR          = 'c4che'
-CACHE_SUFFIX       = '.cache.py'
-DBFILE             = '.wafpickle-%d' % ABI
-WSCRIPT_FILE       = 'wscript'
-WSCRIPT_BUILD_FILE = 'wscript_build'
-WAF_CONFIG_LOG     = 'config.log'
-WAF_CONFIG_H       = 'config.h'
-
-SIG_NIL = 'iluvcuteoverload'
-
-VARIANT = '_VARIANT_'
-DEFAULT = 'default'
-
-SRCDIR  = 'srcdir'
-BLDDIR  = 'blddir'
-APPNAME = 'APPNAME'
-VERSION = 'VERSION'
-
-DEFINES = 'defines'
-UNDEFINED = ()
-
-BREAK = "break"
-CONTINUE = "continue"
-
-# task scheduler options
-JOBCONTROL = "JOBCONTROL"
-MAXPARALLEL = "MAXPARALLEL"
-NORMAL = "NORMAL"
-
-# task state
-NOT_RUN = 0
-MISSING = 1
-CRASHED = 2
-EXCEPTION = 3
-SKIPPED = 8
-SUCCESS = 9
-
-ASK_LATER = -1
-SKIP_ME = -2
-RUN_ME = -3
-
-
-LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
-HOUR_FORMAT = "%H:%M:%S"
-
-TEST_OK = True
-
-CFG_FILES = 'cfg_files'
-
-# positive '->' install
-# negative '<-' uninstall
-INSTALL = 1337
-UNINSTALL = -1337
diff --git a/third_party/waf/wafadmin/Environment.py b/third_party/waf/wafadmin/Environment.py
deleted file mode 100644 (file)
index bea4146..0000000
+++ /dev/null
@@ -1,209 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
-
-"""Environment representation
-
-There is one gotcha: getitem returns [] if the contents evals to False
-This means env['foo'] = {}; print env['foo'] will print [] not {}
-"""
-
-import os, copy, re
-import Logs, Options, Utils
-from Constants import *
-re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
-
-class Environment(object):
-       """A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
-       An environment instance can be stored into a file and loaded easily
-       """
-       __slots__ = ("table", "parent")
-       def __init__(self, filename=None):
-               self.table = {}
-               #self.parent = None
-
-               if filename:
-                       self.load(filename)
-
-       def __contains__(self, key):
-               if key in self.table: return True
-               try: return self.parent.__contains__(key)
-               except AttributeError: return False # parent may not exist
-
-       def __str__(self):
-               keys = set()
-               cur = self
-               while cur:
-                       keys.update(cur.table.keys())
-                       cur = getattr(cur, 'parent', None)
-               keys = list(keys)
-               keys.sort()
-               return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
-
-       def __getitem__(self, key):
-               try:
-                       while 1:
-                               x = self.table.get(key, None)
-                               if not x is None:
-                                       return x
-                               self = self.parent
-               except AttributeError:
-                       return []
-
-       def __setitem__(self, key, value):
-               self.table[key] = value
-
-       def __delitem__(self, key):
-               del self.table[key]
-
-       def pop(self, key, *args):
-               if len(args):
-                       return self.table.pop(key, *args)
-               return self.table.pop(key)
-
-       def set_variant(self, name):
-               self.table[VARIANT] = name
-
-       def variant(self):
-               try:
-                       while 1:
-                               x = self.table.get(VARIANT, None)
-                               if not x is None:
-                                       return x
-                               self = self.parent
-               except AttributeError:
-                       return DEFAULT
-
-       def copy(self):
-               # TODO waf 1.6 rename this method derive, #368
-               newenv = Environment()
-               newenv.parent = self
-               return newenv
-
-       def detach(self):
-               """TODO try it
-               modifying the original env will not change the copy"""
-               tbl = self.get_merged_dict()
-               try:
-                       delattr(self, 'parent')
-               except AttributeError:
-                       pass
-               else:
-                       keys = tbl.keys()
-                       for x in keys:
-                               tbl[x] = copy.deepcopy(tbl[x])
-                       self.table = tbl
-
-       def get_flat(self, key):
-               s = self[key]
-               if isinstance(s, str): return s
-               return ' '.join(s)
-
-       def _get_list_value_for_modification(self, key):
-               """Gets a value that must be a list for further modification.  The
-               list may be modified inplace and there is no need to
-               "self.table[var] = value" afterwards.
-               """
-               try:
-                       value = self.table[key]
-               except KeyError:
-                       try: value = self.parent[key]
-                       except AttributeError: value = []
-                       if isinstance(value, list):
-                               value = value[:]
-                       else:
-                               value = [value]
-               else:
-                       if not isinstance(value, list):
-                               value = [value]
-               self.table[key] = value
-               return value
-
-       def append_value(self, var, value):
-               current_value = self._get_list_value_for_modification(var)
-
-               if isinstance(value, list):
-                       current_value.extend(value)
-               else:
-                       current_value.append(value)
-
-       def prepend_value(self, var, value):
-               current_value = self._get_list_value_for_modification(var)
-
-               if isinstance(value, list):
-                       current_value = value + current_value
-                       # a new list: update the dictionary entry
-                       self.table[var] = current_value
-               else:
-                       current_value.insert(0, value)
-
-       # prepend unique would be ambiguous
-       def append_unique(self, var, value):
-               current_value = self._get_list_value_for_modification(var)
-
-               if isinstance(value, list):
-                       for value_item in value:
-                               if value_item not in current_value:
-                                       current_value.append(value_item)
-               else:
-                       if value not in current_value:
-                               current_value.append(value)
-
-       def get_merged_dict(self):
-               """compute a merged table"""
-               table_list = []
-               env = self
-               while 1:
-                       table_list.insert(0, env.table)
-                       try: env = env.parent
-                       except AttributeError: break
-               merged_table = {}
-               for table in table_list:
-                       merged_table.update(table)
-               return merged_table
-
-       def store(self, filename):
-               "Write the variables into a file"
-               file = open(filename, 'w')
-               merged_table = self.get_merged_dict()
-               keys = list(merged_table.keys())
-               keys.sort()
-               for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
-               file.close()
-
-       def load(self, filename):
-               "Retrieve the variables from a file"
-               tbl = self.table
-               code = Utils.readf(filename)
-               for m in re_imp.finditer(code):
-                       g = m.group
-                       tbl[g(2)] = eval(g(3))
-               Logs.debug('env: %s', self.table)
-
-       def get_destdir(self):
-               "return the destdir, useful for installing"
-               if self.__getitem__('NOINSTALL'): return ''
-               return Options.options.destdir
-
-       def update(self, d):
-               for k, v in d.iteritems():
-                       self[k] = v
-
-
-       def __getattr__(self, name):
-               if name in self.__slots__:
-                       return object.__getattr__(self, name)
-               else:
-                       return self[name]
-
-       def __setattr__(self, name, value):
-               if name in self.__slots__:
-                       object.__setattr__(self, name, value)
-               else:
-                       self[name] = value
-
-       def __delattr__(self, name):
-               if name in self.__slots__:
-                       object.__delattr__(self, name)
-               else:
-                       del self[name]
diff --git a/third_party/waf/wafadmin/Logs.py b/third_party/waf/wafadmin/Logs.py
deleted file mode 100644 (file)
index f67e87c..0000000
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
-
-import ansiterm
-import os, re, logging, traceback, sys
-from Constants import *
-
-zones = ''
-verbose = 0
-
-colors_lst = {
-'USE' : True,
-'BOLD'  :'\x1b[01;1m',
-'RED'   :'\x1b[01;31m',
-'GREEN' :'\x1b[32m',
-'YELLOW':'\x1b[33m',
-'PINK'  :'\x1b[35m',
-'BLUE'  :'\x1b[01;34m',
-'CYAN'  :'\x1b[36m',
-'NORMAL':'\x1b[0m',
-'cursor_on'  :'\x1b[?25h',
-'cursor_off' :'\x1b[?25l',
-}
-
-got_tty = False
-term = os.environ.get('TERM', 'dumb')
-if not term in ['dumb', 'emacs']:
-       try:
-               got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
-       except AttributeError:
-               pass
-
-import Utils
-
-if not got_tty or 'NOCOLOR' in os.environ:
-       colors_lst['USE'] = False
-
-# test
-#if sys.platform == 'win32':
-#      colors_lst['USE'] = True
-
-def get_color(cl):
-       if not colors_lst['USE']: return ''
-       return colors_lst.get(cl, '')
-
-class foo(object):
-       def __getattr__(self, a):
-               return get_color(a)
-       def __call__(self, a):
-               return get_color(a)
-
-colors = foo()
-
-re_log = re.compile(r'(\w+): (.*)', re.M)
-class log_filter(logging.Filter):
-       def __init__(self, name=None):
-               pass
-
-       def filter(self, rec):
-               rec.c1 = colors.PINK
-               rec.c2 = colors.NORMAL
-               rec.zone = rec.module
-               if rec.levelno >= logging.INFO:
-                       if rec.levelno >= logging.ERROR:
-                               rec.c1 = colors.RED
-                       elif rec.levelno >= logging.WARNING:
-                               rec.c1 = colors.YELLOW
-                       else:
-                               rec.c1 = colors.GREEN
-                       return True
-
-               zone = ''
-               m = re_log.match(rec.msg)
-               if m:
-                       zone = rec.zone = m.group(1)
-                       rec.msg = m.group(2)
-
-               if zones:
-                       return getattr(rec, 'zone', '') in zones or '*' in zones
-               elif not verbose > 2:
-                       return False
-               return True
-
-class formatter(logging.Formatter):
-       def __init__(self):
-               logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
-
-       def format(self, rec):
-               if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
-                       try:
-                               return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
-                       except:
-                               return rec.c1+rec.msg+rec.c2
-               return logging.Formatter.format(self, rec)
-
-def debug(*k, **kw):
-       if verbose:
-               k = list(k)
-               k[0] = k[0].replace('\n', ' ')
-               logging.debug(*k, **kw)
-
-def error(*k, **kw):
-       logging.error(*k, **kw)
-       if verbose > 1:
-               if isinstance(k[0], Utils.WafError):
-                       st = k[0].stack
-               else:
-                       st = traceback.extract_stack()
-               if st:
-                       st = st[:-1]
-                       buf = []
-                       for filename, lineno, name, line in st:
-                               buf.append('  File "%s", line %d, in %s' % (filename, lineno, name))
-                               if line:
-                                       buf.append('    %s' % line.strip())
-                       if buf: logging.error("\n".join(buf))
-
-warn = logging.warn
-info = logging.info
-
-def init_log():
-       log = logging.getLogger()
-       log.handlers = []
-       log.filters = []
-       hdlr = logging.StreamHandler()
-       hdlr.setFormatter(formatter())
-       log.addHandler(hdlr)
-       log.addFilter(log_filter())
-       log.setLevel(logging.DEBUG)
-
-# may be initialized more than once
-init_log()
diff --git a/third_party/waf/wafadmin/Node.py b/third_party/waf/wafadmin/Node.py
deleted file mode 100644 (file)
index 6b03726..0000000
+++ /dev/null
@@ -1,701 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
-
-"""
-Node: filesystem structure, contains lists of nodes
-
-IMPORTANT:
-1. Each file/folder is represented by exactly one node.
-
-2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
-unused class members increase the .wafpickle file size sensibly with lots of objects.
-
-3. The build is launched from the top of the build dir (for example, in _build_/).
-
-4. Node should not be instantiated directly.
-Each instance of Build.BuildContext has a Node subclass.
-(aka: 'Nodu', see BuildContext initializer)
-The BuildContext is referenced here as self.__class__.bld
-Its Node class is referenced here as self.__class__
-
-The public and advertised apis are the following:
-${TGT}                 -> dir/to/file.ext
-${TGT[0].base()}       -> dir/to/file
-${TGT[0].dir(env)}     -> dir/to
-${TGT[0].file()}       -> file.ext
-${TGT[0].file_base()}   -> file
-${TGT[0].suffix()}     -> .ext
-${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
-
-"""
-
-import os, sys, fnmatch, re, stat
-import Utils, Constants
-
-UNDEFINED = 0
-DIR = 1
-FILE = 2
-BUILD = 3
-
-type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
-
-# These fnmatch expressions are used by default to prune the directory tree
-# while doing the recursive traversal in the find_iter method of the Node class.
-prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
-
-# These fnmatch expressions are used by default to exclude files and dirs
-# while doing the recursive traversal in the find_iter method of the Node class.
-exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
-
-# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
-# while doing the recursive traversal in the ant_glob method of the Node class.
-exclude_regs = '''
-**/*~
-**/#*#
-**/.#*
-**/%*%
-**/._*
-**/CVS
-**/CVS/**
-**/.cvsignore
-**/SCCS
-**/SCCS/**
-**/vssver.scc
-**/.svn
-**/.svn/**
-**/.git
-**/.git/**
-**/.gitignore
-**/.bzr
-**/.bzr/**
-**/.hg
-**/.hg/**
-**/_MTN
-**/_MTN/**
-**/_darcs
-**/_darcs/**
-**/.DS_Store'''
-
-class Node(object):
-       __slots__ = ("name", "parent", "id", "childs")
-       def __init__(self, name, parent, node_type = UNDEFINED):
-               self.name = name
-               self.parent = parent
-
-               # assumption: one build object at a time
-               self.__class__.bld.id_nodes += 4
-               self.id = self.__class__.bld.id_nodes + node_type
-
-               if node_type == DIR: self.childs = {}
-
-               # We do not want to add another type attribute (memory)
-               # use the id to find out: type = id & 3
-               # for setting: new type = type + x - type & 3
-
-               if parent and name in parent.childs:
-                       raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
-
-               if parent: parent.childs[name] = self
-
-       def __setstate__(self, data):
-               if len(data) == 4:
-                       (self.parent, self.name, self.id, self.childs) = data
-               else:
-                       (self.parent, self.name, self.id) = data
-
-       def __getstate__(self):
-               if getattr(self, 'childs', None) is None:
-                       return (self.parent, self.name, self.id)
-               else:
-                       return (self.parent, self.name, self.id, self.childs)
-
-       def __str__(self):
-               if not self.parent: return ''
-               return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
-
-       def __repr__(self):
-               return self.__str__()
-
-       def __hash__(self):
-               "expensive, make certain it is not used"
-               raise Utils.WafError('nodes, you are doing it wrong')
-
-       def __copy__(self):
-               "nodes are not supposed to be copied"
-               raise Utils.WafError('nodes are not supposed to be cloned')
-
-       def get_type(self):
-               return self.id & 3
-
-       def set_type(self, t):
-               "dangerous, you are not supposed to use this"
-               self.id = self.id + t - self.id & 3
-
-       def dirs(self):
-               return [x for x in self.childs.values() if x.id & 3 == DIR]
-
-       def files(self):
-               return [x for x in self.childs.values() if x.id & 3 == FILE]
-
-       def get_dir(self, name, default=None):
-               node = self.childs.get(name, None)
-               if not node or node.id & 3 != DIR: return default
-               return  node
-
-       def get_file(self, name, default=None):
-               node = self.childs.get(name, None)
-               if not node or node.id & 3 != FILE: return default
-               return node
-
-       def get_build(self, name, default=None):
-               node = self.childs.get(name, None)
-               if not node or node.id & 3 != BUILD: return default
-               return node
-
-       def find_resource(self, lst):
-               "Find an existing input file: either a build node declared previously or a source node"
-               if isinstance(lst, str):
-                       lst = Utils.split_path(lst)
-
-               if len(lst) == 1:
-                       parent = self
-               else:
-                       parent = self.find_dir(lst[:-1])
-                       if not parent: return None
-               self.__class__.bld.rescan(parent)
-
-               name = lst[-1]
-               node = parent.childs.get(name, None)
-               if node:
-                       tp = node.id & 3
-                       if tp == FILE or tp == BUILD:
-                               return node
-                       else:
-                               return None
-
-               tree = self.__class__.bld
-               if not name in tree.cache_dir_contents[parent.id]:
-                       return None
-
-               path = parent.abspath() + os.sep + name
-               try:
-                       st = Utils.h_file(path)
-               except IOError:
-                       return None
-
-               child = self.__class__(name, parent, FILE)
-               tree.node_sigs[0][child.id] = st
-               return child
-
-       def find_or_declare(self, lst):
-               "Used for declaring a build node representing a file being built"
-               if isinstance(lst, str):
-                       lst = Utils.split_path(lst)
-
-               if len(lst) == 1:
-                       parent = self
-               else:
-                       parent = self.find_dir(lst[:-1])
-                       if not parent: return None
-               self.__class__.bld.rescan(parent)
-
-               name = lst[-1]
-               node = parent.childs.get(name, None)
-               if node:
-                       tp = node.id & 3
-                       if tp != BUILD:
-                               raise Utils.WafError('find_or_declare found a source file where a build file was expected %r' % '/'.join(lst))
-                       return node
-               node = self.__class__(name, parent, BUILD)
-               return node
-
-       def find_dir(self, lst):
-               "search a folder in the filesystem"
-
-               if isinstance(lst, str):
-                       lst = Utils.split_path(lst)
-
-               current = self
-               for name in lst:
-                       self.__class__.bld.rescan(current)
-                       prev = current
-
-                       if not current.parent and name == current.name:
-                               continue
-                       elif not name:
-                               continue
-                       elif name == '.':
-                               continue
-                       elif name == '..':
-                               current = current.parent or current
-                       else:
-                               current = prev.childs.get(name, None)
-                               if current is None:
-                                       dir_cont = self.__class__.bld.cache_dir_contents
-                                       if prev.id in dir_cont and name in dir_cont[prev.id]:
-                                               if not prev.name:
-                                                       if os.sep == '/':
-                                                               # cygwin //machine/share
-                                                               dirname = os.sep + name
-                                                       else:
-                                                               # windows c:
-                                                               dirname = name
-                                               else:
-                                                       # regular path
-                                                       dirname = prev.abspath() + os.sep + name
-                                               if not os.path.isdir(dirname):
-                                                       return None
-                                               current = self.__class__(name, prev, DIR)
-                                       elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
-                                               # drive letter or \\ path for windows
-                                               current = self.__class__(name, prev, DIR)
-                                       else:
-                                               return None
-                               else:
-                                       if current.id & 3 != DIR:
-                                               return None
-               return current
-
-       def ensure_dir_node_from_path(self, lst):
-               "used very rarely, force the construction of a branch of node instance for representing folders"
-
-               if isinstance(lst, str):
-                       lst = Utils.split_path(lst)
-
-               current = self
-               for name in lst:
-                       if not name:
-                               continue
-                       elif name == '.':
-                               continue
-                       elif name == '..':
-                               current = current.parent or current
-                       else:
-                               prev = current
-                               current = prev.childs.get(name, None)
-                               if current is None:
-                                       current = self.__class__(name, prev, DIR)
-               return current
-
-       def exclusive_build_node(self, path):
-               """
-               create a hierarchy in the build dir (no source folders) for ill-behaving compilers
-               the node is not hashed, so you must do it manually
-
-               after declaring such a node, find_dir and find_resource should work as expected
-               """
-               lst = Utils.split_path(path)
-               name = lst[-1]
-               if len(lst) > 1:
-                       parent = None
-                       try:
-                               parent = self.find_dir(lst[:-1])
-                       except OSError:
-                               pass
-                       if not parent:
-                               parent = self.ensure_dir_node_from_path(lst[:-1])
-                               self.__class__.bld.rescan(parent)
-                       else:
-                               try:
-                                       self.__class__.bld.rescan(parent)
-                               except OSError:
-                                       pass
-               else:
-                       parent = self
-
-               node = parent.childs.get(name, None)
-               if not node:
-                       node = self.__class__(name, parent, BUILD)
-
-               return node
-
-       def path_to_parent(self, parent):
-               "path relative to a direct ancestor, as string"
-               lst = []
-               p = self
-               h1 = parent.height()
-               h2 = p.height()
-               while h2 > h1:
-                       h2 -= 1
-                       lst.append(p.name)
-                       p = p.parent
-               if lst:
-                       lst.reverse()
-                       ret = os.path.join(*lst)
-               else:
-                       ret = ''
-               return ret
-
-       def find_ancestor(self, node):
-               "find a common ancestor for two nodes - for the shortest path in hierarchy"
-               dist = self.height() - node.height()
-               if dist < 0: return node.find_ancestor(self)
-               # now the real code
-               cand = self
-               while dist > 0:
-                       cand = cand.parent
-                       dist -= 1
-               if cand == node: return cand
-               cursor = node
-               while cand.parent:
-                       cand = cand.parent
-                       cursor = cursor.parent
-                       if cand == cursor: return cand
-
-       def relpath_gen(self, from_node):
-               "string representing a relative path between self to another node"
-
-               if self == from_node: return '.'
-               if from_node.parent == self: return '..'
-
-               # up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
-               ancestor = self.find_ancestor(from_node)
-               lst = []
-               cand = self
-               while not cand.id == ancestor.id:
-                       lst.append(cand.name)
-                       cand = cand.parent
-               cand = from_node
-               while not cand.id == ancestor.id:
-                       lst.append('..')
-                       cand = cand.parent
-               lst.reverse()
-               return os.sep.join(lst)
-
-       def nice_path(self, env=None):
-               "printed in the console, open files easily from the launch directory"
-               tree = self.__class__.bld
-               ln = tree.launch_node()
-
-               if self.id & 3 == FILE: return self.relpath_gen(ln)
-               else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
-
-       def is_child_of(self, node):
-               "does this node belong to the subtree node"
-               p = self
-               diff = self.height() - node.height()
-               while diff > 0:
-                       diff -= 1
-                       p = p.parent
-               return p.id == node.id
-
-       def variant(self, env):
-               "variant, or output directory for this node, a source has for variant 0"
-               if not env: return 0
-               elif self.id & 3 == FILE: return 0
-               else: return env.variant()
-
-       def height(self):
-               "amount of parents"
-               # README a cache can be added here if necessary
-               d = self
-               val = -1
-               while d:
-                       d = d.parent
-                       val += 1
-               return val
-
-       # helpers for building things
-
-       def abspath(self, env=None):
-               """
-               absolute path
-               @param env [Environment]:
-                       * obligatory for build nodes: build/variant/src/dir/bar.o
-                       * optional for dirs: get either src/dir or build/variant/src/dir
-                       * excluded for source nodes: src/dir/bar.c
-
-               Instead of computing the absolute path each time again,
-               store the already-computed absolute paths in one of (variants+1) dictionaries:
-               bld.cache_node_abspath[0] holds absolute paths for source nodes.
-               bld.cache_node_abspath[variant] holds the absolute path for the build nodes
-               which reside in the variant given by env.
-               """
-               ## absolute path - hot zone, so do not touch
-
-               # less expensive
-               variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
-
-               ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
-               if ret: return ret
-
-               if not variant:
-                       # source directory
-                       if not self.parent:
-                               val = os.sep == '/' and os.sep or ''
-                       elif not self.parent.name: # root
-                               val = (os.sep == '/' and os.sep or '') + self.name
-                       else:
-                               val = self.parent.abspath() + os.sep + self.name
-               else:
-                       # build directory
-                       val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
-               self.__class__.bld.cache_node_abspath[variant][self.id] = val
-               return val
-
-       def change_ext(self, ext):
-               "node of the same path, but with a different extension - hot zone so do not touch"
-               name = self.name
-               k = name.rfind('.')
-               if k >= 0:
-                       name = name[:k] + ext
-               else:
-                       name = name + ext
-
-               return self.parent.find_or_declare([name])
-
-       def src_dir(self, env):
-               "src path without the file name"
-               return self.parent.srcpath(env)
-
-       def bld_dir(self, env):
-               "build path without the file name"
-               return self.parent.bldpath(env)
-
-       def bld_base(self, env):
-               "build path without the extension: src/dir/foo(.cpp)"
-               s = os.path.splitext(self.name)[0]
-               return os.path.join(self.bld_dir(env), s)
-
-       def bldpath(self, env=None):
-               "path seen from the build dir default/src/foo.cpp"
-               if self.id & 3 == FILE:
-                       return self.relpath_gen(self.__class__.bld.bldnode)
-               p = self.path_to_parent(self.__class__.bld.srcnode)
-               if p is not '':
-                       return env.variant() + os.sep + p
-               return env.variant()
-
-       def srcpath(self, env=None):
-               "path in the srcdir from the build dir ../src/foo.cpp"
-               if self.id & 3 == BUILD:
-                       return self.bldpath(env)
-               return self.relpath_gen(self.__class__.bld.bldnode)
-
-       def read(self, env):
-               "get the contents of a file, it is not used anywhere for the moment"
-               return Utils.readf(self.abspath(env))
-
-       def dir(self, env):
-               "scons-like"
-               return self.parent.abspath(env)
-
-       def file(self):
-               "scons-like"
-               return self.name
-
-       def file_base(self):
-               "scons-like"
-               return os.path.splitext(self.name)[0]
-
-       def suffix(self):
-               "scons-like - hot zone so do not touch"
-               k = max(0, self.name.rfind('.'))
-               return self.name[k:]
-
-       def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
-               """find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
-               bld_ctx = self.__class__.bld
-               bld_ctx.rescan(self)
-               for name in bld_ctx.cache_dir_contents[self.id]:
-                       if accept_name(self, name):
-                               node = self.find_resource(name)
-                               if node:
-                                       if src and node.id & 3 == FILE:
-                                               yield node
-                               else:
-                                       node = self.find_dir(name)
-                                       if node and node.id != bld_ctx.bldnode.id:
-                                               if dir:
-                                                       yield node
-                                               if not is_prune(self, name):
-                                                       if maxdepth:
-                                                               for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
-                                                                       yield k
-                       else:
-                               if not is_prune(self, name):
-                                       node = self.find_resource(name)
-                                       if not node:
-                                               # not a file, it is a dir
-                                               node = self.find_dir(name)
-                                               if node and node.id != bld_ctx.bldnode.id:
-                                                       if maxdepth:
-                                                               for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
-                                                                       yield k
-
-               if bld:
-                       for node in self.childs.values():
-                               if node.id == bld_ctx.bldnode.id:
-                                       continue
-                               if node.id & 3 == BUILD:
-                                       if accept_name(self, node.name):
-                                               yield node
-               raise StopIteration
-
-       def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
-               """find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
-
-               if not (src or bld or dir):
-                       raise StopIteration
-
-               if self.id & 3 != DIR:
-                       raise StopIteration
-
-               in_pat = Utils.to_list(in_pat)
-               ex_pat = Utils.to_list(ex_pat)
-               prune_pat = Utils.to_list(prune_pat)
-
-               def accept_name(node, name):
-                       for pat in ex_pat:
-                               if fnmatch.fnmatchcase(name, pat):
-                                       return False
-                       for pat in in_pat:
-                               if fnmatch.fnmatchcase(name, pat):
-                                       return True
-                       return False
-
-               def is_prune(node, name):
-                       for pat in prune_pat:
-                               if fnmatch.fnmatchcase(name, pat):
-                                       return True
-                       return False
-
-               ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
-               if flat:
-                       return " ".join([x.relpath_gen(self) for x in ret])
-
-               return ret
-
-       def ant_glob(self, *k, **kw):
-               """
-               known gotcha: will enumerate the files, but only if the folder exists in the source directory
-               """
-
-               src=kw.get('src', 1)
-               bld=kw.get('bld', 0)
-               dir=kw.get('dir', 0)
-               excl = kw.get('excl', exclude_regs)
-               incl = k and k[0] or kw.get('incl', '**')
-
-               def to_pat(s):
-                       lst = Utils.to_list(s)
-                       ret = []
-                       for x in lst:
-                               x = x.replace('//', '/')
-                               if x.endswith('/'):
-                                       x += '**'
-                               lst2 = x.split('/')
-                               accu = []
-                               for k in lst2:
-                                       if k == '**':
-                                               accu.append(k)
-                                       else:
-                                               k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
-                                               k = '^%s$' % k
-                                               #print "pattern", k
-                                               accu.append(re.compile(k))
-                               ret.append(accu)
-                       return ret
-
-               def filtre(name, nn):
-                       ret = []
-                       for lst in nn:
-                               if not lst:
-                                       pass
-                               elif lst[0] == '**':
-                                       ret.append(lst)
-                                       if len(lst) > 1:
-                                               if lst[1].match(name):
-                                                       ret.append(lst[2:])
-                                       else:
-                                               ret.append([])
-                               elif lst[0].match(name):
-                                       ret.append(lst[1:])
-                       return ret
-
-               def accept(name, pats):
-                       nacc = filtre(name, pats[0])
-                       nrej = filtre(name, pats[1])
-                       if [] in nrej:
-                               nacc = []
-                       return [nacc, nrej]
-
-               def ant_iter(nodi, maxdepth=25, pats=[]):
-                       nodi.__class__.bld.rescan(nodi)
-                       tmp = list(nodi.__class__.bld.cache_dir_contents[nodi.id])
-                       tmp.sort()
-                       for name in tmp:
-                               npats = accept(name, pats)
-                               if npats and npats[0]:
-                                       accepted = [] in npats[0]
-                                       #print accepted, nodi, name
-
-                                       node = nodi.find_resource(name)
-                                       if node and accepted:
-                                               if src and node.id & 3 == FILE:
-                                                       yield node
-                                       else:
-                                               node = nodi.find_dir(name)
-                                               if node and node.id != nodi.__class__.bld.bldnode.id:
-                                                       if accepted and dir:
-                                                               yield node
-                                                       if maxdepth:
-                                                               for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
-                                                                       yield k
-                       if bld:
-                               for node in nodi.childs.values():
-                                       if node.id == nodi.__class__.bld.bldnode.id:
-                                               continue
-                                       if node.id & 3 == BUILD:
-                                               npats = accept(node.name, pats)
-                                               if npats and npats[0] and [] in npats[0]:
-                                                       yield node
-                       raise StopIteration
-
-               ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
-
-               if kw.get('flat', True):
-                       return " ".join([x.relpath_gen(self) for x in ret])
-
-               return ret
-
-       def update_build_dir(self, env=None):
-
-               if not env:
-                       for env in self.bld.all_envs:
-                               self.update_build_dir(env)
-                       return
-
-               path = self.abspath(env)
-
-               lst = Utils.listdir(path)
-               try:
-                       self.__class__.bld.cache_dir_contents[self.id].update(lst)
-               except KeyError:
-                       self.__class__.bld.cache_dir_contents[self.id] = set(lst)
-               self.__class__.bld.cache_scanned_folders[self.id] = True
-
-               for k in lst:
-                       npath = path + os.sep + k
-                       st = os.stat(npath)
-                       if stat.S_ISREG(st[stat.ST_MODE]):
-                               ick = self.find_or_declare(k)
-                               if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
-                                       self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
-                       elif stat.S_ISDIR(st[stat.ST_MODE]):
-                               child = self.find_dir(k)
-                               if not child:
-                                       child = self.ensure_dir_node_from_path(k)
-                               child.update_build_dir(env)
-
-       def read(self, flags='r', encoding='ISO8859-1'):
-               """backported from waf 1.8"""
-               return Utils.readf(self.abspath(), flags, encoding)
-
-       def write(self, data, flags='w', encoding='ISO8859-1'):
-               """backported from waf 1.8"""
-               Utils.writef(self.abspath(self.bld.env), data, flags, encoding)
-
-class Nodu(Node):
-       pass
diff --git a/third_party/waf/wafadmin/Options.py b/third_party/waf/wafadmin/Options.py
deleted file mode 100644 (file)
index 7e87c11..0000000
+++ /dev/null
@@ -1,287 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Scott Newton, 2005 (scottn)
-# Thomas Nagy, 2006 (ita)
-
-"Custom command-line options"
-
-import os, sys, imp, types, tempfile, optparse
-import Logs, Utils
-from Constants import *
-
-cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
-
-# TODO remove in waf 1.6 the following two
-commands = {}
-is_install = False
-
-options = {}
-arg_line = []
-launch_dir = ''
-tooldir = ''
-lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
-try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
-except KeyError: cache_global = ''
-platform = Utils.unversioned_sys_platform()
-conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
-
-remote_repo = ['http://waf.googlecode.com/svn/']
-"""remote directory for the plugins"""
-
-
-# Such a command-line should work:  JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
-default_prefix = os.environ.get('PREFIX')
-if not default_prefix:
-       if platform == 'win32':
-               d = tempfile.gettempdir()
-               default_prefix = d[0].upper() + d[1:]
-               # win32 preserves the case, but gettempdir does not
-       else: default_prefix = '/usr/local/'
-
-default_jobs = os.environ.get('JOBS', -1)
-if default_jobs < 1:
-       try:
-               if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
-                       default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
-               else:
-                       default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
-       except:
-               if os.name == 'java': # platform.system() == 'Java'
-                       from java.lang import Runtime
-                       default_jobs = Runtime.getRuntime().availableProcessors()
-               else:
-                       # environment var defined on win32
-                       default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
-
-default_destdir = os.environ.get('DESTDIR', '')
-
-def get_usage(self):
-       cmds_str = []
-       module = Utils.g_module
-       if module:
-               # create the help messages for commands
-               tbl = module.__dict__
-               keys = list(tbl.keys())
-               keys.sort()
-
-               if 'build' in tbl:
-                       if not module.build.__doc__:
-                               module.build.__doc__ = 'builds the project'
-               if 'configure' in tbl:
-                       if not module.configure.__doc__:
-                               module.configure.__doc__ = 'configures the project'
-
-               ban = ['set_options', 'init', 'shutdown']
-
-               optlst = [x for x in keys if not x in ban
-                       and type(tbl[x]) is type(parse_args_impl)
-                       and tbl[x].__doc__
-                       and not x.startswith('_')]
-
-               just = max([len(x) for x in optlst])
-
-               for x in optlst:
-                       cmds_str.append('  %s: %s' % (x.ljust(just), tbl[x].__doc__))
-               ret = '\n'.join(cmds_str)
-       else:
-               ret = ' '.join(cmds)
-       return '''waf [command] [options]
-
-Main commands (example: ./waf build -j4)
-%s
-''' % ret
-
-
-setattr(optparse.OptionParser, 'get_usage', get_usage)
-
-def create_parser(module=None):
-       Logs.debug('options: create_parser is called')
-       parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
-
-       parser.formatter.width = Utils.get_term_cols()
-       p = parser.add_option
-
-       p('-j', '--jobs',
-               type    = 'int',
-               default = default_jobs,
-               help    = 'amount of parallel jobs (%r)' % default_jobs,
-               dest    = 'jobs')
-
-       p('-k', '--keep',
-               action  = 'store_true',
-               default = False,
-               help    = 'keep running happily on independent task groups',
-               dest    = 'keep')
-
-       p('-v', '--verbose',
-               action  = 'count',
-               default = 0,
-               help    = 'verbosity level -v -vv or -vvv [default: 0]',
-               dest    = 'verbose')
-
-       p('--nocache',
-               action  = 'store_true',
-               default = False,
-               help    = 'ignore the WAFCACHE (if set)',
-               dest    = 'nocache')
-
-       p('--zones',
-               action  = 'store',
-               default = '',
-               help    = 'debugging zones (task_gen, deps, tasks, etc)',
-               dest    = 'zones')
-
-       p('-p', '--progress',
-               action  = 'count',
-               default = 0,
-               help    = '-p: progress bar; -pp: ide output',
-               dest    = 'progress_bar')
-
-       p('--targets',
-               action  = 'store',
-               default = '',
-               help    = 'build given task generators, e.g. "target1,target2"',
-               dest    = 'compile_targets')
-
-       gr = optparse.OptionGroup(parser, 'configuration options')
-       parser.add_option_group(gr)
-       gr.add_option('-b', '--blddir',
-               action  = 'store',
-               default = '',
-               help    = 'out dir for the project (configuration)',
-               dest    = 'blddir')
-       gr.add_option('-s', '--srcdir',
-               action  = 'store',
-               default = '',
-               help    = 'top dir for the project (configuration)',
-               dest    = 'srcdir')
-       gr.add_option('--prefix',
-               help    = 'installation prefix (configuration) [default: %r]' % default_prefix,
-               default = default_prefix,
-               dest    = 'prefix')
-
-       gr.add_option('--download',
-               action  = 'store_true',
-               default = False,
-               help    = 'try to download the tools if missing',
-               dest    = 'download')
-
-       gr = optparse.OptionGroup(parser, 'installation options')
-       parser.add_option_group(gr)
-       gr.add_option('--destdir',
-               help    = 'installation root [default: %r]' % default_destdir,
-               default = default_destdir,
-               dest    = 'destdir')
-       gr.add_option('-f', '--force',
-               action  = 'store_true',
-               default = False,
-               help    = 'force file installation',
-               dest    = 'force')
-
-       return parser
-
-def parse_args_impl(parser, _args=None):
-       global options, commands, arg_line
-       (options, args) = parser.parse_args(args=_args)
-
-       arg_line = args
-       #arg_line = args[:] # copy
-
-       # By default, 'waf' is equivalent to 'waf build'
-       commands = {}
-       for var in cmds: commands[var] = 0
-       if not args:
-               commands['build'] = 1
-               args.append('build')
-
-       # Parse the command arguments
-       for arg in args:
-               commands[arg] = True
-
-       # the check thing depends on the build
-       if 'check' in args:
-               idx = args.index('check')
-               try:
-                       bidx = args.index('build')
-                       if bidx > idx:
-                               raise ValueError('build before check')
-               except ValueError, e:
-                       args.insert(idx, 'build')
-
-       if args[0] != 'init':
-               args.insert(0, 'init')
-
-       # TODO -k => -j0
-       if options.keep: options.jobs = 1
-       if options.jobs < 1: options.jobs = 1
-
-       if 'install' in sys.argv or 'uninstall' in sys.argv:
-               # absolute path only if set
-               options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
-
-       Logs.verbose = options.verbose
-       Logs.init_log()
-
-       if options.zones:
-               Logs.zones = options.zones.split(',')
-               if not Logs.verbose: Logs.verbose = 1
-       elif Logs.verbose > 0:
-               Logs.zones = ['runner']
-       if Logs.verbose > 2:
-               Logs.zones = ['*']
-
-# TODO waf 1.6
-# 1. rename the class to OptionsContext
-# 2. instead of a class attribute, use a module (static 'parser')
-# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
-
-class Handler(Utils.Context):
-       """loads wscript modules in folders for adding options
-       This class should be named 'OptionsContext'
-       A method named 'recurse' is bound when used by the module Scripting"""
-
-       parser = None
-       # make it possible to access the reference, like Build.bld
-
-       def __init__(self, module=None):
-               self.parser = create_parser(module)
-               self.cwd = os.getcwd()
-               Handler.parser = self
-
-       def add_option(self, *k, **kw):
-               self.parser.add_option(*k, **kw)
-
-       def add_option_group(self, *k, **kw):
-               return self.parser.add_option_group(*k, **kw)
-
-       def get_option_group(self, opt_str):
-               return self.parser.get_option_group(opt_str)
-
-       def sub_options(self, *k, **kw):
-               if not k: raise Utils.WscriptError('folder expected')
-               self.recurse(k[0], name='set_options')
-
-       def tool_options(self, *k, **kw):
-               Utils.python_24_guard()
-
-               if not k[0]:
-                       raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
-               tools = Utils.to_list(k[0])
-
-               # TODO waf 1.6 remove the global variable tooldir
-               path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
-
-               for tool in tools:
-                       tool = tool.replace('++', 'xx')
-                       if tool == 'java': tool = 'javaw'
-                       if tool.lower() == 'unittest': tool = 'unittestw'
-                       module = Utils.load_tool(tool, path)
-                       try:
-                               fun = module.set_options
-                       except AttributeError:
-                               pass
-                       else:
-                               fun(kw.get('option_group', self))
-
-       def parse_args(self, args=None):
-               parse_args_impl(self.parser, args)
diff --git a/third_party/waf/wafadmin/Runner.py b/third_party/waf/wafadmin/Runner.py
deleted file mode 100644 (file)
index 0d2dea5..0000000
+++ /dev/null
@@ -1,235 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2008 (ita)
-
-"Execute the tasks"
-
-import os, sys, random, time, threading, traceback
-try: from Queue import Queue
-except ImportError: from queue import Queue
-import Build, Utils, Logs, Options
-from Logs import debug, error
-from Constants import *
-
-GAP = 15
-
-run_old = threading.Thread.run
-def run(*args, **kwargs):
-       try:
-               run_old(*args, **kwargs)
-       except (KeyboardInterrupt, SystemExit):
-               raise
-       except:
-               sys.excepthook(*sys.exc_info())
-threading.Thread.run = run
-
-def process_task(tsk):
-
-       m = tsk.master
-       if m.stop:
-               m.out.put(tsk)
-               return
-
-       try:
-               tsk.generator.bld.printout(tsk.display())
-               if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
-               # actual call to task's run() function
-               else: ret = tsk.call_run()
-       except Exception, e:
-               tsk.err_msg = Utils.ex_stack()
-               tsk.hasrun = EXCEPTION
-
-               # TODO cleanup
-               m.error_handler(tsk)
-               m.out.put(tsk)
-               return
-
-       if ret:
-               tsk.err_code = ret
-               tsk.hasrun = CRASHED
-       else:
-               try:
-                       tsk.post_run()
-               except Utils.WafError:
-                       pass
-               except Exception:
-                       tsk.err_msg = Utils.ex_stack()
-                       tsk.hasrun = EXCEPTION
-               else:
-                       tsk.hasrun = SUCCESS
-       if tsk.hasrun != SUCCESS:
-               m.error_handler(tsk)
-
-       m.out.put(tsk)
-
-class TaskConsumer(threading.Thread):
-       ready = Queue(0)
-       consumers = []
-
-       def __init__(self):
-               threading.Thread.__init__(self)
-               self.setDaemon(1)
-               self.start()
-
-       def run(self):
-               try:
-                       self.loop()
-               except:
-                       pass
-
-       def loop(self):
-               while 1:
-                       tsk = TaskConsumer.ready.get()
-                       process_task(tsk)
-
-class Parallel(object):
-       """
-       keep the consumer threads busy, and avoid consuming cpu cycles
-       when no more tasks can be added (end of the build, etc)
-       """
-       def __init__(self, bld, j=2):
-
-               # number of consumers
-               self.numjobs = j
-
-               self.manager = bld.task_manager
-               self.manager.current_group = 0
-
-               self.total = self.manager.total()
-
-               # tasks waiting to be processed - IMPORTANT
-               self.outstanding = []
-               self.maxjobs = MAXJOBS
-
-               # tasks that are awaiting for another task to complete
-               self.frozen = []
-
-               # tasks returned by the consumers
-               self.out = Queue(0)
-
-               self.count = 0 # tasks not in the producer area
-
-               self.processed = 1 # progress indicator
-
-               self.stop = False # error condition to stop the build
-               self.error = False # error flag
-
-       def get_next(self):
-               "override this method to schedule the tasks in a particular order"
-               if not self.outstanding:
-                       return None
-               return self.outstanding.pop(0)
-
-       def postpone(self, tsk):
-               "override this method to schedule the tasks in a particular order"
-               # TODO consider using a deque instead
-               if random.randint(0, 1):
-                       self.frozen.insert(0, tsk)
-               else:
-                       self.frozen.append(tsk)
-
-       def refill_task_list(self):
-               "called to set the next group of tasks"
-
-               while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
-                       self.get_out()
-
-               while not self.outstanding:
-                       if self.count:
-                               self.get_out()
-
-                       if self.frozen:
-                               self.outstanding += self.frozen
-                               self.frozen = []
-                       elif not self.count:
-                               (jobs, tmp) = self.manager.get_next_set()
-                               if jobs != None: self.maxjobs = jobs
-                               if tmp: self.outstanding += tmp
-                               break
-
-       def get_out(self):
-               "the tasks that are put to execute are all collected using get_out"
-               ret = self.out.get()
-               self.manager.add_finished(ret)
-               if not self.stop and getattr(ret, 'more_tasks', None):
-                       self.outstanding += ret.more_tasks
-                       self.total += len(ret.more_tasks)
-               self.count -= 1
-
-       def error_handler(self, tsk):
-               "by default, errors make the build stop (not thread safe so be careful)"
-               if not Options.options.keep:
-                       self.stop = True
-               self.error = True
-
-       def start(self):
-               "execute the tasks"
-
-               if TaskConsumer.consumers:
-                       # the worker pool is usually loaded lazily (see below)
-                       # in case it is re-used with a different value of numjobs:
-                       while len(TaskConsumer.consumers) < self.numjobs:
-                               TaskConsumer.consumers.append(TaskConsumer())
-
-               while not self.stop:
-
-                       self.refill_task_list()
-
-                       # consider the next task
-                       tsk = self.get_next()
-                       if not tsk:
-                               if self.count:
-                                       # tasks may add new ones after they are run
-                                       continue
-                               else:
-                                       # no tasks to run, no tasks running, time to exit
-                                       break
-
-                       if tsk.hasrun:
-                               # if the task is marked as "run", just skip it
-                               self.processed += 1
-                               self.manager.add_finished(tsk)
-                               continue
-
-                       try:
-                               st = tsk.runnable_status()
-                       except Exception, e:
-                               self.processed += 1
-                               if self.stop and not Options.options.keep:
-                                       tsk.hasrun = SKIPPED
-                                       self.manager.add_finished(tsk)
-                                       continue
-                               self.error_handler(tsk)
-                               self.manager.add_finished(tsk)
-                               tsk.hasrun = EXCEPTION
-                               tsk.err_msg = Utils.ex_stack()
-                               continue
-
-                       if st == ASK_LATER:
-                               self.postpone(tsk)
-                       elif st == SKIP_ME:
-                               self.processed += 1
-                               tsk.hasrun = SKIPPED
-                               self.manager.add_finished(tsk)
-                       else:
-                               # run me: put the task in ready queue
-                               tsk.position = (self.processed, self.total)
-                               self.count += 1
-                               tsk.master = self
-                               self.processed += 1
-
-                               if self.numjobs == 1:
-                                       process_task(tsk)
-                               else:
-                                       TaskConsumer.ready.put(tsk)
-                                       # create the consumer threads only if there is something to consume
-                                       if not TaskConsumer.consumers:
-                                               TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
-
-               # self.count represents the tasks that have been made available to the consumer threads
-               # collect all the tasks after an error else the message may be incomplete
-               while self.error and self.count:
-                       self.get_out()
-
-               #print loop
-               assert (self.count == 0 or self.stop)
diff --git a/third_party/waf/wafadmin/Scripting.py b/third_party/waf/wafadmin/Scripting.py
deleted file mode 100644 (file)
index 6f61104..0000000
+++ /dev/null
@@ -1,585 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
-
-"Module called for configuring, compiling and installing targets"
-
-import os, sys, shutil, traceback, datetime, inspect, errno
-
-import Utils, Configure, Build, Logs, Options, Environment, Task
-from Logs import error, warn, info
-from Constants import *
-
-g_gz = 'bz2'
-commands = []
-
-def prepare_impl(t, cwd, ver, wafdir):
-       Options.tooldir = [t]
-       Options.launch_dir = cwd
-
-       # some command-line options can be processed immediately
-       if '--version' in sys.argv:
-               opt_obj = Options.Handler()
-               opt_obj.curdir = cwd
-               opt_obj.parse_args()
-               sys.exit(0)
-
-       # now find the wscript file
-       msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
-
-       # in theory projects can be configured in an autotool-like manner:
-       # mkdir build && cd build && ../waf configure && ../waf
-       build_dir_override = None
-       candidate = None
-
-       lst = os.listdir(cwd)
-
-       search_for_candidate = True
-       if WSCRIPT_FILE in lst:
-               candidate = cwd
-
-       elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
-               # autotool-like configuration
-               calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
-               if WSCRIPT_FILE in os.listdir(calldir):
-                       candidate = calldir
-                       search_for_candidate = False
-               else:
-                       error('arg[0] directory does not contain a wscript file')
-                       sys.exit(1)
-               build_dir_override = cwd
-
-       # climb up to find a script if it is not found
-       while search_for_candidate:
-               if len(cwd) <= 3:
-                       break # stop at / or c:
-               dirlst = os.listdir(cwd)
-               if WSCRIPT_FILE in dirlst:
-                       candidate = cwd
-               if 'configure' in sys.argv and candidate:
-                       break
-               if Options.lockfile in dirlst:
-                       env = Environment.Environment()
-                       try:
-                               env.load(os.path.join(cwd, Options.lockfile))
-                       except:
-                               error('could not load %r' % Options.lockfile)
-                       try:
-                               os.stat(env['cwd'])
-                       except:
-                               candidate = cwd
-                       else:
-                               candidate = env['cwd']
-                       break
-               cwd = os.path.dirname(cwd) # climb up
-
-       if not candidate:
-               # check if the user only wanted to display the help
-               if '-h' in sys.argv or '--help' in sys.argv:
-                       warn('No wscript file found: the help message may be incomplete')
-                       opt_obj = Options.Handler()
-                       opt_obj.curdir = cwd
-                       opt_obj.parse_args()
-               else:
-                       error(msg1)
-               sys.exit(0)
-
-       # We have found wscript, but there is no guarantee that it is valid
-       try:
-               os.chdir(candidate)
-       except OSError:
-               raise Utils.WafError("the folder %r is unreadable" % candidate)
-
-       # define the main module containing the functions init, shutdown, ..
-       Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
-
-       if build_dir_override:
-               d = getattr(Utils.g_module, BLDDIR, None)
-               if d:
-                       # test if user has set the blddir in wscript.
-                       msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
-                       warn(msg)
-               Utils.g_module.blddir = build_dir_override
-
-       # bind a few methods and classes by default
-
-       def set_def(obj, name=''):
-               n = name or obj.__name__
-               if not n in Utils.g_module.__dict__:
-                       setattr(Utils.g_module, n, obj)
-
-       for k in [dist, distclean, distcheck, clean, install, uninstall]:
-               set_def(k)
-
-       set_def(Configure.ConfigurationContext, 'configure_context')
-
-       for k in ['build', 'clean', 'install', 'uninstall']:
-               set_def(Build.BuildContext, k + '_context')
-
-       # now parse the options from the user wscript file
-       opt_obj = Options.Handler(Utils.g_module)
-       opt_obj.curdir = candidate
-       try:
-               f = Utils.g_module.set_options
-       except AttributeError:
-               pass
-       else:
-               opt_obj.sub_options([''])
-       opt_obj.parse_args()
-
-       if not 'init' in Utils.g_module.__dict__:
-               Utils.g_module.init = Utils.nada
-       if not 'shutdown' in Utils.g_module.__dict__:
-               Utils.g_module.shutdown = Utils.nada
-
-       main()
-
-def prepare(t, cwd, ver, wafdir):
-       if WAFVERSION != ver:
-               msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
-               print('\033[91mError: %s\033[0m' % msg)
-               sys.exit(1)
-
-       #"""
-       try:
-               prepare_impl(t, cwd, ver, wafdir)
-       except Utils.WafError, e:
-               error(str(e))
-               sys.exit(1)
-       except KeyboardInterrupt:
-               Utils.pprint('RED', 'Interrupted')
-               sys.exit(68)
-       """
-       import cProfile, pstats
-       cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
-               {'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
-                'profi.txt')
-       p = pstats.Stats('profi.txt')
-       p.sort_stats('time').print_stats(45)
-       #"""
-
-def main():
-       global commands
-       commands = Options.arg_line[:]
-
-       while commands:
-               x = commands.pop(0)
-
-               ini = datetime.datetime.now()
-               if x == 'configure':
-                       fun = configure
-               elif x == 'build':
-                       fun = build
-               else:
-                       fun = getattr(Utils.g_module, x, None)
-
-               if not fun:
-                       raise Utils.WscriptError('No such command %r' % x)
-
-               ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
-
-               if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
-                       # compatibility TODO remove in waf 1.6
-                       try:
-                               fun(ctx)
-                       except TypeError:
-                               fun()
-               else:
-                       fun(ctx)
-
-               ela = ''
-               if not Options.options.progress_bar:
-                       ela = ' (%s)' % Utils.get_elapsed_time(ini)
-
-               if x != 'init' and x != 'shutdown':
-                       info('%r finished successfully%s' % (x, ela))
-
-               if not commands and x != 'shutdown':
-                       commands.append('shutdown')
-
-def configure(conf):
-
-       src = getattr(Options.options, SRCDIR, None)
-       if not src: src = getattr(Utils.g_module, SRCDIR, None)
-       if not src: src = getattr(Utils.g_module, 'top', None)
-       if not src:
-               src = '.'
-               incomplete_src = 1
-       src = os.path.abspath(src)
-
-       bld = getattr(Options.options, BLDDIR, None)
-       if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
-       if not bld: bld = getattr(Utils.g_module, 'out', None)
-       if not bld:
-               bld = 'build'
-               incomplete_bld = 1
-       if bld == '.':
-               raise Utils.WafError('Setting blddir="." may cause distclean problems')
-       bld = os.path.abspath(bld)
-
-       try: os.makedirs(bld)
-       except OSError: pass
-
-       # It is not possible to compile specific targets in the configuration
-       # this may cause configuration errors if autoconfig is set
-       targets = Options.options.compile_targets
-       Options.options.compile_targets = None
-       Options.is_install = False
-
-       conf.srcdir = src
-       conf.blddir = bld
-       conf.post_init()
-
-       if 'incomplete_src' in vars():
-               conf.check_message_1('Setting srcdir to')
-               conf.check_message_2(src)
-       if 'incomplete_bld' in vars():
-               conf.check_message_1('Setting blddir to')
-               conf.check_message_2(bld)
-
-       # calling to main wscript's configure()
-       conf.sub_config([''])
-
-       conf.store()
-
-       # this will write a configure lock so that subsequent builds will
-       # consider the current path as the root directory (see prepare_impl).
-       # to remove: use 'waf distclean'
-       env = Environment.Environment()
-       env[BLDDIR] = bld
-       env[SRCDIR] = src
-       env['argv'] = sys.argv
-       env['commands'] = Options.commands
-       env['options'] = Options.options.__dict__
-
-       # conf.hash & conf.files hold wscript files paths and hash
-       # (used only by Configure.autoconfig)
-       env['hash'] = conf.hash
-       env['files'] = conf.files
-       env['environ'] = dict(conf.environ)
-       env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
-
-       if Utils.g_module.root_path != src:
-               # in case the source dir is somewhere else
-               env.store(os.path.join(src, Options.lockfile))
-
-       env.store(Options.lockfile)
-
-       Options.options.compile_targets = targets
-
-def clean(bld):
-       '''removes the build files'''
-       try:
-               proj = Environment.Environment(Options.lockfile)
-       except IOError:
-               raise Utils.WafError('Nothing to clean (project not configured)')
-
-       bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
-       bld.load_envs()
-
-       bld.is_install = 0 # False
-
-       # read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
-       bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
-
-       try:
-               bld.clean()
-       finally:
-               bld.save()
-
-def check_configured(bld):
-       if not Configure.autoconfig:
-               return bld
-
-       conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
-       bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
-
-       def reconf(proj):
-               back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
-
-               Options.commands = proj['commands']
-               Options.options.__dict__ = proj['options']
-               conf = conf_cls()
-               conf.environ = proj['environ']
-               configure(conf)
-
-               (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
-
-       try:
-               proj = Environment.Environment(Options.lockfile)
-       except IOError:
-               conf = conf_cls()
-               configure(conf)
-       else:
-               try:
-                       bld = bld_cls()
-                       bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
-                       bld.load_envs()
-               except Utils.WafError:
-                       reconf(proj)
-                       return bld_cls()
-
-       try:
-               proj = Environment.Environment(Options.lockfile)
-       except IOError:
-               raise Utils.WafError('Auto-config: project does not configure (bug)')
-
-       h = 0
-       try:
-               for file in proj['files']:
-                       if file.endswith('configure'):
-                               h = hash((h, Utils.readf(file)))
-                       else:
-                               mod = Utils.load_module(file)
-                               h = hash((h, mod.waf_hash_val))
-       except (OSError, IOError):
-               warn('Reconfiguring the project: a file is unavailable')
-               reconf(proj)
-       else:
-               if (h != proj['hash']):
-                       warn('Reconfiguring the project: the configuration has changed')
-                       reconf(proj)
-
-       return bld_cls()
-
-def install(bld):
-       '''installs the build files'''
-       bld = check_configured(bld)
-
-       Options.commands['install'] = True
-       Options.commands['uninstall'] = False
-       Options.is_install = True
-
-       bld.is_install = INSTALL
-
-       build_impl(bld)
-       bld.install()
-
-def uninstall(bld):
-       '''removes the installed files'''
-       Options.commands['install'] = False
-       Options.commands['uninstall'] = True
-       Options.is_install = True
-
-       bld.is_install = UNINSTALL
-
-       try:
-               def runnable_status(self):
-                       return SKIP_ME
-               setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
-               setattr(Task.Task, 'runnable_status', runnable_status)
-
-               build_impl(bld)
-               bld.install()
-       finally:
-               setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
-
-def build(bld):
-       bld = check_configured(bld)
-
-       Options.commands['install'] = False
-       Options.commands['uninstall'] = False
-       Options.is_install = False
-
-       bld.is_install = 0 # False
-
-       return build_impl(bld)
-
-def build_impl(bld):
-       # compile the project and/or install the files
-       try:
-               proj = Environment.Environment(Options.lockfile)
-       except IOError:
-               raise Utils.WafError("Project not configured (run 'waf configure' first)")
-
-       bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
-       bld.load_envs()
-
-       info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
-       bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
-
-       # execute something immediately before the build starts
-       bld.pre_build()
-
-       try:
-               bld.compile()
-       finally:
-               if Options.options.progress_bar: print('')
-               info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
-
-       # execute something immediately after a successful build
-       bld.post_build()
-
-       bld.install()
-
-excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
-dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
-def dont_dist(name, src, build_dir):
-       global excludes, dist_exts
-
-       if (name.startswith(',,')
-               or name.startswith('++')
-               or name.startswith('.waf')
-               or (src == '.' and name == Options.lockfile)
-               or name in excludes
-               or name == build_dir
-               ):
-               return True
-
-       for ext in dist_exts:
-               if name.endswith(ext):
-                       return True
-
-       return False
-
-# like shutil.copytree
-# exclude files and to raise exceptions immediately
-def copytree(src, dst, build_dir):
-       names = os.listdir(src)
-       os.makedirs(dst)
-       for name in names:
-               srcname = os.path.join(src, name)
-               dstname = os.path.join(dst, name)
-
-               if dont_dist(name, src, build_dir):
-                       continue
-
-               if os.path.isdir(srcname):
-                       copytree(srcname, dstname, build_dir)
-               else:
-                       shutil.copy2(srcname, dstname)
-
-# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
-def distclean(ctx=None):
-       '''removes the build directory'''
-       global commands
-       lst = os.listdir('.')
-       for f in lst:
-               if f == Options.lockfile:
-                       try:
-                               proj = Environment.Environment(f)
-                       except:
-                               Logs.warn('could not read %r' % f)
-                               continue
-
-                       try:
-                               shutil.rmtree(proj[BLDDIR])
-                       except IOError:
-                               pass
-                       except OSError, e:
-                               if e.errno != errno.ENOENT:
-                                       Logs.warn('project %r cannot be removed' % proj[BLDDIR])
-
-                       try:
-                               os.remove(f)
-                       except OSError, e:
-                               if e.errno != errno.ENOENT:
-                                       Logs.warn('file %r cannot be removed' % f)
-
-               # remove the local waf cache
-               if not commands and f.startswith('.waf'):
-                       shutil.rmtree(f, ignore_errors=True)
-
-# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
-def dist(appname='', version=''):
-       '''makes a tarball for redistributing the sources'''
-       # return return (distdirname, tarballname)
-       import tarfile
-
-       if not appname: appname = Utils.g_module.APPNAME
-       if not version: version = Utils.g_module.VERSION
-
-       tmp_folder = appname + '-' + version
-       if g_gz in ['gz', 'bz2']:
-               arch_name = tmp_folder + '.tar.' + g_gz
-       else:
-               arch_name = tmp_folder + '.' + 'zip'
-
-       # remove the previous dir
-       try:
-               shutil.rmtree(tmp_folder)
-       except (OSError, IOError):
-               pass
-
-       # remove the previous archive
-       try:
-               os.remove(arch_name)
-       except (OSError, IOError):
-               pass
-
-       # copy the files into the temporary folder
-       blddir = getattr(Utils.g_module, BLDDIR, None)
-       if not blddir:
-               blddir = getattr(Utils.g_module, 'out', None)
-       copytree('.', tmp_folder, blddir)
-
-       # undocumented hook for additional cleanup
-       dist_hook = getattr(Utils.g_module, 'dist_hook', None)
-       if dist_hook:
-               back = os.getcwd()
-               os.chdir(tmp_folder)
-               try:
-                       dist_hook()
-               finally:
-                       # go back to the root directory
-                       os.chdir(back)
-
-       if g_gz in ['gz', 'bz2']:
-               tar = tarfile.open(arch_name, 'w:' + g_gz)
-               tar.add(tmp_folder)
-               tar.close()
-       else:
-               Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
-
-       try: from hashlib import sha1 as sha
-       except ImportError: from sha import sha
-       try:
-               digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
-       except:
-               digest = ''
-
-       info('New archive created: %s%s' % (arch_name, digest))
-
-       if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
-       return arch_name
-
-# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
-def distcheck(appname='', version='', subdir=''):
-       '''checks if the sources compile (tarball from 'dist')'''
-       import tempfile, tarfile
-
-       if not appname: appname = Utils.g_module.APPNAME
-       if not version: version = Utils.g_module.VERSION
-
-       waf = os.path.abspath(sys.argv[0])
-       tarball = dist(appname, version)
-
-       path = appname + '-' + version
-
-       # remove any previous instance
-       if os.path.exists(path):
-               shutil.rmtree(path)
-
-       t = tarfile.open(tarball)
-       for x in t: t.extract(x)
-       t.close()
-
-       # build_path is the directory for the waf invocation
-       if subdir:
-               build_path = os.path.join(path, subdir)
-       else:
-               build_path = path
-
-       instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
-       ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
-       if ret:
-               raise Utils.WafError('distcheck failed with code %i' % ret)
-
-       if os.path.exists(instdir):
-               raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
-
-       shutil.rmtree(path)
-
-# FIXME remove in Waf 1.6 (kept for compatibility)
-def add_subdir(dir, bld):
-       bld.recurse(dir, 'build')
diff --git a/third_party/waf/wafadmin/Task.py b/third_party/waf/wafadmin/Task.py
deleted file mode 100644 (file)
index 59d1020..0000000
+++ /dev/null
@@ -1,1199 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2008 (ita)
-
-"""
-Running tasks in parallel is a simple problem, but in practice it is more complicated:
-* dependencies discovered during the build (dynamic task creation)
-* dependencies discovered after files are compiled
-* the amount of tasks and dependencies (graph size) can be huge
-
-This is why the dependency management is split on three different levels:
-1. groups of tasks that run all after another group of tasks
-2. groups of tasks that can be run in parallel
-3. tasks that can run in parallel, but with possible unknown ad-hoc dependencies
-
-The point #1 represents a strict sequential order between groups of tasks, for example a compiler is produced
-and used to compile the rest, whereas #2 and #3 represent partial order constraints where #2 applies to the kind of task
-and #3 applies to the task instances.
-
-#1 is held by the task manager: ordered list of TaskGroups (see bld.add_group)
-#2 is held by the task groups and the task types: precedence after/before (topological sort),
-   and the constraints extracted from file extensions
-#3 is held by the tasks individually (attribute run_after),
-   and the scheduler (Runner.py) use Task::runnable_status to reorder the tasks
-
---
-
-To try, use something like this in your code:
-import Constants, Task
-Task.algotype = Constants.MAXPARALLEL
-
---
-
-There are two concepts with the tasks (individual units of change):
-* dependency (if 1 is recompiled, recompile 2)
-* order (run 2 after 1)
-
-example 1: if t1 depends on t2 and t2 depends on t3 it is not necessary to make t1 depend on t3 (dependency is transitive)
-example 2: if t1 depends on a node produced by t2, it is not immediately obvious that t1 must run after t2 (order is not obvious)
-
-The role of the Task Manager is to give the tasks in order (groups of task that may be run in parallel one after the other)
-
-"""
-
-import os, shutil, sys, re, random, datetime, tempfile, shlex
-from Utils import md5
-import Build, Runner, Utils, Node, Logs, Options
-from Logs import debug, warn, error
-from Constants import *
-
-algotype = NORMAL
-#algotype = JOBCONTROL
-#algotype = MAXPARALLEL
-
-COMPILE_TEMPLATE_SHELL = '''
-def f(task):
-       env = task.env
-       wd = getattr(task, 'cwd', None)
-       p = env.get_flat
-       cmd = \'\'\' %s \'\'\' % s
-       return task.exec_command(cmd, cwd=wd)
-'''
-
-COMPILE_TEMPLATE_NOSHELL = '''
-def f(task):
-       env = task.env
-       wd = getattr(task, 'cwd', None)
-       def to_list(xx):
-               if isinstance(xx, str): return [xx]
-               return xx
-       lst = []
-       %s
-       lst = [x for x in lst if x]
-       return task.exec_command(lst, cwd=wd)
-'''
-
-
-"""
-Enable different kind of dependency algorithms:
-1 make groups: first compile all cpps and then compile all links (NORMAL)
-2 parallelize all (each link task run after its dependencies) (MAXPARALLEL)
-3 like 1 but provide additional constraints for the parallelization (MAXJOBS)
-
-In theory 1. will be faster than 2 for waf, but might be slower for builds
-The scheme 2 will not allow for running tasks one by one so it can cause disk thrashing on huge builds
-"""
-
-file_deps = Utils.nada
-"""
-Additional dependency pre-check may be added by replacing the function file_deps.
-e.g. extract_outputs, extract_deps below.
-"""
-
-class TaskManager(object):
-       """The manager is attached to the build object, it holds a list of TaskGroup"""
-       def __init__(self):
-               self.groups = []
-               self.tasks_done = []
-               self.current_group = 0
-               self.groups_names = {}
-
-       def group_name(self, g):
-               """name for the group g (utility)"""
-               if not isinstance(g, TaskGroup):
-                       g = self.groups[g]
-               for x in self.groups_names:
-                       if id(self.groups_names[x]) == id(g):
-                               return x
-               return ''
-
-       def group_idx(self, tg):
-               """group the task generator tg is in"""
-               se = id(tg)
-               for i in range(len(self.groups)):
-                       g = self.groups[i]
-                       for t in g.tasks_gen:
-                               if id(t) == se:
-                                       return i
-               return None
-
-       def get_next_set(self):
-               """return the next set of tasks to execute
-               the first parameter is the maximum amount of parallelization that may occur"""
-               ret = None
-               while not ret and self.current_group < len(self.groups):
-                       ret = self.groups[self.current_group].get_next_set()
-                       if ret: return ret
-                       else:
-                               self.groups[self.current_group].process_install()
-                               self.current_group += 1
-               return (None, None)
-
-       def add_group(self, name=None, set=True):
-               #if self.groups and not self.groups[0].tasks:
-               #       error('add_group: an empty group is already present')
-               g = TaskGroup()
-
-               if name and name in self.groups_names:
-                       error('add_group: name %s already present' % name)
-               self.groups_names[name] = g
-               self.groups.append(g)
-               if set:
-                       self.current_group = len(self.groups) - 1
-
-       def set_group(self, idx):
-               if isinstance(idx, str):
-                       g = self.groups_names[idx]
-                       for x in xrange(len(self.groups)):
-                               if id(g) == id(self.groups[x]):
-                                       self.current_group = x
-               else:
-                       self.current_group = idx
-
-       def add_task_gen(self, tgen):
-               if not self.groups: self.add_group()
-               self.groups[self.current_group].tasks_gen.append(tgen)
-
-       def add_task(self, task):
-               if not self.groups: self.add_group()
-               self.groups[self.current_group].tasks.append(task)
-
-       def total(self):
-               total = 0
-               if not self.groups: return 0
-               for group in self.groups:
-                       total += len(group.tasks)
-               return total
-
-       def add_finished(self, tsk):
-               self.tasks_done.append(tsk)
-               bld = tsk.generator.bld
-               if bld.is_install:
-                       f = None
-                       if 'install' in tsk.__dict__:
-                               f = tsk.__dict__['install']
-                               # install=0 to prevent installation
-                               if f: f(tsk)
-                       else:
-                               tsk.install()
-
-class TaskGroup(object):
-       "the compilation of one group does not begin until the previous group has finished (in the manager)"
-       def __init__(self):
-               self.tasks = [] # this list will be consumed
-               self.tasks_gen = []
-
-               self.cstr_groups = Utils.DefaultDict(list) # tasks having equivalent constraints
-               self.cstr_order = Utils.DefaultDict(set) # partial order between the cstr groups
-               self.temp_tasks = [] # tasks put on hold
-               self.ready = 0
-               self.post_funs = []
-
-       def reset(self):
-               "clears the state of the object (put back the tasks into self.tasks)"
-               for x in self.cstr_groups:
-                       self.tasks += self.cstr_groups[x]
-               self.tasks = self.temp_tasks + self.tasks
-               self.temp_tasks = []
-               self.cstr_groups = Utils.DefaultDict(list)
-               self.cstr_order = Utils.DefaultDict(set)
-               self.ready = 0
-
-       def process_install(self):
-               for (f, k, kw) in self.post_funs:
-                       f(*k, **kw)
-
-       def prepare(self):
-               "prepare the scheduling"
-               self.ready = 1
-               file_deps(self.tasks)
-               self.make_cstr_groups()
-               self.extract_constraints()
-
-       def get_next_set(self):
-               "next list of tasks to execute using max job settings, returns (maxjobs, task_list)"
-               global algotype
-               if algotype == NORMAL:
-                       tasks = self.tasks_in_parallel()
-                       maxj = MAXJOBS
-               elif algotype == JOBCONTROL:
-                       (maxj, tasks) = self.tasks_by_max_jobs()
-               elif algotype == MAXPARALLEL:
-                       tasks = self.tasks_with_inner_constraints()
-                       maxj = MAXJOBS
-               else:
-                       raise Utils.WafError("unknown algorithm type %s" % (algotype))
-
-               if not tasks: return ()
-               return (maxj, tasks)
-
-       def make_cstr_groups(self):
-               "unite the tasks that have similar constraints"
-               self.cstr_groups = Utils.DefaultDict(list)
-               for x in self.tasks:
-                       h = x.hash_constraints()
-                       self.cstr_groups[h].append(x)
-
-       def set_order(self, a, b):
-               self.cstr_order[a].add(b)
-
-       def compare_exts(self, t1, t2):
-               "extension production"
-               x = "ext_in"
-               y = "ext_out"
-               in_ = t1.attr(x, ())
-               out_ = t2.attr(y, ())
-               for k in in_:
-                       if k in out_:
-                               return -1
-               in_ = t2.attr(x, ())
-               out_ = t1.attr(y, ())
-               for k in in_:
-                       if k in out_:
-                               return 1
-               return 0
-
-       def compare_partial(self, t1, t2):
-               "partial relations after/before"
-               m = "after"
-               n = "before"
-               name = t2.__class__.__name__
-               if name in Utils.to_list(t1.attr(m, ())): return -1
-               elif name in Utils.to_list(t1.attr(n, ())): return 1
-               name = t1.__class__.__name__
-               if name in Utils.to_list(t2.attr(m, ())): return 1
-               elif name in Utils.to_list(t2.attr(n, ())): return -1
-               return 0
-
-       def extract_constraints(self):
-               "extract the parallelization constraints from the tasks with different constraints"
-               keys = self.cstr_groups.keys()
-               max = len(keys)
-               # hopefully the length of this list is short
-               for i in xrange(max):
-                       t1 = self.cstr_groups[keys[i]][0]
-                       for j in xrange(i + 1, max):
-                               t2 = self.cstr_groups[keys[j]][0]
-
-                               # add the constraints based on the comparisons
-                               val = (self.compare_exts(t1, t2)
-                                       or self.compare_partial(t1, t2)
-                                       )
-                               if val > 0:
-                                       self.set_order(keys[i], keys[j])
-                               elif val < 0:
-                                       self.set_order(keys[j], keys[i])
-
-       def tasks_in_parallel(self):
-               "(NORMAL) next list of tasks that may be executed in parallel"
-
-               if not self.ready: self.prepare()
-
-               keys = self.cstr_groups.keys()
-
-               unconnected = []
-               remainder = []
-
-               for u in keys:
-                       for k in self.cstr_order.values():
-                               if u in k:
-                                       remainder.append(u)
-                                       break
-                       else:
-                               unconnected.append(u)
-
-               toreturn = []
-               for y in unconnected:
-                       toreturn.extend(self.cstr_groups[y])
-
-               # remove stuff only after
-               for y in unconnected:
-                               try: self.cstr_order.__delitem__(y)
-                               except KeyError: pass
-                               self.cstr_groups.__delitem__(y)
-
-               if not toreturn and remainder:
-                       raise Utils.WafError("circular order constraint detected %r" % remainder)
-
-               return toreturn
-
-       def tasks_by_max_jobs(self):
-               "(JOBCONTROL) returns the tasks that can run in parallel with the max amount of jobs"
-               if not self.ready: self.prepare()
-               if not self.temp_tasks: self.temp_tasks = self.tasks_in_parallel()
-               if not self.temp_tasks: return (None, None)
-
-               maxjobs = MAXJOBS
-               ret = []
-               remaining = []
-               for t in self.temp_tasks:
-                       m = getattr(t, "maxjobs", getattr(self.__class__, "maxjobs", MAXJOBS))
-                       if m > maxjobs:
-                               remaining.append(t)
-                       elif m < maxjobs:
-                               remaining += ret
-                               ret = [t]
-                               maxjobs = m
-                       else:
-                               ret.append(t)
-               self.temp_tasks = remaining
-               return (maxjobs, ret)
-
-       def tasks_with_inner_constraints(self):
-               """(MAXPARALLEL) returns all tasks in this group, but add the constraints on each task instance
-               as an optimization, it might be desirable to discard the tasks which do not have to run"""
-               if not self.ready: self.prepare()
-
-               if getattr(self, "done", None): return None
-
-               for p in self.cstr_order:
-                       for v in self.cstr_order[p]:
-                               for m in self.cstr_groups[p]:
-                                       for n in self.cstr_groups[v]:
-                                               n.set_run_after(m)
-               self.cstr_order = Utils.DefaultDict(set)
-               self.cstr_groups = Utils.DefaultDict(list)
-               self.done = 1
-               return self.tasks[:] # make a copy
-
-class store_task_type(type):
-       "store the task types that have a name ending in _task into a map (remember the existing task types)"
-       def __init__(cls, name, bases, dict):
-               super(store_task_type, cls).__init__(name, bases, dict)
-               name = cls.__name__
-
-               if name.endswith('_task'):
-                       name = name.replace('_task', '')
-               if name != 'TaskBase':
-                       TaskBase.classes[name] = cls
-
-class TaskBase(object):
-       """Base class for all Waf tasks
-
-       The most important methods are (by usual order of call):
-       1 runnable_status: ask the task if it should be run, skipped, or if we have to ask later
-       2 __str__: string to display to the user
-       3 run: execute the task
-       4 post_run: after the task is run, update the cache about the task
-
-       This class should be seen as an interface, it provides the very minimum necessary for the scheduler
-       so it does not do much.
-
-       For illustration purposes, TaskBase instances try to execute self.fun (if provided)
-       """
-
-       __metaclass__ = store_task_type
-
-       color = "GREEN"
-       maxjobs = MAXJOBS
-       classes = {}
-       stat = None
-
-       def __init__(self, *k, **kw):
-               self.hasrun = NOT_RUN
-
-               try:
-                       self.generator = kw['generator']
-               except KeyError:
-                       self.generator = self
-                       self.bld = Build.bld
-
-               if kw.get('normal', 1):
-                       self.generator.bld.task_manager.add_task(self)
-
-       def __repr__(self):
-               "used for debugging"
-               return '\n\t{task: %s %s}' % (self.__class__.__name__, str(getattr(self, "fun", "")))
-
-       def __str__(self):
-               "string to display to the user"
-               if hasattr(self, 'fun'):
-                       return 'executing: %s\n' % self.fun.__name__
-               return self.__class__.__name__ + '\n'
-
-       def exec_command(self, *k, **kw):
-               "use this for executing commands from tasks"
-               # TODO in waf 1.6, eliminate bld.exec_command, and move the cwd processing to here
-               if self.env['env']:
-                       kw['env'] = self.env['env']
-               return self.generator.bld.exec_command(*k, **kw)
-
-       def runnable_status(self):
-               "RUN_ME SKIP_ME or ASK_LATER"
-               return RUN_ME
-
-       def can_retrieve_cache(self):
-               return False
-
-       def call_run(self):
-               if self.can_retrieve_cache():
-                       return 0
-               return self.run()
-
-       def run(self):
-               "called if the task must run"
-               if hasattr(self, 'fun'):
-                       return self.fun(self)
-               return 0
-
-       def post_run(self):
-               "update the dependency tree (node stats)"
-               pass
-
-       def display(self):
-               "print either the description (using __str__) or the progress bar or the ide output"
-               col1 = Logs.colors(self.color)
-               col2 = Logs.colors.NORMAL
-
-               if Options.options.progress_bar == 1:
-                       return self.generator.bld.progress_line(self.position[0], self.position[1], col1, col2)
-
-               if Options.options.progress_bar == 2:
-                       ela = Utils.get_elapsed_time(self.generator.bld.ini)
-                       try:
-                               ins  = ','.join([n.name for n in self.inputs])
-                       except AttributeError:
-                               ins = ''
-                       try:
-                               outs = ','.join([n.name for n in self.outputs])
-                       except AttributeError:
-                               outs = ''
-                       return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (self.position[1], self.position[0], ins, outs, ela)
-
-               total = self.position[1]
-               n = len(str(total))
-               fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n)
-               return fs % (self.position[0], self.position[1], col1, str(self), col2)
-
-       def attr(self, att, default=None):
-               "retrieve an attribute from the instance or from the class (microoptimization here)"
-               ret = getattr(self, att, self)
-               if ret is self: return getattr(self.__class__, att, default)
-               return ret
-
-       def hash_constraints(self):
-               "identify a task type for all the constraints relevant for the scheduler: precedence, file production"
-               a = self.attr
-               sum = hash((self.__class__.__name__,
-                       str(a('before', '')),
-                       str(a('after', '')),
-                       str(a('ext_in', '')),
-                       str(a('ext_out', '')),
-                       self.__class__.maxjobs))
-               return sum
-
-       def format_error(self):
-               "error message to display to the user (when a build fails)"
-               if getattr(self, "err_msg", None):
-                       return self.err_msg
-               elif self.hasrun == CRASHED:
-                       try:
-                               return " -> task failed (err #%d): %r" % (self.err_code, self)
-                       except AttributeError:
-                               return " -> task failed: %r" % self
-               elif self.hasrun == MISSING:
-                       return " -> missing files: %r" % self
-               else:
-                       return ''
-
-       def install(self):
-               """
-               installation is performed by looking at the task attributes:
-               * install_path: installation path like "${PREFIX}/bin"
-               * filename: install the first node in the outputs as a file with a particular name, be certain to give os.sep
-               * chmod: permissions
-               """
-               bld = self.generator.bld
-               d = self.attr('install')
-
-               if self.attr('install_path'):
-                       lst = [a.relpath_gen(bld.srcnode) for a in self.outputs]
-                       perm = self.attr('chmod', O644)
-                       if self.attr('src'):
-                               # if src is given, install the sources too
-                               lst += [a.relpath_gen(bld.srcnode) for a in self.inputs]
-                       if self.attr('filename'):
-                               dir = self.install_path.rstrip(os.sep) + os.sep + self.attr('filename')
-                               bld.install_as(dir, lst[0], self.env, perm)
-                       else:
-                               bld.install_files(self.install_path, lst, self.env, perm)
-
-class Task(TaskBase):
-       """The parent class is quite limited, in this version:
-       * file system interaction: input and output nodes
-       * persistence: do not re-execute tasks that have already run
-       * caching: same files can be saved and retrieved from a cache directory
-       * dependencies:
-               implicit, like .c files depending on .h files
-               explicit, like the input nodes or the dep_nodes
-               environment variables, like the CXXFLAGS in self.env
-       """
-       vars = []
-       def __init__(self, env, **kw):
-               TaskBase.__init__(self, **kw)
-               self.env = env
-
-               # inputs and outputs are nodes
-               # use setters when possible
-               self.inputs  = []
-               self.outputs = []
-
-               self.dep_nodes = []
-               self.run_after = []
-
-               # Additionally, you may define the following
-               #self.dep_vars  = 'PREFIX DATADIR'
-
-       def __str__(self):
-               "string to display to the user"
-               env = self.env
-               src_str = ' '.join([a.nice_path(env) for a in self.inputs])
-               tgt_str = ' '.join([a.nice_path(env) for a in self.outputs])
-               if self.outputs: sep = ' -> '
-               else: sep = ''
-               return '%s: %s%s%s\n' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str)
-
-       def __repr__(self):
-               return "".join(['\n\t{task: ', self.__class__.__name__, " ", ",".join([x.name for x in self.inputs]), " -> ", ",".join([x.name for x in self.outputs]), '}'])
-
-       def unique_id(self):
-               "get a unique id: hash the node paths, the variant, the class, the function"
-               try:
-                       return self.uid
-               except AttributeError:
-                       "this is not a real hot zone, but we want to avoid surprizes here"
-                       m = md5()
-                       up = m.update
-                       up(self.__class__.__name__)
-                       up(self.env.variant())
-                       p = None
-                       for x in self.inputs + self.outputs:
-                               if p != x.parent.id:
-                                       p = x.parent.id
-                                       up(x.parent.abspath())
-                               up(x.name)
-                       self.uid = m.digest()
-                       return self.uid
-
-       def set_inputs(self, inp):
-               if isinstance(inp, list): self.inputs += inp
-               else: self.inputs.append(inp)
-
-       def set_outputs(self, out):
-               if isinstance(out, list): self.outputs += out
-               else: self.outputs.append(out)
-
-       def set_run_after(self, task):
-               "set (scheduler) order on another task"
-               # TODO: handle list or object
-               assert isinstance(task, TaskBase)
-               self.run_after.append(task)
-
-       def add_file_dependency(self, filename):
-               "TODO user-provided file dependencies"
-               node = self.generator.bld.path.find_resource(filename)
-               self.dep_nodes.append(node)
-
-       def signature(self):
-               # compute the result one time, and suppose the scan_signature will give the good result
-               try: return self.cache_sig[0]
-               except AttributeError: pass
-
-               self.m = md5()
-
-               # explicit deps
-               exp_sig = self.sig_explicit_deps()
-
-               # env vars
-               var_sig = self.sig_vars()
-
-               # implicit deps
-
-               imp_sig = SIG_NIL
-               if self.scan:
-                       try:
-                               imp_sig = self.sig_implicit_deps()
-                       except ValueError:
-                               return self.signature()
-
-               # we now have the signature (first element) and the details (for debugging)
-               ret = self.m.digest()
-               self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
-               return ret
-
-       def runnable_status(self):
-               "SKIP_ME RUN_ME or ASK_LATER"
-               #return 0 # benchmarking
-
-               if self.inputs and (not self.outputs):
-                       if not getattr(self.__class__, 'quiet', None):
-                               warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r" % self)
-
-               for t in self.run_after:
-                       if not t.hasrun:
-                               return ASK_LATER
-
-               env = self.env
-               bld = self.generator.bld
-
-               # first compute the signature
-               new_sig = self.signature()
-
-               # compare the signature to a signature computed previously
-               key = self.unique_id()
-               try:
-                       prev_sig = bld.task_sigs[key][0]
-               except KeyError:
-                       debug("task: task %r must run as it was never run before or the task code changed", self)
-                       return RUN_ME
-
-               # compare the signatures of the outputs
-               for node in self.outputs:
-                       variant = node.variant(env)
-                       try:
-                               if bld.node_sigs[variant][node.id] != new_sig:
-                                       return RUN_ME
-                       except KeyError:
-                               debug("task: task %r must run as the output nodes do not exist", self)
-                               return RUN_ME
-
-               # debug if asked to
-               if Logs.verbose: self.debug_why(bld.task_sigs[key])
-
-               if new_sig != prev_sig:
-                       return RUN_ME
-               return SKIP_ME
-
-       def post_run(self):
-               "called after a successful task run"
-               bld = self.generator.bld
-               env = self.env
-               sig = self.signature()
-               ssig = sig.encode('hex')
-
-               variant = env.variant()
-               for node in self.outputs:
-                       # check if the node exists ..
-                       try:
-                               os.stat(node.abspath(env))
-                       except OSError:
-                               self.hasrun = MISSING
-                               self.err_msg = '-> missing file: %r' % node.abspath(env)
-                               raise Utils.WafError
-
-                       # important, store the signature for the next run
-                       bld.node_sigs[variant][node.id] = sig
-               bld.task_sigs[self.unique_id()] = self.cache_sig
-
-               # file caching, if possible
-               # try to avoid data corruption as much as possible
-               if not Options.cache_global or Options.options.nocache or not self.outputs:
-                       return None
-
-               if getattr(self, 'cached', None):
-                       return None
-
-               dname = os.path.join(Options.cache_global, ssig)
-               tmpdir = tempfile.mkdtemp(prefix=Options.cache_global + os.sep + 'waf')
-
-               try:
-                       shutil.rmtree(dname)
-               except:
-                       pass
-
-               try:
-                       i = 0
-                       for node in self.outputs:
-                               variant = node.variant(env)
-                               dest = os.path.join(tmpdir, str(i) + node.name)
-                               shutil.copy2(node.abspath(env), dest)
-                               i += 1
-               except (OSError, IOError):
-                       try:
-                               shutil.rmtree(tmpdir)
-                       except:
-                               pass
-               else:
-                       try:
-                               os.rename(tmpdir, dname)
-                       except OSError:
-                               try:
-                                       shutil.rmtree(tmpdir)
-                               except:
-                                       pass
-                       else:
-                               try:
-                                       os.chmod(dname, O755)
-                               except:
-                                       pass
-
-       def can_retrieve_cache(self):
-               """
-               Retrieve build nodes from the cache
-               update the file timestamps to help cleaning the least used entries from the cache
-               additionally, set an attribute 'cached' to avoid re-creating the same cache files
-
-               suppose there are files in cache/dir1/file1 and cache/dir2/file2
-               first, read the timestamp of dir1
-               then try to copy the files
-               then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
-               should an exception occur, ignore the data
-               """
-               if not Options.cache_global or Options.options.nocache or not self.outputs:
-                       return None
-
-               env = self.env
-               sig = self.signature()
-               ssig = sig.encode('hex')
-
-               # first try to access the cache folder for the task
-               dname = os.path.join(Options.cache_global, ssig)
-               try:
-                       t1 = os.stat(dname).st_mtime
-               except OSError:
-                       return None
-
-               i = 0
-               for node in self.outputs:
-                       variant = node.variant(env)
-
-                       orig = os.path.join(dname, str(i) + node.name)
-                       try:
-                               shutil.copy2(orig, node.abspath(env))
-                               # mark the cache file as used recently (modified)
-                               os.utime(orig, None)
-                       except (OSError, IOError):
-                               debug('task: failed retrieving file')
-                               return None
-                       i += 1
-
-               # is it the same folder?
-               try:
-                       t2 = os.stat(dname).st_mtime
-               except OSError:
-                       return None
-
-               if t1 != t2:
-                       return None
-
-               for node in self.outputs:
-                       self.generator.bld.node_sigs[variant][node.id] = sig
-                       if Options.options.progress_bar < 1:
-                               self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
-
-               self.cached = True
-               return 1
-
-       def debug_why(self, old_sigs):
-               "explains why a task is run"
-
-               new_sigs = self.cache_sig
-               def v(x):
-                       return x.encode('hex')
-
-               debug("Task %r", self)
-               msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
-               tmp = 'task: -> %s: %s %s'
-               for x in xrange(len(msgs)):
-                       if (new_sigs[x] != old_sigs[x]):
-                               debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
-
-       def sig_explicit_deps(self):
-               bld = self.generator.bld
-               up = self.m.update
-
-               # the inputs
-               for x in self.inputs + getattr(self, 'dep_nodes', []):
-                       if not x.parent.id in bld.cache_scanned_folders:
-                               bld.rescan(x.parent)
-
-                       variant = x.variant(self.env)
-                       try:
-                               up(bld.node_sigs[variant][x.id])
-                       except KeyError:
-                               raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
-
-               # manual dependencies, they can slow down the builds
-               if bld.deps_man:
-                       additional_deps = bld.deps_man
-                       for x in self.inputs + self.outputs:
-                               try:
-                                       d = additional_deps[x.id]
-                               except KeyError:
-                                       continue
-
-                               for v in d:
-                                       if isinstance(v, Node.Node):
-                                               bld.rescan(v.parent)
-                                               variant = v.variant(self.env)
-                                               try:
-                                                       v = bld.node_sigs[variant][v.id]
-                                               except KeyError:
-                                                       raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
-                                       elif hasattr(v, '__call__'):
-                                               v = v() # dependency is a function, call it
-                                       up(v)
-
-               for x in self.dep_nodes:
-                       v = bld.node_sigs[x.variant(self.env)][x.id]
-                       up(v)
-
-               return self.m.digest()
-
-       def sig_vars(self):
-               bld = self.generator.bld
-               env = self.env
-
-               # dependencies on the environment vars
-               act_sig = bld.hash_env_vars(env, self.__class__.vars)
-               self.m.update(act_sig)
-
-               # additional variable dependencies, if provided
-               dep_vars = getattr(self, 'dep_vars', None)
-               if dep_vars:
-                       self.m.update(bld.hash_env_vars(env, dep_vars))
-
-               return self.m.digest()
-
-       #def scan(self, node):
-       #       """this method returns a tuple containing:
-       #       * a list of nodes corresponding to real files
-       #       * a list of names for files not found in path_lst
-       #       the input parameters may have more parameters that the ones used below
-       #       """
-       #       return ((), ())
-       scan = None
-
-       # compute the signature, recompute it if there is no match in the cache
-       def sig_implicit_deps(self):
-               "the signature obtained may not be the one if the files have changed, we do it in two steps"
-
-               bld = self.generator.bld
-
-               # get the task signatures from previous runs
-               key = self.unique_id()
-               prev_sigs = bld.task_sigs.get(key, ())
-               if prev_sigs:
-                       try:
-                               # for issue #379
-                               if prev_sigs[2] == self.compute_sig_implicit_deps():
-                                       return prev_sigs[2]
-                       except (KeyError, OSError):
-                               pass
-                       del bld.task_sigs[key]
-                       raise ValueError('rescan')
-
-               # no previous run or the signature of the dependencies has changed, rescan the dependencies
-               (nodes, names) = self.scan()
-               if Logs.verbose:
-                       debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
-
-               # store the dependencies in the cache
-               bld.node_deps[key] = nodes
-               bld.raw_deps[key] = names
-
-               # recompute the signature and return it
-               try:
-                       sig = self.compute_sig_implicit_deps()
-               except KeyError:
-                       try:
-                               nodes = []
-                               for k in bld.node_deps.get(self.unique_id(), []):
-                                       if k.id & 3 == 2: # Node.FILE:
-                                               if not k.id in bld.node_sigs[0]:
-                                                       nodes.append(k)
-                                       else:
-                                               if not k.id in bld.node_sigs[self.env.variant()]:
-                                                       nodes.append(k)
-                       except:
-                               nodes = '?'
-                       raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
-
-               return sig
-
-       def compute_sig_implicit_deps(self):
-               """it is intended for .cpp and inferred .h files
-               there is a single list (no tree traversal)
-               this is the hot spot so ... do not touch"""
-               upd = self.m.update
-
-               bld = self.generator.bld
-               tstamp = bld.node_sigs
-               env = self.env
-
-               for k in bld.node_deps.get(self.unique_id(), []):
-                       # unlikely but necessary if it happens
-                       if not k.parent.id in bld.cache_scanned_folders:
-                               # if the parent folder is removed, an OSError may be thrown
-                               bld.rescan(k.parent)
-
-                       # if the parent folder is removed, a KeyError will be thrown
-                       if k.id & 3 == 2: # Node.FILE:
-                               upd(tstamp[0][k.id])
-                       else:
-                               upd(tstamp[env.variant()][k.id])
-
-               return self.m.digest()
-
-def funex(c):
-       dc = {}
-       exec(c, dc)
-       return dc['f']
-
-reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})", re.M)
-def compile_fun_shell(name, line):
-       """Compiles a string (once) into a function, eg:
-       simple_task_type('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
-
-       The env variables (CXX, ..) on the task must not hold dicts (order)
-       The reserved keywords TGT and SRC represent the task input and output nodes
-
-       quick test:
-       bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
-       """
-
-       extr = []
-       def repl(match):
-               g = match.group
-               if g('dollar'): return "$"
-               elif g('backslash'): return '\\\\'
-               elif g('subst'): extr.append((g('var'), g('code'))); return "%s"
-               return None
-
-       line = reg_act.sub(repl, line) or line
-
-       parm = []
-       dvars = []
-       app = parm.append
-       for (var, meth) in extr:
-               if var == 'SRC':
-                       if meth: app('task.inputs%s' % meth)
-                       else: app('" ".join([a.srcpath(env) for a in task.inputs])')
-               elif var == 'TGT':
-                       if meth: app('task.outputs%s' % meth)
-                       else: app('" ".join([a.bldpath(env) for a in task.outputs])')
-               else:
-                       if not var in dvars: dvars.append(var)
-                       app("p('%s')" % var)
-       if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
-       else: parm = ''
-
-       c = COMPILE_TEMPLATE_SHELL % (line, parm)
-
-       debug('action: %s', c)
-       return (funex(c), dvars)
-
-def compile_fun_noshell(name, line):
-
-       extr = []
-       def repl(match):
-               g = match.group
-               if g('dollar'): return "$"
-               elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>"
-               return None
-
-       line2 = reg_act.sub(repl, line)
-       params = line2.split('<<|@|>>')
-
-       buf = []
-       dvars = []
-       app = buf.append
-       for x in xrange(len(extr)):
-               params[x] = params[x].strip()
-               if params[x]:
-                       app("lst.extend(%r)" % params[x].split())
-               (var, meth) = extr[x]
-               if var == 'SRC':
-                       if meth: app('lst.append(task.inputs%s)' % meth)
-                       else: app("lst.extend([a.srcpath(env) for a in task.inputs])")
-               elif var == 'TGT':
-                       if meth: app('lst.append(task.outputs%s)' % meth)
-                       else: app("lst.extend([a.bldpath(env) for a in task.outputs])")
-               else:
-                       app('lst.extend(to_list(env[%r]))' % var)
-                       if not var in dvars: dvars.append(var)
-
-       if params[-1]:
-               app("lst.extend(%r)" % shlex.split(params[-1]))
-
-       fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
-       debug('action: %s', fun)
-       return (funex(fun), dvars)
-
-def compile_fun(name, line, shell=None):
-       "commands can be launched by the shell or not"
-       if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
-               shell = True
-       #else:
-       #       shell = False
-
-       if shell is None:
-               if sys.platform == 'win32':
-                       shell = False
-               else:
-                       shell = True
-
-       if shell:
-               return compile_fun_shell(name, line)
-       else:
-               return compile_fun_noshell(name, line)
-
-def simple_task_type(name, line, color='GREEN', vars=[], ext_in=[], ext_out=[], before=[], after=[], shell=None):
-       """return a new Task subclass with the function run compiled from the line given"""
-       (fun, dvars) = compile_fun(name, line, shell)
-       fun.code = line
-       return task_type_from_func(name, fun, vars or dvars, color, ext_in, ext_out, before, after)
-
-def task_type_from_func(name, func, vars=[], color='GREEN', ext_in=[], ext_out=[], before=[], after=[]):
-       """return a new Task subclass with the function run compiled from the line given"""
-       params = {
-               'run': func,
-               'vars': vars,
-               'color': color,
-               'name': name,
-               'ext_in': Utils.to_list(ext_in),
-               'ext_out': Utils.to_list(ext_out),
-               'before': Utils.to_list(before),
-               'after': Utils.to_list(after),
-       }
-
-       cls = type(Task)(name, (Task,), params)
-       TaskBase.classes[name] = cls
-       return cls
-
-def always_run(cls):
-       """Set all task instances of this class to be executed whenever a build is started
-       The task signature is calculated, but the result of the comparation between
-       task signatures is bypassed
-       """
-       old = cls.runnable_status
-       def always(self):
-               ret = old(self)
-               if ret == SKIP_ME:
-                       return RUN_ME
-               return ret
-       cls.runnable_status = always
-
-def update_outputs(cls):
-       """When a command is always run, it is possible that the output only change
-       sometimes. By default the build node have as a hash the signature of the task
-       which may not change. With this, the output nodes (produced) are hashed,
-       and the hashes are set to the build nodes
-
-       This may avoid unnecessary recompilations, but it uses more resources
-       (hashing the output files) so it is not used by default
-       """
-       old_post_run = cls.post_run
-       def post_run(self):
-               old_post_run(self)
-               bld = self.generator.bld
-               for output in self.outputs:
-                       bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
-                       bld.task_sigs[output.id] = self.unique_id()
-       cls.post_run = post_run
-
-       old_runnable_status = cls.runnable_status
-       def runnable_status(self):
-               status = old_runnable_status(self)
-               if status != RUN_ME:
-                       return status
-
-               uid = self.unique_id()
-               try:
-                       bld = self.outputs[0].__class__.bld
-                       new_sig  = self.signature()
-                       prev_sig = bld.task_sigs[uid][0]
-                       if prev_sig == new_sig:
-                               for x in self.outputs:
-                                       if not x.id in bld.node_sigs[self.env.variant()]:
-                                               return RUN_ME
-                                       if bld.task_sigs[x.id] != uid: # ensure the outputs are associated with *this* task
-                                               return RUN_ME
-                               return SKIP_ME
-               except KeyError:
-                       pass
-               except IndexError:
-                       pass
-               return RUN_ME
-       cls.runnable_status = runnable_status
-
-def extract_outputs(tasks):
-       """file_deps: Infer additional dependencies from task input and output nodes
-       """
-       v = {}
-       for x in tasks:
-               try:
-                       (ins, outs) = v[x.env.variant()]
-               except KeyError:
-                       ins = {}
-                       outs = {}
-                       v[x.env.variant()] = (ins, outs)
-
-               for a in getattr(x, 'inputs', []):
-                       try: ins[a.id].append(x)
-                       except KeyError: ins[a.id] = [x]
-               for a in getattr(x, 'outputs', []):
-                       try: outs[a.id].append(x)
-                       except KeyError: outs[a.id] = [x]
-
-       for (ins, outs) in v.values():
-               links = set(ins.iterkeys()).intersection(outs.iterkeys())
-               for k in links:
-                       for a in ins[k]:
-                               for b in outs[k]:
-                                       a.set_run_after(b)
-
-def extract_deps(tasks):
-       """file_deps: Infer additional dependencies from task input and output nodes and from implicit dependencies
-       returned by the scanners - that will only work if all tasks are created
-
-       this is aimed at people who have pathological builds and who do not care enough
-       to implement the build dependencies properly
-
-       with two loops over the list of tasks, do not expect this to be really fast
-       """
-
-       # first reuse the function above
-       extract_outputs(tasks)
-
-       # map the output nodes to the tasks producing them
-       out_to_task = {}
-       for x in tasks:
-               v = x.env.variant()
-               try:
-                       lst = x.outputs
-               except AttributeError:
-                       pass
-               else:
-                       for node in lst:
-                               out_to_task[(v, node.id)] = x
-
-       # map the dependencies found to the tasks compiled
-       dep_to_task = {}
-       for x in tasks:
-               try:
-                       x.signature()
-               except: # this is on purpose
-                       pass
-
-               v = x.env.variant()
-               key = x.unique_id()
-               for k in x.generator.bld.node_deps.get(x.unique_id(), []):
-                       try: dep_to_task[(v, k.id)].append(x)
-                       except KeyError: dep_to_task[(v, k.id)] = [x]
-
-       # now get the intersection
-       deps = set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
-
-       # and add the dependencies from task to task
-       for idx in deps:
-               for k in dep_to_task[idx]:
-                       k.set_run_after(out_to_task[idx])
-
-       # cleanup, remove the signatures
-       for x in tasks:
-               try:
-                       delattr(x, 'cache_sig')
-               except AttributeError:
-                       pass
diff --git a/third_party/waf/wafadmin/TaskGen.py b/third_party/waf/wafadmin/TaskGen.py
deleted file mode 100644 (file)
index 386798f..0000000
+++ /dev/null
@@ -1,614 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2008 (ita)
-
-"""
-The class task_gen encapsulates the creation of task objects (low-level code)
-The instances can have various parameters, but the creation of task nodes (Task.py)
-is delayed. To achieve this, various methods are called from the method "apply"
-
-The class task_gen contains lots of methods, and a configuration table:
-* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
-* the order of the methods (self.prec or by default task_gen.prec) is configurable
-* new methods can be inserted dynamically without pasting old code
-
-Additionally, task_gen provides the method apply_core
-* file extensions are mapped to methods: def meth(self, name_or_node)
-* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
-* when called, the functions may modify self.allnodes to re-add source to process
-* the mappings can map an extension or a filename (see the code below)
-
-WARNING: subclasses must reimplement the clone method
-"""
-
-import os, traceback, copy
-import Build, Task, Utils, Logs, Options
-from Logs import debug, error, warn
-from Constants import *
-
-typos = {
-'sources':'source',
-'targets':'target',
-'include':'includes',
-'define':'defines',
-'importpath':'importpaths',
-'install_var':'install_path',
-'install_subdir':'install_path',
-'inst_var':'install_path',
-'inst_dir':'install_path',
-'feature':'features',
-}
-
-class register_obj(type):
-       """no decorators for classes, so we use a metaclass
-       we store into task_gen.classes the classes that inherit task_gen
-       and whose names end in '_taskgen'
-       """
-       def __init__(cls, name, bases, dict):
-               super(register_obj, cls).__init__(name, bases, dict)
-               name = cls.__name__
-               suffix = '_taskgen'
-               if name.endswith(suffix):
-                       task_gen.classes[name.replace(suffix, '')] = cls
-
-class task_gen(object):
-       """
-       Most methods are of the form 'def meth(self):' without any parameters
-       there are many of them, and they do many different things:
-       * task creation
-       * task results installation
-       * environment modification
-       * attribute addition/removal
-
-       The inheritance approach is complicated
-       * mixing several languages at once
-       * subclassing is needed even for small changes
-       * inserting new methods is complicated
-
-       This new class uses a configuration table:
-       * adding new methods easily
-       * obtaining the order in which to call the methods
-       * postponing the method calls (post() -> apply)
-
-       Additionally, a 'traits' static attribute is provided:
-       * this list contains methods
-       * the methods can remove or add methods from self.meths
-       Example1: the attribute 'staticlib' is set on an instance
-       a method set in the list of traits is executed when the
-       instance is posted, it finds that flag and adds another method for execution
-       Example2: a method set in the list of traits finds the msvc
-       compiler (from self.env['MSVC']==1); more methods are added to self.meths
-       """
-
-       __metaclass__ = register_obj
-       mappings = {}
-       mapped = {}
-       prec = Utils.DefaultDict(list)
-       traits = Utils.DefaultDict(set)
-       classes = {}
-
-       def __init__(self, *kw, **kwargs):
-               self.prec = Utils.DefaultDict(list)
-               "map precedence of function names to call"
-               # so we will have to play with directed acyclic graphs
-               # detect cycles, etc
-
-               self.source = ''
-               self.target = ''
-
-               # list of methods to execute - does not touch it by hand unless you know
-               self.meths = []
-
-               # list of mappings extension -> function
-               self.mappings = {}
-
-               # list of features (see the documentation on traits)
-               self.features = list(kw)
-
-               # not always a good idea
-               self.tasks = []
-
-               self.default_chmod = O644
-               self.default_install_path = None
-
-               # kind of private, beware of what you put in it, also, the contents are consumed
-               self.allnodes = []
-
-               self.bld = kwargs.get('bld', Build.bld)
-               self.env = self.bld.env.copy()
-
-               self.path = self.bld.path # emulate chdir when reading scripts
-               self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
-
-               # provide a unique id
-               self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
-
-               for key, val in kwargs.iteritems():
-                       setattr(self, key, val)
-
-               self.bld.task_manager.add_task_gen(self)
-               self.bld.all_task_gen.append(self)
-
-       def __str__(self):
-               return ("<task_gen '%s' of type %s defined in %s>"
-                       % (self.name or self.target, self.__class__.__name__, str(self.path)))
-
-       def __setattr__(self, name, attr):
-               real = typos.get(name, name)
-               if real != name:
-                       warn('typo %s -> %s' % (name, real))
-                       if Logs.verbose > 0:
-                               traceback.print_stack()
-               object.__setattr__(self, real, attr)
-
-       def to_list(self, value):
-               "helper: returns a list"
-               if isinstance(value, str): return value.split()
-               else: return value
-
-       def apply(self):
-               "order the methods to execute using self.prec or task_gen.prec"
-               keys = set(self.meths)
-
-               # add the methods listed in the features
-               self.features = Utils.to_list(self.features)
-               for x in self.features + ['*']:
-                       st = task_gen.traits[x]
-                       if not st:
-                               warn('feature %r does not exist - bind at least one method to it' % x)
-                       keys.update(st)
-
-               # copy the precedence table
-               prec = {}
-               prec_tbl = self.prec or task_gen.prec
-               for x in prec_tbl:
-                       if x in keys:
-                               prec[x] = prec_tbl[x]
-
-               # elements disconnected
-               tmp = []
-               for a in keys:
-                       for x in prec.values():
-                               if a in x: break
-                       else:
-                               tmp.append(a)
-
-               # topological sort
-               out = []
-               while tmp:
-                       e = tmp.pop()
-                       if e in keys: out.append(e)
-                       try:
-                               nlst = prec[e]
-                       except KeyError:
-                               pass
-                       else:
-                               del prec[e]
-                               for x in nlst:
-                                       for y in prec:
-                                               if x in prec[y]:
-                                                       break
-                                       else:
-                                               tmp.append(x)
-
-               if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
-               out.reverse()
-               self.meths = out
-
-               # then we run the methods in order
-               debug('task_gen: posting %s %d', self, id(self))
-               for x in out:
-                       try:
-                               v = getattr(self, x)
-                       except AttributeError:
-                               raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
-                       debug('task_gen: -> %s (%d)', x, id(self))
-                       v()
-
-       def post(self):
-               "runs the code to create the tasks, do not subclass"
-               if not self.name:
-                       if isinstance(self.target, list):
-                               self.name = ' '.join(self.target)
-                       else:
-                               self.name = self.target
-
-               if getattr(self, 'posted', None):
-                       #error("OBJECT ALREADY POSTED" + str( self))
-                       return
-
-               self.apply()
-               self.posted = True
-               debug('task_gen: posted %s', self.name)
-
-       def get_hook(self, ext):
-               try: return self.mappings[ext]
-               except KeyError:
-                       try: return task_gen.mappings[ext]
-                       except KeyError: return None
-
-       # TODO waf 1.6: always set the environment
-       # TODO waf 1.6: create_task(self, name, inputs, outputs)
-       def create_task(self, name, src=None, tgt=None, env=None):
-               env = env or self.env
-               task = Task.TaskBase.classes[name](env.copy(), generator=self)
-               if src:
-                       task.set_inputs(src)
-               if tgt:
-                       task.set_outputs(tgt)
-               self.tasks.append(task)
-               return task
-
-       def name_to_obj(self, name):
-               return self.bld.name_to_obj(name, self.env)
-
-       def get_tgen_by_name(self, name):
-               return self.bld.get_tgen_by_name(name)
-
-       def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
-               """
-               The attributes "excludes" and "exts" must be lists to avoid the confusion
-               find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
-
-               do not use absolute paths
-               do not use paths outside of the source tree
-               the files or folder beginning by . are not returned
-
-               # TODO: remove in Waf 1.6
-               """
-
-               err_msg = "'%s' attribute must be a list"
-               if not isinstance(excludes, list):
-                       raise Utils.WscriptError(err_msg % 'excludes')
-               if not isinstance(exts, list):
-                       raise Utils.WscriptError(err_msg % 'exts')
-
-               lst = []
-
-               #make sure dirnames is a list helps with dirnames with spaces
-               dirnames = self.to_list(dirnames)
-
-               ext_lst = exts or list(self.mappings.keys()) + list(task_gen.mappings.keys())
-
-               for name in dirnames:
-                       anode = self.path.find_dir(name)
-
-                       if not anode or not anode.is_child_of(self.bld.srcnode):
-                               raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
-                                        ", or it's not child of '%s'." % (name, self.bld.srcnode))
-
-                       self.bld.rescan(anode)
-                       for name in self.bld.cache_dir_contents[anode.id]:
-
-                               # ignore hidden files
-                               if name.startswith('.'):
-                                       continue
-
-                               (base, ext) = os.path.splitext(name)
-                               if ext in ext_lst and not name in lst and not name in excludes:
-                                       lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
-
-               lst.sort()
-               self.source = self.to_list(self.source)
-               if not self.source: self.source = lst
-               else: self.source += lst
-
-       def clone(self, env):
-               """when creating a clone in a task generator method,
-               make sure to set posted=False on the clone
-               else the other task generator will not create its tasks"""
-               newobj = task_gen(bld=self.bld)
-               for x in self.__dict__:
-                       if x in ['env', 'bld']:
-                               continue
-                       elif x in ["path", "features"]:
-                               setattr(newobj, x, getattr(self, x))
-                       else:
-                               setattr(newobj, x, copy.copy(getattr(self, x)))
-
-               newobj.__class__ = self.__class__
-               if isinstance(env, str):
-                       newobj.env = self.bld.all_envs[env].copy()
-               else:
-                       newobj.env = env.copy()
-
-               return newobj
-
-       def get_inst_path(self):
-               return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
-
-       def set_inst_path(self, val):
-               self._install_path = val
-
-       install_path = property(get_inst_path, set_inst_path)
-
-
-       def get_chmod(self):
-               return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
-
-       def set_chmod(self, val):
-               self._chmod = val
-
-       chmod = property(get_chmod, set_chmod)
-
-def declare_extension(var, func):
-       try:
-               for x in Utils.to_list(var):
-                       task_gen.mappings[x] = func
-       except:
-               raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
-       task_gen.mapped[func.__name__] = func
-
-def declare_order(*k):
-       assert(len(k) > 1)
-       n = len(k) - 1
-       for i in xrange(n):
-               f1 = k[i]
-               f2 = k[i+1]
-               if not f1 in task_gen.prec[f2]:
-                       task_gen.prec[f2].append(f1)
-
-def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
-       install=0, before=[], after=[], decider=None, rule=None, scan=None):
-       """
-       see Tools/flex.py for an example
-       while i do not like such wrappers, some people really do
-       """
-
-       action = action or rule
-       if isinstance(action, str):
-               act = Task.simple_task_type(name, action, color=color)
-       else:
-               act = Task.task_type_from_func(name, action, color=color)
-       act.ext_in = tuple(Utils.to_list(ext_in))
-       act.ext_out = tuple(Utils.to_list(ext_out))
-       act.before = Utils.to_list(before)
-       act.after = Utils.to_list(after)
-       act.scan = scan
-
-       def x_file(self, node):
-               if decider:
-                       ext = decider(self, node)
-               else:
-                       ext = ext_out
-
-               if isinstance(ext, str):
-                       out_source = node.change_ext(ext)
-                       if reentrant:
-                               self.allnodes.append(out_source)
-               elif isinstance(ext, list):
-                       out_source = [node.change_ext(x) for x in ext]
-                       if reentrant:
-                               for i in xrange((reentrant is True) and len(out_source) or reentrant):
-                                       self.allnodes.append(out_source[i])
-               else:
-                       # XXX: useless: it will fail on Utils.to_list above...
-                       raise Utils.WafError("do not know how to process %s" % str(ext))
-
-               tsk = self.create_task(name, node, out_source)
-
-               if node.__class__.bld.is_install:
-                       tsk.install = install
-
-       declare_extension(act.ext_in, x_file)
-       return x_file
-
-def bind_feature(name, methods):
-       lst = Utils.to_list(methods)
-       task_gen.traits[name].update(lst)
-
-"""
-All the following decorators are registration decorators, i.e add an attribute to current class
- (task_gen and its derivatives), with same name as func, which points to func itself.
-For example:
-   @taskgen
-   def sayHi(self):
-        print("hi")
-Now taskgen.sayHi() may be called
-
-If python were really smart, it could infer itself the order of methods by looking at the
-attributes. A prerequisite for execution is to have the attribute set before.
-Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
-"""
-def taskgen(func):
-       """
-       register a method as a task generator method
-       """
-       setattr(task_gen, func.__name__, func)
-       return func
-
-def feature(*k):
-       """
-       declare a task generator method that will be executed when the
-       object attribute 'feature' contains the corresponding key(s)
-       """
-       def deco(func):
-               setattr(task_gen, func.__name__, func)
-               for name in k:
-                       task_gen.traits[name].update([func.__name__])
-               return func
-       return deco
-
-def before(*k):
-       """
-       declare a task generator method which will be executed
-       before the functions of given name(s)
-       """
-       def deco(func):
-               setattr(task_gen, func.__name__, func)
-               for fun_name in k:
-                       if not func.__name__ in task_gen.prec[fun_name]:
-                               task_gen.prec[fun_name].append(func.__name__)
-               return func
-       return deco
-
-def after(*k):
-       """
-       declare a task generator method which will be executed
-       after the functions of given name(s)
-       """
-       def deco(func):
-               setattr(task_gen, func.__name__, func)
-               for fun_name in k:
-                       if not fun_name in task_gen.prec[func.__name__]:
-                               task_gen.prec[func.__name__].append(fun_name)
-               return func
-       return deco
-
-def extension(var):
-       """
-       declare a task generator method which will be invoked during
-       the processing of source files for the extension given
-       """
-       def deco(func):
-               setattr(task_gen, func.__name__, func)
-               try:
-                       for x in Utils.to_list(var):
-                               task_gen.mappings[x] = func
-               except:
-                       raise Utils.WafError('extension takes either a list or a string %r' % var)
-               task_gen.mapped[func.__name__] = func
-               return func
-       return deco
-
-# TODO make certain the decorators may be used here
-
-def apply_core(self):
-       """Process the attribute source
-       transform the names into file nodes
-       try to process the files by name first, later by extension"""
-       # get the list of folders to use by the scanners
-       # all our objects share the same include paths anyway
-       find_resource = self.path.find_resource
-
-       for filename in self.to_list(self.source):
-               # if self.mappings or task_gen.mappings contains a file of the same name
-               x = self.get_hook(filename)
-               if x:
-                       x(self, filename)
-               else:
-                       node = find_resource(filename)
-                       if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
-                       self.allnodes.append(node)
-
-       for node in self.allnodes:
-               # self.mappings or task_gen.mappings map the file extension to a function
-               x = self.get_hook(node.suffix())
-
-               if not x:
-                       raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
-                               (str(node), self.__class__.mappings.keys(), self.__class__))
-               x(self, node)
-feature('*')(apply_core)
-
-def exec_rule(self):
-       """Process the attribute rule, when provided the method apply_core will be disabled
-       """
-       if not getattr(self, 'rule', None):
-               return
-
-       # someone may have removed it already
-       try:
-               self.meths.remove('apply_core')
-       except ValueError:
-               pass
-
-       # get the function and the variables
-       func = self.rule
-
-       vars2 = []
-       if isinstance(func, str):
-               # use the shell by default for user-defined commands
-               (func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
-               func.code = self.rule
-
-       # create the task class
-       name = getattr(self, 'name', None) or self.target or self.rule
-       if not isinstance(name, str):
-               name = str(self.idx)
-       cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
-       cls.color = getattr(self, 'color', 'BLUE')
-
-       # now create one instance
-       tsk = self.create_task(name)
-
-       dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
-       if dep_vars:
-               tsk.dep_vars = dep_vars
-       if isinstance(self.rule, str):
-               tsk.env.ruledeps = self.rule
-       else:
-               # only works if the function is in a global module such as a waf tool
-               tsk.env.ruledeps = Utils.h_fun(self.rule)
-
-       # we assume that the user knows that without inputs or outputs
-       #if not getattr(self, 'target', None) and not getattr(self, 'source', None):
-       #       cls.quiet = True
-
-       if getattr(self, 'target', None):
-               cls.quiet = True
-               tsk.outputs = [self.path.find_or_declare(x) for x in self.to_list(self.target)]
-
-       if getattr(self, 'source', None):
-               cls.quiet = True
-               tsk.inputs = []
-               for x in self.to_list(self.source):
-                       y = self.path.find_resource(x)
-                       if not y:
-                               raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
-                       tsk.inputs.append(y)
-
-       if self.allnodes:
-               tsk.inputs.extend(self.allnodes)
-
-       if getattr(self, 'scan', None):
-               cls.scan = self.scan
-
-       if getattr(self, 'install_path', None):
-               tsk.install_path = self.install_path
-
-       if getattr(self, 'cwd', None):
-               tsk.cwd = self.cwd
-
-       if getattr(self, 'on_results', None) or getattr(self, 'update_outputs', None):
-               Task.update_outputs(cls)
-
-       if getattr(self, 'always', None):
-               Task.always_run(cls)
-
-       for x in ['after', 'before', 'ext_in', 'ext_out']:
-               setattr(cls, x, getattr(self, x, []))
-feature('*')(exec_rule)
-before('apply_core')(exec_rule)
-
-def sequence_order(self):
-       """
-       add a strict sequential constraint between the tasks generated by task generators
-       it uses the fact that task generators are posted in order
-       it will not post objects which belong to other folders
-       there is also an awesome trick for executing the method in last position
-
-       to use:
-       bld(features='javac seq')
-       bld(features='jar seq')
-
-       to start a new sequence, set the attribute seq_start, for example:
-       obj.seq_start = True
-       """
-       if self.meths and self.meths[-1] != 'sequence_order':
-               self.meths.append('sequence_order')
-               return
-
-       if getattr(self, 'seq_start', None):
-               return
-
-       # all the tasks previously declared must be run before these
-       if getattr(self.bld, 'prev', None):
-               self.bld.prev.post()
-               for x in self.bld.prev.tasks:
-                       for y in self.tasks:
-                               y.set_run_after(x)
-
-       self.bld.prev = self
-
-feature('seq')(sequence_order)
diff --git a/third_party/waf/wafadmin/Tools/__init__.py b/third_party/waf/wafadmin/Tools/__init__.py
deleted file mode 100644 (file)
index 8f026e1..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
diff --git a/third_party/waf/wafadmin/Tools/ar.py b/third_party/waf/wafadmin/Tools/ar.py
deleted file mode 100644 (file)
index 3571670..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2008 (ita)
-# Ralf Habacker, 2006 (rh)
-
-"ar and ranlib"
-
-import os, sys
-import Task, Utils
-from Configure import conftest
-
-ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
-cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
-cls.maxjobs = 1
-cls.install = Utils.nada
-
-# remove the output in case it already exists
-old = cls.run
-def wrap(self):
-       try: os.remove(self.outputs[0].abspath(self.env))
-       except OSError: pass
-       return old(self)
-setattr(cls, 'run', wrap)
-
-def detect(conf):
-       conf.find_program('ar', var='AR')
-       conf.find_program('ranlib', var='RANLIB')
-       conf.env.ARFLAGS = 'rcs'
-
-@conftest
-def find_ar(conf):
-       v = conf.env
-       conf.check_tool('ar')
-       if not v['AR']: conf.fatal('ar is required for static libraries - not found')
diff --git a/third_party/waf/wafadmin/Tools/bison.py b/third_party/waf/wafadmin/Tools/bison.py
deleted file mode 100644 (file)
index c281e61..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# John O'Meara, 2006
-# Thomas Nagy 2009
-
-"Bison processing"
-
-import Task
-from TaskGen import extension
-
-bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
-cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
-
-@extension(['.y', '.yc', '.yy'])
-def big_bison(self, node):
-       """when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
-       has_h = '-d' in self.env['BISONFLAGS']
-
-       outs = []
-       if node.name.endswith('.yc'):
-               outs.append(node.change_ext('.tab.cc'))
-               if has_h:
-                       outs.append(node.change_ext('.tab.hh'))
-       else:
-               outs.append(node.change_ext('.tab.c'))
-               if has_h:
-                       outs.append(node.change_ext('.tab.h'))
-
-       tsk = self.create_task('bison', node, outs)
-       tsk.cwd = node.bld_dir(tsk.env)
-
-       # and the c/cxx file must be compiled too
-       self.allnodes.append(outs[0])
-
-def detect(conf):
-       bison = conf.find_program('bison', var='BISON', mandatory=True)
-       conf.env['BISONFLAGS'] = '-d'
diff --git a/third_party/waf/wafadmin/Tools/cc.py b/third_party/waf/wafadmin/Tools/cc.py
deleted file mode 100644 (file)
index 7eb5272..0000000
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-"Base for c programs/libraries"
-
-import os
-import TaskGen, Build, Utils, Task
-from Logs import debug
-import ccroot
-from TaskGen import feature, before, extension, after
-
-g_cc_flag_vars = [
-'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
-'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
-'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
-
-EXT_CC = ['.c']
-
-g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
-
-# TODO remove in waf 1.6
-class cc_taskgen(ccroot.ccroot_abstract):
-       pass
-
-@feature('c', 'cc')
-@before('apply_type_vars')
-@after('default_cc')
-def init_cc(self):
-       self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
-       self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
-
-       if not self.env['CC_NAME']:
-               raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
-
-@feature('c', 'cc')
-@after('apply_incpaths')
-def apply_obj_vars_cc(self):
-       """after apply_incpaths for INC_PATHS"""
-       env = self.env
-       app = env.append_unique
-       cpppath_st = env['CPPPATH_ST']
-
-       # local flags come first
-       # set the user-defined includes paths
-       for i in env['INC_PATHS']:
-               app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
-               app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
-
-       # set the library include paths
-       for i in env['CPPPATH']:
-               app('_CCINCFLAGS', cpppath_st % i)
-
-@feature('c', 'cc')
-@after('apply_lib_vars')
-def apply_defines_cc(self):
-       """after uselib is set for CCDEFINES"""
-       self.defines = getattr(self, 'defines', [])
-       lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
-       milst = []
-
-       # now process the local defines
-       for defi in lst:
-               if not defi in milst:
-                       milst.append(defi)
-
-       # CCDEFINES_
-       libs = self.to_list(self.uselib)
-       for l in libs:
-               val = self.env['CCDEFINES_'+l]
-               if val: milst += val
-       self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
-       y = self.env['CCDEFINES_ST']
-       self.env.append_unique('_CCDEFFLAGS', [y%x for x in milst])
-
-@extension(EXT_CC)
-def c_hook(self, node):
-       # create the compilation task: cpp or cc
-       if getattr(self, 'obj_ext', None):
-               obj_ext = self.obj_ext
-       else:
-               obj_ext = '_%d.o' % self.idx
-
-       task = self.create_task('cc', node, node.change_ext(obj_ext))
-       try:
-               self.compiled_tasks.append(task)
-       except AttributeError:
-               raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
-       return task
-
-cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
-cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
-cls.scan = ccroot.scan
-cls.vars.append('CCDEPS')
-
-link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
-cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
-cls.maxjobs = 1
-cls.install = Utils.nada
diff --git a/third_party/waf/wafadmin/Tools/ccroot.py b/third_party/waf/wafadmin/Tools/ccroot.py
deleted file mode 100644 (file)
index 2240b2f..0000000
+++ /dev/null
@@ -1,639 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2008 (ita)
-
-"base for all c/c++ programs and libraries"
-
-import os, sys, re
-import TaskGen, Task, Utils, preproc, Logs, Build, Options
-from Logs import error, debug, warn
-from Utils import md5
-from TaskGen import taskgen, after, before, feature
-from Constants import *
-from Configure import conftest
-try:
-       from cStringIO import StringIO
-except ImportError:
-       from io import StringIO
-
-import config_c # <- necessary for the configuration, do not touch
-
-USE_TOP_LEVEL = False
-
-def get_cc_version(conf, cc, gcc=False, icc=False):
-
-       cmd = cc + ['-dM', '-E', '-']
-       try:
-               p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
-               p.stdin.write('\n')
-               out = p.communicate()[0]
-       except:
-               conf.fatal('could not determine the compiler version %r' % cmd)
-
-       # PY3K: do not touch
-       out = str(out)
-
-       if gcc:
-               if out.find('__INTEL_COMPILER') >= 0:
-                       conf.fatal('The intel compiler pretends to be gcc')
-               if out.find('__GNUC__') < 0:
-                       conf.fatal('Could not determine the compiler type')
-
-       if icc and out.find('__INTEL_COMPILER') < 0:
-               conf.fatal('Not icc/icpc')
-
-       k = {}
-       if icc or gcc:
-               out = out.split('\n')
-               import shlex
-
-               for line in out:
-                       lst = shlex.split(line)
-                       if len(lst)>2:
-                               key = lst[1]
-                               val = lst[2]
-                               k[key] = val
-
-               def isD(var):
-                       return var in k
-
-               def isT(var):
-                       return var in k and k[var] != '0'
-
-               # Some documentation is available at http://predef.sourceforge.net
-               # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
-               mp1 = {
-                       '__linux__'   : 'linux',
-                       '__GNU__'     : 'gnu',
-                       '__FreeBSD__' : 'freebsd',
-                       '__NetBSD__'  : 'netbsd',
-                       '__OpenBSD__' : 'openbsd',
-                       '__sun'       : 'sunos',
-                       '__hpux'      : 'hpux',
-                       '__sgi'       : 'irix',
-                       '_AIX'        : 'aix',
-                       '__CYGWIN__'  : 'cygwin',
-                       '__MSYS__'    : 'msys',
-                       '_UWIN'       : 'uwin',
-                       '_WIN64'      : 'win32',
-                       '_WIN32'      : 'win32',
-                       '__POWERPC__' : 'powerpc',
-                       }
-
-               for i in mp1:
-                       if isD(i):
-                               conf.env.DEST_OS = mp1[i]
-                               break
-               else:
-                       if isD('__APPLE__') and isD('__MACH__'):
-                               conf.env.DEST_OS = 'darwin'
-                       elif isD('__unix__'): # unix must be tested last as it's a generic fallback
-                               conf.env.DEST_OS = 'generic'
-
-               if isD('__ELF__'):
-                       conf.env.DEST_BINFMT = 'elf'
-               elif isD('__WINNT__') or isD('__CYGWIN__'):
-                       conf.env.DEST_BINFMT = 'pe'
-               elif isD('__APPLE__'):
-                       conf.env.DEST_BINFMT = 'mac-o'
-
-               mp2 = {
-                               '__x86_64__'  : 'x86_64',
-                               '__i386__'    : 'x86',
-                               '__ia64__'    : 'ia',
-                               '__mips__'    : 'mips',
-                               '__sparc__'   : 'sparc',
-                               '__alpha__'   : 'alpha',
-                               '__arm__'     : 'arm',
-                               '__hppa__'    : 'hppa',
-                               '__powerpc__' : 'powerpc',
-                               }
-               for i in mp2:
-                       if isD(i):
-                               conf.env.DEST_CPU = mp2[i]
-                               break
-
-               debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
-               conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
-       return k
-
-class DEBUG_LEVELS:
-       """Will disappear in waf 1.6"""
-       ULTRADEBUG = "ultradebug"
-       DEBUG = "debug"
-       RELEASE = "release"
-       OPTIMIZED = "optimized"
-       CUSTOM = "custom"
-
-       ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
-
-def scan(self):
-       "look for .h the .cpp need"
-       debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
-
-       # TODO waf 1.6 - assume the default input has exactly one file
-
-       if len(self.inputs) == 1:
-               node = self.inputs[0]
-               (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
-               if Logs.verbose:
-                       debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
-               return (nodes, names)
-
-       all_nodes = []
-       all_names = []
-       seen = set()
-       for node in self.inputs:
-               (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
-               if Logs.verbose:
-                       debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
-               for x in nodes:
-                       if id(x) in seen: continue
-                       seen.add(id(x))
-                       all_nodes.append(x)
-               for x in names:
-                       if not x in all_names:
-                               all_names.append(x)
-       return (all_nodes, all_names)
-
-class ccroot_abstract(TaskGen.task_gen):
-       "Parent class for programs and libraries in languages c, c++ and moc (Qt)"
-       def __init__(self, *k, **kw):
-               # COMPAT remove in waf 1.6 TODO
-               if len(k) > 1:
-                       k = list(k)
-                       if k[1][0] != 'c':
-                               k[1] = 'c' + k[1]
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-def get_target_name(self):
-       tp = 'program'
-       for x in self.features:
-               if x in ['cshlib', 'cstaticlib']:
-                       tp = x.lstrip('c')
-
-       pattern = self.env[tp + '_PATTERN']
-       if not pattern: pattern = '%s'
-
-       dir, name = os.path.split(self.target)
-
-       if 'cshlib' in self.features and getattr(self, 'vnum', None):
-               nums = self.vnum.split('.')
-               if self.env.DEST_BINFMT == 'pe':
-                       # include the version in the dll file name,
-                       # the import lib file name stays unversionned.
-                       name = name + '-' + nums[0]
-               elif self.env.DEST_OS == 'openbsd':
-                       pattern = '%s.%s' % (pattern, nums[0])
-                       if len(nums) >= 2:
-                               pattern += '.%s' % nums[1]
-
-       return os.path.join(dir, pattern % name)
-
-@feature('c', 'cc', 'cxx')
-@before('apply_core')
-def default_cc(self):
-       """compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
-       Utils.def_attrs(self,
-               includes = '',
-               defines= '',
-               rpaths = '',
-               uselib = '',
-               uselib_local = '',
-               add_objects = '',
-               p_flag_vars = [],
-               p_type_vars = [],
-               compiled_tasks = [],
-               link_task = None)
-
-       # The only thing we need for cross-compilation is DEST_BINFMT.
-       # At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
-       # Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
-       if not self.env.DEST_BINFMT:
-               # Infer the binary format from the os name.
-               self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
-                       self.env.DEST_OS or Utils.unversioned_sys_platform())
-
-       if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
-       if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
-
-@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
-def apply_verif(self):
-       """no particular order, used for diagnostic"""
-       if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
-               raise Utils.WafError('no source files specified for %s' % self)
-       if not self.target:
-               raise Utils.WafError('no target for %s' % self)
-
-# TODO reference the d programs, shlibs in d.py, not here
-
-@feature('cprogram', 'dprogram')
-@after('default_cc')
-@before('apply_core')
-def vars_target_cprogram(self):
-       self.default_install_path = self.env.BINDIR
-       self.default_chmod = O755
-
-@after('default_cc')
-@feature('cshlib', 'dshlib')
-@before('apply_core')
-def vars_target_cshlib(self):
-       if self.env.DEST_BINFMT == 'pe':
-               #   set execute bit on libs to avoid 'permission denied' (issue 283)
-               self.default_chmod = O755
-               self.default_install_path = self.env.BINDIR
-       else:
-               self.default_install_path = self.env.LIBDIR
-
-@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
-@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
-def default_link_install(self):
-       """you may kill this method to inject your own installation for the first element
-       any other install should only process its own nodes and not those from the others"""
-       if self.install_path:
-               self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod)
-
-@feature('c', 'cc', 'cxx')
-@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
-def apply_incpaths(self):
-       """used by the scanner
-       after processing the uselib for CPPPATH
-       after apply_core because some processing may add include paths
-       """
-       lst = []
-       # TODO move the uselib processing out of here
-       for lib in self.to_list(self.uselib):
-               for path in self.env['CPPPATH_' + lib]:
-                       if not path in lst:
-                               lst.append(path)
-       if preproc.go_absolute:
-               for path in preproc.standard_includes:
-                       if not path in lst:
-                               lst.append(path)
-
-       for path in self.to_list(self.includes):
-               if not path in lst:
-                       if preproc.go_absolute or not os.path.isabs(path):
-                               lst.append(path)
-                       else:
-                               self.env.prepend_value('CPPPATH', path)
-
-       for path in lst:
-               node = None
-               if os.path.isabs(path):
-                       if preproc.go_absolute:
-                               node = self.bld.root.find_dir(path)
-               elif path[0] == '#':
-                       node = self.bld.srcnode
-                       if len(path) > 1:
-                               node = node.find_dir(path[1:])
-               else:
-                       node = self.path.find_dir(path)
-
-               if node:
-                       self.env.append_value('INC_PATHS', node)
-
-       # TODO WAF 1.6
-       if USE_TOP_LEVEL:
-               self.env.append_value('INC_PATHS', self.bld.srcnode)
-
-@feature('c', 'cc', 'cxx')
-@after('init_cc', 'init_cxx')
-@before('apply_lib_vars')
-def apply_type_vars(self):
-       """before apply_lib_vars because we modify uselib
-       after init_cc and init_cxx because web need p_type_vars
-       """
-       for x in self.features:
-               if not x in ['cprogram', 'cstaticlib', 'cshlib']:
-                       continue
-               x = x.lstrip('c')
-
-               # if the type defines uselib to add, add them
-               st = self.env[x + '_USELIB']
-               if st: self.uselib = self.uselib + ' ' + st
-
-               # each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
-               # so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
-               for var in self.p_type_vars:
-                       compvar = '%s_%s' % (x, var)
-                       #print compvar
-                       value = self.env[compvar]
-                       if value: self.env.append_value(var, value)
-
-@feature('cprogram', 'cshlib', 'cstaticlib')
-@after('apply_core')
-def apply_link(self):
-       """executes after apply_core for collecting 'compiled_tasks'
-       use a custom linker if specified (self.link='name-of-custom-link-task')"""
-       link = getattr(self, 'link', None)
-       if not link:
-               if 'cstaticlib' in self.features: link = 'static_link'
-               elif 'cxx' in self.features: link = 'cxx_link'
-               else: link = 'cc_link'
-
-       tsk = self.create_task(link)
-       outputs = [t.outputs[0] for t in self.compiled_tasks]
-       tsk.set_inputs(outputs)
-       tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
-
-       self.link_task = tsk
-
-@feature('c', 'cc', 'cxx')
-@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
-def apply_lib_vars(self):
-       """after apply_link because of 'link_task'
-       after default_cc because of the attribute 'uselib'"""
-
-       # after 'apply_core' in case if 'cc' if there is no link
-
-       env = self.env
-
-       # 1. the case of the libs defined in the project (visit ancestors first)
-       # the ancestors external libraries (uselib) will be prepended
-       self.uselib = self.to_list(self.uselib)
-       names = self.to_list(self.uselib_local)
-
-       seen = set([])
-       tmp = Utils.deque(names) # consume a copy of the list of names
-       while tmp:
-               lib_name = tmp.popleft()
-               # visit dependencies only once
-               if lib_name in seen:
-                       continue
-
-               y = self.name_to_obj(lib_name)
-               if not y:
-                       raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
-               y.post()
-               seen.add(lib_name)
-
-               # object has ancestors to process (shared libraries): add them to the end of the list
-               if getattr(y, 'uselib_local', None):
-                       lst = y.to_list(y.uselib_local)
-                       if 'cshlib' in y.features or 'cprogram' in y.features:
-                               lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
-                       tmp.extend(lst)
-
-               # link task and flags
-               if getattr(y, 'link_task', None):
-
-                       link_name = y.target[y.target.rfind(os.sep) + 1:]
-                       if 'cstaticlib' in y.features:
-                               env.append_value('STATICLIB', link_name)
-                       elif 'cshlib' in y.features or 'cprogram' in y.features:
-                               # WARNING some linkers can link against programs
-                               env.append_value('LIB', link_name)
-
-                       # the order
-                       self.link_task.set_run_after(y.link_task)
-
-                       # for the recompilation
-                       dep_nodes = getattr(self.link_task, 'dep_nodes', [])
-                       self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
-
-                       # add the link path too
-                       tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
-                       if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
-
-               # add ancestors uselib too - but only propagate those that have no staticlib
-               for v in self.to_list(y.uselib):
-                       if not env['STATICLIB_' + v]:
-                               if not v in self.uselib:
-                                       self.uselib.insert(0, v)
-
-               # if the library task generator provides 'export_incdirs', add to the include path
-               # the export_incdirs must be a list of paths relative to the other library
-               if getattr(y, 'export_incdirs', None):
-                       for x in self.to_list(y.export_incdirs):
-                               node = y.path.find_dir(x)
-                               if not node:
-                                       raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
-                               self.env.append_unique('INC_PATHS', node)
-
-       # 2. the case of the libs defined outside
-       for x in self.uselib:
-               for v in self.p_flag_vars:
-                       val = self.env[v + '_' + x]
-                       if val: self.env.append_value(v, val)
-
-@feature('cprogram', 'cstaticlib', 'cshlib')
-@after('init_cc', 'init_cxx', 'apply_link')
-def apply_objdeps(self):
-       "add the .o files produced by some other object files in the same manner as uselib_local"
-       if not getattr(self, 'add_objects', None): return
-
-       seen = []
-       names = self.to_list(self.add_objects)
-       while names:
-               x = names[0]
-
-               # visit dependencies only once
-               if x in seen:
-                       names = names[1:]
-                       continue
-
-               # object does not exist ?
-               y = self.name_to_obj(x)
-               if not y:
-                       raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
-
-               # object has ancestors to process first ? update the list of names
-               if getattr(y, 'add_objects', None):
-                       added = 0
-                       lst = y.to_list(y.add_objects)
-                       lst.reverse()
-                       for u in lst:
-                               if u in seen: continue
-                               added = 1
-                               names = [u]+names
-                       if added: continue # list of names modified, loop
-
-               # safe to process the current object
-               y.post()
-               seen.append(x)
-
-               for t in y.compiled_tasks:
-                       self.link_task.inputs.extend(t.outputs)
-
-@feature('cprogram', 'cshlib', 'cstaticlib')
-@after('apply_lib_vars')
-def apply_obj_vars(self):
-       """after apply_lib_vars for uselib"""
-       v = self.env
-       lib_st           = v['LIB_ST']
-       staticlib_st     = v['STATICLIB_ST']
-       libpath_st       = v['LIBPATH_ST']
-       staticlibpath_st = v['STATICLIBPATH_ST']
-       rpath_st         = v['RPATH_ST']
-
-       app = v.append_unique
-
-       if v['FULLSTATIC']:
-               v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
-
-       for i in v['RPATH']:
-               if i and rpath_st:
-                       app('LINKFLAGS', rpath_st % i)
-
-       for i in v['LIBPATH']:
-               app('LINKFLAGS', libpath_st % i)
-               app('LINKFLAGS', staticlibpath_st % i)
-
-       if v['STATICLIB']:
-               v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
-               k = [(staticlib_st % i) for i in v['STATICLIB']]
-               app('LINKFLAGS', k)
-
-       # fully static binaries ?
-       if not v['FULLSTATIC']:
-               if v['STATICLIB'] or v['LIB']:
-                       v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
-
-       app('LINKFLAGS', [lib_st % i for i in v['LIB']])
-
-@after('apply_link')
-def process_obj_files(self):
-       if not hasattr(self, 'obj_files'): return
-       for x in self.obj_files:
-               node = self.path.find_resource(x)
-               self.link_task.inputs.append(node)
-
-@taskgen
-def add_obj_file(self, file):
-       """Small example on how to link object files as if they were source
-       obj = bld.create_obj('cc')
-       obj.add_obj_file('foo.o')"""
-       if not hasattr(self, 'obj_files'): self.obj_files = []
-       if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
-       self.obj_files.append(file)
-
-c_attrs = {
-'cxxflag' : 'CXXFLAGS',
-'cflag' : 'CCFLAGS',
-'ccflag' : 'CCFLAGS',
-'linkflag' : 'LINKFLAGS',
-'ldflag' : 'LINKFLAGS',
-'lib' : 'LIB',
-'libpath' : 'LIBPATH',
-'staticlib': 'STATICLIB',
-'staticlibpath': 'STATICLIBPATH',
-'rpath' : 'RPATH',
-'framework' : 'FRAMEWORK',
-'frameworkpath' : 'FRAMEWORKPATH'
-}
-
-@feature('c', 'cc', 'cxx')
-@before('init_cxx', 'init_cc')
-@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
-def add_extra_flags(self):
-       """case and plural insensitive
-       before apply_obj_vars for processing the library attributes
-       """
-       for x in self.__dict__.keys():
-               y = x.lower()
-               if y[-1] == 's':
-                       y = y[:-1]
-               if c_attrs.get(y, None):
-                       self.env.append_unique(c_attrs[y], getattr(self, x))
-
-# ============ the code above must not know anything about import libs ==========
-
-@feature('cshlib')
-@after('apply_link', 'default_cc')
-@before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
-def apply_implib(self):
-       """On mswindows, handle dlls and their import libs
-       the .dll.a is the import lib and it is required for linking so it is installed too
-       """
-       if not self.env.DEST_BINFMT == 'pe':
-               return
-
-       self.meths.remove('default_link_install')
-
-       bindir = self.install_path
-       if not bindir: return
-
-       # install the dll in the bin dir
-       dll = self.link_task.outputs[0]
-       self.bld.install_files(bindir, dll, self.env, self.chmod)
-
-       # add linker flags to generate the import lib
-       implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
-
-       implib = dll.parent.find_or_declare(implib)
-       self.link_task.outputs.append(implib)
-       self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
-
-       self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
-
-# ============ the code above must not know anything about vnum processing on unix platforms =========
-
-@feature('cshlib')
-@after('apply_link')
-@before('apply_lib_vars', 'default_link_install')
-def apply_vnum(self):
-       """
-       libfoo.so is installed as libfoo.so.1.2.3
-       """
-       if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
-               return
-
-       self.meths.remove('default_link_install')
-
-       link = self.link_task
-       nums = self.vnum.split('.')
-       node = link.outputs[0]
-
-       libname = node.name
-       if libname.endswith('.dylib'):
-               name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
-               name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
-       else:
-               name3 = libname + '.' + self.vnum
-               name2 = libname + '.' + nums[0]
-
-       if self.env.SONAME_ST:
-               v = self.env.SONAME_ST % name2
-               self.env.append_value('LINKFLAGS', v.split())
-
-       bld = self.bld
-       nums = self.vnum.split('.')
-
-       path = self.install_path
-       if not path: return
-
-       if self.env.DEST_OS == 'openbsd':
-               libname = self.link_task.outputs[0].name
-               bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env)
-       else:
-               bld.install_as(path + os.sep + name3, node, env=self.env)
-               bld.symlink_as(path + os.sep + name2, name3)
-               bld.symlink_as(path + os.sep + libname, name3)
-
-       # the following task is just to enable execution from the build dir :-/
-       if self.env.DEST_OS != 'openbsd':
-               self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
-
-def exec_vnum_link(self):
-       for x in self.outputs:
-               path = x.abspath(self.env)
-               try:
-                       os.remove(path)
-               except OSError:
-                       pass
-
-               try:
-                       os.symlink(self.inputs[0].name, path)
-               except OSError:
-                       return 1
-
-cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
-cls.quiet = 1
-
-# ============ the --as-needed flag should added during the configuration, not at runtime =========
-
-@conftest
-def add_as_needed(conf):
-       if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME):
-               conf.env.append_unique('LINKFLAGS', '--as-needed')
diff --git a/third_party/waf/wafadmin/Tools/compiler_cc.py b/third_party/waf/wafadmin/Tools/compiler_cc.py
deleted file mode 100644 (file)
index 642458a..0000000
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Matthias Jahn jahn dôt matthias Ã¢t freenet dôt de, 2007 (pmarat)
-
-import os, sys, imp, types, ccroot
-import optparse
-import Utils, Configure, Options
-from Logs import debug
-
-c_compiler = {
-       'win32':  ['msvc', 'gcc'],
-       'cygwin': ['gcc'],
-       'darwin': ['gcc'],
-       'aix':    ['xlc', 'gcc'],
-       'linux':  ['gcc', 'icc', 'suncc'],
-       'sunos':  ['gcc', 'suncc'],
-       'irix':   ['gcc'],
-       'hpux':   ['gcc'],
-       'gnu':    ['gcc'],
-       'default': ['gcc']
-}
-
-def __list_possible_compiler(platform):
-       try:
-               return c_compiler[platform]
-       except KeyError:
-               return c_compiler["default"]
-
-def detect(conf):
-       """
-       for each compiler for the platform, try to configure the compiler
-       in theory the tools should raise a configuration error if the compiler
-       pretends to be something it is not (setting CC=icc and trying to configure gcc)
-       """
-       try: test_for_compiler = Options.options.check_c_compiler
-       except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
-       orig = conf.env
-       for compiler in test_for_compiler.split():
-               conf.env = orig.copy()
-               try:
-                       conf.check_tool(compiler)
-               except Configure.ConfigurationError, e:
-                       debug('compiler_cc: %r' % e)
-               else:
-                       if conf.env['CC']:
-                               orig.table = conf.env.get_merged_dict()
-                               conf.env = orig
-                               conf.check_message(compiler, '', True)
-                               conf.env['COMPILER_CC'] = compiler
-                               break
-                       conf.check_message(compiler, '', False)
-                       break
-       else:
-               conf.fatal('could not configure a c compiler!')
-
-def set_options(opt):
-       build_platform = Utils.unversioned_sys_platform()
-       possible_compiler_list = __list_possible_compiler(build_platform)
-       test_for_compiler = ' '.join(possible_compiler_list)
-       cc_compiler_opts = opt.add_option_group("C Compiler Options")
-       cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
-               help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
-               dest="check_c_compiler")
-
-       for c_compiler in test_for_compiler.split():
-               opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
diff --git a/third_party/waf/wafadmin/Tools/compiler_cxx.py b/third_party/waf/wafadmin/Tools/compiler_cxx.py
deleted file mode 100644 (file)
index aa0b0e7..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Matthias Jahn jahn dôt matthias Ã¢t freenet dôt de 2007 (pmarat)
-
-import os, sys, imp, types, ccroot
-import optparse
-import Utils, Configure, Options
-from Logs import debug
-
-cxx_compiler = {
-'win32':  ['msvc', 'g++'],
-'cygwin': ['g++'],
-'darwin': ['g++'],
-'aix':    ['xlc++', 'g++'],
-'linux':  ['g++', 'icpc', 'sunc++'],
-'sunos':  ['g++', 'sunc++'],
-'irix':   ['g++'],
-'hpux':   ['g++'],
-'gnu':    ['g++'],
-'default': ['g++']
-}
-
-def __list_possible_compiler(platform):
-       try:
-               return cxx_compiler[platform]
-       except KeyError:
-               return cxx_compiler["default"]
-
-def detect(conf):
-       try: test_for_compiler = Options.options.check_cxx_compiler
-       except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
-       orig = conf.env
-       for compiler in test_for_compiler.split():
-               try:
-                       conf.env = orig.copy()
-                       conf.check_tool(compiler)
-               except Configure.ConfigurationError, e:
-                       debug('compiler_cxx: %r' % e)
-               else:
-                       if conf.env['CXX']:
-                               orig.table = conf.env.get_merged_dict()
-                               conf.env = orig
-                               conf.check_message(compiler, '', True)
-                               conf.env['COMPILER_CXX'] = compiler
-                               break
-                       conf.check_message(compiler, '', False)
-                       break
-       else:
-               conf.fatal('could not configure a cxx compiler!')
-
-def set_options(opt):
-       build_platform = Utils.unversioned_sys_platform()
-       possible_compiler_list = __list_possible_compiler(build_platform)
-       test_for_compiler = ' '.join(possible_compiler_list)
-       cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
-       cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
-               help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
-               dest="check_cxx_compiler")
-
-       for cxx_compiler in test_for_compiler.split():
-               opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
diff --git a/third_party/waf/wafadmin/Tools/compiler_d.py b/third_party/waf/wafadmin/Tools/compiler_d.py
deleted file mode 100644 (file)
index 378277a..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-
-import os, sys, imp, types
-import Utils, Configure, Options
-
-def detect(conf):
-       if getattr(Options.options, 'check_dmd_first', None):
-               test_for_compiler = ['dmd', 'gdc']
-       else:
-               test_for_compiler = ['gdc', 'dmd']
-
-       for d_compiler in test_for_compiler:
-               try:
-                       conf.check_tool(d_compiler)
-               except:
-                       pass
-               else:
-                       break
-       else:
-               conf.fatal('no suitable d compiler was found')
-
-def set_options(opt):
-       d_compiler_opts = opt.add_option_group('D Compiler Options')
-       d_compiler_opts.add_option('--check-dmd-first', action='store_true',
-                       help='checks for the gdc compiler before dmd (default is the other way round)',
-                       dest='check_dmd_first',
-                       default=False)
-
-       for d_compiler in ['gdc', 'dmd']:
-               opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)
diff --git a/third_party/waf/wafadmin/Tools/config_c.py b/third_party/waf/wafadmin/Tools/config_c.py
deleted file mode 100644 (file)
index cdf3b3e..0000000
+++ /dev/null
@@ -1,754 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2008 (ita)
-
-"""
-c/c++ configuration routines
-"""
-
-import os, imp, sys, shlex, shutil
-from Utils import md5
-import Build, Utils, Configure, Task, Options, Logs, TaskGen
-from Constants import *
-from Configure import conf, conftest
-
-cfg_ver = {
-       'atleast-version': '>=',
-       'exact-version': '==',
-       'max-version': '<=',
-}
-
-SNIP1 = '''
-       int main() {
-       void *p;
-       p=(void*)(%s);
-       return 0;
-}
-'''
-
-SNIP2 = '''
-int main() {
-       if ((%(type_name)s *) 0) return 0;
-       if (sizeof (%(type_name)s)) return 0;
-}
-'''
-
-SNIP3 = '''
-int main() {
-       return 0;
-}
-'''
-
-def parse_flags(line, uselib, env):
-       """pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
-
-       lst = shlex.split(line)
-       while lst:
-               x = lst.pop(0)
-               st = x[:2]
-               ot = x[2:]
-               app = env.append_value
-               if st == '-I' or st == '/I':
-                       if not ot: ot = lst.pop(0)
-                       app('CPPPATH_' + uselib, ot)
-               elif st == '-D':
-                       if not ot: ot = lst.pop(0)
-                       app('CXXDEFINES_' + uselib, ot)
-                       app('CCDEFINES_' + uselib, ot)
-               elif st == '-l':
-                       if not ot: ot = lst.pop(0)
-                       app('LIB_' + uselib, ot)
-               elif st == '-L':
-                       if not ot: ot = lst.pop(0)
-                       app('LIBPATH_' + uselib, ot)
-               elif x == '-pthread' or x.startswith('+'):
-                       app('CCFLAGS_' + uselib, x)
-                       app('CXXFLAGS_' + uselib, x)
-                       app('LINKFLAGS_' + uselib, x)
-               elif x == '-framework':
-                       app('FRAMEWORK_' + uselib, lst.pop(0))
-               elif x.startswith('-F'):
-                       app('FRAMEWORKPATH_' + uselib, x[2:])
-               elif x.startswith('-std'):
-                       app('CCFLAGS_' + uselib, x)
-                       app('CXXFLAGS_' + uselib, x)
-                       app('LINKFLAGS_' + uselib, x)
-               #
-               # NOTE on special treatment of -Wl,-R and -Wl,-rpath:
-               #
-               # It is important to not put a library provided RPATH
-               # into the LINKFLAGS but in the RPATH instead, since
-               # the provided LINKFLAGS get prepended to our own internal
-               # RPATH later, and hence can potentially lead to linking
-               # in too old versions of our internal libs.
-               #
-               elif x == '-Wl,-rpath' or x == '-Wl,-R':
-                       app('RPATH_' + uselib, lst.pop(0).lstrip('-Wl,'))
-               elif x.startswith('-Wl,-R,'):
-                       app('RPATH_' + uselib, x[7:])
-               elif x.startswith('-Wl,-R'):
-                       app('RPATH_' + uselib, x[6:])
-               elif x.startswith('-Wl,-rpath,'):
-                       app('RPATH_' + uselib, x[11:])
-               elif x.startswith('-Wl'):
-                       app('LINKFLAGS_' + uselib, x)
-               elif x.startswith('-m') or x.startswith('-f'):
-                       app('CCFLAGS_' + uselib, x)
-                       app('CXXFLAGS_' + uselib, x)
-
-@conf
-def ret_msg(self, f, kw):
-       """execute a function, when provided"""
-       if isinstance(f, str):
-               return f
-       return f(kw)
-
-@conf
-def validate_cfg(self, kw):
-       if not 'path' in kw:
-               if not self.env.PKGCONFIG:
-                       self.find_program('pkg-config', var='PKGCONFIG')
-               kw['path'] = self.env.PKGCONFIG
-
-       # pkg-config version
-       if 'atleast_pkgconfig_version' in kw:
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
-               return
-
-       # pkg-config --modversion
-       if 'modversion' in kw:
-               return
-
-       if 'variables' in kw:
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for %s variables' % kw['package']
-               return
-
-       # checking for the version of a module, for the moment, one thing at a time
-       for x in cfg_ver.keys():
-               y = x.replace('-', '_')
-               if y in kw:
-                       if not 'package' in kw:
-                               raise ValueError('%s requires a package' % x)
-
-                       if not 'msg' in kw:
-                               kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
-                       return
-
-       if not 'msg' in kw:
-               kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
-       if not 'okmsg' in kw:
-               kw['okmsg'] = 'yes'
-       if not 'errmsg' in kw:
-               kw['errmsg'] = 'not found'
-
-@conf
-def cmd_and_log(self, cmd, kw):
-       Logs.debug('runner: %s\n' % cmd)
-       if self.log:
-               self.log.write('%s\n' % cmd)
-
-       try:
-               p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
-               (out, err) = p.communicate()
-       except OSError, e:
-               self.log.write('error %r' % e)
-               self.fatal(str(e))
-
-       # placeholder, don't touch
-       out = str(out)
-       err = str(err)
-
-       if self.log:
-               self.log.write(out)
-               self.log.write(err)
-
-       if p.returncode:
-               if not kw.get('errmsg', ''):
-                       if kw.get('mandatory', False):
-                               kw['errmsg'] = out.strip()
-                       else:
-                               kw['errmsg'] = 'no'
-               self.fatal('fail')
-       return out
-
-@conf
-def exec_cfg(self, kw):
-
-       # pkg-config version
-       if 'atleast_pkgconfig_version' in kw:
-               cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
-               self.cmd_and_log(cmd, kw)
-               if not 'okmsg' in kw:
-                       kw['okmsg'] = 'yes'
-               return
-
-       # checking for the version of a module
-       for x in cfg_ver:
-               y = x.replace('-', '_')
-               if y in kw:
-                       self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
-                       if not 'okmsg' in kw:
-                               kw['okmsg'] = 'yes'
-                       self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
-                       break
-
-       # retrieving the version of a module
-       if 'modversion' in kw:
-               version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
-               self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
-               return version
-
-       # retrieving variables of a module
-       if 'variables' in kw:
-               env = kw.get('env', self.env)
-               uselib = kw.get('uselib_store', kw['package'].upper())
-               vars = Utils.to_list(kw['variables'])
-               for v in vars:
-                       val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
-                       var = '%s_%s' % (uselib, v)
-                       env[var] = val
-               if not 'okmsg' in kw:
-                       kw['okmsg'] = 'yes'
-               return
-
-       lst = [kw['path']]
-
-
-       defi = kw.get('define_variable', None)
-       if not defi:
-               defi = self.env.PKG_CONFIG_DEFINES or {}
-       for key, val in defi.iteritems():
-               lst.append('--define-variable=%s=%s' % (key, val))
-
-       lst.append(kw.get('args', ''))
-       lst.append(kw['package'])
-
-       # so we assume the command-line will output flags to be parsed afterwards
-       cmd = ' '.join(lst)
-       ret = self.cmd_and_log(cmd, kw)
-       if not 'okmsg' in kw:
-               kw['okmsg'] = 'yes'
-
-       self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
-       parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
-       return ret
-
-@conf
-def check_cfg(self, *k, **kw):
-       """
-       for pkg-config mostly, but also all the -config tools
-       conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
-       conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
-       """
-
-       self.validate_cfg(kw)
-       if 'msg' in kw:
-               self.check_message_1(kw['msg'])
-       ret = None
-       try:
-               ret = self.exec_cfg(kw)
-       except Configure.ConfigurationError, e:
-               if 'errmsg' in kw:
-                       self.check_message_2(kw['errmsg'], 'YELLOW')
-               if 'mandatory' in kw and kw['mandatory']:
-                       if Logs.verbose > 1:
-                               raise
-                       else:
-                               self.fatal('the configuration failed (see %r)' % self.log.name)
-       else:
-               kw['success'] = ret
-               if 'okmsg' in kw:
-                       self.check_message_2(self.ret_msg(kw['okmsg'], kw))
-
-       return ret
-
-# the idea is the following: now that we are certain
-# that all the code here is only for c or c++, it is
-# easy to put all the logic in one function
-#
-# this should prevent code duplication (ita)
-
-# env: an optional environment (modified -> provide a copy)
-# compiler: cc or cxx - it tries to guess what is best
-# type: cprogram, cshlib, cstaticlib
-# code: a c code to execute
-# uselib_store: where to add the variables
-# uselib: parameters to use for building
-# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
-# execute: True or False - will return the result of the execution
-
-@conf
-def validate_c(self, kw):
-       """validate the parameters for the test method"""
-
-       if not 'env' in kw:
-               kw['env'] = self.env.copy()
-
-       env = kw['env']
-       if not 'compiler' in kw:
-               kw['compiler'] = 'cc'
-               if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
-                       kw['compiler'] = 'cxx'
-                       if not self.env['CXX']:
-                               self.fatal('a c++ compiler is required')
-               else:
-                       if not self.env['CC']:
-                               self.fatal('a c compiler is required')
-
-       if not 'type' in kw:
-               kw['type'] = 'cprogram'
-
-       assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
-
-
-       #if kw['type'] != 'program' and kw.get('execute', 0):
-       #       raise ValueError, 'can only execute programs'
-
-       def to_header(dct):
-               if 'header_name' in dct:
-                       dct = Utils.to_list(dct['header_name'])
-                       return ''.join(['#include <%s>\n' % x for x in dct])
-               return ''
-
-       # set the file name
-       if not 'compile_mode' in kw:
-               kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
-
-       if not 'compile_filename' in kw:
-               kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
-
-       #OSX
-       if 'framework_name' in kw:
-               try: TaskGen.task_gen.create_task_macapp
-               except AttributeError: self.fatal('frameworks require the osx tool')
-
-               fwkname = kw['framework_name']
-               if not 'uselib_store' in kw:
-                       kw['uselib_store'] = fwkname.upper()
-
-               if not kw.get('no_header', False):
-                       if not 'header_name' in kw:
-                               kw['header_name'] = []
-                       fwk = '%s/%s.h' % (fwkname, fwkname)
-                       if kw.get('remove_dot_h', None):
-                               fwk = fwk[:-2]
-                       kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
-
-               kw['msg'] = 'Checking for framework %s' % fwkname
-               kw['framework'] = fwkname
-               #kw['frameworkpath'] = set it yourself
-
-       if 'function_name' in kw:
-               fu = kw['function_name']
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for function %s' % fu
-               kw['code'] = to_header(kw) + SNIP1 % fu
-               if not 'uselib_store' in kw:
-                       kw['uselib_store'] = fu.upper()
-               if not 'define_name' in kw:
-                       kw['define_name'] = self.have_define(fu)
-
-       elif 'type_name' in kw:
-               tu = kw['type_name']
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for type %s' % tu
-               if not 'header_name' in kw:
-                       kw['header_name'] = 'stdint.h'
-               kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
-               if not 'define_name' in kw:
-                       kw['define_name'] = self.have_define(tu.upper())
-
-       elif 'header_name' in kw:
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for header %s' % kw['header_name']
-
-               l = Utils.to_list(kw['header_name'])
-               assert len(l)>0, 'list of headers in header_name is empty'
-
-               kw['code'] = to_header(kw) + SNIP3
-
-               if not 'uselib_store' in kw:
-                       kw['uselib_store'] = l[0].upper()
-
-               if not 'define_name' in kw:
-                       kw['define_name'] = self.have_define(l[0])
-
-       if 'lib' in kw:
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for library %s' % kw['lib']
-               if not 'uselib_store' in kw:
-                       kw['uselib_store'] = kw['lib'].upper()
-
-       if 'staticlib' in kw:
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for static library %s' % kw['staticlib']
-               if not 'uselib_store' in kw:
-                       kw['uselib_store'] = kw['staticlib'].upper()
-
-       if 'fragment' in kw:
-               # an additional code fragment may be provided to replace the predefined code
-               # in custom headers
-               kw['code'] = kw['fragment']
-               if not 'msg' in kw:
-                       kw['msg'] = 'Checking for custom code'
-               if not 'errmsg' in kw:
-                       kw['errmsg'] = 'no'
-
-       for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
-               if flagsname in kw:
-                       if not 'msg' in kw:
-                               kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
-                       if not 'errmsg' in kw:
-                               kw['errmsg'] = 'no'
-
-       if not 'execute' in kw:
-               kw['execute'] = False
-
-       if not 'errmsg' in kw:
-               kw['errmsg'] = 'not found'
-
-       if not 'okmsg' in kw:
-               kw['okmsg'] = 'yes'
-
-       if not 'code' in kw:
-               kw['code'] = SNIP3
-
-       if not kw.get('success'): kw['success'] = None
-
-       assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
-
-@conf
-def post_check(self, *k, **kw):
-       "set the variables after a test was run successfully"
-
-       is_success = False
-       if kw['execute']:
-               if kw['success'] is not None:
-                       is_success = True
-       else:
-               is_success = (kw['success'] == 0)
-
-       if 'define_name' in kw:
-               if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
-                       if kw['execute']:
-                               key = kw['success']
-                               if isinstance(key, str):
-                                       if key:
-                                               self.define(kw['define_name'], key, quote=kw.get('quote', 1))
-                                       else:
-                                               self.define_cond(kw['define_name'], True)
-                               else:
-                                       self.define_cond(kw['define_name'], False)
-                       else:
-                               self.define_cond(kw['define_name'], is_success)
-
-       if is_success and 'uselib_store' in kw:
-               import cc, cxx
-               for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
-                       lk = k.lower()
-                       # inconsistency: includes -> CPPPATH
-                       if k == 'CPPPATH': lk = 'includes'
-                       if k == 'CXXDEFINES': lk = 'defines'
-                       if k == 'CCDEFINES': lk = 'defines'
-                       if lk in kw:
-                               val = kw[lk]
-                               # remove trailing slash
-                               if isinstance(val, str):
-                                       val = val.rstrip(os.path.sep)
-                               self.env.append_unique(k + '_' + kw['uselib_store'], val)
-
-@conf
-def check(self, *k, **kw):
-       # so this will be the generic function
-       # it will be safer to use check_cxx or check_cc
-       self.validate_c(kw)
-       self.check_message_1(kw['msg'])
-       ret = None
-       try:
-               ret = self.run_c_code(*k, **kw)
-       except Configure.ConfigurationError, e:
-               self.check_message_2(kw['errmsg'], 'YELLOW')
-               if 'mandatory' in kw and kw['mandatory']:
-                       if Logs.verbose > 1:
-                               raise
-                       else:
-                               self.fatal('the configuration failed (see %r)' % self.log.name)
-       else:
-               kw['success'] = ret
-               self.check_message_2(self.ret_msg(kw['okmsg'], kw))
-
-       self.post_check(*k, **kw)
-       if not kw.get('execute', False):
-               return ret == 0
-       return ret
-
-@conf
-def run_c_code(self, *k, **kw):
-       test_f_name = kw['compile_filename']
-
-       k = 0
-       while k < 10000:
-               # make certain to use a fresh folder - necessary for win32
-               dir = os.path.join(self.blddir, '.conf_check_%d' % k)
-
-               # if the folder already exists, remove it
-               try:
-                       shutil.rmtree(dir)
-               except OSError:
-                       pass
-
-               try:
-                       os.stat(dir)
-               except OSError:
-                       break
-
-               k += 1
-
-       try:
-               os.makedirs(dir)
-       except:
-               self.fatal('cannot create a configuration test folder %r' % dir)
-
-       try:
-               os.stat(dir)
-       except:
-               self.fatal('cannot use the configuration test folder %r' % dir)
-
-       bdir = os.path.join(dir, 'testbuild')
-
-       if not os.path.exists(bdir):
-               os.makedirs(bdir)
-
-       env = kw['env']
-
-       dest = open(os.path.join(dir, test_f_name), 'w')
-       dest.write(kw['code'])
-       dest.close()
-
-       back = os.path.abspath('.')
-
-       bld = Build.BuildContext()
-       bld.log = self.log
-       bld.all_envs.update(self.all_envs)
-       bld.all_envs['default'] = env
-       bld.lst_variants = bld.all_envs.keys()
-       bld.load_dirs(dir, bdir)
-
-       os.chdir(dir)
-
-       bld.rescan(bld.srcnode)
-
-       if not 'features' in kw:
-               # conf.check(features='cc cprogram pyext', ...)
-               kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
-
-       o = bld(features=kw['features'], source=test_f_name, target='testprog')
-
-       for k, v in kw.iteritems():
-               setattr(o, k, v)
-
-       self.log.write("==>\n%s\n<==\n" % kw['code'])
-
-       # compile the program
-       try:
-               bld.compile()
-       except Utils.WafError:
-               ret = Utils.ex_stack()
-       else:
-               ret = 0
-
-       # chdir before returning
-       os.chdir(back)
-
-       if ret:
-               self.log.write('command returned %r' % ret)
-               self.fatal(str(ret))
-
-       # if we need to run the program, try to get its result
-       # keep the name of the program to execute
-       if kw['execute']:
-               lastprog = o.link_task.outputs[0].abspath(env)
-
-               args = Utils.to_list(kw.get('exec_args', []))
-               proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
-               (out, err) = proc.communicate()
-               w = self.log.write
-               w(str(out))
-               w('\n')
-               w(str(err))
-               w('\n')
-               w('returncode %r' % proc.returncode)
-               w('\n')
-               if proc.returncode:
-                       self.fatal(Utils.ex_stack())
-               ret = out
-
-       return ret
-
-@conf
-def check_cxx(self, *k, **kw):
-       kw['compiler'] = 'cxx'
-       return self.check(*k, **kw)
-
-@conf
-def check_cc(self, *k, **kw):
-       kw['compiler'] = 'cc'
-       return self.check(*k, **kw)
-
-@conf
-def define(self, define, value, quote=1):
-       """store a single define and its state into an internal list for later
-          writing to a config header file.  Value can only be
-          a string or int; other types not supported.  String
-          values will appear properly quoted in the generated
-          header file."""
-       assert define and isinstance(define, str)
-
-       # ordered_dict is for writing the configuration header in order
-       tbl = self.env[DEFINES] or Utils.ordered_dict()
-
-       # the user forgot to tell if the value is quoted or not
-       if isinstance(value, str):
-               if quote:
-                       tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
-               else:
-                       tbl[define] = value
-       elif isinstance(value, int):
-               tbl[define] = value
-       else:
-               raise TypeError('define %r -> %r must be a string or an int' % (define, value))
-
-       # add later to make reconfiguring faster
-       self.env[DEFINES] = tbl
-       self.env[define] = value # <- not certain this is necessary
-
-@conf
-def undefine(self, define):
-       """store a single define and its state into an internal list
-          for later writing to a config header file"""
-       assert define and isinstance(define, str)
-
-       tbl = self.env[DEFINES] or Utils.ordered_dict()
-
-       value = UNDEFINED
-       tbl[define] = value
-
-       # add later to make reconfiguring faster
-       self.env[DEFINES] = tbl
-       self.env[define] = value
-
-@conf
-def define_cond(self, name, value):
-       """Conditionally define a name.
-       Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
-       if value:
-               self.define(name, 1)
-       else:
-               self.undefine(name)
-
-@conf
-def is_defined(self, key):
-       defines = self.env[DEFINES]
-       if not defines:
-               return False
-       try:
-               value = defines[key]
-       except KeyError:
-               return False
-       else:
-               return value != UNDEFINED
-
-@conf
-def get_define(self, define):
-       "get the value of a previously stored define"
-       try: return self.env[DEFINES][define]
-       except KeyError: return None
-
-@conf
-def have_define(self, name):
-       "prefix the define with 'HAVE_' and make sure it has valid characters."
-       return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
-
-@conf
-def write_config_header(self, configfile='', env='', guard='', top=False):
-       "save the defines into a file"
-       if not configfile: configfile = WAF_CONFIG_H
-       waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
-
-       # configfile -> absolute path
-       # there is a good reason to concatenate first and to split afterwards
-       if not env: env = self.env
-       if top:
-               diff = ''
-       else:
-               diff = Utils.diff_path(self.srcdir, self.curdir)
-       full = os.sep.join([self.blddir, env.variant(), diff, configfile])
-       full = os.path.normpath(full)
-       (dir, base) = os.path.split(full)
-
-       try: os.makedirs(dir)
-       except: pass
-
-       dest = open(full, 'w')
-       dest.write('/* Configuration header created by Waf - do not edit */\n')
-       dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
-
-       dest.write(self.get_config_header())
-
-       # config files are not removed on "waf clean"
-       env.append_unique(CFG_FILES, os.path.join(diff, configfile))
-
-       dest.write('\n#endif /* %s */\n' % waf_guard)
-       dest.close()
-
-@conf
-def get_config_header(self):
-       """Fill-in the contents of the config header. Override when you need to write your own config header."""
-       config_header = []
-
-       tbl = self.env[DEFINES] or Utils.ordered_dict()
-       for key in tbl.allkeys:
-               value = tbl[key]
-               if value is None:
-                       config_header.append('#define %s' % key)
-               elif value is UNDEFINED:
-                       config_header.append('/* #undef %s */' % key)
-               else:
-                       config_header.append('#define %s %s' % (key, value))
-       return "\n".join(config_header)
-
-@conftest
-def find_cpp(conf):
-       v = conf.env
-       cpp = []
-       if v['CPP']: cpp = v['CPP']
-       elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
-       if not cpp: cpp = conf.find_program('cpp', var='CPP')
-       #if not cpp: cpp = v['CC']
-       #if not cpp: cpp = v['CXX']
-       v['CPP'] = cpp
-
-@conftest
-def cc_add_flags(conf):
-       conf.add_os_flags('CFLAGS', 'CCFLAGS')
-       conf.add_os_flags('CPPFLAGS')
-
-@conftest
-def cxx_add_flags(conf):
-       conf.add_os_flags('CXXFLAGS')
-       conf.add_os_flags('CPPFLAGS')
-
-@conftest
-def link_add_flags(conf):
-       conf.add_os_flags('LINKFLAGS')
-       conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
-
-@conftest
-def cc_load_tools(conf):
-       conf.check_tool('cc')
-
-@conftest
-def cxx_load_tools(conf):
-       conf.check_tool('cxx')
diff --git a/third_party/waf/wafadmin/Tools/cs.py b/third_party/waf/wafadmin/Tools/cs.py
deleted file mode 100644 (file)
index 4c987d2..0000000
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-"C# support"
-
-import TaskGen, Utils, Task, Options
-from Logs import error
-from TaskGen import before, after, taskgen, feature
-
-flag_vars= ['FLAGS', 'ASSEMBLIES']
-
-@feature('cs')
-def init_cs(self):
-       Utils.def_attrs(self,
-               flags = '',
-               assemblies = '',
-               resources = '',
-               uselib = '')
-
-@feature('cs')
-@after('init_cs')
-def apply_uselib_cs(self):
-       if not self.uselib:
-               return
-       global flag_vars
-       for var in self.to_list(self.uselib):
-               for v in self.flag_vars:
-                       val = self.env[v+'_'+var]
-                       if val: self.env.append_value(v, val)
-
-@feature('cs')
-@after('apply_uselib_cs')
-@before('apply_core')
-def apply_cs(self):
-       try: self.meths.remove('apply_core')
-       except ValueError: pass
-
-       # process the flags for the assemblies
-       for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
-               self.env.append_unique('_ASSEMBLIES', '/r:'+i)
-
-       # process the flags for the resources
-       for i in self.to_list(self.resources):
-               self.env.append_unique('_RESOURCES', '/resource:'+i)
-
-       # what kind of assembly are we generating?
-       self.env['_TYPE'] = getattr(self, 'type', 'exe')
-
-       # additional flags
-       self.env.append_unique('_FLAGS', self.to_list(self.flags))
-       self.env.append_unique('_FLAGS', self.env.FLAGS)
-
-       # process the sources
-       nodes = [self.path.find_resource(i) for i in self.to_list(self.source)]
-       self.create_task('mcs', nodes, self.path.find_or_declare(self.target))
-
-Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
-
-def detect(conf):
-       csc = getattr(Options.options, 'cscbinary', None)
-       if csc:
-               conf.env.MCS = csc
-       conf.find_program(['gmcs', 'mcs'], var='MCS')
-
-def set_options(opt):
-       opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
diff --git a/third_party/waf/wafadmin/Tools/cxx.py b/third_party/waf/wafadmin/Tools/cxx.py
deleted file mode 100644 (file)
index 184fee3..0000000
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
-
-"Base for c++ programs and libraries"
-
-import TaskGen, Task, Utils
-from Logs import debug
-import ccroot # <- do not remove
-from TaskGen import feature, before, extension, after
-
-g_cxx_flag_vars = [
-'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
-'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
-'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
-"main cpp variables"
-
-EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
-
-g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
-
-# TODO remove in waf 1.6
-class cxx_taskgen(ccroot.ccroot_abstract):
-       pass
-
-@feature('cxx')
-@before('apply_type_vars')
-@after('default_cc')
-def init_cxx(self):
-       if not 'cc' in self.features:
-               self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
-
-       self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
-       self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
-
-       if not self.env['CXX_NAME']:
-               raise Utils.WafError("At least one compiler (g++, ..) must be selected")
-
-@feature('cxx')
-@after('apply_incpaths')
-def apply_obj_vars_cxx(self):
-       """after apply_incpaths for INC_PATHS"""
-       env = self.env
-       app = env.append_unique
-       cxxpath_st = env['CPPPATH_ST']
-
-       # local flags come first
-       # set the user-defined includes paths
-       for i in env['INC_PATHS']:
-               app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
-               app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
-
-       # set the library include paths
-       for i in env['CPPPATH']:
-               app('_CXXINCFLAGS', cxxpath_st % i)
-
-@feature('cxx')
-@after('apply_lib_vars')
-def apply_defines_cxx(self):
-       """after uselib is set for CXXDEFINES"""
-       self.defines = getattr(self, 'defines', [])
-       lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
-       milst = []
-
-       # now process the local defines
-       for defi in lst:
-               if not defi in milst:
-                       milst.append(defi)
-
-       # CXXDEFINES_USELIB
-       libs = self.to_list(self.uselib)
-       for l in libs:
-               val = self.env['CXXDEFINES_'+l]
-               if val: milst += self.to_list(val)
-
-       self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
-       y = self.env['CXXDEFINES_ST']
-       self.env.append_unique('_CXXDEFFLAGS', [y%x for x in milst])
-
-@extension(EXT_CXX)
-def cxx_hook(self, node):
-       # create the compilation task: cpp or cc
-       if getattr(self, 'obj_ext', None):
-               obj_ext = self.obj_ext
-       else:
-               obj_ext = '_%d.o' % self.idx
-
-       task = self.create_task('cxx', node, node.change_ext(obj_ext))
-       try:
-               self.compiled_tasks.append(task)
-       except AttributeError:
-               raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
-       return task
-
-cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
-cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
-cls.scan = ccroot.scan
-cls.vars.append('CXXDEPS')
-
-link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
-cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
-cls.maxjobs = 1
-cls.install = Utils.nada
diff --git a/third_party/waf/wafadmin/Tools/d.py b/third_party/waf/wafadmin/Tools/d.py
deleted file mode 100644 (file)
index 2c2e948..0000000
+++ /dev/null
@@ -1,534 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2007-2008 (ita)
-
-import os, sys, re, optparse
-import ccroot # <- leave this
-import TaskGen, Utils, Task, Configure, Logs, Build
-from Logs import debug, error
-from TaskGen import taskgen, feature, after, before, extension
-from Configure import conftest
-
-EXT_D = ['.d', '.di', '.D']
-D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
-
-DLIB = """
-version(D_Version2) {
-       import std.stdio;
-       int main() {
-               writefln("phobos2");
-               return 0;
-       }
-} else {
-       version(Tango) {
-               import tango.stdc.stdio;
-               int main() {
-                       printf("tango");
-                       return 0;
-               }
-       } else {
-               import std.stdio;
-               int main() {
-                       writefln("phobos1");
-                       return 0;
-               }
-       }
-}
-"""
-
-def filter_comments(filename):
-       txt = Utils.readf(filename)
-       i = 0
-       buf = []
-       max = len(txt)
-       begin = 0
-       while i < max:
-               c = txt[i]
-               if c == '"' or c == "'":  # skip a string or character literal
-                       buf.append(txt[begin:i])
-                       delim = c
-                       i += 1
-                       while i < max:
-                               c = txt[i]
-                               if c == delim: break
-                               elif c == '\\':  # skip the character following backslash
-                                       i += 1
-                               i += 1
-                       i += 1
-                       begin = i
-               elif c == '/':  # try to replace a comment with whitespace
-                       buf.append(txt[begin:i])
-                       i += 1
-                       if i == max: break
-                       c = txt[i]
-                       if c == '+':  # eat nesting /+ +/ comment
-                               i += 1
-                               nesting = 1
-                               c = None
-                               while i < max:
-                                       prev = c
-                                       c = txt[i]
-                                       if prev == '/' and c == '+':
-                                               nesting += 1
-                                               c = None
-                                       elif prev == '+' and c == '/':
-                                               nesting -= 1
-                                               if nesting == 0: break
-                                               c = None
-                                       i += 1
-                       elif c == '*':  # eat /* */ comment
-                               i += 1
-                               c = None
-                               while i < max:
-                                       prev = c
-                                       c = txt[i]
-                                       if prev == '*' and c == '/': break
-                                       i += 1
-                       elif c == '/':  # eat // comment
-                               i += 1
-                               while i < max and txt[i] != '\n':
-                                       i += 1
-                       else:  # no comment
-                               begin = i - 1
-                               continue
-                       i += 1
-                       begin = i
-                       buf.append(' ')
-               else:
-                       i += 1
-       buf.append(txt[begin:])
-       return buf
-
-class d_parser(object):
-       def __init__(self, env, incpaths):
-               #self.code = ''
-               #self.module = ''
-               #self.imports = []
-
-               self.allnames = []
-
-               self.re_module = re.compile("module\s+([^;]+)")
-               self.re_import = re.compile("import\s+([^;]+)")
-               self.re_import_bindings = re.compile("([^:]+):(.*)")
-               self.re_import_alias = re.compile("[^=]+=(.+)")
-
-               self.env = env
-
-               self.nodes = []
-               self.names = []
-
-               self.incpaths = incpaths
-
-       def tryfind(self, filename):
-               found = 0
-               for n in self.incpaths:
-                       found = n.find_resource(filename.replace('.', '/') + '.d')
-                       if found:
-                               self.nodes.append(found)
-                               self.waiting.append(found)
-                               break
-               if not found:
-                       if not filename in self.names:
-                               self.names.append(filename)
-
-       def get_strings(self, code):
-               #self.imports = []
-               self.module = ''
-               lst = []
-
-               # get the module name (if present)
-
-               mod_name = self.re_module.search(code)
-               if mod_name:
-                       self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
-
-               # go through the code, have a look at all import occurrences
-
-               # first, lets look at anything beginning with "import" and ending with ";"
-               import_iterator = self.re_import.finditer(code)
-               if import_iterator:
-                       for import_match in import_iterator:
-                               import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
-
-                               # does this end with an import bindings declaration?
-                               # (import bindings always terminate the list of imports)
-                               bindings_match = self.re_import_bindings.match(import_match_str)
-                               if bindings_match:
-                                       import_match_str = bindings_match.group(1)
-                                       # if so, extract the part before the ":" (since the module declaration(s) is/are located there)
-
-                               # split the matching string into a bunch of strings, separated by a comma
-                               matches = import_match_str.split(',')
-
-                               for match in matches:
-                                       alias_match = self.re_import_alias.match(match)
-                                       if alias_match:
-                                               # is this an alias declaration? (alias = module name) if so, extract the module name
-                                               match = alias_match.group(1)
-
-                                       lst.append(match)
-               return lst
-
-       def start(self, node):
-               self.waiting = [node]
-               # while the stack is not empty, add the dependencies
-               while self.waiting:
-                       nd = self.waiting.pop(0)
-                       self.iter(nd)
-
-       def iter(self, node):
-               path = node.abspath(self.env) # obtain the absolute path
-               code = "".join(filter_comments(path)) # read the file and filter the comments
-               names = self.get_strings(code) # obtain the import strings
-               for x in names:
-                       # optimization
-                       if x in self.allnames: continue
-                       self.allnames.append(x)
-
-                       # for each name, see if it is like a node or not
-                       self.tryfind(x)
-
-def scan(self):
-       "look for .d/.di the .d source need"
-       env = self.env
-       gruik = d_parser(env, env['INC_PATHS'])
-       gruik.start(self.inputs[0])
-
-       if Logs.verbose:
-               debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
-               #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
-       return (gruik.nodes, gruik.names)
-
-def get_target_name(self):
-       "for d programs and libs"
-       v = self.env
-       tp = 'program'
-       for x in self.features:
-               if x in ['dshlib', 'dstaticlib']:
-                       tp = x.lstrip('d')
-       return v['D_%s_PATTERN' % tp] % self.target
-
-d_params = {
-'dflags': '',
-'importpaths':'',
-'libs':'',
-'libpaths':'',
-'generate_headers':False,
-}
-
-@feature('d')
-@before('apply_type_vars')
-def init_d(self):
-       for x in d_params:
-               setattr(self, x, getattr(self, x, d_params[x]))
-
-class d_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-               # COMPAT
-               if len(k) > 1:
-                       self.features.append('d' + k[1])
-
-# okay, we borrow a few methods from ccroot
-TaskGen.bind_feature('d', D_METHS)
-
-@feature('d')
-@before('apply_d_libs')
-def init_d(self):
-       Utils.def_attrs(self,
-               dflags='',
-               importpaths='',
-               libs='',
-               libpaths='',
-               uselib='',
-               uselib_local='',
-               generate_headers=False, # set to true if you want .di files as well as .o
-               compiled_tasks=[],
-               add_objects=[],
-               link_task=None)
-
-@feature('d')
-@after('apply_d_link', 'init_d')
-@before('apply_vnum', 'apply_d_vars')
-def apply_d_libs(self):
-       """after apply_link because of 'link_task'
-       after default_cc because of the attribute 'uselib'"""
-       env = self.env
-
-       # 1. the case of the libs defined in the project (visit ancestors first)
-       # the ancestors external libraries (uselib) will be prepended
-       self.uselib = self.to_list(self.uselib)
-       names = self.to_list(self.uselib_local)
-
-       seen = set([])
-       tmp = Utils.deque(names) # consume a copy of the list of names
-       while tmp:
-               lib_name = tmp.popleft()
-               # visit dependencies only once
-               if lib_name in seen:
-                       continue
-
-               y = self.name_to_obj(lib_name)
-               if not y:
-                       raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
-               y.post()
-               seen.add(lib_name)
-
-               # object has ancestors to process (shared libraries): add them to the end of the list
-               if getattr(y, 'uselib_local', None):
-                       lst = y.to_list(y.uselib_local)
-                       if 'dshlib' in y.features or 'dprogram' in y.features:
-                               lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features]
-                       tmp.extend(lst)
-
-               # link task and flags
-               if getattr(y, 'link_task', None):
-
-                       link_name = y.target[y.target.rfind(os.sep) + 1:]
-                       if 'dstaticlib' in y.features or 'dshlib' in y.features:
-                               env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name)
-                               env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env))
-
-                       # the order
-                       self.link_task.set_run_after(y.link_task)
-
-                       # for the recompilation
-                       dep_nodes = getattr(self.link_task, 'dep_nodes', [])
-                       self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
-
-               # add ancestors uselib too - but only propagate those that have no staticlib
-               for v in self.to_list(y.uselib):
-                       if not v in self.uselib:
-                               self.uselib.insert(0, v)
-
-               # if the library task generator provides 'export_incdirs', add to the include path
-               # the export_incdirs must be a list of paths relative to the other library
-               if getattr(y, 'export_incdirs', None):
-                       for x in self.to_list(y.export_incdirs):
-                               node = y.path.find_dir(x)
-                               if not node:
-                                       raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
-                               self.env.append_unique('INC_PATHS', node)
-
-@feature('dprogram', 'dshlib', 'dstaticlib')
-@after('apply_core')
-def apply_d_link(self):
-       link = getattr(self, 'link', None)
-       if not link:
-               if 'dstaticlib' in self.features: link = 'static_link'
-               else: link = 'd_link'
-
-       outputs = [t.outputs[0] for t in self.compiled_tasks]
-       self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self)))
-
-@feature('d')
-@after('apply_core')
-def apply_d_vars(self):
-       env = self.env
-       dpath_st   = env['DPATH_ST']
-       lib_st   = env['DLIB_ST']
-       libpath_st = env['DLIBPATH_ST']
-
-       importpaths = self.to_list(self.importpaths)
-       libpaths = []
-       libs = []
-       uselib = self.to_list(self.uselib)
-
-       for i in uselib:
-               if env['DFLAGS_' + i]:
-                       env.append_unique('DFLAGS', env['DFLAGS_' + i])
-
-       for x in self.features:
-               if not x in ['dprogram', 'dstaticlib', 'dshlib']:
-                       continue
-               x.lstrip('d')
-               d_shlib_dflags = env['D_' + x + '_DFLAGS']
-               if d_shlib_dflags:
-                       env.append_unique('DFLAGS', d_shlib_dflags)
-
-       # add import paths
-       for i in uselib:
-               if env['DPATH_' + i]:
-                       for entry in self.to_list(env['DPATH_' + i]):
-                               if not entry in importpaths:
-                                       importpaths.append(entry)
-
-       # now process the import paths
-       for path in importpaths:
-               if os.path.isabs(path):
-                       env.append_unique('_DIMPORTFLAGS', dpath_st % path)
-               else:
-                       node = self.path.find_dir(path)
-                       self.env.append_unique('INC_PATHS', node)
-                       env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
-                       env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
-
-       # add library paths
-       for i in uselib:
-               if env['LIBPATH_' + i]:
-                       for entry in self.to_list(env['LIBPATH_' + i]):
-                               if not entry in libpaths:
-                                       libpaths.append(entry)
-       libpaths = self.to_list(self.libpaths) + libpaths
-
-       # now process the library paths
-       # apply same path manipulation as used with import paths
-       for path in libpaths:
-               if not os.path.isabs(path):
-                       node = self.path.find_resource(path)
-                       if not node:
-                               raise Utils.WafError('could not find libpath %r from %r' % (path, self))
-                       path = node.abspath(self.env)
-
-               env.append_unique('DLINKFLAGS', libpath_st % path)
-
-       # add libraries
-       for i in uselib:
-               if env['LIB_' + i]:
-                       for entry in self.to_list(env['LIB_' + i]):
-                               if not entry in libs:
-                                       libs.append(entry)
-       libs.extend(self.to_list(self.libs))
-
-       # process user flags
-       for flag in self.to_list(self.dflags):
-               env.append_unique('DFLAGS', flag)
-
-       # now process the libraries
-       for lib in libs:
-               env.append_unique('DLINKFLAGS', lib_st % lib)
-
-       # add linker flags
-       for i in uselib:
-               dlinkflags = env['DLINKFLAGS_' + i]
-               if dlinkflags:
-                       for linkflag in dlinkflags:
-                               env.append_unique('DLINKFLAGS', linkflag)
-
-@feature('dshlib')
-@after('apply_d_vars')
-def add_shlib_d_flags(self):
-       for linkflag in self.env['D_shlib_LINKFLAGS']:
-               self.env.append_unique('DLINKFLAGS', linkflag)
-
-@extension(EXT_D)
-def d_hook(self, node):
-       # create the compilation task: cpp or cc
-       task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
-       try: obj_ext = self.obj_ext
-       except AttributeError: obj_ext = '_%d.o' % self.idx
-
-       task.inputs = [node]
-       task.outputs = [node.change_ext(obj_ext)]
-       self.compiled_tasks.append(task)
-
-       if self.generate_headers:
-               header_node = node.change_ext(self.env['DHEADER_ext'])
-               task.outputs += [header_node]
-
-d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
-d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
-${D_HDR_F}${TGT[1].bldpath(env)} \
-${D_SRC_F}${SRC} \
-${D_TGT_F}${TGT[0].bldpath(env)}'
-link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
-
-def override_exec(cls):
-       """stupid dmd wants -of stuck to the file name"""
-       old_exec = cls.exec_command
-       def exec_command(self, *k, **kw):
-               if isinstance(k[0], list):
-                       lst = k[0]
-                       for i in xrange(len(lst)):
-                               if lst[i] == '-of':
-                                       del lst[i]
-                                       lst[i] = '-of' + lst[i]
-                                       break
-               return old_exec(self, *k, **kw)
-       cls.exec_command = exec_command
-
-cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
-cls.scan = scan
-override_exec(cls)
-
-cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
-override_exec(cls)
-
-cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
-override_exec(cls)
-
-# for feature request #104
-@taskgen
-def generate_header(self, filename, install_path):
-       if not hasattr(self, 'header_lst'): self.header_lst = []
-       self.meths.append('process_header')
-       self.header_lst.append([filename, install_path])
-
-@before('apply_core')
-def process_header(self):
-       env = self.env
-       for i in getattr(self, 'header_lst', []):
-               node = self.path.find_resource(i[0])
-
-               if not node:
-                       raise Utils.WafError('file not found on d obj '+i[0])
-
-               task = self.create_task('d_header')
-               task.set_inputs(node)
-               task.set_outputs(node.change_ext('.di'))
-
-d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
-Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
-
-@conftest
-def d_platform_flags(conf):
-       v = conf.env
-       binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
-               v.DEST_OS or Utils.unversioned_sys_platform())
-       if binfmt == 'pe':
-               v['D_program_PATTERN']   = '%s.exe'
-               v['D_shlib_PATTERN']     = 'lib%s.dll'
-               v['D_staticlib_PATTERN'] = 'lib%s.a'
-       else:
-               v['D_program_PATTERN']   = '%s'
-               v['D_shlib_PATTERN']     = 'lib%s.so'
-               v['D_staticlib_PATTERN'] = 'lib%s.a'
-
-@conftest
-def check_dlibrary(conf):
-       ret = conf.check_cc(features='d dprogram', fragment=DLIB, mandatory=True, compile_filename='test.d', execute=True)
-       conf.env.DLIBRARY = ret.strip()
-
-# quick test #
-if __name__ == "__main__":
-       #Logs.verbose = 2
-
-       try: arg = sys.argv[1]
-       except IndexError: arg = "file.d"
-
-       print("".join(filter_comments(arg)))
-       # TODO
-       paths = ['.']
-
-       #gruik = filter()
-       #gruik.start(arg)
-
-       #code = "".join(gruik.buf)
-
-       #print "we have found the following code"
-       #print code
-
-       #print "now parsing"
-       #print "-------------------------------------------"
-       """
-       parser_ = d_parser()
-       parser_.start(arg)
-
-       print "module: %s" % parser_.module
-       print "imports: ",
-       for imp in parser_.imports:
-               print imp + " ",
-       print
-"""
diff --git a/third_party/waf/wafadmin/Tools/dbus.py b/third_party/waf/wafadmin/Tools/dbus.py
deleted file mode 100644 (file)
index 42c4ca2..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-import Task, Utils
-from TaskGen import taskgen, before, after, feature
-
-@taskgen
-def add_dbus_file(self, filename, prefix, mode):
-       if not hasattr(self, 'dbus_lst'):
-               self.dbus_lst = []
-       self.meths.append('process_dbus')
-       self.dbus_lst.append([filename, prefix, mode])
-
-@before('apply_core')
-def process_dbus(self):
-       for filename, prefix, mode in getattr(self, 'dbus_lst', []):
-               node = self.path.find_resource(filename)
-
-               if not node:
-                       raise Utils.WafError('file not found ' + filename)
-
-               tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
-
-               tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
-               tsk.env.DBUS_BINDING_TOOL_MODE   = mode
-
-Task.simple_task_type('dbus_binding_tool',
-       '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
-       color='BLUE', before='cc')
-
-def detect(conf):
-       dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
diff --git a/third_party/waf/wafadmin/Tools/dmd.py b/third_party/waf/wafadmin/Tools/dmd.py
deleted file mode 100644 (file)
index b86ffd6..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2008 (ita)
-
-import sys
-import Utils, ar
-from Configure import conftest
-
-@conftest
-def find_dmd(conf):
-       conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
-
-@conftest
-def common_flags_ldc(conf):
-       v = conf.env
-       v['DFLAGS']         = ['-d-version=Posix']
-       v['DLINKFLAGS']     = []
-       v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
-
-@conftest
-def common_flags_dmd(conf):
-       v = conf.env
-
-       # _DFLAGS _DIMPORTFLAGS
-
-       # Compiler is dmd so 'gdc' part will be ignored, just
-       # ensure key is there, so wscript can append flags to it
-       v['DFLAGS']            = ['-version=Posix']
-
-       v['D_SRC_F']           = ''
-       v['D_TGT_F']           = ['-c', '-of']
-       v['DPATH_ST']          = '-I%s' # template for adding import paths
-
-       # linker
-       v['D_LINKER']          = v['D_COMPILER']
-       v['DLNK_SRC_F']        = ''
-       v['DLNK_TGT_F']        = '-of'
-
-       v['DLIB_ST']           = '-L-l%s' # template for adding libs
-       v['DLIBPATH_ST']       = '-L-L%s' # template for adding libpaths
-
-       # linker debug levels
-       v['DFLAGS_OPTIMIZED']  = ['-O']
-       v['DFLAGS_DEBUG']      = ['-g', '-debug']
-       v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
-       v['DLINKFLAGS']        = ['-quiet']
-
-       v['D_shlib_DFLAGS']    = ['-fPIC']
-       v['D_shlib_LINKFLAGS'] = ['-L-shared']
-
-       v['DHEADER_ext']       = '.di'
-       v['D_HDR_F']           = ['-H', '-Hf']
-
-def detect(conf):
-       conf.find_dmd()
-       conf.check_tool('ar')
-       conf.check_tool('d')
-       conf.common_flags_dmd()
-       conf.d_platform_flags()
-
-       if conf.env.D_COMPILER.find('ldc') > -1:
-               conf.common_flags_ldc()
diff --git a/third_party/waf/wafadmin/Tools/flex.py b/third_party/waf/wafadmin/Tools/flex.py
deleted file mode 100644 (file)
index cbea42d..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# John O'Meara, 2006
-# Thomas Nagy, 2006-2008
-
-"Flex processing"
-
-import TaskGen
-
-def decide_ext(self, node):
-       if 'cxx' in self.features: return '.lex.cc'
-       else: return '.lex.c'
-
-TaskGen.declare_chain(
-       name = 'flex',
-       rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
-       ext_in = '.l',
-       ext_out = '.c .cxx',
-       decider = decide_ext
-)
-
-def detect(conf):
-       conf.find_program('flex', var='FLEX', mandatory=True)
-       conf.env['FLEXFLAGS'] = ''
diff --git a/third_party/waf/wafadmin/Tools/gas.py b/third_party/waf/wafadmin/Tools/gas.py
deleted file mode 100644 (file)
index 5dd0b5d..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008 (ita)
-
-"as and gas"
-
-import os, sys
-import Task
-from TaskGen import extension, taskgen, after, before
-
-EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
-
-as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
-Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
-
-@extension(EXT_ASM)
-def asm_hook(self, node):
-       # create the compilation task: cpp or cc
-       try: obj_ext = self.obj_ext
-       except AttributeError: obj_ext = '_%d.o' % self.idx
-
-       task = self.create_task('asm', node, node.change_ext(obj_ext))
-       self.compiled_tasks.append(task)
-       self.meths.append('asm_incflags')
-
-@after('apply_obj_vars_cc')
-@after('apply_obj_vars_cxx')
-@before('apply_link')
-def asm_incflags(self):
-       self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
-       var = ('cxx' in self.features) and 'CXX' or 'CC'
-       self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
-
-def detect(conf):
-       conf.find_program(['gas', 'as'], var='AS')
-       if not conf.env.AS: conf.env.AS = conf.env.CC
-       #conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files
diff --git a/third_party/waf/wafadmin/Tools/gcc.py b/third_party/waf/wafadmin/Tools/gcc.py
deleted file mode 100644 (file)
index 83d5b24..0000000
+++ /dev/null
@@ -1,138 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2008 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-
-import os, sys
-import Configure, Options, Utils
-import ccroot, ar
-from Configure import conftest
-
-@conftest
-def find_gcc(conf):
-       cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
-       cc = conf.cmd_to_list(cc)
-       ccroot.get_cc_version(conf, cc, gcc=True)
-       conf.env.CC_NAME = 'gcc'
-       conf.env.CC      = cc
-
-@conftest
-def gcc_common_flags(conf):
-       v = conf.env
-
-       # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
-
-       v['CCFLAGS_DEBUG'] = ['-g']
-
-       v['CCFLAGS_RELEASE'] = ['-O2']
-
-       v['CC_SRC_F']            = ''
-       v['CC_TGT_F']            = ['-c', '-o', ''] # shell hack for -MD
-       v['CPPPATH_ST']          = '-I%s' # template for adding include paths
-
-       # linker
-       if not v['LINK_CC']: v['LINK_CC'] = v['CC']
-       v['CCLNK_SRC_F']         = ''
-       v['CCLNK_TGT_F']         = ['-o', ''] # shell hack for -MD
-
-       v['LIB_ST']              = '-l%s' # template for adding libs
-       v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-       v['STATICLIB_ST']        = '-l%s'
-       v['STATICLIBPATH_ST']    = '-L%s'
-       v['RPATH_ST']            = '-Wl,-rpath,%s'
-       v['CCDEFINES_ST']        = '-D%s'
-
-       v['SONAME_ST']           = '-Wl,-h,%s'
-       v['SHLIB_MARKER']        = '-Wl,-Bdynamic'
-       v['STATICLIB_MARKER']    = '-Wl,-Bstatic'
-       v['FULLSTATIC_MARKER']   = '-static'
-
-       # program
-       v['program_PATTERN']     = '%s'
-
-       # shared library
-       v['shlib_CCFLAGS']       = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
-       v['shlib_LINKFLAGS']     = ['-shared']
-       v['shlib_PATTERN']       = 'lib%s.so'
-
-       # static lib
-       v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
-       v['staticlib_PATTERN']   = 'lib%s.a'
-
-       # osx stuff
-       v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
-       v['CCFLAGS_MACBUNDLE']   = ['-fPIC']
-       v['macbundle_PATTERN']   = '%s.bundle'
-
-@conftest
-def gcc_modifier_win32(conf):
-       v = conf.env
-       v['program_PATTERN']     = '%s.exe'
-
-       v['shlib_PATTERN']       = '%s.dll'
-       v['implib_PATTERN']      = 'lib%s.dll.a'
-       v['IMPLIB_ST']           = '-Wl,--out-implib,%s'
-
-       dest_arch = v['DEST_CPU']
-       v['shlib_CCFLAGS'] = ['-DPIC']
-
-       v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
-
-       # Auto-import is enabled by default even without this option,
-       # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
-       # that the linker emits otherwise.
-       v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
-
-@conftest
-def gcc_modifier_cygwin(conf):
-       gcc_modifier_win32(conf)
-       v = conf.env
-       v['shlib_PATTERN']       = 'cyg%s.dll'
-       v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
-
-@conftest
-def gcc_modifier_darwin(conf):
-       v = conf.env
-       v['shlib_CCFLAGS']       = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
-       v['shlib_LINKFLAGS']     = ['-dynamiclib']
-       v['shlib_PATTERN']       = 'lib%s.dylib'
-
-       v['staticlib_LINKFLAGS'] = []
-
-       v['SHLIB_MARKER']        = ''
-       v['STATICLIB_MARKER']    = ''
-       v['SONAME_ST']           = ''
-
-@conftest
-def gcc_modifier_aix(conf):
-       v = conf.env
-       v['program_LINKFLAGS']   = ['-Wl,-brtl']
-
-       v['shlib_LINKFLAGS']     = ['-shared','-Wl,-brtl,-bexpfull']
-
-       v['SHLIB_MARKER']        = ''
-
-@conftest
-def gcc_modifier_openbsd(conf):
-       conf.env['SONAME_ST'] = []
-
-@conftest
-def gcc_modifier_platform(conf):
-       # * set configurations specific for a platform.
-       # * the destination platform is detected automatically by looking at the macros the compiler predefines,
-       #   and if it's not recognised, it fallbacks to sys.platform.
-       dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
-       gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
-       if gcc_modifier_func:
-                       gcc_modifier_func(conf)
-
-def detect(conf):
-       conf.find_gcc()
-       conf.find_cpp()
-       conf.find_ar()
-       conf.gcc_common_flags()
-       conf.gcc_modifier_platform()
-       conf.cc_load_tools()
-       conf.cc_add_flags()
-       conf.link_add_flags()
diff --git a/third_party/waf/wafadmin/Tools/gdc.py b/third_party/waf/wafadmin/Tools/gdc.py
deleted file mode 100644 (file)
index d1e5e7b..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-
-import sys
-import Utils, ar
-from Configure import conftest
-
-@conftest
-def find_gdc(conf):
-       conf.find_program('gdc', var='D_COMPILER', mandatory=True)
-
-@conftest
-def common_flags_gdc(conf):
-       v = conf.env
-
-       # _DFLAGS _DIMPORTFLAGS
-
-       # for mory info about the meaning of this dict see dmd.py
-       v['DFLAGS']            = []
-
-       v['D_SRC_F']           = ''
-       v['D_TGT_F']           = ['-c', '-o', '']
-       v['DPATH_ST']          = '-I%s' # template for adding import paths
-
-       # linker
-       v['D_LINKER']          = v['D_COMPILER']
-       v['DLNK_SRC_F']        = ''
-       v['DLNK_TGT_F']        = ['-o', '']
-
-       v['DLIB_ST']           = '-l%s' # template for adding libs
-       v['DLIBPATH_ST']       = '-L%s' # template for adding libpaths
-
-       # debug levels
-       v['DLINKFLAGS']        = []
-       v['DFLAGS_OPTIMIZED']  = ['-O3']
-       v['DFLAGS_DEBUG']      = ['-O0']
-       v['DFLAGS_ULTRADEBUG'] = ['-O0']
-
-       v['D_shlib_DFLAGS']    = []
-       v['D_shlib_LINKFLAGS'] = ['-shared']
-
-       v['DHEADER_ext']       = '.di'
-       v['D_HDR_F']           = '-fintfc -fintfc-file='
-
-def detect(conf):
-       conf.find_gdc()
-       conf.check_tool('ar')
-       conf.check_tool('d')
-       conf.common_flags_gdc()
-       conf.d_platform_flags()
diff --git a/third_party/waf/wafadmin/Tools/glib2.py b/third_party/waf/wafadmin/Tools/glib2.py
deleted file mode 100644 (file)
index d3fc776..0000000
+++ /dev/null
@@ -1,163 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2008 (ita)
-
-"GLib2 support"
-
-import Task, Utils
-from TaskGen import taskgen, before, after, feature
-
-#
-# glib-genmarshal
-#
-
-@taskgen
-def add_marshal_file(self, filename, prefix):
-       if not hasattr(self, 'marshal_list'):
-               self.marshal_list = []
-       self.meths.append('process_marshal')
-       self.marshal_list.append((filename, prefix))
-
-@before('apply_core')
-def process_marshal(self):
-       for f, prefix in getattr(self, 'marshal_list', []):
-               node = self.path.find_resource(f)
-
-               if not node:
-                       raise Utils.WafError('file not found %r' % f)
-
-               h_node = node.change_ext('.h')
-               c_node = node.change_ext('.c')
-
-               task = self.create_task('glib_genmarshal', node, [h_node, c_node])
-               task.env.GLIB_GENMARSHAL_PREFIX = prefix
-       self.allnodes.append(c_node)
-
-def genmarshal_func(self):
-
-       bld = self.inputs[0].__class__.bld
-
-       get = self.env.get_flat
-       cmd1 = "%s %s --prefix=%s --header > %s" % (
-               get('GLIB_GENMARSHAL'),
-               self.inputs[0].srcpath(self.env),
-               get('GLIB_GENMARSHAL_PREFIX'),
-               self.outputs[0].abspath(self.env)
-       )
-
-       ret = bld.exec_command(cmd1)
-       if ret: return ret
-
-       #print self.outputs[1].abspath(self.env)
-       f = open(self.outputs[1].abspath(self.env), 'wb')
-       c = '''#include "%s"\n''' % self.outputs[0].name
-       f.write(c)
-       f.close()
-
-       cmd2 = "%s %s --prefix=%s --body >> %s" % (
-               get('GLIB_GENMARSHAL'),
-               self.inputs[0].srcpath(self.env),
-               get('GLIB_GENMARSHAL_PREFIX'),
-               self.outputs[1].abspath(self.env)
-       )
-       ret = Utils.exec_command(cmd2)
-       if ret: return ret
-
-#
-# glib-mkenums
-#
-
-@taskgen
-def add_enums_from_template(self, source='', target='', template='', comments=''):
-       if not hasattr(self, 'enums_list'):
-               self.enums_list = []
-       self.meths.append('process_enums')
-       self.enums_list.append({'source': source,
-                               'target': target,
-                               'template': template,
-                               'file-head': '',
-                               'file-prod': '',
-                               'file-tail': '',
-                               'enum-prod': '',
-                               'value-head': '',
-                               'value-prod': '',
-                               'value-tail': '',
-                               'comments': comments})
-
-@taskgen
-def add_enums(self, source='', target='',
-              file_head='', file_prod='', file_tail='', enum_prod='',
-              value_head='', value_prod='', value_tail='', comments=''):
-       if not hasattr(self, 'enums_list'):
-               self.enums_list = []
-       self.meths.append('process_enums')
-       self.enums_list.append({'source': source,
-                               'template': '',
-                               'target': target,
-                               'file-head': file_head,
-                               'file-prod': file_prod,
-                               'file-tail': file_tail,
-                               'enum-prod': enum_prod,
-                               'value-head': value_head,
-                               'value-prod': value_prod,
-                               'value-tail': value_tail,
-                               'comments': comments})
-
-@before('apply_core')
-def process_enums(self):
-       for enum in getattr(self, 'enums_list', []):
-               task = self.create_task('glib_mkenums')
-               env = task.env
-
-               inputs = []
-
-               # process the source
-               source_list = self.to_list(enum['source'])
-               if not source_list:
-                       raise Utils.WafError('missing source ' + str(enum))
-               source_list = [self.path.find_resource(k) for k in source_list]
-               inputs += source_list
-               env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
-
-               # find the target
-               if not enum['target']:
-                       raise Utils.WafError('missing target ' + str(enum))
-               tgt_node = self.path.find_or_declare(enum['target'])
-               if tgt_node.name.endswith('.c'):
-                       self.allnodes.append(tgt_node)
-               env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
-
-
-               options = []
-
-               if enum['template']: # template, if provided
-                       template_node = self.path.find_resource(enum['template'])
-                       options.append('--template %s' % (template_node.abspath(env)))
-                       inputs.append(template_node)
-               params = {'file-head' : '--fhead',
-                          'file-prod' : '--fprod',
-                          'file-tail' : '--ftail',
-                          'enum-prod' : '--eprod',
-                          'value-head' : '--vhead',
-                          'value-prod' : '--vprod',
-                          'value-tail' : '--vtail',
-                          'comments': '--comments'}
-               for param, option in params.iteritems():
-                       if enum[param]:
-                               options.append('%s %r' % (option, enum[param]))
-
-               env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
-
-               # update the task instance
-               task.set_inputs(inputs)
-               task.set_outputs(tgt_node)
-
-Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
-       color='BLUE', before='cc cxx')
-Task.simple_task_type('glib_mkenums',
-       '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
-       color='PINK', before='cc cxx')
-
-def detect(conf):
-       glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
-       mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')
diff --git a/third_party/waf/wafadmin/Tools/gnome.py b/third_party/waf/wafadmin/Tools/gnome.py
deleted file mode 100644 (file)
index da11e91..0000000
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2008 (ita)
-
-"Gnome support"
-
-import os, re
-import TaskGen, Utils, Runner, Task, Build, Options, Logs
-import cc
-from Logs import error
-from TaskGen import taskgen, before, after, feature
-
-n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
-n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
-
-def postinstall_schemas(prog_name):
-       if Build.bld.is_install:
-               dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
-               if not Options.options.destdir:
-                       # add the gconf schema
-                       Utils.pprint('YELLOW', 'Installing GConf schema')
-                       command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
-                       ret = Utils.exec_command(command)
-               else:
-                       Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
-                       Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
-
-def postinstall_icons():
-       dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
-       if Build.bld.is_install:
-               if not Options.options.destdir:
-                       # update the pixmap cache directory
-                       Utils.pprint('YELLOW', "Updating Gtk icon cache.")
-                       command = 'gtk-update-icon-cache -q -f -t %s' % dir
-                       ret = Utils.exec_command(command)
-               else:
-                       Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
-                       Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
-
-def postinstall_scrollkeeper(prog_name):
-       if Build.bld.is_install:
-               # now the scrollkeeper update if we can write to the log file
-               if os.access('/var/log/scrollkeeper.log', os.W_OK):
-                       dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
-                       dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
-                       command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
-                       ret = Utils.exec_command(command)
-
-def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
-       if schemas: postinstall_schemas(prog_name)
-       if icons: postinstall_icons()
-       if scrollkeeper: postinstall_scrollkeeper(prog_name)
-
-# OBSOLETE
-class gnome_doc_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@feature('gnome_doc')
-def init_gnome_doc(self):
-       self.default_install_path = '${PREFIX}/share'
-
-@feature('gnome_doc')
-@after('init_gnome_doc')
-def apply_gnome_doc(self):
-       self.env['APPNAME'] = self.doc_module
-       lst = self.to_list(self.doc_linguas)
-       bld = self.bld
-       lst.append('C')
-
-       for x in lst:
-               if not x == 'C':
-                       tsk = self.create_task('xml2po')
-                       node = self.path.find_resource(x+'/'+x+'.po')
-                       src = self.path.find_resource('C/%s.xml' % self.doc_module)
-                       out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
-                       tsk.set_inputs([node, src])
-                       tsk.set_outputs(out)
-               else:
-                       out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
-
-               tsk2 = self.create_task('xsltproc2po')
-               out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
-               tsk2.set_outputs(out2)
-               node = self.path.find_resource(self.doc_module+".omf.in")
-               tsk2.inputs = [node, out]
-
-               tsk2.run_after.append(tsk)
-
-               if bld.is_install:
-                       path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
-                       bld.install_files(self.install_path + '/omf', out2, env=self.env)
-                       for y in self.to_list(self.doc_figures):
-                               try:
-                                       os.stat(self.path.abspath() + '/' + x + '/' + y)
-                                       bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
-                               except:
-                                       bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
-                       bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
-                       if x == 'C':
-                               xmls = self.to_list(self.doc_includes)
-                               xmls.append(self.doc_entities)
-                               for z in xmls:
-                                       out = self.path.find_resource('%s/%s' % (x, z))
-                                       bld.install_as(path + '/%s' % z, out.abspath(self.env))
-
-# OBSOLETE
-class xml_to_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@feature('xml_to')
-def init_xml_to(self):
-       Utils.def_attrs(self,
-               source = 'xmlfile',
-               xslt = 'xlsltfile',
-               target = 'hey',
-               default_install_path = '${PREFIX}',
-               task_created = None)
-
-@feature('xml_to')
-@after('init_xml_to')
-def apply_xml_to(self):
-       xmlfile = self.path.find_resource(self.source)
-       xsltfile = self.path.find_resource(self.xslt)
-       tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html'))
-       tsk.install_path = self.install_path
-
-def sgml_scan(self):
-       node = self.inputs[0]
-
-       env = self.env
-       variant = node.variant(env)
-
-       fi = open(node.abspath(env), 'r')
-       content = fi.read()
-       fi.close()
-
-       # we should use a sgml parser :-/
-       name = n1_regexp.findall(content)[0]
-       num = n2_regexp.findall(content)[0]
-
-       doc_name = name+'.'+num
-
-       if not self.outputs:
-               self.outputs = [self.generator.path.find_or_declare(doc_name)]
-
-       return ([], [doc_name])
-
-class gnome_sgml2man_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@feature('gnome_sgml2man')
-def apply_gnome_sgml2man(self):
-       """
-       we could make it more complicated, but for now we just scan the document each time
-       """
-       assert(getattr(self, 'appname', None))
-
-       def install_result(task):
-               out = task.outputs[0]
-               name = out.name
-               ext = name[-1]
-               env = task.env
-               self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
-
-       self.bld.rescan(self.path)
-       for name in self.bld.cache_dir_contents[self.path.id]:
-               base, ext = os.path.splitext(name)
-               if ext != '.sgml': continue
-
-               task = self.create_task('sgml2man')
-               task.set_inputs(self.path.find_resource(name))
-               task.task_generator = self
-               if self.bld.is_install: task.install = install_result
-               # no outputs, the scanner does it
-               # no caching for now, this is not a time-critical feature
-               # in the future the scanner can be used to do more things (find dependencies, etc)
-               task.scan()
-
-cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC}  > /dev/null', color='BLUE')
-cls.scan = sgml_scan
-cls.quiet = 1
-
-Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
-
-Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
-
-# how do you expect someone to understand this?!
-xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
---stringparam db2omf.basename ${APPNAME} \
---stringparam db2omf.format docbook \
---stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
---stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
---stringparam db2omf.omf_dir ${PREFIX}/share/omf \
---stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
---stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
---stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
-${DB2OMF} ${SRC[1].abspath(env)}"""
-
-#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
-Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
-
-def detect(conf):
-       conf.check_tool('gnu_dirs glib2 dbus')
-       sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
-
-       def getstr(varname):
-               return getattr(Options.options, varname, '')
-
-       # addefine also sets the variable to the env
-       conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
-
-       xml2po = conf.find_program('xml2po', var='XML2PO')
-       xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
-       conf.env['XML2POFLAGS'] = '-e -p'
-       conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
-       conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
-
-def set_options(opt):
-       opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
diff --git a/third_party/waf/wafadmin/Tools/gnu_dirs.py b/third_party/waf/wafadmin/Tools/gnu_dirs.py
deleted file mode 100644 (file)
index ac149df..0000000
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-"""
-To use this module do not forget to call
-opt.tool_options('gnu_dirs')
-AND
-conf.check_tool('gnu_dirs')
-
-Add options for the standard GNU directories, this tool will add the options
-found in autotools, and will update the environment with the following
-installation variables:
-
- * PREFIX : architecture-independent files [/usr/local]
- * EXEC_PREFIX : architecture-dependent files [PREFIX]
- * BINDIR : user executables [EXEC_PREFIX/bin]
- * SBINDIR : user executables [EXEC_PREFIX/sbin]
- * LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
- * SYSCONFDIR : read-only single-machine data [PREFIX/etc]
- * SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
- * LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
- * LIBDIR : object code libraries [EXEC_PREFIX/lib]
- * INCLUDEDIR : C header files [PREFIX/include]
- * OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
- * DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
- * DATADIR : read-only architecture-independent data [DATAROOTDIR]
- * INFODIR : info documentation [DATAROOTDIR/info]
- * LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
- * MANDIR : man documentation [DATAROOTDIR/man]
- * DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
- * HTMLDIR : html documentation [DOCDIR]
- * DVIDIR : dvi documentation [DOCDIR]
- * PDFDIR : pdf documentation [DOCDIR]
- * PSDIR : ps documentation [DOCDIR]
-"""
-
-import Utils, Options
-
-_options = [x.split(', ') for x in '''
-bindir, user executables, ${EXEC_PREFIX}/bin
-sbindir, system admin executables, ${EXEC_PREFIX}/sbin
-libexecdir, program executables, ${EXEC_PREFIX}/libexec
-sysconfdir, read-only single-machine data, ${PREFIX}/etc
-sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
-localstatedir, modifiable single-machine data, ${PREFIX}/var
-libdir, object code libraries, ${EXEC_PREFIX}/lib
-includedir, C header files, ${PREFIX}/include
-oldincludedir, C header files for non-gcc, /usr/include
-datarootdir, read-only arch.-independent data root, ${PREFIX}/share
-datadir, read-only architecture-independent data, ${DATAROOTDIR}
-infodir, info documentation, ${DATAROOTDIR}/info
-localedir, locale-dependent data, ${DATAROOTDIR}/locale
-mandir, man documentation, ${DATAROOTDIR}/man
-docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
-htmldir, html documentation, ${DOCDIR}
-dvidir, dvi documentation, ${DOCDIR}
-pdfdir, pdf documentation, ${DOCDIR}
-psdir, ps documentation, ${DOCDIR}
-'''.split('\n') if x]
-
-def detect(conf):
-       def get_param(varname, default):
-               return getattr(Options.options, varname, '') or default
-
-       env = conf.env
-       env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
-       env['PACKAGE'] = Utils.g_module.APPNAME
-
-       complete = False
-       iter = 0
-       while not complete and iter < len(_options) + 1:
-               iter += 1
-               complete = True
-               for name, help, default in _options:
-                       name = name.upper()
-                       if not env[name]:
-                               try:
-                                       env[name] = Utils.subst_vars(get_param(name, default), env)
-                               except TypeError:
-                                       complete = False
-       if not complete:
-               lst = [name for name, _, _ in _options if not env[name.upper()]]
-               raise Utils.WafError('Variable substitution failure %r' % lst)
-
-def set_options(opt):
-
-       inst_dir = opt.add_option_group('Installation directories',
-'By default, "waf install" will put the files in\
- "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
- than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
-
-       for k in ('--prefix', '--destdir'):
-               option = opt.parser.get_option(k)
-               if option:
-                       opt.parser.remove_option(k)
-                       inst_dir.add_option(option)
-
-       inst_dir.add_option('--exec-prefix',
-               help = 'installation prefix [Default: ${PREFIX}]',
-               default = '',
-               dest = 'EXEC_PREFIX')
-
-       dirs_options = opt.add_option_group('Pre-defined installation directories', '')
-
-       for name, help, default in _options:
-               option_name = '--' + name
-               str_default = default
-               str_help = '%s [Default: %s]' % (help, str_default)
-               dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
diff --git a/third_party/waf/wafadmin/Tools/gob2.py b/third_party/waf/wafadmin/Tools/gob2.py
deleted file mode 100644 (file)
index 96d8e20..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-import TaskGen
-
-TaskGen.declare_chain(
-       name = 'gob2',
-       rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
-       ext_in = '.gob',
-       ext_out = '.c'
-)
-
-def detect(conf):
-       gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
-       conf.env['GOB2'] = gob2
-       conf.env['GOB2FLAGS'] = ''
diff --git a/third_party/waf/wafadmin/Tools/gxx.py b/third_party/waf/wafadmin/Tools/gxx.py
deleted file mode 100644 (file)
index 38e8d00..0000000
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-
-import os, sys
-import Configure, Options, Utils
-import ccroot, ar
-from Configure import conftest
-
-@conftest
-def find_gxx(conf):
-       cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
-       cxx = conf.cmd_to_list(cxx)
-       ccroot.get_cc_version(conf, cxx, gcc=True)
-       conf.env.CXX_NAME = 'gcc'
-       conf.env.CXX      = cxx
-
-@conftest
-def gxx_common_flags(conf):
-       v = conf.env
-
-       # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
-       v['CXXFLAGS_DEBUG'] = ['-g']
-       v['CXXFLAGS_RELEASE'] = ['-O2']
-
-       v['CXX_SRC_F']           = ''
-       v['CXX_TGT_F']           = ['-c', '-o', ''] # shell hack for -MD
-       v['CPPPATH_ST']          = '-I%s' # template for adding include paths
-
-       # linker
-       if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
-       v['CXXLNK_SRC_F']        = ''
-       v['CXXLNK_TGT_F']        = ['-o', ''] # shell hack for -MD
-
-       v['LIB_ST']              = '-l%s' # template for adding libs
-       v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-       v['STATICLIB_ST']        = '-l%s'
-       v['STATICLIBPATH_ST']    = '-L%s'
-       v['RPATH_ST']            = '-Wl,-rpath,%s'
-       v['CXXDEFINES_ST']       = '-D%s'
-
-       v['SONAME_ST']           = '-Wl,-h,%s'
-       v['SHLIB_MARKER']        = '-Wl,-Bdynamic'
-       v['STATICLIB_MARKER']    = '-Wl,-Bstatic'
-       v['FULLSTATIC_MARKER']   = '-static'
-
-       # program
-       v['program_PATTERN']     = '%s'
-
-       # shared library
-       v['shlib_CXXFLAGS']      = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
-       v['shlib_LINKFLAGS']     = ['-shared']
-       v['shlib_PATTERN']       = 'lib%s.so'
-
-       # static lib
-       v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
-       v['staticlib_PATTERN']   = 'lib%s.a'
-
-       # osx stuff
-       v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
-       v['CCFLAGS_MACBUNDLE']   = ['-fPIC']
-       v['macbundle_PATTERN']   = '%s.bundle'
-
-@conftest
-def gxx_modifier_win32(conf):
-       v = conf.env
-       v['program_PATTERN']     = '%s.exe'
-
-       v['shlib_PATTERN']       = '%s.dll'
-       v['implib_PATTERN']      = 'lib%s.dll.a'
-       v['IMPLIB_ST']           = '-Wl,--out-implib,%s'
-
-       dest_arch = v['DEST_CPU']
-       v['shlib_CXXFLAGS'] = []
-
-       v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
-
-       # Auto-import is enabled by default even without this option,
-       # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
-       # that the linker emits otherwise.
-       v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
-
-@conftest
-def gxx_modifier_cygwin(conf):
-       gxx_modifier_win32(conf)
-       v = conf.env
-       v['shlib_PATTERN']       = 'cyg%s.dll'
-       v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
-
-@conftest
-def gxx_modifier_darwin(conf):
-       v = conf.env
-       v['shlib_CXXFLAGS']      = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
-       v['shlib_LINKFLAGS']     = ['-dynamiclib']
-       v['shlib_PATTERN']       = 'lib%s.dylib'
-
-       v['staticlib_LINKFLAGS'] = []
-
-       v['SHLIB_MARKER']        = ''
-       v['STATICLIB_MARKER']    = ''
-       v['SONAME_ST']           = ''
-
-@conftest
-def gxx_modifier_aix(conf):
-       v = conf.env
-       v['program_LINKFLAGS']   = ['-Wl,-brtl']
-
-       v['shlib_LINKFLAGS']     = ['-shared', '-Wl,-brtl,-bexpfull']
-
-       v['SHLIB_MARKER']        = ''
-
-@conftest
-def gxx_modifier_openbsd(conf):
-       conf.env['SONAME_ST'] = []
-
-@conftest
-def gxx_modifier_platform(conf):
-       # * set configurations specific for a platform.
-       # * the destination platform is detected automatically by looking at the macros the compiler predefines,
-       #   and if it's not recognised, it fallbacks to sys.platform.
-       dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
-       gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
-       if gxx_modifier_func:
-                       gxx_modifier_func(conf)
-
-def detect(conf):
-       conf.find_gxx()
-       conf.find_cpp()
-       conf.find_ar()
-       conf.gxx_common_flags()
-       conf.gxx_modifier_platform()
-       conf.cxx_load_tools()
-       conf.cxx_add_flags()
-       conf.link_add_flags()
diff --git a/third_party/waf/wafadmin/Tools/icc.py b/third_party/waf/wafadmin/Tools/icc.py
deleted file mode 100644 (file)
index 9c9a926..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Stian Selnes, 2008
-# Thomas Nagy 2009
-
-import os, sys
-import Configure, Options, Utils
-import ccroot, ar, gcc
-from Configure import conftest
-
-@conftest
-def find_icc(conf):
-       if sys.platform == 'cygwin':
-               conf.fatal('The Intel compiler does not work on Cygwin')
-
-       v = conf.env
-       cc = None
-       if v['CC']: cc = v['CC']
-       elif 'CC' in conf.environ: cc = conf.environ['CC']
-       if not cc: cc = conf.find_program('icc', var='CC')
-       if not cc: cc = conf.find_program('ICL', var='CC')
-       if not cc: conf.fatal('Intel C Compiler (icc) was not found')
-       cc = conf.cmd_to_list(cc)
-
-       ccroot.get_cc_version(conf, cc, icc=True)
-       v['CC'] = cc
-       v['CC_NAME'] = 'icc'
-
-detect = '''
-find_icc
-find_ar
-gcc_common_flags
-gcc_modifier_platform
-cc_load_tools
-cc_add_flags
-link_add_flags
-'''
diff --git a/third_party/waf/wafadmin/Tools/icpc.py b/third_party/waf/wafadmin/Tools/icpc.py
deleted file mode 100644 (file)
index 7d79c57..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy 2009
-
-import os, sys
-import Configure, Options, Utils
-import ccroot, ar, gxx
-from Configure import conftest
-
-@conftest
-def find_icpc(conf):
-       if sys.platform == 'cygwin':
-               conf.fatal('The Intel compiler does not work on Cygwin')
-
-       v = conf.env
-       cxx = None
-       if v['CXX']: cxx = v['CXX']
-       elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
-       if not cxx: cxx = conf.find_program('icpc', var='CXX')
-       if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
-       cxx = conf.cmd_to_list(cxx)
-
-       ccroot.get_cc_version(conf, cxx, icc=True)
-       v['CXX'] = cxx
-       v['CXX_NAME'] = 'icc'
-
-detect = '''
-find_icpc
-find_ar
-gxx_common_flags
-gxx_modifier_platform
-cxx_load_tools
-cxx_add_flags
-link_add_flags
-'''
diff --git a/third_party/waf/wafadmin/Tools/intltool.py b/third_party/waf/wafadmin/Tools/intltool.py
deleted file mode 100644 (file)
index 5fb3df2..0000000
+++ /dev/null
@@ -1,138 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-"intltool support"
-
-import os, re
-import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
-from TaskGen import feature, before, taskgen
-from Logs import error
-
-"""
-Usage:
-
-bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
-
-"""
-
-class intltool_in_taskgen(TaskGen.task_gen):
-       """deprecated"""
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@before('apply_core')
-@feature('intltool_in')
-def iapply_intltool_in_f(self):
-       try: self.meths.remove('apply_core')
-       except ValueError: pass
-
-       for i in self.to_list(self.source):
-               node = self.path.find_resource(i)
-
-               podir = getattr(self, 'podir', 'po')
-               podirnode = self.path.find_dir(podir)
-               if not podirnode:
-                       error("could not find the podir %r" % podir)
-                       continue
-
-               cache = getattr(self, 'intlcache', '.intlcache')
-               self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
-               self.env['INTLPODIR'] = podirnode.srcpath(self.env)
-               self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
-
-               task = self.create_task('intltool', node, node.change_ext(''))
-               task.install_path = self.install_path
-
-class intltool_po_taskgen(TaskGen.task_gen):
-       """deprecated"""
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-
-@feature('intltool_po')
-def apply_intltool_po(self):
-       try: self.meths.remove('apply_core')
-       except ValueError: pass
-
-       self.default_install_path = '${LOCALEDIR}'
-       appname = getattr(self, 'appname', 'set_your_app_name')
-       podir = getattr(self, 'podir', '')
-
-       def install_translation(task):
-               out = task.outputs[0]
-               filename = out.name
-               (langname, ext) = os.path.splitext(filename)
-               inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
-               self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
-
-       linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
-       if linguas:
-               # scan LINGUAS file for locales to process
-               file = open(linguas.abspath())
-               langs = []
-               for line in file.readlines():
-                       # ignore lines containing comments
-                       if not line.startswith('#'):
-                               langs += line.split()
-               file.close()
-               re_linguas = re.compile('[-a-zA-Z_@.]+')
-               for lang in langs:
-                       # Make sure that we only process lines which contain locales
-                       if re_linguas.match(lang):
-                               node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
-                               task = self.create_task('po')
-                               task.set_inputs(node)
-                               task.set_outputs(node.change_ext('.mo'))
-                               if self.bld.is_install: task.install = install_translation
-       else:
-               Utils.pprint('RED', "Error no LINGUAS file found in po directory")
-
-Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
-Task.simple_task_type('intltool',
-       '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
-       color='BLUE', after="cc_link cxx_link", shell=False)
-
-def detect(conf):
-       pocom = conf.find_program('msgfmt')
-       if not pocom:
-               # if msgfmt should not be mandatory, catch the thrown exception in your wscript
-               conf.fatal('The program msgfmt (gettext) is mandatory!')
-       conf.env['POCOM'] = pocom
-
-       # NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
-
-       intltool = conf.find_program('intltool-merge', var='INTLTOOL')
-       if not intltool:
-               # if intltool-merge should not be mandatory, catch the thrown exception in your wscript
-               if Options.platform == 'win32':
-                       perl = conf.find_program('perl', var='PERL')
-                       if not perl:
-                               conf.fatal('The program perl (required by intltool) could not be found')
-
-                       intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
-                       if not intltooldir:
-                               conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
-
-                       conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
-                       conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
-               else:
-                       conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
-
-       def getstr(varname):
-               return getattr(Options.options, varname, '')
-
-       prefix  = conf.env['PREFIX']
-       datadir = getstr('datadir')
-       if not datadir: datadir = os.path.join(prefix,'share')
-
-       conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
-       conf.define('DATADIR', datadir)
-
-       if conf.env['CC'] or conf.env['CXX']:
-               # Define to 1 if <locale.h> is present
-               conf.check(header_name='locale.h')
-
-def set_options(opt):
-       opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
-       opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')
diff --git a/third_party/waf/wafadmin/Tools/javaw.py b/third_party/waf/wafadmin/Tools/javaw.py
deleted file mode 100644 (file)
index 4d9f4c7..0000000
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2008 (ita)
-
-"""
-Java support
-
-Javac is one of the few compilers that behaves very badly:
-* it outputs files where it wants to (-d is only for the package root)
-* it recompiles files silently behind your back
-* it outputs an undefined amount of files (inner classes)
-
-Fortunately, the convention makes it possible to use the build dir without
-too many problems for the moment
-
-Inner classes must be located and cleaned when a problem arise,
-for the moment waf does not track the production of inner classes.
-
-Adding all the files to a task and executing it if any of the input files
-change is only annoying for the compilation times
-
-Compilation can be run using Jython[1] rather than regular Python. Instead of
-running one of the following commands:
-    ./waf configure
-    python waf configure
-You would have to run:
-    java -jar /path/to/jython.jar waf configure
-
-[1] http://www.jython.org/
-"""
-
-import os, re
-from Configure import conf
-import TaskGen, Task, Utils, Options, Build
-from TaskGen import feature, before, taskgen
-
-class_check_source = '''
-public class Test {
-       public static void main(String[] argv) {
-               Class lib;
-               if (argv.length < 1) {
-                       System.err.println("Missing argument");
-                       System.exit(77);
-               }
-               try {
-                       lib = Class.forName(argv[0]);
-               } catch (ClassNotFoundException e) {
-                       System.err.println("ClassNotFoundException");
-                       System.exit(1);
-               }
-               lib = null;
-               System.exit(0);
-       }
-}
-'''
-
-@feature('jar')
-@before('apply_core')
-def jar_files(self):
-       basedir = getattr(self, 'basedir', '.')
-       destfile = getattr(self, 'destfile', 'test.jar')
-       jaropts = getattr(self, 'jaropts', [])
-       jarcreate = getattr(self, 'jarcreate', 'cf')
-
-       dir = self.path.find_dir(basedir)
-       if not dir: raise
-
-       jaropts.append('-C')
-       jaropts.append(dir.abspath(self.env))
-       jaropts.append('.')
-
-       out = self.path.find_or_declare(destfile)
-
-       tsk = self.create_task('jar_create')
-       tsk.set_outputs(out)
-       tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
-       tsk.env['JAROPTS'] = jaropts
-       tsk.env['JARCREATE'] = jarcreate
-
-@feature('javac')
-@before('apply_core')
-def apply_java(self):
-       Utils.def_attrs(self, jarname='', jaropts='', classpath='',
-               sourcepath='.', srcdir='.', source_re='**/*.java',
-               jar_mf_attributes={}, jar_mf_classpath=[])
-
-       if getattr(self, 'source_root', None):
-               # old stuff
-               self.srcdir = self.source_root
-
-
-       nodes_lst = []
-
-       if not self.classpath:
-               if not self.env['CLASSPATH']:
-                       self.env['CLASSPATH'] = '..' + os.pathsep + '.'
-       else:
-               self.env['CLASSPATH'] = self.classpath
-
-       srcdir_node = self.path.find_dir(self.srcdir)
-       if not srcdir_node:
-               raise Utils.WafError('could not find srcdir %r' % self.srcdir)
-
-       src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
-       bld_nodes = [x.change_ext('.class') for x in src_nodes]
-
-       self.env['OUTDIR'] = [srcdir_node.bldpath(self.env)]
-
-       tsk = self.create_task('javac')
-       tsk.set_inputs(src_nodes)
-       tsk.set_outputs(bld_nodes)
-
-       if getattr(self, 'compat', None):
-               tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
-
-       if hasattr(self, 'sourcepath'):
-               fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
-               names = os.pathsep.join([x.srcpath() for x in fold])
-       else:
-               names = srcdir_node.srcpath()
-
-       if names:
-               tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
-
-       if self.jarname:
-               jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
-               jtsk.set_run_after(tsk)
-
-               if not self.env.JAROPTS:
-                       if self.jaropts:
-                               self.env.JAROPTS = self.jaropts
-                       else:
-                               dirs = '.'
-                               self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
-
-Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', shell=False)
-cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}', shell=False)
-cls.color = 'BLUE'
-def post_run_javac(self):
-       """this is for cleaning the folder
-       javac creates single files for inner classes
-       but it is not possible to know which inner classes in advance"""
-
-       par = {}
-       for x in self.inputs:
-               par[x.parent.id] = x.parent
-
-       inner = {}
-       for k in par.values():
-               path = k.abspath(self.env)
-               lst = os.listdir(path)
-
-               for u in lst:
-                       if u.find('$') >= 0:
-                               inner_class_node = k.find_or_declare(u)
-                               inner[inner_class_node.id] = inner_class_node
-
-       to_add = set(inner.keys()) - set([x.id for x in self.outputs])
-       for x in to_add:
-               self.outputs.append(inner[x])
-
-       self.cached = True # disable the cache here - inner classes are a problem
-       return Task.Task.post_run(self)
-cls.post_run = post_run_javac
-
-def detect(conf):
-       # If JAVA_PATH is set, we prepend it to the path list
-       java_path = conf.environ['PATH'].split(os.pathsep)
-       v = conf.env
-
-       if 'JAVA_HOME' in conf.environ:
-               java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
-               conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
-
-       for x in 'javac java jar'.split():
-               conf.find_program(x, var=x.upper(), path_list=java_path)
-               conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
-       v['JAVA_EXT'] = ['.java']
-
-       if 'CLASSPATH' in conf.environ:
-               v['CLASSPATH'] = conf.environ['CLASSPATH']
-
-       if not v['JAR']: conf.fatal('jar is required for making java packages')
-       if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
-       v['JARCREATE'] = 'cf' # can use cvf
-
-@conf
-def check_java_class(self, classname, with_classpath=None):
-       """Check if the specified java class is installed"""
-
-       import shutil
-
-       javatestdir = '.waf-javatest'
-
-       classpath = javatestdir
-       if self.env['CLASSPATH']:
-               classpath += os.pathsep + self.env['CLASSPATH']
-       if isinstance(with_classpath, str):
-               classpath += os.pathsep + with_classpath
-
-       shutil.rmtree(javatestdir, True)
-       os.mkdir(javatestdir)
-
-       java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
-       java_file.write(class_check_source)
-       java_file.close()
-
-       # Compile the source
-       Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
-
-       # Try to run the app
-       cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
-       self.log.write("%s\n" % str(cmd))
-       found = Utils.exec_command(cmd, shell=False, log=self.log)
-
-       self.check_message('Java class %s' % classname, "", not found)
-
-       shutil.rmtree(javatestdir, True)
-
-       return found
-
-@conf
-def check_jni_headers(conf):
-       """
-       Check for jni headers and libraries
-
-       On success the environment variable xxx_JAVA is added for uselib
-       """
-
-       if not conf.env.CC_NAME and not conf.env.CXX_NAME:
-               conf.fatal('load a compiler first (gcc, g++, ..)')
-
-       if not conf.env.JAVA_HOME:
-               conf.fatal('set JAVA_HOME in the system environment')
-
-       # jni requires the jvm
-       javaHome = conf.env['JAVA_HOME'][0]
-
-       b = Build.BuildContext()
-       b.load_dirs(conf.srcdir, conf.blddir)
-       dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
-       f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
-       incDirs = [x.parent.abspath() for x in f]
-
-       dir = b.root.find_dir(conf.env.JAVA_HOME[0])
-       f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
-       libDirs = [x.parent.abspath() for x in f] or [javaHome]
-
-       for i, d in enumerate(libDirs):
-               if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
-                               libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
-                       break
-       else:
-               conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
diff --git a/third_party/waf/wafadmin/Tools/kde4.py b/third_party/waf/wafadmin/Tools/kde4.py
deleted file mode 100644 (file)
index 1f3bae7..0000000
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-import os, sys, re
-import Options, TaskGen, Task, Utils
-from TaskGen import taskgen, feature, after
-
-class msgfmt_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@feature('msgfmt')
-def init_msgfmt(self):
-       #langs = '' # for example "foo/fr foo/br"
-       self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
-
-@feature('msgfmt')
-@after('init_msgfmt')
-def apply_msgfmt(self):
-       for lang in self.to_list(self.langs):
-               node = self.path.find_resource(lang+'.po')
-               task = self.create_task('msgfmt', node, node.change_ext('.mo'))
-
-               if not self.bld.is_install: continue
-               langname = lang.split('/')
-               langname = langname[-1]
-               task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
-               task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
-               task.chmod = self.chmod
-
-def detect(conf):
-       kdeconfig = conf.find_program('kde4-config')
-       if not kdeconfig:
-               conf.fatal('we need kde4-config')
-       prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
-       file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
-       try: os.stat(file)
-       except OSError:
-               file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
-               try: os.stat(file)
-               except OSError: conf.fatal('could not open %s' % file)
-
-       try:
-               txt = Utils.readf(file)
-       except (OSError, IOError):
-               conf.fatal('could not read %s' % file)
-
-       txt = txt.replace('\\\n', '\n')
-       fu = re.compile('#(.*)\n')
-       txt = fu.sub('', txt)
-
-       setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
-       found = setregexp.findall(txt)
-
-       for (_, key, val) in found:
-               #print key, val
-               conf.env[key] = val
-
-       # well well, i could just write an interpreter for cmake files
-       conf.env['LIB_KDECORE']='kdecore'
-       conf.env['LIB_KDEUI']  ='kdeui'
-       conf.env['LIB_KIO']    ='kio'
-       conf.env['LIB_KHTML']  ='khtml'
-       conf.env['LIB_KPARTS'] ='kparts'
-
-       conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
-       conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
-       conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
-
-       conf.env['MSGFMT'] = conf.find_program('msgfmt')
-
-Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)
diff --git a/third_party/waf/wafadmin/Tools/libtool.py b/third_party/waf/wafadmin/Tools/libtool.py
deleted file mode 100644 (file)
index bcc0e2f..0000000
+++ /dev/null
@@ -1,329 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Matthias Jahn, 2008, jahn matthias ath freenet punto de
-# Thomas Nagy, 2008 (ita)
-
-import sys, re, os, optparse
-
-import TaskGen, Task, Utils, preproc
-from Logs import error, debug, warn
-from TaskGen import taskgen, after, before, feature
-
-REVISION="0.1.3"
-
-"""
-if you want to use the code here, you must use something like this:
-obj = obj.create(...)
-obj.features.append("libtool")
-obj.vnum = "1.2.3" # optional, but versioned libraries are common
-"""
-
-# fake libtool files
-fakelibtool_vardeps = ['CXX', 'PREFIX']
-def fakelibtool_build(task):
-       # Writes a .la file, used by libtool
-       env = task.env
-       dest  = open(task.outputs[0].abspath(env), 'w')
-       sname = task.inputs[0].name
-       fu = dest.write
-       fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
-       if env['vnum']:
-               nums = env['vnum'].split('.')
-               libname = task.inputs[0].name
-               name3 = libname+'.'+env['vnum']
-               name2 = libname+'.'+nums[0]
-               name1 = libname
-               fu("dlname='%s'\n" % name2)
-               strn = " ".join([name3, name2, name1])
-               fu("library_names='%s'\n" % (strn) )
-       else:
-               fu("dlname='%s'\n" % sname)
-               fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
-       fu("old_library=''\n")
-       vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
-       fu("dependency_libs='%s'\n" % vars)
-       fu("current=0\n")
-       fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
-       fu("dlopen=''\ndlpreopen=''\n")
-       fu("libdir='%s/lib'\n" % env['PREFIX'])
-       dest.close()
-       return 0
-
-def read_la_file(path):
-       sp = re.compile(r'^([^=]+)=\'(.*)\'$')
-       dc={}
-       file = open(path, "r")
-       for line in file.readlines():
-               try:
-                       #print sp.split(line.strip())
-                       _, left, right, _ = sp.split(line.strip())
-                       dc[left]=right
-               except ValueError:
-                       pass
-       file.close()
-       return dc
-
-@feature("libtool")
-@after('apply_link')
-def apply_link_libtool(self):
-       if self.type != 'program':
-               linktask = self.link_task
-               self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la'))
-
-       if self.bld.is_install:
-               self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
-
-@feature("libtool")
-@before('apply_core')
-def apply_libtool(self):
-       self.env['vnum']=self.vnum
-
-       paths=[]
-       libs=[]
-       libtool_files=[]
-       libtool_vars=[]
-
-       for l in self.env['LINKFLAGS']:
-               if l[:2]=='-L':
-                       paths.append(l[2:])
-               elif l[:2]=='-l':
-                       libs.append(l[2:])
-
-       for l in libs:
-               for p in paths:
-                       dict = read_la_file(p+'/lib'+l+'.la')
-                       linkflags2 = dict.get('dependency_libs', '')
-                       for v in linkflags2.split():
-                               if v.endswith('.la'):
-                                       libtool_files.append(v)
-                                       libtool_vars.append(v)
-                                       continue
-                               self.env.append_unique('LINKFLAGS', v)
-                               break
-
-       self.env['libtoolvars']=libtool_vars
-
-       while libtool_files:
-               file = libtool_files.pop()
-               dict = read_la_file(file)
-               for v in dict['dependency_libs'].split():
-                       if v[-3:] == '.la':
-                               libtool_files.append(v)
-                               continue
-                       self.env.append_unique('LINKFLAGS', v)
-
-Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
-
-class libtool_la_file:
-       def __init__ (self, la_filename):
-               self.__la_filename = la_filename
-               #remove path and .la suffix
-               self.linkname = str(os.path.split(la_filename)[-1])[:-3]
-               if self.linkname.startswith("lib"):
-                       self.linkname = self.linkname[3:]
-               # The name that we can dlopen(3).
-               self.dlname = None
-               # Names of this library
-               self.library_names = None
-               # The name of the static archive.
-               self.old_library = None
-               # Libraries that this one depends upon.
-               self.dependency_libs = None
-               # Version information for libIlmImf.
-               self.current = None
-               self.age = None
-               self.revision = None
-               # Is this an already installed library?
-               self.installed = None
-               # Should we warn about portability when linking against -modules?
-               self.shouldnotlink = None
-               # Files to dlopen/dlpreopen
-               self.dlopen = None
-               self.dlpreopen = None
-               # Directory that this library needs to be installed in:
-               self.libdir = '/usr/lib'
-               if not self.__parse():
-                       raise ValueError("file %s not found!!" %(la_filename))
-
-       def __parse(self):
-               "Retrieve the variables from a file"
-               if not os.path.isfile(self.__la_filename): return 0
-               la_file=open(self.__la_filename, 'r')
-               for line in la_file:
-                       ln = line.strip()
-                       if not ln: continue
-                       if ln[0]=='#': continue
-                       (key, value) = str(ln).split('=', 1)
-                       key = key.strip()
-                       value = value.strip()
-                       if value == "no": value = False
-                       elif value == "yes": value = True
-                       else:
-                               try: value = int(value)
-                               except ValueError: value = value.strip("'")
-                       setattr(self, key, value)
-               la_file.close()
-               return 1
-
-       def get_libs(self):
-               """return linkflags for this lib"""
-               libs = []
-               if self.dependency_libs:
-                       libs = str(self.dependency_libs).strip().split()
-               if libs == None:
-                       libs = []
-               # add la lib and libdir
-               libs.insert(0, "-l%s" % self.linkname.strip())
-               libs.insert(0, "-L%s" % self.libdir.strip())
-               return libs
-
-       def __str__(self):
-               return '''\
-dlname = "%(dlname)s"
-library_names = "%(library_names)s"
-old_library = "%(old_library)s"
-dependency_libs = "%(dependency_libs)s"
-version = %(current)s.%(age)s.%(revision)s
-installed = "%(installed)s"
-shouldnotlink = "%(shouldnotlink)s"
-dlopen = "%(dlopen)s"
-dlpreopen = "%(dlpreopen)s"
-libdir = "%(libdir)s"''' % self.__dict__
-
-class libtool_config:
-       def __init__ (self, la_filename):
-               self.__libtool_la_file = libtool_la_file(la_filename)
-               tmp = self.__libtool_la_file
-               self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
-               self.__sub_la_files = []
-               self.__sub_la_files.append(la_filename)
-               self.__libs = None
-
-       def __cmp__(self, other):
-               """make it compareable with X.Y.Z versions (Y and Z are optional)"""
-               if not other:
-                       return 1
-               othervers = [int(s) for s in str(other).split(".")]
-               selfvers = self.__version
-               return cmp(selfvers, othervers)
-
-       def __str__(self):
-               return "\n".join([
-                       str(self.__libtool_la_file),
-                       ' '.join(self.__libtool_la_file.get_libs()),
-                       '* New getlibs:',
-                       ' '.join(self.get_libs())
-               ])
-
-       def __get_la_libs(self, la_filename):
-               return libtool_la_file(la_filename).get_libs()
-
-       def get_libs(self):
-               """return the complete uniqe linkflags that do not
-               contain .la files anymore"""
-               libs_list = list(self.__libtool_la_file.get_libs())
-               libs_map = {}
-               while len(libs_list) > 0:
-                       entry = libs_list.pop(0)
-                       if entry:
-                               if str(entry).endswith(".la"):
-                                       ## prevents duplicate .la checks
-                                       if entry not in self.__sub_la_files:
-                                               self.__sub_la_files.append(entry)
-                                               libs_list.extend(self.__get_la_libs(entry))
-                               else:
-                                       libs_map[entry]=1
-               self.__libs = libs_map.keys()
-               return self.__libs
-
-       def get_libs_only_L(self):
-               if not self.__libs: self.get_libs()
-               libs = self.__libs
-               libs = [s for s in libs if str(s).startswith('-L')]
-               return libs
-
-       def get_libs_only_l(self):
-               if not self.__libs: self.get_libs()
-               libs = self.__libs
-               libs = [s for s in libs if str(s).startswith('-l')]
-               return libs
-
-       def get_libs_only_other(self):
-               if not self.__libs: self.get_libs()
-               libs = self.__libs
-               libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
-               return libs
-
-def useCmdLine():
-       """parse cmdline args and control build"""
-       usage = '''Usage: %prog [options] PathToFile.la
-example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
-nor: %prog --libs /usr/lib/libamarok.la'''
-       parser = optparse.OptionParser(usage)
-       a = parser.add_option
-       a("--version", dest = "versionNumber",
-               action = "store_true", default = False,
-               help = "output version of libtool-config"
-               )
-       a("--debug", dest = "debug",
-               action = "store_true", default = False,
-               help = "enable debug"
-               )
-       a("--libs", dest = "libs",
-               action = "store_true", default = False,
-               help = "output all linker flags"
-               )
-       a("--libs-only-l", dest = "libs_only_l",
-               action = "store_true", default = False,
-               help = "output -l flags"
-               )
-       a("--libs-only-L", dest = "libs_only_L",
-               action = "store_true", default = False,
-               help = "output -L flags"
-               )
-       a("--libs-only-other", dest = "libs_only_other",
-               action = "store_true", default = False,
-               help = "output other libs (e.g. -pthread)"
-               )
-       a("--atleast-version", dest = "atleast_version",
-               default=None,
-               help = "return 0 if the module is at least version ATLEAST_VERSION"
-               )
-       a("--exact-version", dest = "exact_version",
-               default=None,
-               help = "return 0 if the module is exactly version EXACT_VERSION"
-               )
-       a("--max-version", dest = "max_version",
-               default=None,
-               help = "return 0 if the module is at no newer than version MAX_VERSION"
-               )
-
-       (options, args) = parser.parse_args()
-       if len(args) != 1 and not options.versionNumber:
-               parser.error("incorrect number of arguments")
-       if options.versionNumber:
-               print("libtool-config version %s" % REVISION)
-               return 0
-       ltf = libtool_config(args[0])
-       if options.debug:
-               print(ltf)
-       if options.atleast_version:
-               if ltf >= options.atleast_version: return 0
-               sys.exit(1)
-       if options.exact_version:
-               if ltf == options.exact_version: return 0
-               sys.exit(1)
-       if options.max_version:
-               if ltf <= options.max_version: return 0
-               sys.exit(1)
-
-       def p(x):
-               print(" ".join(x))
-       if options.libs: p(ltf.get_libs())
-       elif options.libs_only_l: p(ltf.get_libs_only_l())
-       elif options.libs_only_L: p(ltf.get_libs_only_L())
-       elif options.libs_only_other: p(ltf.get_libs_only_other())
-       return 0
-
-if __name__ == '__main__':
-       useCmdLine()
diff --git a/third_party/waf/wafadmin/Tools/lua.py b/third_party/waf/wafadmin/Tools/lua.py
deleted file mode 100644 (file)
index 8a6c1f4..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Sebastian Schlingmann, 2008
-# Thomas Nagy, 2008 (ita)
-
-import TaskGen
-from TaskGen import taskgen, feature
-from Constants import *
-
-TaskGen.declare_chain(
-       name = 'luac',
-       rule = '${LUAC} -s -o ${TGT} ${SRC}',
-       ext_in = '.lua',
-       ext_out = '.luac',
-       reentrant = False,
-       install = 'LUADIR', # env variable
-)
-
-@feature('lua')
-def init_lua(self):
-       self.default_chmod = O755
-
-def detect(conf):
-       conf.find_program('luac', var='LUAC', mandatory = True)
diff --git a/third_party/waf/wafadmin/Tools/msvc.py b/third_party/waf/wafadmin/Tools/msvc.py
deleted file mode 100644 (file)
index 3b82d43..0000000
+++ /dev/null
@@ -1,796 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2006 (dv)
-# Tamas Pal, 2007 (folti)
-# Nicolas Mercier, 2009
-# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
-
-# usage:
-#
-# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
-# conf.env['MSVC_TARGETS'] = ['x64']
-# conf.check_tool('msvc')
-# OR conf.check_tool('msvc', funs='no_autodetect')
-# conf.check_lib_msvc('gdi32')
-# conf.check_libs_msvc('kernel32 user32', mandatory=true)
-# ...
-# obj.uselib = 'KERNEL32 USER32 GDI32'
-#
-# platforms and targets will be tested in the order they appear;
-# the first good configuration will be used
-# supported platforms :
-# ia64, x64, x86, x86_amd64, x86_ia64
-
-# compilers supported :
-#  msvc       => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
-#  wsdk       => Windows SDK, versions 6.0, 6.1, 7.0
-#  icl        => Intel compiler, versions 9,10,11
-#  Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
-#  PocketPC   => Compiler/SDK for PocketPC devices (armv4/v4i)
-
-
-import os, sys, re, string, optparse
-import Utils, TaskGen, Runner, Configure, Task, Options
-from Logs import debug, info, warn, error
-from TaskGen import after, before, feature
-
-from Configure import conftest, conf
-import ccroot, cc, cxx, ar, winres
-from libtool import read_la_file
-
-try:
-       import _winreg
-except:
-       import winreg as _winreg
-
-pproc = Utils.pproc
-
-# importlibs provided by MSVC/Platform SDK. Do NOT search them....
-g_msvc_systemlibs = """
-aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
-cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
-credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
-ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
-faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
-gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
-kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
-mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
-msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
-netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
-odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
-osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
-ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
-rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
-shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
-traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
-version vfw32 wbemuuid  webpost wiaguid wininet winmm winscard winspool winstrm
-wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
-""".split()
-
-
-all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
-all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
-all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
-
-def setup_msvc(conf, versions):
-       platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
-       desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
-       versiondict = dict(versions)
-
-       for version in desired_versions:
-               try:
-                       targets = dict(versiondict [version])
-                       for target in platforms:
-                               try:
-                                       arch,(p1,p2,p3) = targets[target]
-                                       compiler,revision = version.split()
-                                       return compiler,revision,p1,p2,p3
-                               except KeyError: continue
-               except KeyError: continue
-       conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
-
-@conf
-def get_msvc_version(conf, compiler, version, target, vcvars):
-       debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
-       batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
-       f = open(batfile, 'w')
-       f.write("""@echo off
-set INCLUDE=
-set LIB=
-call "%s" %s
-echo PATH=%%PATH%%
-echo INCLUDE=%%INCLUDE%%
-echo LIB=%%LIB%%
-""" % (vcvars,target))
-       f.close()
-       sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
-       lines = sout.splitlines()
-
-       for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
-               if lines[0].find(x) != -1:
-                       break
-       else:
-               debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
-               conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
-
-       for line in lines[1:]:
-               if line.startswith('PATH='):
-                       path = line[5:]
-                       MSVC_PATH = path.split(';')
-               elif line.startswith('INCLUDE='):
-                       MSVC_INCDIR = [i for i in line[8:].split(';') if i]
-               elif line.startswith('LIB='):
-                       MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
-
-       # Check if the compiler is usable at all.
-       # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
-       env = {}
-       env.update(os.environ)
-       env.update(PATH = path)
-       compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
-       cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
-       # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
-       if env.has_key('CL'):
-               del(env['CL'])
-
-       try:
-               p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
-               out, err = p.communicate()
-               if p.returncode != 0:
-                       raise Exception('return code: %r: %r' % (p.returncode, err))
-       except Exception, e:
-               debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
-               debug(str(e))
-               conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
-       else:
-               debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
-
-       return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
-
-@conf
-def gather_wsdk_versions(conf, versions):
-       version_pattern = re.compile('^v..?.?\...?.?')
-       try:
-               all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
-       except WindowsError:
-               try:
-                       all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
-               except WindowsError:
-                       return
-       index = 0
-       while 1:
-               try:
-                       version = _winreg.EnumKey(all_versions, index)
-               except WindowsError:
-                       break
-               index = index + 1
-               if not version_pattern.match(version):
-                       continue
-               try:
-                       msvc_version = _winreg.OpenKey(all_versions, version)
-                       path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
-               except WindowsError:
-                       continue
-               if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
-                       targets = []
-                       for target,arch in all_msvc_platforms:
-                               try:
-                                       targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
-                               except Configure.ConfigurationError:
-                                       pass
-                       versions.append(('wsdk ' + version[1:], targets))
-
-@conf
-def gather_msvc_versions(conf, versions):
-       # checks SmartPhones SDKs
-       try:
-               ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
-       except WindowsError:
-               try:
-                       ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
-               except WindowsError:
-                       ce_sdk = ''
-       if ce_sdk:
-               supported_wince_platforms = []
-               ce_index = 0
-               while 1:
-                       try:
-                               sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
-                       except WindowsError:
-                               break
-                       ce_index = ce_index + 1
-                       sdk = _winreg.OpenKey(ce_sdk, sdk_device)
-                       path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
-                       path=str(path)
-                       path,device = os.path.split(path)
-                       if not device:
-                               path,device = os.path.split(path)
-                       for arch,compiler in all_wince_platforms:
-                               platforms = []
-                               if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
-                                       platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
-                               if platforms:
-                                       supported_wince_platforms.append((device, platforms))
-       # checks MSVC
-       version_pattern = re.compile('^..?\...?')
-       for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
-               try:
-                       all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
-               except WindowsError:
-                       try:
-                               all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
-                       except WindowsError:
-                               continue
-               index = 0
-               while 1:
-                       try:
-                               version = _winreg.EnumKey(all_versions, index)
-                       except WindowsError:
-                               break
-                       index = index + 1
-                       if not version_pattern.match(version):
-                               continue
-                       try:
-                               msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
-                               path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
-                               path=str(path)
-                               targets = []
-                               if ce_sdk:
-                                       for device,platforms in supported_wince_platforms:
-                                               cetargets = []
-                                               for platform,compiler,include,lib in platforms:
-                                                       winCEpath = os.path.join(path, 'VC', 'ce')
-                                                       if os.path.isdir(winCEpath):
-                                                               common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
-                                                               if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
-                                                                       bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
-                                                                       incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
-                                                                       libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
-                                                                       cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
-                                               versions.append((device+' '+version, cetargets))
-                               if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
-                                       for target,realtarget in all_msvc_platforms[::-1]:
-                                               try:
-                                                       targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
-                                               except:
-                                                       pass
-                               elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
-                                       try:
-                                               targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
-                                       except Configure.ConfigurationError:
-                                               pass
-                               versions.append(('msvc '+version, targets))
-
-                       except WindowsError:
-                               continue
-
-@conf
-def gather_icl_versions(conf, versions):
-       version_pattern = re.compile('^...?.?\....?.?')
-       try:
-               all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
-       except WindowsError:
-               try:
-                       all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
-               except WindowsError:
-                       return
-       index = 0
-       while 1:
-               try:
-                       version = _winreg.EnumKey(all_versions, index)
-               except WindowsError:
-                       break
-               index = index + 1
-               if not version_pattern.match(version):
-                       continue
-               targets = []
-               for target,arch in all_icl_platforms:
-                       try:
-                               icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
-                               path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
-                               if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
-                                       try:
-                                               targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
-                                       except Configure.ConfigurationError:
-                                               pass
-                       except WindowsError:
-                               continue
-               major = version[0:2]
-               versions.append(('intel ' + major, targets))
-
-@conf
-def get_msvc_versions(conf):
-       if not conf.env.MSVC_INSTALLED_VERSIONS:
-               lst = []
-               conf.gather_msvc_versions(lst)
-               conf.gather_wsdk_versions(lst)
-               conf.gather_icl_versions(lst)
-               conf.env.MSVC_INSTALLED_VERSIONS = lst
-       return conf.env.MSVC_INSTALLED_VERSIONS
-
-@conf
-def print_all_msvc_detected(conf):
-       for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
-               info(version)
-               for target,l in targets:
-                       info("\t"+target)
-
-def detect_msvc(conf):
-       versions = get_msvc_versions(conf)
-       return setup_msvc(conf, versions)
-
-@conf
-def find_lt_names_msvc(self, libname, is_static=False):
-       """
-       Win32/MSVC specific code to glean out information from libtool la files.
-       this function is not attached to the task_gen class
-       """
-       lt_names=[
-               'lib%s.la' % libname,
-               '%s.la' % libname,
-       ]
-
-       for path in self.env['LIBPATH']:
-               for la in lt_names:
-                       laf=os.path.join(path,la)
-                       dll=None
-                       if os.path.exists(laf):
-                               ltdict=read_la_file(laf)
-                               lt_libdir=None
-                               if ltdict.get('libdir', ''):
-                                       lt_libdir = ltdict['libdir']
-                               if not is_static and ltdict.get('library_names', ''):
-                                       dllnames=ltdict['library_names'].split()
-                                       dll=dllnames[0].lower()
-                                       dll=re.sub('\.dll$', '', dll)
-                                       return (lt_libdir, dll, False)
-                               elif ltdict.get('old_library', ''):
-                                       olib=ltdict['old_library']
-                                       if os.path.exists(os.path.join(path,olib)):
-                                               return (path, olib, True)
-                                       elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
-                                               return (lt_libdir, olib, True)
-                                       else:
-                                               return (None, olib, True)
-                               else:
-                                       raise Utils.WafError('invalid libtool object file: %s' % laf)
-       return (None, None, None)
-
-@conf
-def libname_msvc(self, libname, is_static=False, mandatory=False):
-       lib = libname.lower()
-       lib = re.sub('\.lib$','',lib)
-
-       if lib in g_msvc_systemlibs:
-               return lib
-
-       lib=re.sub('^lib','',lib)
-
-       if lib == 'm':
-               return None
-
-       (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
-
-       if lt_path != None and lt_libname != None:
-               if lt_static == True:
-                       # file existence check has been made by find_lt_names
-                       return os.path.join(lt_path,lt_libname)
-
-       if lt_path != None:
-               _libpaths=[lt_path] + self.env['LIBPATH']
-       else:
-               _libpaths=self.env['LIBPATH']
-
-       static_libs=[
-               'lib%ss.lib' % lib,
-               'lib%s.lib' % lib,
-               '%ss.lib' % lib,
-               '%s.lib' %lib,
-               ]
-
-       dynamic_libs=[
-               'lib%s.dll.lib' % lib,
-               'lib%s.dll.a' % lib,
-               '%s.dll.lib' % lib,
-               '%s.dll.a' % lib,
-               'lib%s_d.lib' % lib,
-               '%s_d.lib' % lib,
-               '%s.lib' %lib,
-               ]
-
-       libnames=static_libs
-       if not is_static:
-               libnames=dynamic_libs + static_libs
-
-       for path in _libpaths:
-               for libn in libnames:
-                       if os.path.exists(os.path.join(path, libn)):
-                               debug('msvc: lib found: %s', os.path.join(path,libn))
-                               return re.sub('\.lib$', '',libn)
-
-       #if no lib can be found, just return the libname as msvc expects it
-       if mandatory:
-               self.fatal("The library %r could not be found" % libname)
-       return re.sub('\.lib$', '', libname)
-
-@conf
-def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
-       "This is the api to use"
-       libn = self.libname_msvc(libname, is_static, mandatory)
-
-       if not uselib_store:
-               uselib_store = libname.upper()
-
-       # Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
-       # but we don't distinguish static libs from shared libs.
-       # This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
-       if False and is_static: # disabled
-               self.env['STATICLIB_' + uselib_store] = [libn]
-       else:
-               self.env['LIB_' + uselib_store] = [libn]
-
-@conf
-def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
-       for libname in Utils.to_list(libnames):
-               self.check_lib_msvc(libname, is_static, mandatory=mandatory)
-
-@conftest
-def no_autodetect(conf):
-       conf.eval_rules(detect.replace('autodetect', ''))
-
-
-detect = '''
-autodetect
-find_msvc
-msvc_common_flags
-cc_load_tools
-cxx_load_tools
-cc_add_flags
-cxx_add_flags
-link_add_flags
-'''
-
-@conftest
-def autodetect(conf):
-       v = conf.env
-       compiler, version, path, includes, libdirs = detect_msvc(conf)
-       v['PATH'] = path
-       v['CPPPATH'] = includes
-       v['LIBPATH'] = libdirs
-       v['MSVC_COMPILER'] = compiler
-
-def _get_prog_names(conf, compiler):
-       if compiler=='intel':
-               compiler_name = 'ICL'
-               linker_name = 'XILINK'
-               lib_name = 'XILIB'
-       else:
-               # assumes CL.exe
-               compiler_name = 'CL'
-               linker_name = 'LINK'
-               lib_name = 'LIB'
-       return compiler_name, linker_name, lib_name
-
-@conftest
-def find_msvc(conf):
-       # due to path format limitations, limit operation only to native Win32. Yeah it sucks.
-       if sys.platform != 'win32':
-               conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
-
-       v = conf.env
-
-       compiler, version, path, includes, libdirs = detect_msvc(conf)
-
-       compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
-       has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6)        or (compiler == 'intel' and float(version) >= 11)
-
-       # compiler
-       cxx = None
-       if v.CXX: cxx = v.CXX
-       elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
-       if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
-       cxx = conf.cmd_to_list(cxx)
-
-       # before setting anything, check if the compiler is really msvc
-       env = dict(conf.environ)
-       env.update(PATH = ';'.join(path))
-       if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
-               conf.fatal('the msvc compiler could not be identified')
-
-       link = v.LINK_CXX
-       if not link:
-               link = conf.find_program(linker_name, path_list=path, mandatory=True)
-       ar = v.AR
-       if not ar:
-               ar = conf.find_program(lib_name, path_list=path, mandatory=True)
-
-       # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
-       mt = v.MT
-       if has_msvc_manifest:
-               mt = conf.find_program('MT', path_list=path, mandatory=True)
-
-       # no more possibility of failure means the data state will be consistent
-       # we may store the data safely now
-
-       v.MSVC_MANIFEST = has_msvc_manifest
-       v.PATH = path
-       v.CPPPATH = includes
-       v.LIBPATH = libdirs
-
-       # c/c++ compiler
-       v.CC = v.CXX = cxx
-       v.CC_NAME = v.CXX_NAME = 'msvc'
-
-       v.LINK = v.LINK_CXX = link
-       if not v.LINK_CC:
-               v.LINK_CC = v.LINK_CXX
-
-       v.AR = ar
-       v.MT = mt
-       v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
-
-
-       conf.check_tool('winres')
-
-       if not conf.env.WINRC:
-               warn('Resource compiler not found. Compiling resource file is disabled')
-
-       # environment flags
-       try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
-       except KeyError: pass
-       try: v.prepend_value('LIBPATH', conf.environ['LIB'])
-       except KeyError: pass
-
-@conftest
-def msvc_common_flags(conf):
-       v = conf.env
-
-       v['CPPFLAGS']     = ['/W3', '/nologo']
-
-       v['CCDEFINES_ST']     = '/D%s'
-       v['CXXDEFINES_ST']    = '/D%s'
-
-       # TODO just use _WIN32, which defined by the compiler itself!
-       v['CCDEFINES']    = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
-       v['CXXDEFINES']   = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
-
-       v['_CCINCFLAGS']  = []
-       v['_CCDEFFLAGS']  = []
-       v['_CXXINCFLAGS'] = []
-       v['_CXXDEFFLAGS'] = []
-
-       v['CC_SRC_F']     = ''
-       v['CC_TGT_F']     = ['/c', '/Fo']
-       v['CXX_SRC_F']    = ''
-       v['CXX_TGT_F']    = ['/c', '/Fo']
-
-       v['CPPPATH_ST']   = '/I%s' # template for adding include paths
-
-       v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
-
-       # Subsystem specific flags
-       v['CPPFLAGS_CONSOLE']   = ['/SUBSYSTEM:CONSOLE']
-       v['CPPFLAGS_NATIVE']    = ['/SUBSYSTEM:NATIVE']
-       v['CPPFLAGS_POSIX']     = ['/SUBSYSTEM:POSIX']
-       v['CPPFLAGS_WINDOWS']   = ['/SUBSYSTEM:WINDOWS']
-       v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
-
-       # CRT specific flags
-       v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
-       v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
-
-       # TODO these are defined by the compiler itself!
-       v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
-       v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
-
-       v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
-       v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
-
-       # TODO these are defined by the compiler itself!
-       v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
-       v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
-
-       # compiler debug levels
-       v['CCFLAGS']            = ['/TC']
-       v['CCFLAGS_OPTIMIZED']  = ['/O2', '/DNDEBUG']
-       v['CCFLAGS_RELEASE']    = ['/O2', '/DNDEBUG']
-       v['CCFLAGS_DEBUG']      = ['/Od', '/RTC1', '/ZI']
-       v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
-
-       v['CXXFLAGS']            = ['/TP', '/EHsc']
-       v['CXXFLAGS_OPTIMIZED']  = ['/O2', '/DNDEBUG']
-       v['CXXFLAGS_RELEASE']    = ['/O2', '/DNDEBUG']
-
-       v['CXXFLAGS_DEBUG']      = ['/Od', '/RTC1', '/ZI']
-       v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
-
-       # linker
-       v['LIB']              = []
-
-       v['LIB_ST']           = '%s.lib' # template for adding libs
-       v['LIBPATH_ST']       = '/LIBPATH:%s' # template for adding libpaths
-       v['STATICLIB_ST']     = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
-       v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
-
-       v['LINKFLAGS'] = ['/NOLOGO']
-       if v['MSVC_MANIFEST']:
-               v.append_value('LINKFLAGS', '/MANIFEST')
-       v['LINKFLAGS_DEBUG']      = ['/DEBUG']
-       v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG']
-
-       # shared library
-       v['shlib_CCFLAGS']  = ['']
-       v['shlib_CXXFLAGS'] = ['']
-       v['shlib_LINKFLAGS']= ['/DLL']
-       v['shlib_PATTERN']  = '%s.dll'
-       v['implib_PATTERN'] = '%s.lib'
-       v['IMPLIB_ST']      = '/IMPLIB:%s'
-
-       # static library
-       v['staticlib_LINKFLAGS'] = ['']
-       v['staticlib_PATTERN']   = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
-
-       # program
-       v['program_PATTERN']     = '%s.exe'
-
-
-#######################################################################################################
-##### conf above, build below
-
-@after('apply_link')
-@feature('c', 'cc', 'cxx')
-def apply_flags_msvc(self):
-       if self.env.CC_NAME != 'msvc' or not self.link_task:
-               return
-
-       subsystem = getattr(self, 'subsystem', '')
-       if subsystem:
-               subsystem = '/subsystem:%s' % subsystem
-               flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
-               self.env.append_value(flags, subsystem)
-
-       if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features:
-               for f in self.env.LINKFLAGS:
-                       d = f.lower()
-                       if d[1:] == 'debug':
-                               pdbnode = self.link_task.outputs[0].change_ext('.pdb')
-                               pdbfile = pdbnode.bldpath(self.env)
-                               self.link_task.outputs.append(pdbnode)
-                               self.bld.install_files(self.install_path, [pdbnode], env=self.env)
-                               break
-
-@feature('cprogram', 'cshlib', 'cstaticlib')
-@after('apply_lib_vars')
-@before('apply_obj_vars')
-def apply_obj_vars_msvc(self):
-       if self.env['CC_NAME'] != 'msvc':
-               return
-
-       try:
-               self.meths.remove('apply_obj_vars')
-       except ValueError:
-               pass
-
-       libpaths = getattr(self, 'libpaths', [])
-       if not libpaths: self.libpaths = libpaths
-
-       env = self.env
-       app = env.append_unique
-
-       cpppath_st       = env['CPPPATH_ST']
-       lib_st           = env['LIB_ST']
-       staticlib_st     = env['STATICLIB_ST']
-       libpath_st       = env['LIBPATH_ST']
-       staticlibpath_st = env['STATICLIBPATH_ST']
-
-       for i in env['LIBPATH']:
-               app('LINKFLAGS', libpath_st % i)
-               if not libpaths.count(i):
-                       libpaths.append(i)
-
-       for i in env['LIBPATH']:
-               app('LINKFLAGS', staticlibpath_st % i)
-               if not libpaths.count(i):
-                       libpaths.append(i)
-
-       # i doubt that anyone will make a fully static binary anyway
-       if not env['FULLSTATIC']:
-               if env['STATICLIB'] or env['LIB']:
-                       app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
-
-       for i in env['STATICLIB']:
-               app('LINKFLAGS', staticlib_st % i)
-
-       for i in env['LIB']:
-               app('LINKFLAGS', lib_st % i)
-
-# split the manifest file processing from the link task, like for the rc processing
-
-@feature('cprogram', 'cshlib')
-@after('apply_link')
-def apply_manifest(self):
-       """Special linker for MSVC with support for embedding manifests into DLL's
-       and executables compiled by Visual Studio 2005 or probably later. Without
-       the manifest file, the binaries are unusable.
-       See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
-
-       if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
-               out_node = self.link_task.outputs[0]
-               man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
-               self.link_task.outputs.append(man_node)
-               self.link_task.do_manifest = True
-
-def exec_mf(self):
-       env = self.env
-       mtool = env['MT']
-       if not mtool:
-               return 0
-
-       self.do_manifest = False
-
-       outfile = self.outputs[0].bldpath(env)
-
-       manifest = None
-       for out_node in self.outputs:
-               if out_node.name.endswith('.manifest'):
-                       manifest = out_node.bldpath(env)
-                       break
-       if manifest is None:
-               # Should never get here.  If we do, it means the manifest file was
-               # never added to the outputs list, thus we don't have a manifest file
-               # to embed, so we just return.
-               return 0
-
-       # embedding mode. Different for EXE's and DLL's.
-       # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
-       mode = ''
-       if 'cprogram' in self.generator.features:
-               mode = '1'
-       elif 'cshlib' in self.generator.features:
-               mode = '2'
-
-       debug('msvc: embedding manifest')
-       #flags = ' '.join(env['MTFLAGS'] or [])
-
-       lst = []
-       lst.extend([env['MT']])
-       lst.extend(Utils.to_list(env['MTFLAGS']))
-       lst.extend(Utils.to_list("-manifest"))
-       lst.extend(Utils.to_list(manifest))
-       lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
-
-       #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
-       #       manifest, outfile, mode)
-       lst = [lst]
-       return self.exec_command(*lst)
-
-########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
-
-def exec_command_msvc(self, *k, **kw):
-       "instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
-       if self.env['CC_NAME'] == 'msvc':
-               if isinstance(k[0], list):
-                       lst = []
-                       carry = ''
-                       for a in k[0]:
-                               if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
-                                       carry = a
-                               else:
-                                       lst.append(carry + a)
-                                       carry = ''
-                       k = [lst]
-
-               env = dict(os.environ)
-               env.update(PATH = ';'.join(self.env['PATH']))
-               kw['env'] = env
-
-       ret = self.generator.bld.exec_command(*k, **kw)
-       if ret: return ret
-       if getattr(self, 'do_manifest', None):
-               ret = exec_mf(self)
-       return ret
-
-for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
-       cls = Task.TaskBase.classes.get(k, None)
-       if cls:
-               cls.exec_command = exec_command_msvc
diff --git a/third_party/waf/wafadmin/Tools/nasm.py b/third_party/waf/wafadmin/Tools/nasm.py
deleted file mode 100644 (file)
index 43b73a7..0000000
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008
-
-"""
-Nasm processing
-"""
-
-import os
-import TaskGen, Task, Utils
-from TaskGen import taskgen, before, extension
-
-nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
-
-EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
-
-@before('apply_link')
-def apply_nasm_vars(self):
-
-       # flags
-       if hasattr(self, 'nasm_flags'):
-               for flag in self.to_list(self.nasm_flags):
-                       self.env.append_value('NASM_FLAGS', flag)
-
-       # includes - well, if we suppose it works with c processing
-       if hasattr(self, 'includes'):
-               for inc in self.to_list(self.includes):
-                       node = self.path.find_dir(inc)
-                       if not node:
-                               raise Utils.WafError('cannot find the dir' + inc)
-                       self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
-                       self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
-
-@extension(EXT_NASM)
-def nasm_file(self, node):
-       try: obj_ext = self.obj_ext
-       except AttributeError: obj_ext = '_%d.o' % self.idx
-
-       task = self.create_task('nasm', node, node.change_ext(obj_ext))
-       self.compiled_tasks.append(task)
-
-       self.meths.append('apply_nasm_vars')
-
-# create our action here
-Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
-
-def detect(conf):
-       nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)
diff --git a/third_party/waf/wafadmin/Tools/ocaml.py b/third_party/waf/wafadmin/Tools/ocaml.py
deleted file mode 100644 (file)
index a0667a4..0000000
+++ /dev/null
@@ -1,297 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-"ocaml support"
-
-import os, re
-import TaskGen, Utils, Task, Build
-from Logs import error
-from TaskGen import taskgen, feature, before, after, extension
-
-EXT_MLL = ['.mll']
-EXT_MLY = ['.mly']
-EXT_MLI = ['.mli']
-EXT_MLC = ['.c']
-EXT_ML  = ['.ml']
-
-open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
-foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
-def filter_comments(txt):
-       meh = [0]
-       def repl(m):
-               if m.group(1): meh[0] += 1
-               elif m.group(2): meh[0] -= 1
-               elif not meh[0]: return m.group(0)
-               return ''
-       return foo.sub(repl, txt)
-
-def scan(self):
-       node = self.inputs[0]
-       code = filter_comments(node.read(self.env))
-
-       global open_re
-       names = []
-       import_iterator = open_re.finditer(code)
-       if import_iterator:
-               for import_match in import_iterator:
-                       names.append(import_match.group(1))
-       found_lst = []
-       raw_lst = []
-       for name in names:
-               nd = None
-               for x in self.incpaths:
-                       nd = x.find_resource(name.lower()+'.ml')
-                       if not nd: nd = x.find_resource(name+'.ml')
-                       if nd:
-                               found_lst.append(nd)
-                               break
-               else:
-                       raw_lst.append(name)
-
-       return (found_lst, raw_lst)
-
-native_lst=['native', 'all', 'c_object']
-bytecode_lst=['bytecode', 'all']
-class ocaml_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@feature('ocaml')
-def init_ml(self):
-       Utils.def_attrs(self,
-               type = 'all',
-               incpaths_lst = [],
-               bld_incpaths_lst = [],
-               mlltasks = [],
-               mlytasks = [],
-               mlitasks = [],
-               native_tasks = [],
-               bytecode_tasks = [],
-               linktasks = [],
-               bytecode_env = None,
-               native_env = None,
-               compiled_tasks = [],
-               includes = '',
-               uselib = '',
-               are_deps_set = 0)
-
-@feature('ocaml')
-@after('init_ml')
-def init_envs_ml(self):
-
-       self.islibrary = getattr(self, 'islibrary', False)
-
-       global native_lst, bytecode_lst
-       self.native_env = None
-       if self.type in native_lst:
-               self.native_env = self.env.copy()
-               if self.islibrary: self.native_env['OCALINKFLAGS']   = '-a'
-
-       self.bytecode_env = None
-       if self.type in bytecode_lst:
-               self.bytecode_env = self.env.copy()
-               if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
-
-       if self.type == 'c_object':
-               self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
-
-@feature('ocaml')
-@before('apply_vars_ml')
-@after('init_envs_ml')
-def apply_incpaths_ml(self):
-       inc_lst = self.includes.split()
-       lst = self.incpaths_lst
-       for dir in inc_lst:
-               node = self.path.find_dir(dir)
-               if not node:
-                       error("node not found: " + str(dir))
-                       continue
-               self.bld.rescan(node)
-               if not node in lst: lst.append(node)
-               self.bld_incpaths_lst.append(node)
-       # now the nodes are added to self.incpaths_lst
-
-@feature('ocaml')
-@before('apply_core')
-def apply_vars_ml(self):
-       for i in self.incpaths_lst:
-               if self.bytecode_env:
-                       app = self.bytecode_env.append_value
-                       app('OCAMLPATH', '-I')
-                       app('OCAMLPATH', i.srcpath(self.env))
-                       app('OCAMLPATH', '-I')
-                       app('OCAMLPATH', i.bldpath(self.env))
-
-               if self.native_env:
-                       app = self.native_env.append_value
-                       app('OCAMLPATH', '-I')
-                       app('OCAMLPATH', i.bldpath(self.env))
-                       app('OCAMLPATH', '-I')
-                       app('OCAMLPATH', i.srcpath(self.env))
-
-       varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
-       for name in self.uselib.split():
-               for vname in varnames:
-                       cnt = self.env[vname+'_'+name]
-                       if cnt:
-                               if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
-                               if self.native_env: self.native_env.append_value(vname, cnt)
-
-@feature('ocaml')
-@after('apply_core')
-def apply_link_ml(self):
-
-       if self.bytecode_env:
-               ext = self.islibrary and '.cma' or '.run'
-
-               linktask = self.create_task('ocalink')
-               linktask.bytecode = 1
-               linktask.set_outputs(self.path.find_or_declare(self.target + ext))
-               linktask.obj = self
-               linktask.env = self.bytecode_env
-               self.linktasks.append(linktask)
-
-       if self.native_env:
-               if self.type == 'c_object': ext = '.o'
-               elif self.islibrary: ext = '.cmxa'
-               else: ext = ''
-
-               linktask = self.create_task('ocalinkx')
-               linktask.set_outputs(self.path.find_or_declare(self.target + ext))
-               linktask.obj = self
-               linktask.env = self.native_env
-               self.linktasks.append(linktask)
-
-               # we produce a .o file to be used by gcc
-               self.compiled_tasks.append(linktask)
-
-@extension(EXT_MLL)
-def mll_hook(self, node):
-       mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env)
-       self.mlltasks.append(mll_task)
-
-       self.allnodes.append(mll_task.outputs[0])
-
-@extension(EXT_MLY)
-def mly_hook(self, node):
-       mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env)
-       self.mlytasks.append(mly_task)
-       self.allnodes.append(mly_task.outputs[0])
-
-       task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env)
-
-@extension(EXT_MLI)
-def mli_hook(self, node):
-       task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env)
-       self.mlitasks.append(task)
-
-@extension(EXT_MLC)
-def mlc_hook(self, node):
-       task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env)
-       self.compiled_tasks.append(task)
-
-@extension(EXT_ML)
-def ml_hook(self, node):
-       if self.native_env:
-               task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env)
-               task.obj = self
-               task.incpaths = self.bld_incpaths_lst
-               self.native_tasks.append(task)
-
-       if self.bytecode_env:
-               task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env)
-               task.obj = self
-               task.bytecode = 1
-               task.incpaths = self.bld_incpaths_lst
-               self.bytecode_tasks.append(task)
-
-def compile_may_start(self):
-       if not getattr(self, 'flag_deps', ''):
-               self.flag_deps = 1
-
-               # the evil part is that we can only compute the dependencies after the
-               # source files can be read (this means actually producing the source files)
-               if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
-               else: alltasks = self.obj.native_tasks
-
-               self.signature() # ensure that files are scanned - unfortunately
-               tree = self.generator.bld
-               env = self.env
-               for node in self.inputs:
-                       lst = tree.node_deps[self.unique_id()]
-                       for depnode in lst:
-                               for t in alltasks:
-                                       if t == self: continue
-                                       if depnode in t.inputs:
-                                               self.set_run_after(t)
-
-               # TODO necessary to get the signature right - for now
-               delattr(self, 'cache_sig')
-               self.signature()
-
-       return Task.Task.runnable_status(self)
-
-b = Task.simple_task_type
-cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
-cls.runnable_status = compile_may_start
-cls.scan = scan
-
-b = Task.simple_task_type
-cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
-cls.runnable_status = compile_may_start
-cls.scan = scan
-
-
-b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
-b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
-
-b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
-b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
-
-
-def link_may_start(self):
-       if not getattr(self, 'order', ''):
-
-               # now reorder the inputs given the task dependencies
-               if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
-               else: alltasks = self.obj.native_tasks
-
-               # this part is difficult, we do not have a total order on the tasks
-               # if the dependencies are wrong, this may not stop
-               seen = []
-               pendant = []+alltasks
-               while pendant:
-                       task = pendant.pop(0)
-                       if task in seen: continue
-                       for x in task.run_after:
-                               if not x in seen:
-                                       pendant.append(task)
-                                       break
-                       else:
-                               seen.append(task)
-               self.inputs = [x.outputs[0] for x in seen]
-               self.order = 1
-       return Task.Task.runnable_status(self)
-
-act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
-act.runnable_status = link_may_start
-act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
-act.runnable_status = link_may_start
-
-def detect(conf):
-       opt = conf.find_program('ocamlopt', var='OCAMLOPT')
-       occ = conf.find_program('ocamlc', var='OCAMLC')
-       if (not opt) or (not occ):
-               conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
-
-       v = conf.env
-       v['OCAMLC']       = occ
-       v['OCAMLOPT']     = opt
-       v['OCAMLLEX']     = conf.find_program('ocamllex', var='OCAMLLEX')
-       v['OCAMLYACC']    = conf.find_program('ocamlyacc', var='OCAMLYACC')
-       v['OCAMLFLAGS']   = ''
-       v['OCAMLLIB']     = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
-       v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
-       v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
-       v['LIB_OCAML'] = 'camlrun'
diff --git a/third_party/waf/wafadmin/Tools/osx.py b/third_party/waf/wafadmin/Tools/osx.py
deleted file mode 100644 (file)
index 95184ee..0000000
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy 2008
-
-"""MacOSX related tools
-
-To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
-  obj.mac_app = True
-
-To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
-  obj.mac_bundle = True
-"""
-
-import os, shutil, sys, platform
-import TaskGen, Task, Build, Options, Utils
-from TaskGen import taskgen, feature, after, before
-from Logs import error, debug
-
-# plist template
-app_info = '''
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
-<plist version="0.9">
-<dict>
-       <key>CFBundlePackageType</key>
-       <string>APPL</string>
-       <key>CFBundleGetInfoString</key>
-       <string>Created by Waf</string>
-       <key>CFBundleSignature</key>
-       <string>????</string>
-       <key>NOTE</key>
-       <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
-       <key>CFBundleExecutable</key>
-       <string>%s</string>
-</dict>
-</plist>
-'''
-
-# see WAF issue 285
-# and also http://trac.macports.org/ticket/17059
-@feature('c', 'cc', 'cxx')
-@before('apply_lib_vars')
-def set_macosx_deployment_target(self):
-       if self.env['MACOSX_DEPLOYMENT_TARGET']:
-               os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
-       elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
-               if sys.platform == 'darwin':
-                       os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
-
-@feature('c', 'cc', 'cxx')
-@after('apply_lib_vars')
-def apply_framework(self):
-       for x in self.to_list(self.env['FRAMEWORKPATH']):
-               frameworkpath_st = '-F%s'
-               self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
-               self.env.append_unique('CCFLAGS', frameworkpath_st % x)
-               self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
-
-       for x in self.to_list(self.env['FRAMEWORK']):
-               self.env.append_value('LINKFLAGS', ['-framework', x])
-
-@taskgen
-def create_bundle_dirs(self, name, out):
-       bld = self.bld
-       dir = out.parent.get_dir(name)
-
-       if not dir:
-               dir = out.__class__(name, out.parent, 1)
-               bld.rescan(dir)
-               contents = out.__class__('Contents', dir, 1)
-               bld.rescan(contents)
-               macos = out.__class__('MacOS', contents, 1)
-               bld.rescan(macos)
-       return dir
-
-def bundle_name_for_output(out):
-       name = out.name
-       k = name.rfind('.')
-       if k >= 0:
-               name = name[:k] + '.app'
-       else:
-               name = name + '.app'
-       return name
-
-@taskgen
-@after('apply_link')
-@feature('cprogram')
-def create_task_macapp(self):
-       """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
-       or use obj.mac_app = True to build specific targets as Mac apps"""
-       if self.env['MACAPP'] or getattr(self, 'mac_app', False):
-               apptask = self.create_task('macapp')
-               apptask.set_inputs(self.link_task.outputs)
-
-               out = self.link_task.outputs[0]
-
-               name = bundle_name_for_output(out)
-               dir = self.create_bundle_dirs(name, out)
-
-               n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
-
-               apptask.set_outputs([n1])
-               apptask.chmod = 0755
-               apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
-               self.apptask = apptask
-
-@after('apply_link')
-@feature('cprogram')
-def create_task_macplist(self):
-       """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
-       or use obj.mac_app = True to build specific targets as Mac apps"""
-       if  self.env['MACAPP'] or getattr(self, 'mac_app', False):
-               # check if the user specified a plist before using our template
-               if not getattr(self, 'mac_plist', False):
-                       self.mac_plist = app_info
-
-               plisttask = self.create_task('macplist')
-               plisttask.set_inputs(self.link_task.outputs)
-
-               out = self.link_task.outputs[0]
-               self.mac_plist = self.mac_plist % (out.name)
-
-               name = bundle_name_for_output(out)
-               dir = self.create_bundle_dirs(name, out)
-
-               n1 = dir.find_or_declare(['Contents', 'Info.plist'])
-
-               plisttask.set_outputs([n1])
-               plisttask.mac_plist = self.mac_plist
-               plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
-               self.plisttask = plisttask
-
-@after('apply_link')
-@feature('cshlib')
-def apply_link_osx(self):
-       name = self.link_task.outputs[0].name
-       if not self.install_path:
-               return
-       if getattr(self, 'vnum', None):
-               name = name.replace('.dylib', '.%s.dylib' % self.vnum)
-
-       path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
-       if '-dynamiclib' in self.env['LINKFLAGS']:
-               self.env.append_value('LINKFLAGS', '-install_name')
-               self.env.append_value('LINKFLAGS', path)
-
-@before('apply_link', 'apply_lib_vars')
-@feature('c', 'cc', 'cxx')
-def apply_bundle(self):
-       """use env['MACBUNDLE'] to force all shlibs into mac bundles
-       or use obj.mac_bundle = True for specific targets only"""
-       if not ('cshlib' in self.features or 'shlib' in self.features): return
-       if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
-               self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
-               uselib = self.uselib = self.to_list(self.uselib)
-               if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
-
-@after('apply_link')
-@feature('cshlib')
-def apply_bundle_remove_dynamiclib(self):
-       if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
-               if not getattr(self, 'vnum', None):
-                       try:
-                               self.env['LINKFLAGS'].remove('-dynamiclib')
-                               self.env['LINKFLAGS'].remove('-single_module')
-                       except ValueError:
-                               pass
-
-# TODO REMOVE IN 1.6 (global variable)
-app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
-
-def app_build(task):
-       env = task.env
-       shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
-
-       return 0
-
-def plist_build(task):
-       env = task.env
-       f = open(task.outputs[0].abspath(env), "w")
-       f.write(task.mac_plist)
-       f.close()
-
-       return 0
-
-Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
-Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")
diff --git a/third_party/waf/wafadmin/Tools/perl.py b/third_party/waf/wafadmin/Tools/perl.py
deleted file mode 100644 (file)
index 85105ea..0000000
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# andersg at 0x63.nu 2007
-
-import os
-import Task, Options, Utils
-from Configure import conf
-from TaskGen import extension, taskgen, feature, before
-
-xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
-EXT_XS = ['.xs']
-
-@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
-@feature('perlext')
-def init_perlext(self):
-       self.uselib = self.to_list(getattr(self, 'uselib', ''))
-       if not 'PERL' in self.uselib: self.uselib.append('PERL')
-       if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
-       self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
-
-@extension(EXT_XS)
-def xsubpp_file(self, node):
-       outnode = node.change_ext('.c')
-       self.create_task('xsubpp', node, outnode)
-       self.allnodes.append(outnode)
-
-Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False)
-
-@conf
-def check_perl_version(conf, minver=None):
-       """
-       Checks if perl is installed.
-
-       If installed the variable PERL will be set in environment.
-
-       Perl binary can be overridden by --with-perl-binary config variable
-
-       """
-
-       if getattr(Options.options, 'perlbinary', None):
-               conf.env.PERL = Options.options.perlbinary
-       else:
-               conf.find_program('perl', var='PERL', mandatory=True)
-
-       try:
-               version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
-       except:
-               conf.fatal('could not determine the perl version')
-
-       conf.env.PERL_VERSION = version
-       cver = ''
-       if minver:
-               try:
-                       ver = tuple(map(int, version.split('.')))
-               except:
-                       conf.fatal('unsupported perl version %r' % version)
-               if ver < minver:
-                       conf.fatal('perl is too old')
-
-               cver = '.'.join(map(str,minver))
-       conf.check_message('perl', cver, True, version)
-
-@conf
-def check_perl_module(conf, module):
-       """
-       Check if specified perlmodule is installed.
-
-       Minimum version can be specified by specifying it after modulename
-       like this:
-
-       conf.check_perl_module("Some::Module 2.92")
-       """
-       cmd = [conf.env['PERL'], '-e', 'use %s' % module]
-       r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
-       conf.check_message("perl module %s" % module, "", r)
-       return r
-
-@conf
-def check_perl_ext_devel(conf):
-       """
-       Check for configuration needed to build perl extensions.
-
-       Sets different xxx_PERLEXT variables in the environment.
-
-       Also sets the ARCHDIR_PERL variable useful as installation path,
-       which can be overridden by --with-perl-archdir
-       """
-       if not conf.env.PERL:
-               conf.fatal('perl detection is required first')
-
-       def read_out(cmd):
-               return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
-
-       conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
-       conf.env.CPPPATH_PERLEXT   = read_out('print "$Config{archlib}/CORE"')
-       conf.env.CCFLAGS_PERLEXT   = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
-       conf.env.XSUBPP            = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
-       conf.env.EXTUTILS_TYPEMAP  = read_out('print "$Config{privlib}/ExtUtils/typemap"')
-       conf.env.perlext_PATTERN   = '%s.' + read_out('print $Config{dlext}')[0]
-
-       if getattr(Options.options, 'perlarchdir', None):
-               conf.env.ARCHDIR_PERL = Options.options.perlarchdir
-       else:
-               conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
-
-def set_options(opt):
-       opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
-       opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)
diff --git a/third_party/waf/wafadmin/Tools/preproc.py b/third_party/waf/wafadmin/Tools/preproc.py
deleted file mode 100644 (file)
index 71eb05a..0000000
+++ /dev/null
@@ -1,837 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2009 (ita)
-
-"""
-C/C++ preprocessor for finding dependencies
-
-Reasons for using the Waf preprocessor by default
-1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
-2. Not all compilers provide .d files for obtaining the dependencies (portability)
-3. A naive file scanner will not catch the constructs such as "#include foo()"
-4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
-
-Regarding the speed concerns:
-a. the preprocessing is performed only when files must be compiled
-b. the macros are evaluated only for #if/#elif/#include
-c. the time penalty is about 10%
-d. system headers are not scanned
-
-Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
-during the compilation to track the dependencies (useful when used with the boost libraries).
-It only works with gcc though, and it cannot be used with Qt builds. A dumb
-file scanner will be added in the future, so we will have most bahaviours.
-"""
-# TODO: more varargs, pragma once
-# TODO: dumb file scanner tracking all includes
-
-import re, sys, os, string
-import Logs, Build, Utils
-from Logs import debug, error
-import traceback
-
-class PreprocError(Utils.WafError):
-       pass
-
-POPFILE = '-'
-
-
-recursion_limit = 5000
-"do not loop too much on header inclusion"
-
-go_absolute = 0
-"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
-
-standard_includes = ['/usr/include']
-if sys.platform == "win32":
-       standard_includes = []
-
-use_trigraphs = 0
-'apply the trigraph rules first'
-
-strict_quotes = 0
-"Keep <> for system includes (do not search for those includes)"
-
-g_optrans = {
-'not':'!',
-'and':'&&',
-'bitand':'&',
-'and_eq':'&=',
-'or':'||',
-'bitor':'|',
-'or_eq':'|=',
-'xor':'^',
-'xor_eq':'^=',
-'compl':'~',
-}
-"these ops are for c++, to reset, set an empty dict"
-
-# ignore #warning and #error
-re_lines = re.compile(\
-       '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
-       re.IGNORECASE | re.MULTILINE)
-
-re_mac = re.compile("^[a-zA-Z_]\w*")
-re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
-re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
-re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
-re_cpp = re.compile(
-       r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""",
-       re.MULTILINE)
-trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
-chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
-
-NUM   = 'i'
-OP    = 'O'
-IDENT = 'T'
-STR   = 's'
-CHAR  = 'c'
-
-tok_types = [NUM, STR, IDENT, OP]
-exp_types = [
-       r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
-       r'L?"([^"\\]|\\.)*"',
-       r'[a-zA-Z_]\w*',
-       r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
-]
-re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
-
-accepted  = 'a'
-ignored   = 'i'
-undefined = 'u'
-skipped   = 's'
-
-def repl(m):
-       if m.group(1):
-               return ' '
-       s = m.group(2)
-       if s is None:
-               return ''
-       return s
-
-def filter_comments(filename):
-       # return a list of tuples : keyword, line
-       code = Utils.readf(filename)
-       if use_trigraphs:
-               for (a, b) in trig_def: code = code.split(a).join(b)
-       code = re_nl.sub('', code)
-       code = re_cpp.sub(repl, code)
-       return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
-
-prec = {}
-# op -> number, needed for such expressions:   #if 1 && 2 != 0
-ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
-for x in range(len(ops)):
-       syms = ops[x]
-       for u in syms.split():
-               prec[u] = x
-
-def reduce_nums(val_1, val_2, val_op):
-       """apply arithmetic rules and try to return an integer result"""
-       #print val_1, val_2, val_op
-
-       # now perform the operation, make certain a and b are numeric
-       try:    a = 0 + val_1
-       except TypeError: a = int(val_1)
-       try:    b = 0 + val_2
-       except TypeError: b = int(val_2)
-
-       d = val_op
-       if d == '%':  c = a%b
-       elif d=='+':  c = a+b
-       elif d=='-':  c = a-b
-       elif d=='*':  c = a*b
-       elif d=='/':  c = a/b
-       elif d=='^':  c = a^b
-       elif d=='|':  c = a|b
-       elif d=='||': c = int(a or b)
-       elif d=='&':  c = a&b
-       elif d=='&&': c = int(a and b)
-       elif d=='==': c = int(a == b)
-       elif d=='!=': c = int(a != b)
-       elif d=='<=': c = int(a <= b)
-       elif d=='<':  c = int(a < b)
-       elif d=='>':  c = int(a > b)
-       elif d=='>=': c = int(a >= b)
-       elif d=='^':  c = int(a^b)
-       elif d=='<<': c = a<<b
-       elif d=='>>': c = a>>b
-       else: c = 0
-       return c
-
-def get_num(lst):
-       if not lst: raise PreprocError("empty list for get_num")
-       (p, v) = lst[0]
-       if p == OP:
-               if v == '(':
-                       count_par = 1
-                       i = 1
-                       while i < len(lst):
-                               (p, v) = lst[i]
-
-                               if p == OP:
-                                       if v == ')':
-                                               count_par -= 1
-                                               if count_par == 0:
-                                                       break
-                                       elif v == '(':
-                                               count_par += 1
-                               i += 1
-                       else:
-                               raise PreprocError("rparen expected %r" % lst)
-
-                       (num, _) = get_term(lst[1:i])
-                       return (num, lst[i+1:])
-
-               elif v == '+':
-                       return get_num(lst[1:])
-               elif v == '-':
-                       num, lst = get_num(lst[1:])
-                       return (reduce_nums('-1', num, '*'), lst)
-               elif v == '!':
-                       num, lst = get_num(lst[1:])
-                       return (int(not int(num)), lst)
-               elif v == '~':
-                       return (~ int(num), lst)
-               else:
-                       raise PreprocError("invalid op token %r for get_num" % lst)
-       elif p == NUM:
-               return v, lst[1:]
-       elif p == IDENT:
-               # all macros should have been replaced, remaining identifiers eval to 0
-               return 0, lst[1:]
-       else:
-               raise PreprocError("invalid token %r for get_num" % lst)
-
-def get_term(lst):
-       if not lst: raise PreprocError("empty list for get_term")
-       num, lst = get_num(lst)
-       if not lst:
-               return (num, [])
-       (p, v) = lst[0]
-       if p == OP:
-               if v == '&&' and not num:
-                       return (num, [])
-               elif v == '||' and num:
-                       return (num, [])
-               elif v == ',':
-                       # skip
-                       return get_term(lst[1:])
-               elif v == '?':
-                       count_par = 0
-                       i = 1
-                       while i < len(lst):
-                               (p, v) = lst[i]
-
-                               if p == OP:
-                                       if v == ')':
-                                               count_par -= 1
-                                       elif v == '(':
-                                               count_par += 1
-                                       elif v == ':':
-                                               if count_par == 0:
-                                                       break
-                               i += 1
-                       else:
-                               raise PreprocError("rparen expected %r" % lst)
-
-                       if int(num):
-                               return get_term(lst[1:i])
-                       else:
-                               return get_term(lst[i+1:])
-
-               else:
-                       num2, lst = get_num(lst[1:])
-
-                       if not lst:
-                               # no more tokens to process
-                               num2 = reduce_nums(num, num2, v)
-                               return get_term([(NUM, num2)] + lst)
-
-                       # operator precedence
-                       p2, v2 = lst[0]
-                       if p2 != OP:
-                               raise PreprocError("op expected %r" % lst)
-
-                       if prec[v2] >= prec[v]:
-                               num2 = reduce_nums(num, num2, v)
-                               return get_term([(NUM, num2)] + lst)
-                       else:
-                               num3, lst = get_num(lst[1:])
-                               num3 = reduce_nums(num2, num3, v2)
-                               return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
-
-
-       raise PreprocError("cannot reduce %r" % lst)
-
-def reduce_eval(lst):
-       """take a list of tokens and output true or false (#if/#elif conditions)"""
-       num, lst = get_term(lst)
-       return (NUM, num)
-
-def stringize(lst):
-       """use for converting a list of tokens to a string"""
-       lst = [str(v2) for (p2, v2) in lst]
-       return "".join(lst)
-
-def paste_tokens(t1, t2):
-       """
-       here is what we can paste:
-        a ## b  ->  ab
-        > ## =  ->  >=
-        a ## 2  ->  a2
-       """
-       p1 = None
-       if t1[0] == OP and t2[0] == OP:
-               p1 = OP
-       elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
-               p1 = IDENT
-       elif t1[0] == NUM and t2[0] == NUM:
-               p1 = NUM
-       if not p1:
-               raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
-       return (p1, t1[1] + t2[1])
-
-def reduce_tokens(lst, defs, ban=[]):
-       """replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
-       i = 0
-
-       while i < len(lst):
-               (p, v) = lst[i]
-
-               if p == IDENT and v == "defined":
-                       del lst[i]
-                       if i < len(lst):
-                               (p2, v2) = lst[i]
-                               if p2 == IDENT:
-                                       if v2 in defs:
-                                               lst[i] = (NUM, 1)
-                                       else:
-                                               lst[i] = (NUM, 0)
-                               elif p2 == OP and v2 == '(':
-                                       del lst[i]
-                                       (p2, v2) = lst[i]
-                                       del lst[i] # remove the ident, and change the ) for the value
-                                       if v2 in defs:
-                                               lst[i] = (NUM, 1)
-                                       else:
-                                               lst[i] = (NUM, 0)
-                               else:
-                                       raise PreprocError("invalid define expression %r" % lst)
-
-               elif p == IDENT and v in defs:
-
-                       if isinstance(defs[v], str):
-                               a, b = extract_macro(defs[v])
-                               defs[v] = b
-                       macro_def = defs[v]
-                       to_add = macro_def[1]
-
-                       if isinstance(macro_def[0], list):
-                               # macro without arguments
-                               del lst[i]
-                               for x in xrange(len(to_add)):
-                                       lst.insert(i, to_add[x])
-                                       i += 1
-                       else:
-                               # collect the arguments for the funcall
-
-                               args = []
-                               del lst[i]
-
-                               if i >= len(lst):
-                                       raise PreprocError("expected '(' after %r (got nothing)" % v)
-
-                               (p2, v2) = lst[i]
-                               if p2 != OP or v2 != '(':
-                                       raise PreprocError("expected '(' after %r" % v)
-
-                               del lst[i]
-
-                               one_param = []
-                               count_paren = 0
-                               while i < len(lst):
-                                       p2, v2 = lst[i]
-
-                                       del lst[i]
-                                       if p2 == OP and count_paren == 0:
-                                               if v2 == '(':
-                                                       one_param.append((p2, v2))
-                                                       count_paren += 1
-                                               elif v2 == ')':
-                                                       if one_param: args.append(one_param)
-                                                       break
-                                               elif v2 == ',':
-                                                       if not one_param: raise PreprocError("empty param in funcall %s" % p)
-                                                       args.append(one_param)
-                                                       one_param = []
-                                               else:
-                                                       one_param.append((p2, v2))
-                                       else:
-                                               one_param.append((p2, v2))
-                                               if   v2 == '(': count_paren += 1
-                                               elif v2 == ')': count_paren -= 1
-                               else:
-                                       raise PreprocError('malformed macro')
-
-                               # substitute the arguments within the define expression
-                               accu = []
-                               arg_table = macro_def[0]
-                               j = 0
-                               while j < len(to_add):
-                                       (p2, v2) = to_add[j]
-
-                                       if p2 == OP and v2 == '#':
-                                               # stringize is for arguments only
-                                               if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
-                                                       toks = args[arg_table[to_add[j+1][1]]]
-                                                       accu.append((STR, stringize(toks)))
-                                                       j += 1
-                                               else:
-                                                       accu.append((p2, v2))
-                                       elif p2 == OP and v2 == '##':
-                                               # token pasting, how can man invent such a complicated system?
-                                               if accu and j+1 < len(to_add):
-                                                       # we have at least two tokens
-
-                                                       t1 = accu[-1]
-
-                                                       if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
-                                                               toks = args[arg_table[to_add[j+1][1]]]
-
-                                                               if toks:
-                                                                       accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
-                                                                       accu.extend(toks[1:])
-                                                               else:
-                                                                       # error, case "a##"
-                                                                       accu.append((p2, v2))
-                                                                       accu.extend(toks)
-                                                       elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
-                                                               # TODO not sure
-                                                               # first collect the tokens
-                                                               va_toks = []
-                                                               st = len(macro_def[0])
-                                                               pt = len(args)
-                                                               for x in args[pt-st+1:]:
-                                                                       va_toks.extend(x)
-                                                                       va_toks.append((OP, ','))
-                                                               if va_toks: va_toks.pop() # extra comma
-                                                               if len(accu)>1:
-                                                                       (p3, v3) = accu[-1]
-                                                                       (p4, v4) = accu[-2]
-                                                                       if v3 == '##':
-                                                                               # remove the token paste
-                                                                               accu.pop()
-                                                                               if v4 == ',' and pt < st:
-                                                                                       # remove the comma
-                                                                                       accu.pop()
-                                                               accu += va_toks
-                                                       else:
-                                                               accu[-1] = paste_tokens(t1, to_add[j+1])
-
-                                                       j += 1
-                                               else:
-                                                       # invalid paste, case    "##a" or "b##"
-                                                       accu.append((p2, v2))
-
-                                       elif p2 == IDENT and v2 in arg_table:
-                                               toks = args[arg_table[v2]]
-                                               reduce_tokens(toks, defs, ban+[v])
-                                               accu.extend(toks)
-                                       else:
-                                               accu.append((p2, v2))
-
-                                       j += 1
-
-
-                               reduce_tokens(accu, defs, ban+[v])
-
-                               for x in xrange(len(accu)-1, -1, -1):
-                                       lst.insert(i, accu[x])
-
-               i += 1
-
-
-def eval_macro(lst, adefs):
-       """reduce the tokens from the list lst, and try to return a 0/1 result"""
-       reduce_tokens(lst, adefs, [])
-       if not lst: raise PreprocError("missing tokens to evaluate")
-       (p, v) = reduce_eval(lst)
-       return int(v) != 0
-
-def extract_macro(txt):
-       """process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
-       t = tokenize(txt)
-       if re_fun.search(txt):
-               p, name = t[0]
-
-               p, v = t[1]
-               if p != OP: raise PreprocError("expected open parenthesis")
-
-               i = 1
-               pindex = 0
-               params = {}
-               prev = '('
-
-               while 1:
-                       i += 1
-                       p, v = t[i]
-
-                       if prev == '(':
-                               if p == IDENT:
-                                       params[v] = pindex
-                                       pindex += 1
-                                       prev = p
-                               elif p == OP and v == ')':
-                                       break
-                               else:
-                                       raise PreprocError("unexpected token (3)")
-                       elif prev == IDENT:
-                               if p == OP and v == ',':
-                                       prev = v
-                               elif p == OP and v == ')':
-                                       break
-                               else:
-                                       raise PreprocError("comma or ... expected")
-                       elif prev == ',':
-                               if p == IDENT:
-                                       params[v] = pindex
-                                       pindex += 1
-                                       prev = p
-                               elif p == OP and v == '...':
-                                       raise PreprocError("not implemented (1)")
-                               else:
-                                       raise PreprocError("comma or ... expected (2)")
-                       elif prev == '...':
-                               raise PreprocError("not implemented (2)")
-                       else:
-                               raise PreprocError("unexpected else")
-
-               #~ print (name, [params, t[i+1:]])
-               return (name, [params, t[i+1:]])
-       else:
-               (p, v) = t[0]
-               return (v, [[], t[1:]])
-
-re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
-def extract_include(txt, defs):
-       """process a line in the form "#include foo" to return a string representing the file"""
-       m = re_include.search(txt)
-       if m:
-               if m.group('a'): return '<', m.group('a')
-               if m.group('b'): return '"', m.group('b')
-
-       # perform preprocessing and look at the result, it must match an include
-       toks = tokenize(txt)
-       reduce_tokens(toks, defs, ['waf_include'])
-
-       if not toks:
-               raise PreprocError("could not parse include %s" % txt)
-
-       if len(toks) == 1:
-               if toks[0][0] == STR:
-                       return '"', toks[0][1]
-       else:
-               if toks[0][1] == '<' and toks[-1][1] == '>':
-                       return stringize(toks).lstrip('<').rstrip('>')
-
-       raise PreprocError("could not parse include %s." % txt)
-
-def parse_char(txt):
-       if not txt: raise PreprocError("attempted to parse a null char")
-       if txt[0] != '\\':
-               return ord(txt)
-       c = txt[1]
-       if c == 'x':
-               if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
-               return int(txt[2:], 16)
-       elif c.isdigit():
-               if c == '0' and len(txt)==2: return 0
-               for i in 3, 2, 1:
-                       if len(txt) > i and txt[1:1+i].isdigit():
-                               return (1+i, int(txt[1:1+i], 8))
-       else:
-               try: return chr_esc[c]
-               except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
-
-@Utils.run_once
-def tokenize_private(s):
-       ret = []
-       for match in re_clexer.finditer(s):
-               m = match.group
-               for name in tok_types:
-                       v = m(name)
-                       if v:
-                               if name == IDENT:
-                                       try: v = g_optrans[v]; name = OP
-                                       except KeyError:
-                                               # c++ specific
-                                               if v.lower() == "true":
-                                                       v = 1
-                                                       name = NUM
-                                               elif v.lower() == "false":
-                                                       v = 0
-                                                       name = NUM
-                               elif name == NUM:
-                                       if m('oct'): v = int(v, 8)
-                                       elif m('hex'): v = int(m('hex'), 16)
-                                       elif m('n0'): v = m('n0')
-                                       else:
-                                               v = m('char')
-                                               if v: v = parse_char(v)
-                                               else: v = m('n2') or m('n4')
-                               elif name == OP:
-                                       if v == '%:': v = '#'
-                                       elif v == '%:%:': v = '##'
-                               elif name == STR:
-                                       # remove the quotes around the string
-                                       v = v[1:-1]
-                               ret.append((name, v))
-                               break
-       return ret
-
-def tokenize(s):
-       """convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
-       return tokenize_private(s)[:]
-
-@Utils.run_once
-def define_name(line):
-       return re_mac.match(line).group(0)
-
-class c_parser(object):
-       def __init__(self, nodepaths=None, defines=None):
-               #self.lines = txt.split('\n')
-               self.lines = []
-
-               if defines is None:
-                       self.defs  = {}
-               else:
-                       self.defs  = dict(defines) # make a copy
-               self.state = []
-
-               self.env   = None # needed for the variant when searching for files
-
-               self.count_files = 0
-               self.currentnode_stack = []
-
-               self.nodepaths = nodepaths or []
-
-               self.nodes = []
-               self.names = []
-
-               # file added
-               self.curfile = ''
-               self.ban_includes = set([])
-
-       def cached_find_resource(self, node, filename):
-               try:
-                       nd = node.bld.cache_nd
-               except:
-                       nd = node.bld.cache_nd = {}
-
-               tup = (node.id, filename)
-               try:
-                       return nd[tup]
-               except KeyError:
-                       ret = node.find_resource(filename)
-                       nd[tup] = ret
-                       return ret
-
-       def tryfind(self, filename):
-               self.curfile = filename
-
-               # for msvc it should be a for loop on the whole stack
-               found = self.cached_find_resource(self.currentnode_stack[-1], filename)
-
-               for n in self.nodepaths:
-                       if found:
-                               break
-                       found = self.cached_find_resource(n, filename)
-
-               if found:
-                       self.nodes.append(found)
-                       if filename[-4:] != '.moc':
-                               self.addlines(found)
-               else:
-                       if not filename in self.names:
-                               self.names.append(filename)
-               return found
-
-       def addlines(self, node):
-
-               self.currentnode_stack.append(node.parent)
-               filepath = node.abspath(self.env)
-
-               self.count_files += 1
-               if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
-               pc = self.parse_cache
-               debug('preproc: reading file %r', filepath)
-               try:
-                       lns = pc[filepath]
-               except KeyError:
-                       pass
-               else:
-                       self.lines.extend(lns)
-                       return
-
-               try:
-                       lines = filter_comments(filepath)
-                       lines.append((POPFILE, ''))
-                       lines.reverse()
-                       pc[filepath] = lines # cache the lines filtered
-                       self.lines.extend(lines)
-               except IOError:
-                       raise PreprocError("could not read the file %s" % filepath)
-               except Exception:
-                       if Logs.verbose > 0:
-                               error("parsing %s failed" % filepath)
-                               traceback.print_exc()
-
-       def start(self, node, env):
-               debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
-
-               self.env = env
-               variant = node.variant(env)
-               bld = node.__class__.bld
-               try:
-                       self.parse_cache = bld.parse_cache
-               except AttributeError:
-                       bld.parse_cache = {}
-                       self.parse_cache = bld.parse_cache
-
-               self.addlines(node)
-               if env['DEFLINES']:
-                       lst = [('define', x) for x in env['DEFLINES']]
-                       lst.reverse()
-                       self.lines.extend(lst)
-
-               while self.lines:
-                       (kind, line) = self.lines.pop()
-                       if kind == POPFILE:
-                               self.currentnode_stack.pop()
-                               continue
-                       try:
-                               self.process_line(kind, line)
-                       except Exception, e:
-                               if Logs.verbose:
-                                       debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
-
-       def process_line(self, token, line):
-               """
-               WARNING: a new state must be added for if* because the endif
-               """
-               ve = Logs.verbose
-               if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
-               state = self.state
-
-               # make certain we define the state if we are about to enter in an if block
-               if token in ['ifdef', 'ifndef', 'if']:
-                       state.append(undefined)
-               elif token == 'endif':
-                       state.pop()
-
-               # skip lines when in a dead 'if' branch, wait for the endif
-               if not token in ['else', 'elif', 'endif']:
-                       if skipped in self.state or ignored in self.state:
-                               return
-
-               if token == 'if':
-                       ret = eval_macro(tokenize(line), self.defs)
-                       if ret: state[-1] = accepted
-                       else: state[-1] = ignored
-               elif token == 'ifdef':
-                       m = re_mac.match(line)
-                       if m and m.group(0) in self.defs: state[-1] = accepted
-                       else: state[-1] = ignored
-               elif token == 'ifndef':
-                       m = re_mac.match(line)
-                       if m and m.group(0) in self.defs: state[-1] = ignored
-                       else: state[-1] = accepted
-               elif token == 'include' or token == 'import':
-                       (kind, inc) = extract_include(line, self.defs)
-                       if inc in self.ban_includes: return
-                       if token == 'import': self.ban_includes.add(inc)
-                       if ve: debug('preproc: include found %s    (%s) ', inc, kind)
-                       if kind == '"' or not strict_quotes:
-                               self.tryfind(inc)
-               elif token == 'elif':
-                       if state[-1] == accepted:
-                               state[-1] = skipped
-                       elif state[-1] == ignored:
-                               if eval_macro(tokenize(line), self.defs):
-                                       state[-1] = accepted
-               elif token == 'else':
-                       if state[-1] == accepted: state[-1] = skipped
-                       elif state[-1] == ignored: state[-1] = accepted
-               elif token == 'define':
-                       try:
-                               self.defs[define_name(line)] = line
-                       except:
-                               raise PreprocError("invalid define line %s" % line)
-               elif token == 'undef':
-                       m = re_mac.match(line)
-                       if m and m.group(0) in self.defs:
-                               self.defs.__delitem__(m.group(0))
-                               #print "undef %s" % name
-               elif token == 'pragma':
-                       if re_pragma_once.match(line.lower()):
-                               self.ban_includes.add(self.curfile)
-
-def get_deps(node, env, nodepaths=[]):
-       """
-       Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
-       #include some_macro()
-       """
-
-       gruik = c_parser(nodepaths)
-       gruik.start(node, env)
-       return (gruik.nodes, gruik.names)
-
-#################### dumb dependency scanner
-
-re_inc = re.compile(\
-       '^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
-       re.IGNORECASE | re.MULTILINE)
-
-def lines_includes(filename):
-       code = Utils.readf(filename)
-       if use_trigraphs:
-               for (a, b) in trig_def: code = code.split(a).join(b)
-       code = re_nl.sub('', code)
-       code = re_cpp.sub(repl, code)
-       return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
-
-def get_deps_simple(node, env, nodepaths=[], defines={}):
-       """
-       Get the dependencies by just looking recursively at the #include statements
-       """
-
-       nodes = []
-       names = []
-
-       def find_deps(node):
-               lst = lines_includes(node.abspath(env))
-
-               for (_, line) in lst:
-                       (t, filename) = extract_include(line, defines)
-                       if filename in names:
-                               continue
-
-                       if filename.endswith('.moc'):
-                               names.append(filename)
-
-                       found = None
-                       for n in nodepaths:
-                               if found:
-                                       break
-                               found = n.find_resource(filename)
-
-                       if not found:
-                               if not filename in names:
-                                       names.append(filename)
-                       elif not found in nodes:
-                               nodes.append(found)
-                               find_deps(node)
-
-       find_deps(node)
-       return (nodes, names)
diff --git a/third_party/waf/wafadmin/Tools/python.py b/third_party/waf/wafadmin/Tools/python.py
deleted file mode 100644 (file)
index cd96b65..0000000
+++ /dev/null
@@ -1,432 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2007 (ita)
-# Gustavo Carneiro (gjc), 2007
-
-"Python support"
-
-import os, sys
-import TaskGen, Utils, Options
-from Logs import debug, warn, info
-from TaskGen import extension, before, after, feature
-from Configure import conf
-from config_c import parse_flags
-
-EXT_PY = ['.py']
-FRAG_2 = '''
-#include "Python.h"
-#ifdef __cplusplus
-extern "C" {
-#endif
-       void Py_Initialize(void);
-       void Py_Finalize(void);
-#ifdef __cplusplus
-}
-#endif
-int main()
-{
-   Py_Initialize();
-   Py_Finalize();
-   return 0;
-}
-'''
-
-@feature('pyext')
-@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars', 'apply_bundle')
-@after('vars_target_cshlib')
-def init_pyext(self):
-       self.default_install_path = '${PYTHONARCHDIR}'
-       self.uselib = self.to_list(getattr(self, 'uselib', ''))
-       if not 'PYEXT' in self.uselib:
-               self.uselib.append('PYEXT')
-       self.env['MACBUNDLE'] = True
-
-@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
-@after('apply_bundle')
-@feature('pyext')
-def pyext_shlib_ext(self):
-       # override shlib_PATTERN set by the osx module
-       self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
-
-@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
-@feature('pyembed')
-def init_pyembed(self):
-       self.uselib = self.to_list(getattr(self, 'uselib', ''))
-       if not 'PYEMBED' in self.uselib:
-               self.uselib.append('PYEMBED')
-
-@extension(EXT_PY)
-def process_py(self, node):
-       if not (self.bld.is_install and self.install_path):
-               return
-       def inst_py(ctx):
-               install_pyfile(self, node)
-       self.bld.add_post_fun(inst_py)
-
-def install_pyfile(self, node):
-       path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
-
-       self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
-       if self.bld.is_install < 0:
-               info("* removing byte compiled python files")
-               for x in 'co':
-                       try:
-                               os.remove(path + x)
-                       except OSError:
-                               pass
-
-       if self.bld.is_install > 0:
-               if self.env['PYC'] or self.env['PYO']:
-                       info("* byte compiling %r" % path)
-
-               if self.env['PYC']:
-                       program = ("""
-import sys, py_compile
-for pyfile in sys.argv[1:]:
-       py_compile.compile(pyfile, pyfile + 'c')
-""")
-                       argv = [self.env['PYTHON'], '-c', program, path]
-                       ret = Utils.pproc.Popen(argv).wait()
-                       if ret:
-                               raise Utils.WafError('bytecode compilation failed %r' % path)
-
-               if self.env['PYO']:
-                       program = ("""
-import sys, py_compile
-for pyfile in sys.argv[1:]:
-       py_compile.compile(pyfile, pyfile + 'o')
-""")
-                       argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
-                       ret = Utils.pproc.Popen(argv).wait()
-                       if ret:
-                               raise Utils.WafError('bytecode compilation failed %r' % path)
-
-# COMPAT
-class py_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@before('apply_core')
-@after('vars_target_cprogram', 'vars_target_cshlib')
-@feature('py')
-def init_py(self):
-       self.default_install_path = '${PYTHONDIR}'
-
-def _get_python_variables(python_exe, variables, imports=['import sys']):
-       """Run a python interpreter and print some variables"""
-       program = list(imports)
-       program.append('')
-       for v in variables:
-               program.append("print(repr(%s))" % v)
-       os_env = dict(os.environ)
-       try:
-               del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
-       except KeyError:
-               pass
-       proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
-       output = proc.communicate()[0].split("\n") # do not touch, python3
-       if proc.returncode:
-               if Options.options.verbose:
-                       warn("Python program to extract python configuration variables failed:\n%s"
-                                      % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
-               raise RuntimeError
-       return_values = []
-       for s in output:
-               s = s.strip()
-               if not s:
-                       continue
-               if s == 'None':
-                       return_values.append(None)
-               elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
-                       return_values.append(eval(s))
-               elif s[0].isdigit():
-                       return_values.append(int(s))
-               else: break
-       return return_values
-
-@conf
-def check_python_headers(conf, mandatory=True):
-       """Check for headers and libraries necessary to extend or embed python.
-
-       On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
-
-       PYEXT: for compiling python extensions
-       PYEMBED: for embedding a python interpreter"""
-
-       if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
-               conf.fatal('load a compiler first (gcc, g++, ..)')
-
-       if not conf.env['PYTHON_VERSION']:
-               conf.check_python_version()
-
-       env = conf.env
-       python = env['PYTHON']
-       if not python:
-               conf.fatal('could not find the python executable')
-
-       ## On Mac OSX we need to use mac bundles for python plugins
-       if Options.platform == 'darwin':
-               conf.check_tool('osx')
-
-       try:
-               # Get some python configuration variables using distutils
-               v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDVERSION'.split()
-               (python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
-                python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
-                python_MACOSX_DEPLOYMENT_TARGET, python_LDVERSION) = \
-                       _get_python_variables(python, ["get_config_var('%s') or ''" % x for x in v],
-                                             ['from distutils.sysconfig import get_config_var'])
-       except RuntimeError:
-               conf.fatal("Python development headers not found (-v for details).")
-
-       conf.log.write("""Configuration returned from %r:
-python_prefix = %r
-python_SO = %r
-python_SYSLIBS = %r
-python_LDFLAGS = %r
-python_SHLIBS = %r
-python_LIBDIR = %r
-python_LIBPL = %r
-INCLUDEPY = %r
-Py_ENABLE_SHARED = %r
-MACOSX_DEPLOYMENT_TARGET = %r
-LDVERSION = %r
-""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
-       python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET,
-       python_LDVERSION))
-
-       # Allow some python overrides from env vars for cross-compiling
-       os_env = dict(os.environ)
-
-       override_python_LDFLAGS = os_env.get('python_LDFLAGS', None)
-       if override_python_LDFLAGS is not None:
-               conf.log.write("python_LDFLAGS override from environment = %r\n" % (override_python_LDFLAGS))
-               python_LDFLAGS = override_python_LDFLAGS
-
-       override_python_LIBDIR = os_env.get('python_LIBDIR', None)
-       if override_python_LIBDIR is not None:
-               conf.log.write("python_LIBDIR override from environment = %r\n" % (override_python_LIBDIR))
-               python_LIBDIR = override_python_LIBDIR
-
-       if python_MACOSX_DEPLOYMENT_TARGET:
-               conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
-               conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
-
-       env['pyext_PATTERN'] = '%s'+python_SO
-
-       # Check for python libraries for embedding
-       if python_SYSLIBS is not None:
-               for lib in python_SYSLIBS.split():
-                       if lib.startswith('-l'):
-                               lib = lib[2:] # strip '-l'
-                       env.append_value('LIB_PYEMBED', lib)
-
-       if python_SHLIBS is not None:
-               for lib in python_SHLIBS.split():
-                       if lib.startswith('-l'):
-                               env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
-                       else:
-                               env.append_value('LINKFLAGS_PYEMBED', lib)
-
-       if Options.platform != 'darwin' and python_LDFLAGS:
-               parse_flags(python_LDFLAGS, 'PYEMBED', env)
-
-       result = False
-       if not python_LDVERSION:
-               python_LDVERSION = env['PYTHON_VERSION']
-       name = 'python' + python_LDVERSION
-
-       if python_LIBDIR is not None:
-               path = [python_LIBDIR]
-               conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
-               result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
-
-       if not result and python_LIBPL is not None:
-               conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
-               path = [python_LIBPL]
-               result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
-
-       if not result:
-               conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
-               path = [os.path.join(python_prefix, "libs")]
-               name = 'python' + python_LDVERSION.replace('.', '')
-               result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
-
-       if result:
-               env['LIBPATH_PYEMBED'] = path
-               env.append_value('LIB_PYEMBED', name)
-       else:
-               conf.log.write("\n\n### LIB NOT FOUND\n")
-
-       # under certain conditions, python extensions must link to
-       # python libraries, not just python embedding programs.
-       if (sys.platform == 'win32' or sys.platform.startswith('os2')
-               or sys.platform == 'darwin' or Py_ENABLE_SHARED):
-               env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
-               env['LIB_PYEXT'] = env['LIB_PYEMBED']
-
-       # We check that pythonX.Y-config exists, and if it exists we
-       # use it to get only the includes, else fall back to distutils.
-       python_config = conf.find_program(
-               'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
-               var='PYTHON_CONFIG')
-       if not python_config:
-               python_config = conf.find_program(
-                       'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
-                       var='PYTHON_CONFIG')
-
-       includes = []
-       if python_config:
-               for incstr in Utils.cmd_output("%s --includes" % (python_config,)).strip().split():
-                       # strip the -I or /I
-                       if (incstr.startswith('-I')
-                           or incstr.startswith('/I')):
-                               incstr = incstr[2:]
-                       # append include path, unless already given
-                       if incstr not in includes:
-                               includes.append(incstr)
-               conf.log.write("Include path for Python extensions "
-                              "(found via python-config --includes): %r\n" % (includes,))
-               env['CPPPATH_PYEXT'] = includes
-               env['CPPPATH_PYEMBED'] = includes
-       else:
-               conf.log.write("Include path for Python extensions "
-                              "(found via distutils module): %r\n" % (INCLUDEPY,))
-               env['CPPPATH_PYEXT'] = [INCLUDEPY]
-               env['CPPPATH_PYEMBED'] = [INCLUDEPY]
-
-       # Code using the Python API needs to be compiled with -fno-strict-aliasing
-       if env['CC_NAME'] == 'gcc':
-               env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
-               env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
-       if env['CXX_NAME'] == 'gcc':
-               env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
-               env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
-
-       # See if it compiles
-       conf.check(define_name='HAVE_PYTHON_H',
-                  uselib='PYEMBED', fragment=FRAG_2,
-                  errmsg='Could not find the python development headers', mandatory=mandatory)
-
-@conf
-def check_python_version(conf, minver=None):
-       """
-       Check if the python interpreter is found matching a given minimum version.
-       minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
-
-       If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
-       (eg. '2.4') of the actual python version found, and PYTHONDIR is
-       defined, pointing to the site-packages directory appropriate for
-       this python version, where modules/packages/extensions should be
-       installed.
-       """
-       assert minver is None or isinstance(minver, tuple)
-       python = conf.env['PYTHON']
-       if not python:
-               conf.fatal('could not find the python executable')
-
-       # Get python version string
-       cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
-       debug('python: Running python command %r' % cmd)
-       proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, shell=False)
-       lines = proc.communicate()[0].split()
-       assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
-       pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
-
-       # compare python version with the minimum required
-       result = (minver is None) or (pyver_tuple >= minver)
-
-       if result:
-               # define useful environment variables
-               pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
-               conf.env['PYTHON_VERSION'] = pyver
-
-               if 'PYTHONDIR' in conf.environ:
-                       pydir = conf.environ['PYTHONDIR']
-               else:
-                       if sys.platform == 'win32':
-                               (python_LIBDEST, pydir) = \
-                                               _get_python_variables(python,
-                                                                                         ["get_config_var('LIBDEST') or ''",
-                                                                                          "get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']],
-                                                                                         ['from distutils.sysconfig import get_config_var, get_python_lib'])
-                       else:
-                               python_LIBDEST = None
-                               (pydir,) = \
-                                               _get_python_variables(python,
-                                                                                         ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']],
-                                                                                         ['from distutils.sysconfig import get_config_var, get_python_lib'])
-                       if python_LIBDEST is None:
-                               if conf.env['LIBDIR']:
-                                       python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
-                               else:
-                                       python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
-
-               if 'PYTHONARCHDIR' in conf.environ:
-                       pyarchdir = conf.environ['PYTHONARCHDIR']
-               else:
-                       (pyarchdir,) = _get_python_variables(python,
-                                                                                       ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']],
-                                                                                       ['from distutils.sysconfig import get_config_var, get_python_lib'])
-                       if not pyarchdir:
-                               pyarchdir = pydir
-
-               if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
-                       conf.define('PYTHONDIR', pydir)
-                       conf.define('PYTHONARCHDIR', pyarchdir)
-
-               conf.env['PYTHONDIR'] = pydir
-
-       # Feedback
-       pyver_full = '.'.join(map(str, pyver_tuple[:3]))
-       if minver is None:
-               conf.check_message_custom('Python version', '', pyver_full)
-       else:
-               minver_str = '.'.join(map(str, minver))
-               conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
-
-       if not result:
-               conf.fatal('The python version is too old (%r)' % pyver_full)
-
-@conf
-def check_python_module(conf, module_name):
-       """
-       Check if the selected python interpreter can import the given python module.
-       """
-       result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
-                          stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
-       conf.check_message('Python module', module_name, result)
-       if not result:
-               conf.fatal('Could not find the python module %r' % module_name)
-
-def detect(conf):
-
-       if not conf.env.PYTHON:
-               conf.env.PYTHON = sys.executable
-
-       python = conf.find_program('python', var='PYTHON')
-       if not python:
-               conf.fatal('Could not find the path of the python executable')
-
-       if conf.env.PYTHON != sys.executable:
-               warn("python executable '%s' different from sys.executable '%s'" % (conf.env.PYTHON, sys.executable))
-
-       v = conf.env
-       v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
-       v['PYFLAGS'] = ''
-       v['PYFLAGS_OPT'] = '-O'
-
-       v['PYC'] = getattr(Options.options, 'pyc', 1)
-       v['PYO'] = getattr(Options.options, 'pyo', 1)
-
-def set_options(opt):
-       opt.add_option('--nopyc',
-                       action='store_false',
-                       default=1,
-                       help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
-                       dest = 'pyc')
-       opt.add_option('--nopyo',
-                       action='store_false',
-                       default=1,
-                       help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
-                       dest='pyo')
diff --git a/third_party/waf/wafadmin/Tools/qt4.py b/third_party/waf/wafadmin/Tools/qt4.py
deleted file mode 100644 (file)
index 7d2cad7..0000000
+++ /dev/null
@@ -1,504 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-"""
-Qt4 support
-
-If QT4_ROOT is given (absolute path), the configuration will look in it first
-
-This module also demonstrates how to add tasks dynamically (when the build has started)
-"""
-
-try:
-       from xml.sax import make_parser
-       from xml.sax.handler import ContentHandler
-except ImportError:
-       has_xml = False
-       ContentHandler = object
-else:
-       has_xml = True
-
-import os, sys
-import ccroot, cxx
-import TaskGen, Task, Utils, Runner, Options, Node, Configure
-from TaskGen import taskgen, feature, after, extension
-from Logs import error
-from Constants import *
-
-MOC_H = ['.h', '.hpp', '.hxx', '.hh']
-EXT_RCC = ['.qrc']
-EXT_UI  = ['.ui']
-EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
-
-class qxx_task(Task.Task):
-       "A cpp task that may create a moc task dynamically"
-
-       before = ['cxx_link', 'static_link']
-
-       def __init__(self, *k, **kw):
-               Task.Task.__init__(self, *k, **kw)
-               self.moc_done = 0
-
-       def scan(self):
-               (nodes, names) = ccroot.scan(self)
-               # for some reasons (variants) the moc node may end in the list of node deps
-               for x in nodes:
-                       if x.name.endswith('.moc'):
-                               nodes.remove(x)
-                               names.append(x.relpath_gen(self.inputs[0].parent))
-               return (nodes, names)
-
-       def runnable_status(self):
-               if self.moc_done:
-                       # if there is a moc task, delay the computation of the file signature
-                       for t in self.run_after:
-                               if not t.hasrun:
-                                       return ASK_LATER
-                       # the moc file enters in the dependency calculation
-                       # so we need to recompute the signature when the moc file is present
-                       self.signature()
-                       return Task.Task.runnable_status(self)
-               else:
-                       # yes, really, there are people who generate cxx files
-                       for t in self.run_after:
-                               if not t.hasrun:
-                                       return ASK_LATER
-                       self.add_moc_tasks()
-                       return ASK_LATER
-
-       def add_moc_tasks(self):
-
-               node = self.inputs[0]
-               tree = node.__class__.bld
-
-               try:
-                       # compute the signature once to know if there is a moc file to create
-                       self.signature()
-               except KeyError:
-                       # the moc file may be referenced somewhere else
-                       pass
-               else:
-                       # remove the signature, it must be recomputed with the moc task
-                       delattr(self, 'cache_sig')
-
-               moctasks=[]
-               mocfiles=[]
-               variant = node.variant(self.env)
-               try:
-                       tmp_lst = tree.raw_deps[self.unique_id()]
-                       tree.raw_deps[self.unique_id()] = []
-               except KeyError:
-                       tmp_lst = []
-               for d in tmp_lst:
-                       if not d.endswith('.moc'): continue
-                       # paranoid check
-                       if d in mocfiles:
-                               error("paranoia owns")
-                               continue
-
-                       # process that base.moc only once
-                       mocfiles.append(d)
-
-                       # find the extension (performed only when the .cpp has changes)
-                       base2 = d[:-4]
-                       for path in [node.parent] + self.generator.env['INC_PATHS']:
-                               tree.rescan(path)
-                               vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H
-                               for ex in vals:
-                                       h_node = path.find_resource(base2 + ex)
-                                       if h_node:
-                                               break
-                               else:
-                                       continue
-                               break
-                       else:
-                               raise Utils.WafError("no header found for %s which is a moc file" % str(d))
-
-                       m_node = h_node.change_ext('.moc')
-                       tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
-
-                       # create the task
-                       task = Task.TaskBase.classes['moc'](self.env, normal=0)
-                       task.set_inputs(h_node)
-                       task.set_outputs(m_node)
-
-                       generator = tree.generator
-                       generator.outstanding.insert(0, task)
-                       generator.total += 1
-
-                       moctasks.append(task)
-
-               # remove raw deps except the moc files to save space (optimization)
-               tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
-
-               # look at the file inputs, it is set right above
-               lst = tree.node_deps.get(self.unique_id(), ())
-               for d in lst:
-                       name = d.name
-                       if name.endswith('.moc'):
-                               task = Task.TaskBase.classes['moc'](self.env, normal=0)
-                               task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
-                               task.set_outputs(d)
-
-                               generator = tree.generator
-                               generator.outstanding.insert(0, task)
-                               generator.total += 1
-
-                               moctasks.append(task)
-
-               # simple scheduler dependency: run the moc task before others
-               self.run_after = moctasks
-               self.moc_done = 1
-
-       run = Task.TaskBase.classes['cxx'].__dict__['run']
-
-def translation_update(task):
-       outs = [a.abspath(task.env) for a in task.outputs]
-       outs = " ".join(outs)
-       lupdate = task.env['QT_LUPDATE']
-
-       for x in task.inputs:
-               file = x.abspath(task.env)
-               cmd = "%s %s -ts %s" % (lupdate, file, outs)
-               Utils.pprint('BLUE', cmd)
-               task.generator.bld.exec_command(cmd)
-
-class XMLHandler(ContentHandler):
-       def __init__(self):
-               self.buf = []
-               self.files = []
-       def startElement(self, name, attrs):
-               if name == 'file':
-                       self.buf = []
-       def endElement(self, name):
-               if name == 'file':
-                       self.files.append(''.join(self.buf))
-       def characters(self, cars):
-               self.buf.append(cars)
-
-def scan(self):
-       "add the dependency on the files referenced in the qrc"
-       node = self.inputs[0]
-       parser = make_parser()
-       curHandler = XMLHandler()
-       parser.setContentHandler(curHandler)
-       fi = open(self.inputs[0].abspath(self.env))
-       parser.parse(fi)
-       fi.close()
-
-       nodes = []
-       names = []
-       root = self.inputs[0].parent
-       for x in curHandler.files:
-               nd = root.find_resource(x)
-               if nd: nodes.append(nd)
-               else: names.append(x)
-
-       return (nodes, names)
-
-@extension(EXT_RCC)
-def create_rcc_task(self, node):
-       "hook for rcc files"
-       rcnode = node.change_ext('_rc.cpp')
-       rcctask = self.create_task('rcc', node, rcnode)
-       cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
-       self.compiled_tasks.append(cpptask)
-       return cpptask
-
-@extension(EXT_UI)
-def create_uic_task(self, node):
-       "hook for uic tasks"
-       uictask = self.create_task('ui4', node)
-       uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
-       return uictask
-
-class qt4_taskgen(cxx.cxx_taskgen):
-       def __init__(self, *k, **kw):
-               cxx.cxx_taskgen.__init__(self, *k, **kw)
-               self.features.append('qt4')
-
-@extension('.ts')
-def add_lang(self, node):
-       """add all the .ts file into self.lang"""
-       self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
-
-@feature('qt4')
-@after('apply_link')
-def apply_qt4(self):
-       if getattr(self, 'lang', None):
-               update = getattr(self, 'update', None)
-               lst=[]
-               trans=[]
-               for l in self.to_list(self.lang):
-
-                       if not isinstance(l, Node.Node):
-                               l = self.path.find_resource(l+'.ts')
-
-                       t = self.create_task('ts2qm', l, l.change_ext('.qm'))
-                       lst.append(t.outputs[0])
-
-                       if update:
-                               trans.append(t.inputs[0])
-
-               trans_qt4 = getattr(Options.options, 'trans_qt4', False)
-               if update and trans_qt4:
-                       # we need the cpp files given, except the rcc task we create after
-                       # FIXME may be broken
-                       u = Task.TaskCmd(translation_update, self.env, 2)
-                       u.inputs = [a.inputs[0] for a in self.compiled_tasks]
-                       u.outputs = trans
-
-               if getattr(self, 'langname', None):
-                       t = Task.TaskBase.classes['qm2rcc'](self.env)
-                       t.set_inputs(lst)
-                       t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
-                       t.path = self.path
-                       k = create_rcc_task(self, t.outputs[0])
-                       self.link_task.inputs.append(k.outputs[0])
-
-       self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
-       self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
-
-@extension(EXT_QT4)
-def cxx_hook(self, node):
-       # create the compilation task: cpp or cc
-       try: obj_ext = self.obj_ext
-       except AttributeError: obj_ext = '_%d.o' % self.idx
-
-       task = self.create_task('qxx', node, node.change_ext(obj_ext))
-       self.compiled_tasks.append(task)
-       return task
-
-def process_qm2rcc(task):
-       outfile = task.outputs[0].abspath(task.env)
-       f = open(outfile, 'w')
-       f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
-       for k in task.inputs:
-               f.write(' <file>')
-               #f.write(k.name)
-               f.write(k.path_to_parent(task.path))
-               f.write('</file>\n')
-       f.write('</qresource>\n</RCC>')
-       f.close()
-
-b = Task.simple_task_type
-b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
-cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
-cls.scan = scan
-b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
-b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
-
-Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
-
-def detect_qt4(conf):
-       env = conf.env
-       opt = Options.options
-
-       qtdir = getattr(opt, 'qtdir', '')
-       qtbin = getattr(opt, 'qtbin', '')
-       qtlibs = getattr(opt, 'qtlibs', '')
-       useframework = getattr(opt, 'use_qt4_osxframework', True)
-
-       paths = []
-
-       # the path to qmake has been given explicitely
-       if qtbin:
-               paths = [qtbin]
-
-       # the qt directory has been given - we deduce the qt binary path
-       if not qtdir:
-               qtdir = conf.environ.get('QT4_ROOT', '')
-               qtbin = os.path.join(qtdir, 'bin')
-               paths = [qtbin]
-
-       # no qtdir, look in the path and in /usr/local/Trolltech
-       if not qtdir:
-               paths = os.environ.get('PATH', '').split(os.pathsep)
-               paths.append('/usr/share/qt4/bin/')
-               try:
-                       lst = os.listdir('/usr/local/Trolltech/')
-               except OSError:
-                       pass
-               else:
-                       if lst:
-                               lst.sort()
-                               lst.reverse()
-
-                               # keep the highest version
-                               qtdir = '/usr/local/Trolltech/%s/' % lst[0]
-                               qtbin = os.path.join(qtdir, 'bin')
-                               paths.append(qtbin)
-
-       # at the end, try to find qmake in the paths given
-       # keep the one with the highest version
-       cand = None
-       prev_ver = ['4', '0', '0']
-       for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
-               qmake = conf.find_program(qmk, path_list=paths)
-               if qmake:
-                       try:
-                               version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
-                       except ValueError:
-                               pass
-                       else:
-                               if version:
-                                       new_ver = version.split('.')
-                                       if new_ver > prev_ver:
-                                               cand = qmake
-                                               prev_ver = new_ver
-       if cand:
-               qmake = cand
-       else:
-               conf.fatal('could not find qmake for qt4')
-
-       conf.env.QMAKE = qmake
-       qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
-       qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
-       qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
-
-       if not qtlibs:
-               try:
-                       qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
-               except ValueError:
-                       qtlibs = os.path.join(qtdir, 'lib')
-
-       def find_bin(lst, var):
-               for f in lst:
-                       ret = conf.find_program(f, path_list=paths)
-                       if ret:
-                               env[var]=ret
-                               break
-
-       vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
-
-       find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
-       find_bin(['uic-qt4', 'uic'], 'QT_UIC')
-       if not env['QT_UIC']:
-               conf.fatal('cannot find the uic compiler for qt4')
-
-       try:
-               version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
-       except ValueError:
-               conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
-
-       version = version.replace('Qt User Interface Compiler ','')
-       version = version.replace('User Interface Compiler for Qt', '')
-       if version.find(" 3.") != -1:
-               conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
-               sys.exit(1)
-       conf.check_message('uic version', '', 1, option='(%s)'%version)
-
-       find_bin(['moc-qt4', 'moc'], 'QT_MOC')
-       find_bin(['rcc'], 'QT_RCC')
-       find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
-       find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
-
-       env['UIC3_ST']= '%s -o %s'
-       env['UIC_ST'] = '%s -o %s'
-       env['MOC_ST'] = '-o'
-       env['ui_PATTERN'] = 'ui_%s.h'
-       env['QT_LRELEASE_FLAGS'] = ['-silent']
-
-       vars_debug = [a+'_debug' for a in vars]
-
-       try:
-               conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
-
-       except Configure.ConfigurationError:
-
-               for lib in vars_debug+vars:
-                       uselib = lib.upper()
-
-                       d = (lib.find('_debug') > 0) and 'd' or ''
-
-                       # original author seems to prefer static to shared libraries
-                       for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
-
-                               conf.check_message_1('Checking for %s %s' % (lib, kind))
-
-                               for ext in ['', '4']:
-                                       path = os.path.join(qtlibs, pat % (lib + d + ext))
-                                       if os.path.exists(path):
-                                               env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
-                                               conf.check_message_2('ok ' + path, 'GREEN')
-                                               break
-                                       path = os.path.join(qtbin, pat % (lib + d + ext))
-                                       if os.path.exists(path):
-                                               env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
-                                               conf.check_message_2('ok ' + path, 'GREEN')
-                                               break
-                               else:
-                                       conf.check_message_2('not found', 'YELLOW')
-                                       continue
-                               break
-
-                       env.append_unique('LIBPATH_' + uselib, qtlibs)
-                       env.append_unique('CPPPATH_' + uselib, qtincludes)
-                       env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
-       else:
-               for i in vars_debug+vars:
-                       try:
-                               conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
-                       except ValueError:
-                               pass
-
-       # the libpaths are set nicely, unfortunately they make really long command-lines
-       # remove the qtcore ones from qtgui, etc
-       def process_lib(vars_, coreval):
-               for d in vars_:
-                       var = d.upper()
-                       if var == 'QTCORE': continue
-
-                       value = env['LIBPATH_'+var]
-                       if value:
-                               core = env[coreval]
-                               accu = []
-                               for lib in value:
-                                       if lib in core: continue
-                                       accu.append(lib)
-                               env['LIBPATH_'+var] = accu
-
-       process_lib(vars, 'LIBPATH_QTCORE')
-       process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
-
-       # rpath if wanted
-       want_rpath = getattr(Options.options, 'want_rpath', 1)
-       if want_rpath:
-               def process_rpath(vars_, coreval):
-                       for d in vars_:
-                               var = d.upper()
-                               value = env['LIBPATH_'+var]
-                               if value:
-                                       core = env[coreval]
-                                       accu = []
-                                       for lib in value:
-                                               if var != 'QTCORE':
-                                                       if lib in core:
-                                                               continue
-                                               accu.append('-Wl,--rpath='+lib)
-                                       env['RPATH_'+var] = accu
-               process_rpath(vars, 'LIBPATH_QTCORE')
-               process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
-
-       env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
-
-def detect(conf):
-       detect_qt4(conf)
-
-def set_options(opt):
-       opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
-
-       opt.add_option('--header-ext',
-               type='string',
-               default='',
-               help='header extension for moc files',
-               dest='qt_header_ext')
-
-       for i in 'qtdir qtbin qtlibs'.split():
-               opt.add_option('--'+i, type='string', default='', dest=i)
-
-       if sys.platform == "darwin":
-               opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
-
-       opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
diff --git a/third_party/waf/wafadmin/Tools/ruby.py b/third_party/waf/wafadmin/Tools/ruby.py
deleted file mode 100644 (file)
index afa8a59..0000000
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# daniel.svensson at purplescout.se 2008
-
-import os
-import Task, Options, Utils
-from TaskGen import before, feature, after
-from Configure import conf
-
-@feature('rubyext')
-@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
-@after('default_cc', 'vars_target_cshlib')
-def init_rubyext(self):
-       self.default_install_path = '${ARCHDIR_RUBY}'
-       self.uselib = self.to_list(getattr(self, 'uselib', ''))
-       if not 'RUBY' in self.uselib:
-               self.uselib.append('RUBY')
-       if not 'RUBYEXT' in self.uselib:
-               self.uselib.append('RUBYEXT')
-
-@feature('rubyext')
-@before('apply_link')
-def apply_ruby_so_name(self):
-       self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
-
-@conf
-def check_ruby_version(conf, minver=()):
-       """
-       Checks if ruby is installed.
-       If installed the variable RUBY will be set in environment.
-       Ruby binary can be overridden by --with-ruby-binary config variable
-       """
-
-       if Options.options.rubybinary:
-               conf.env.RUBY = Options.options.rubybinary
-       else:
-               conf.find_program("ruby", var="RUBY", mandatory=True)
-
-       ruby = conf.env.RUBY
-
-       try:
-               version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
-       except:
-               conf.fatal('could not determine ruby version')
-       conf.env.RUBY_VERSION = version
-
-       try:
-               ver = tuple(map(int, version.split(".")))
-       except:
-               conf.fatal('unsupported ruby version %r' % version)
-
-       cver = ''
-       if minver:
-               if ver < minver:
-                       conf.fatal('ruby is too old')
-               cver = ".".join([str(x) for x in minver])
-
-       conf.check_message('ruby', cver, True, version)
-
-@conf
-def check_ruby_ext_devel(conf):
-       if not conf.env.RUBY:
-               conf.fatal('ruby detection is required first')
-
-       if not conf.env.CC_NAME and not conf.env.CXX_NAME:
-               conf.fatal('load a c/c++ compiler first')
-
-       version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
-
-       def read_out(cmd):
-               return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
-
-       def read_config(key):
-               return read_out('puts Config::CONFIG[%r]' % key)
-
-       ruby = conf.env['RUBY']
-       archdir = read_config('archdir')
-       cpppath = archdir
-       if version >= (1, 9, 0):
-               ruby_hdrdir = read_config('rubyhdrdir')
-               cpppath += ruby_hdrdir
-               cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
-
-       conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
-
-       conf.env.LIBPATH_RUBYEXT = read_config('libdir')
-       conf.env.LIBPATH_RUBYEXT += archdir
-       conf.env.CPPPATH_RUBYEXT = cpppath
-       conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
-       conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
-
-       # ok this is really stupid, but the command and flags are combined.
-       # so we try to find the first argument...
-       flags = read_config('LDSHARED')
-       while flags and flags[0][0] != '-':
-               flags = flags[1:]
-
-       # we also want to strip out the deprecated ppc flags
-       if len(flags) > 1 and flags[1] == "ppc":
-               flags = flags[2:]
-
-       conf.env.LINKFLAGS_RUBYEXT = flags
-       conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
-       conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
-
-       if Options.options.rubyarchdir:
-               conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
-       else:
-               conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
-
-       if Options.options.rubylibdir:
-               conf.env.LIBDIR_RUBY = Options.options.rubylibdir
-       else:
-               conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
-
-def set_options(opt):
-       opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
-       opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
-       opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
diff --git a/third_party/waf/wafadmin/Tools/suncc.py b/third_party/waf/wafadmin/Tools/suncc.py
deleted file mode 100644 (file)
index b1a2aad..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-# Ralf Habacker, 2006 (rh)
-
-import os, optparse
-import Utils, Options, Configure
-import ccroot, ar
-from Configure import conftest
-
-@conftest
-def find_scc(conf):
-       v = conf.env
-       cc = None
-       if v['CC']: cc = v['CC']
-       elif 'CC' in conf.environ: cc = conf.environ['CC']
-       #if not cc: cc = conf.find_program('gcc', var='CC')
-       if not cc: cc = conf.find_program('cc', var='CC')
-       if not cc: conf.fatal('suncc was not found')
-       cc = conf.cmd_to_list(cc)
-
-       try:
-               if not Utils.cmd_output(cc + ['-flags']):
-                       conf.fatal('suncc %r was not found' % cc)
-       except ValueError:
-               conf.fatal('suncc -flags could not be executed')
-
-       v['CC']  = cc
-       v['CC_NAME'] = 'sun'
-
-@conftest
-def scc_common_flags(conf):
-       v = conf.env
-
-       # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
-
-       v['CC_SRC_F']            = ''
-       v['CC_TGT_F']            = ['-c', '-o', '']
-       v['CPPPATH_ST']          = '-I%s' # template for adding include paths
-
-       # linker
-       if not v['LINK_CC']: v['LINK_CC'] = v['CC']
-       v['CCLNK_SRC_F']         = ''
-       v['CCLNK_TGT_F']         = ['-o', ''] # solaris hack, separate the -o from the target
-
-       v['LIB_ST']              = '-l%s' # template for adding libs
-       v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-       v['STATICLIB_ST']        = '-l%s'
-       v['STATICLIBPATH_ST']    = '-L%s'
-       v['CCDEFINES_ST']        = '-D%s'
-
-       v['SONAME_ST']           = '-Wl,-h -Wl,%s'
-       v['SHLIB_MARKER']        = '-Bdynamic'
-       v['STATICLIB_MARKER']    = '-Bstatic'
-
-       # program
-       v['program_PATTERN']     = '%s'
-
-       # shared library
-       v['shlib_CCFLAGS']       = ['-Kpic', '-DPIC']
-       v['shlib_LINKFLAGS']     = ['-G']
-       v['shlib_PATTERN']       = 'lib%s.so'
-
-       # static lib
-       v['staticlib_LINKFLAGS'] = ['-Bstatic']
-       v['staticlib_PATTERN']   = 'lib%s.a'
-
-detect = '''
-find_scc
-find_cpp
-find_ar
-scc_common_flags
-cc_load_tools
-cc_add_flags
-link_add_flags
-'''
diff --git a/third_party/waf/wafadmin/Tools/suncxx.py b/third_party/waf/wafadmin/Tools/suncxx.py
deleted file mode 100644 (file)
index 8754b6c..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-# Ralf Habacker, 2006 (rh)
-
-import os, optparse
-import Utils, Options, Configure
-import ccroot, ar
-from Configure import conftest
-
-@conftest
-def find_sxx(conf):
-       v = conf.env
-       cc = None
-       if v['CXX']: cc = v['CXX']
-       elif 'CXX' in conf.environ: cc = conf.environ['CXX']
-       if not cc: cc = conf.find_program('c++', var='CXX')
-       if not cc: conf.fatal('sunc++ was not found')
-       cc = conf.cmd_to_list(cc)
-
-       try:
-               if not Utils.cmd_output(cc + ['-flags']):
-                       conf.fatal('sunc++ %r was not found' % cc)
-       except ValueError:
-               conf.fatal('sunc++ -flags could not be executed')
-
-       v['CXX']  = cc
-       v['CXX_NAME'] = 'sun'
-
-@conftest
-def sxx_common_flags(conf):
-       v = conf.env
-
-       # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
-
-       v['CXX_SRC_F']           = ''
-       v['CXX_TGT_F']           = ['-c', '-o', '']
-       v['CPPPATH_ST']          = '-I%s' # template for adding include paths
-
-       # linker
-       if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
-       v['CXXLNK_SRC_F']        = ''
-       v['CXXLNK_TGT_F']        = ['-o', ''] # solaris hack, separate the -o from the target
-
-       v['LIB_ST']              = '-l%s' # template for adding libs
-       v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-       v['STATICLIB_ST']        = '-l%s'
-       v['STATICLIBPATH_ST']    = '-L%s'
-       v['CXXDEFINES_ST']       = '-D%s'
-
-       v['SONAME_ST']           = '-Wl,-h -Wl,%s'
-       v['SHLIB_MARKER']        = '-Bdynamic'
-       v['STATICLIB_MARKER']    = '-Bstatic'
-
-       # program
-       v['program_PATTERN']     = '%s'
-
-       # shared library
-       v['shlib_CXXFLAGS']      = ['-Kpic', '-DPIC']
-       v['shlib_LINKFLAGS']     = ['-G']
-       v['shlib_PATTERN']       = 'lib%s.so'
-
-       # static lib
-       v['staticlib_LINKFLAGS'] = ['-Bstatic']
-       v['staticlib_PATTERN']   = 'lib%s.a'
-
-detect = '''
-find_sxx
-find_cpp
-find_ar
-sxx_common_flags
-cxx_load_tools
-cxx_add_flags
-link_add_flags
-'''
diff --git a/third_party/waf/wafadmin/Tools/tex.py b/third_party/waf/wafadmin/Tools/tex.py
deleted file mode 100644 (file)
index 43aee1f..0000000
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-
-"TeX/LaTeX/PDFLaTeX support"
-
-import os, re
-import Utils, TaskGen, Task, Runner, Build
-from TaskGen import feature, before
-from Logs import error, warn, debug
-
-re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M)
-def scan(self):
-       node = self.inputs[0]
-       env = self.env
-
-       nodes = []
-       names = []
-       if not node: return (nodes, names)
-
-       code = Utils.readf(node.abspath(env))
-
-       curdirnode = self.curdirnode
-       abs = curdirnode.abspath()
-       for match in re_tex.finditer(code):
-               path = match.group('file')
-               if path:
-                       for k in ['', '.tex', '.ltx']:
-                               # add another loop for the tex include paths?
-                               debug('tex: trying %s%s' % (path, k))
-                               try:
-                                       os.stat(abs+os.sep+path+k)
-                               except OSError:
-                                       continue
-                               found = path+k
-                               node = curdirnode.find_resource(found)
-                               if node:
-                                       nodes.append(node)
-                       else:
-                               debug('tex: could not find %s' % path)
-                               names.append(path)
-
-       debug("tex: found the following : %s and names %s" % (nodes, names))
-       return (nodes, names)
-
-latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
-pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
-bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
-makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
-
-g_bibtex_re = re.compile('bibdata', re.M)
-def tex_build(task, command='LATEX'):
-       env = task.env
-       bld = task.generator.bld
-
-       if not env['PROMPT_LATEX']:
-               env.append_value('LATEXFLAGS', '-interaction=batchmode')
-               env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
-
-       fun = latex_fun
-       if command == 'PDFLATEX':
-               fun = pdflatex_fun
-
-       node = task.inputs[0]
-       reldir  = node.bld_dir(env)
-
-       #lst = []
-       #for c in Utils.split_path(reldir):
-       #       if c: lst.append('..')
-       #srcfile = os.path.join(*(lst + [node.srcpath(env)]))
-       #sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
-       srcfile = node.abspath(env)
-       sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep
-
-       aux_node = node.change_ext('.aux')
-       idx_node = node.change_ext('.idx')
-
-       nm = aux_node.name
-       docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
-
-       # important, set the cwd for everybody
-       task.cwd = task.inputs[0].parent.abspath(task.env)
-
-
-       warn('first pass on %s' % command)
-
-       task.env.env = {'TEXINPUTS': sr2}
-       task.env.SRCFILE = srcfile
-       ret = fun(task)
-       if ret:
-               return ret
-
-       # look in the .aux file if there is a bibfile to process
-       try:
-               ct = Utils.readf(aux_node.abspath(env))
-       except (OSError, IOError):
-               error('error bibtex scan')
-       else:
-               fo = g_bibtex_re.findall(ct)
-
-               # there is a .aux file to process
-               if fo:
-                       warn('calling bibtex')
-
-                       task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2}
-                       task.env.SRCFILE = docuname
-                       ret = bibtex_fun(task)
-                       if ret:
-                               error('error when calling bibtex %s' % docuname)
-                               return ret
-
-       # look on the filesystem if there is a .idx file to process
-       try:
-               idx_path = idx_node.abspath(env)
-               os.stat(idx_path)
-       except OSError:
-               error('error file.idx scan')
-       else:
-               warn('calling makeindex')
-
-               task.env.SRCFILE = idx_node.name
-               task.env.env = {}
-               ret = makeindex_fun(task)
-               if ret:
-                       error('error when calling makeindex %s' % idx_path)
-                       return ret
-
-
-       hash = ''
-       i = 0
-       while i < 10:
-               # prevent against infinite loops - one never knows
-               i += 1
-
-               # watch the contents of file.aux
-               prev_hash = hash
-               try:
-                       hash = Utils.h_file(aux_node.abspath(env))
-               except KeyError:
-                       error('could not read aux.h -> %s' % aux_node.abspath(env))
-                       pass
-
-               # debug
-               #print "hash is, ", hash, " ", old_hash
-
-               # stop if file.aux does not change anymore
-               if hash and hash == prev_hash:
-                       break
-
-               # run the command
-               warn('calling %s' % command)
-
-               task.env.env = {'TEXINPUTS': sr2 + os.pathsep}
-               task.env.SRCFILE = srcfile
-               ret = fun(task)
-               if ret:
-                       error('error when calling %s %s' % (command, latex_fun))
-                       return ret
-
-       return None # ok
-
-latex_vardeps  = ['LATEX', 'LATEXFLAGS']
-def latex_build(task):
-       return tex_build(task, 'LATEX')
-
-pdflatex_vardeps  = ['PDFLATEX', 'PDFLATEXFLAGS']
-def pdflatex_build(task):
-       return tex_build(task, 'PDFLATEX')
-
-class tex_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
-@feature('tex')
-@before('apply_core')
-def apply_tex(self):
-       if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
-               self.type = 'pdflatex'
-
-       tree = self.bld
-       outs = Utils.to_list(getattr(self, 'outs', []))
-
-       # prompt for incomplete files (else the batchmode is used)
-       self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
-
-       deps_lst = []
-
-       if getattr(self, 'deps', None):
-               deps = self.to_list(self.deps)
-               for filename in deps:
-                       n = self.path.find_resource(filename)
-                       if not n in deps_lst: deps_lst.append(n)
-
-       self.source = self.to_list(self.source)
-       for filename in self.source:
-               base, ext = os.path.splitext(filename)
-
-               node = self.path.find_resource(filename)
-               if not node: raise Utils.WafError('cannot find %s' % filename)
-
-               if self.type == 'latex':
-                       task = self.create_task('latex', node, node.change_ext('.dvi'))
-               elif self.type == 'pdflatex':
-                       task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
-
-               task.env = self.env
-               task.curdirnode = self.path
-
-               # add the manual dependencies
-               if deps_lst:
-                       variant = node.variant(self.env)
-                       try:
-                               lst = tree.node_deps[task.unique_id()]
-                               for n in deps_lst:
-                                       if not n in lst:
-                                               lst.append(n)
-                       except KeyError:
-                               tree.node_deps[task.unique_id()] = deps_lst
-
-               if self.type == 'latex':
-                       if 'ps' in outs:
-                               tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
-                               tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
-                       if 'pdf' in outs:
-                               tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
-                               tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
-               elif self.type == 'pdflatex':
-                       if 'ps' in outs:
-                               self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
-       self.source = []
-
-def detect(conf):
-       v = conf.env
-       for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
-               conf.find_program(p, var=p.upper())
-               v[p.upper()+'FLAGS'] = ''
-       v['DVIPSFLAGS'] = '-Ppdf'
-
-b = Task.simple_task_type
-b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
-b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
-b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
-b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
-b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
-
-b = Task.task_type_from_func
-cls = b('latex', latex_build, vars=latex_vardeps)
-cls.scan = scan
-cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
-cls.scan = scan
diff --git a/third_party/waf/wafadmin/Tools/unittestw.py b/third_party/waf/wafadmin/Tools/unittestw.py
deleted file mode 100644 (file)
index 7cf2ded..0000000
+++ /dev/null
@@ -1,308 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2006
-
-"""
-Unit tests run in the shutdown() method, and for c/c++ programs
-
-One should NOT have to give parameters to programs to execute
-
-In the shutdown method, add the following code:
-
-       >>> def shutdown():
-       ...     ut = UnitTest.unit_test()
-       ...     ut.run()
-       ...     ut.print_results()
-
-
-Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
-"""
-import os, sys
-import Build, TaskGen, Utils, Options, Logs, Task
-from TaskGen import before, after, feature
-from Constants import *
-
-class unit_test(object):
-       "Unit test representation"
-       def __init__(self):
-               self.returncode_ok = 0          # Unit test returncode considered OK. All returncodes differing from this one
-                                               # will cause the unit test to be marked as "FAILED".
-
-               # The following variables are filled with data by run().
-
-               # print_results() uses these for printing the unit test summary,
-               # but if there is need for direct access to the results,
-               # they can be retrieved here, after calling run().
-
-               self.num_tests_ok = 0           # Number of successful unit tests
-               self.num_tests_failed = 0       # Number of failed unit tests
-               self.num_tests_err = 0          # Tests that have not even run
-               self.total_num_tests = 0        # Total amount of unit tests
-               self.max_label_length = 0       # Maximum label length (pretty-print the output)
-
-               self.unit_tests = Utils.ordered_dict()          # Unit test dictionary. Key: the label (unit test filename relative
-                                               # to the build dir), value: unit test filename with absolute path
-               self.unit_test_results = {}     # Dictionary containing the unit test results.
-                                               # Key: the label, value: result (true = success false = failure)
-               self.unit_test_erroneous = {}   # Dictionary indicating erroneous unit tests.
-                                               # Key: the label, value: true = unit test has an error  false = unit test is ok
-               self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
-               self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
-               self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
-               self.run_if_waf_does = 'check' #build was the old default
-
-       def run(self):
-               "Run the unit tests and gather results (note: no output here)"
-
-               self.num_tests_ok = 0
-               self.num_tests_failed = 0
-               self.num_tests_err = 0
-               self.total_num_tests = 0
-               self.max_label_length = 0
-
-               self.unit_tests = Utils.ordered_dict()
-               self.unit_test_results = {}
-               self.unit_test_erroneous = {}
-
-               ld_library_path = []
-
-               # If waf is not building, don't run anything
-               if not Options.commands[self.run_if_waf_does]: return
-
-               # Get the paths for the shared libraries, and obtain the unit tests to execute
-               for obj in Build.bld.all_task_gen:
-                       try:
-                               link_task = obj.link_task
-                       except AttributeError:
-                               pass
-                       else:
-                               lib_path = link_task.outputs[0].parent.abspath(obj.env)
-                               if lib_path not in ld_library_path:
-                                       ld_library_path.append(lib_path)
-
-                       unit_test = getattr(obj, 'unit_test', '')
-                       if unit_test and 'cprogram' in obj.features:
-                               try:
-                                       output = obj.path
-                                       filename = os.path.join(output.abspath(obj.env), obj.target)
-                                       srcdir = output.abspath()
-                                       label = os.path.join(output.bldpath(obj.env), obj.target)
-                                       self.max_label_length = max(self.max_label_length, len(label))
-                                       self.unit_tests[label] = (filename, srcdir)
-                               except KeyError:
-                                       pass
-               self.total_num_tests = len(self.unit_tests)
-               # Now run the unit tests
-               Utils.pprint('GREEN', 'Running the unit tests')
-               count = 0
-               result = 1
-
-               for label in self.unit_tests.allkeys:
-                       file_and_src = self.unit_tests[label]
-                       filename = file_and_src[0]
-                       srcdir = file_and_src[1]
-                       count += 1
-                       line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
-                       if Options.options.progress_bar and line:
-                               sys.stderr.write(line)
-                               sys.stderr.flush()
-                       try:
-                               kwargs = {}
-                               kwargs['env'] = os.environ.copy()
-                               if self.change_to_testfile_dir:
-                                       kwargs['cwd'] = srcdir
-                               if not self.want_to_see_test_output:
-                                       kwargs['stdout'] = Utils.pproc.PIPE  # PIPE for ignoring output
-                               if not self.want_to_see_test_error:
-                                       kwargs['stderr'] = Utils.pproc.PIPE  # PIPE for ignoring output
-                               if ld_library_path:
-                                       v = kwargs['env']
-                                       def add_path(dct, path, var):
-                                               dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
-                                       if sys.platform == 'win32':
-                                               add_path(v, ld_library_path, 'PATH')
-                                       elif sys.platform == 'darwin':
-                                               add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
-                                               add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
-                                       else:
-                                               add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
-
-                               pp = Utils.pproc.Popen(filename, **kwargs)
-                               (out, err) = pp.communicate() # uh, and the output is ignored?? - fortunately this is going to disappear
-
-                               result = int(pp.returncode == self.returncode_ok)
-
-                               if result:
-                                       self.num_tests_ok += 1
-                               else:
-                                       self.num_tests_failed += 1
-
-                               self.unit_test_results[label] = result
-                               self.unit_test_erroneous[label] = 0
-                       except OSError:
-                               self.unit_test_erroneous[label] = 1
-                               self.num_tests_err += 1
-                       except KeyboardInterrupt:
-                               pass
-               if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
-
-       def print_results(self):
-               "Pretty-prints a summary of all unit tests, along with some statistics"
-
-               # If waf is not building, don't output anything
-               if not Options.commands[self.run_if_waf_does]: return
-
-               p = Utils.pprint
-               # Early quit if no tests were performed
-               if self.total_num_tests == 0:
-                       p('YELLOW', 'No unit tests present')
-                       return
-
-               for label in self.unit_tests.allkeys:
-                       filename = self.unit_tests[label]
-                       err = 0
-                       result = 0
-
-                       try: err = self.unit_test_erroneous[label]
-                       except KeyError: pass
-
-                       try: result = self.unit_test_results[label]
-                       except KeyError: pass
-
-                       n = self.max_label_length - len(label)
-                       if err: n += 4
-                       elif result: n += 7
-                       else: n += 3
-
-                       line = '%s %s' % (label, '.' * n)
-
-                       if err: p('RED', '%sERROR' % line)
-                       elif result: p('GREEN', '%sOK' % line)
-                       else: p('YELLOW', '%sFAILED' % line)
-
-               percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
-               percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
-               percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
-
-               p('NORMAL', '''
-Successful tests:      %i (%.1f%%)
-Failed tests:          %i (%.1f%%)
-Erroneous tests:       %i (%.1f%%)
-
-Total number of tests: %i
-''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
-               self.num_tests_err, percentage_erroneous, self.total_num_tests))
-               p('GREEN', 'Unit tests finished')
-
-
-############################################################################################
-
-"""
-New unit test system
-
-The targets with feature 'test' are executed after they are built
-bld(features='cprogram cc test', ...)
-
-To display the results:
-import UnitTest
-bld.add_post_fun(UnitTest.summary)
-"""
-
-import threading
-testlock = threading.Lock()
-
-def set_options(opt):
-       opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
-
-@feature('test')
-@after('apply_link', 'vars_target_cprogram')
-def make_test(self):
-       if not 'cprogram' in self.features:
-               Logs.error('test cannot be executed %s' % self)
-               return
-
-       self.default_install_path = None
-       self.create_task('utest', self.link_task.outputs)
-
-def exec_test(self):
-
-       status = 0
-
-       variant = self.env.variant()
-
-       filename = self.inputs[0].abspath(self.env)
-       self.ut_exec = getattr(self, 'ut_exec', [filename])
-       if getattr(self.generator, 'ut_fun', None):
-               self.generator.ut_fun(self)
-
-       try:
-               fu = getattr(self.generator.bld, 'all_test_paths')
-       except AttributeError:
-               fu = os.environ.copy()
-               self.generator.bld.all_test_paths = fu
-
-               lst = []
-               for obj in self.generator.bld.all_task_gen:
-                       link_task = getattr(obj, 'link_task', None)
-                       if link_task and link_task.env.variant() == variant:
-                               lst.append(link_task.outputs[0].parent.abspath(obj.env))
-
-               def add_path(dct, path, var):
-                       dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
-
-               if sys.platform == 'win32':
-                       add_path(fu, lst, 'PATH')
-               elif sys.platform == 'darwin':
-                       add_path(fu, lst, 'DYLD_LIBRARY_PATH')
-                       add_path(fu, lst, 'LD_LIBRARY_PATH')
-               else:
-                       add_path(fu, lst, 'LD_LIBRARY_PATH')
-
-
-       cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
-       proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
-       (stdout, stderr) = proc.communicate()
-
-       tup = (filename, proc.returncode, stdout, stderr)
-       self.generator.utest_result = tup
-
-       testlock.acquire()
-       try:
-               bld = self.generator.bld
-               Logs.debug("ut: %r", tup)
-               try:
-                       bld.utest_results.append(tup)
-               except AttributeError:
-                       bld.utest_results = [tup]
-       finally:
-               testlock.release()
-
-cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
-
-old = cls.runnable_status
-def test_status(self):
-       ret = old(self)
-       if ret == SKIP_ME and getattr(Options.options, 'all_tests', False):
-               return RUN_ME
-       return ret
-
-cls.runnable_status = test_status
-cls.quiet = 1
-
-def summary(bld):
-       lst = getattr(bld, 'utest_results', [])
-       if lst:
-               Utils.pprint('CYAN', 'execution summary')
-
-               total = len(lst)
-               tfail = len([x for x in lst if x[1]])
-
-               Utils.pprint('CYAN', '  tests that pass %d/%d' % (total-tfail, total))
-               for (f, code, out, err) in lst:
-                       if not code:
-                               Utils.pprint('CYAN', '    %s' % f)
-
-               Utils.pprint('CYAN', '  tests that fail %d/%d' % (tfail, total))
-               for (f, code, out, err) in lst:
-                       if code:
-                               Utils.pprint('CYAN', '    %s' % f)
diff --git a/third_party/waf/wafadmin/Tools/vala.py b/third_party/waf/wafadmin/Tools/vala.py
deleted file mode 100644 (file)
index df1d11b..0000000
+++ /dev/null
@@ -1,307 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-import os.path, shutil
-import Task, Runner, Utils, Logs, Build, Node, Options
-from TaskGen import extension, after, before
-
-EXT_VALA = ['.vala', '.gs']
-
-class valac_task(Task.Task):
-
-       vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
-       before = ("cc", "cxx")
-
-       def run(self):
-               env = self.env
-               inputs = [a.srcpath(env) for a in self.inputs]
-               valac = env['VALAC']
-               vala_flags = env.get_flat('VALAFLAGS')
-               top_src = self.generator.bld.srcnode.abspath()
-               top_bld = self.generator.bld.srcnode.abspath(env)
-
-               if env['VALAC_VERSION'] > (0, 1, 6):
-                       cmd = [valac, '-C', '--quiet', vala_flags]
-               else:
-                       cmd = [valac, '-C', vala_flags]
-
-               if self.threading:
-                       cmd.append('--thread')
-
-               if self.profile:
-                       cmd.append('--profile=%s' % self.profile)
-
-               if self.target_glib:
-                       cmd.append('--target-glib=%s' % self.target_glib)
-
-               features = self.generator.features
-
-               if 'cshlib' in features or 'cstaticlib' in features:
-                       output_dir = self.outputs[0].bld_dir(env)
-                       cmd.append('--library ' + self.target)
-                       if env['VALAC_VERSION'] >= (0, 7, 0):
-                               for x in self.outputs:
-                                       if x.name.endswith('.h'):
-                                               cmd.append('--header ' + x.bldpath(self.env))
-                       cmd.append('--basedir ' + top_src)
-                       cmd.append('-d ' + top_bld)
-                       if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
-                               cmd.append('--gir=%s.gir' % self.gir)
-
-               else:
-                       output_dir = self.outputs[0].bld_dir(env)
-                       cmd.append('-d %s' % output_dir)
-
-               for vapi_dir in self.vapi_dirs:
-                       cmd.append('--vapidir=%s' % vapi_dir)
-
-               for package in self.packages:
-                       cmd.append('--pkg %s' % package)
-
-               for package in self.packages_private:
-                       cmd.append('--pkg %s' % package)
-
-               cmd.append(" ".join(inputs))
-               result = self.generator.bld.exec_command(" ".join(cmd))
-
-               if not 'cprogram' in features:
-                       # generate the .deps file
-                       if self.packages:
-                               filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
-                               deps = open(filename, 'w')
-                               for package in self.packages:
-                                       deps.write(package + '\n')
-                               deps.close()
-
-                       # handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
-                       self._fix_output("../%s.vapi" % self.target)
-                       # handle vala >= 0.1.7 who has a weid definition for --directory
-                       self._fix_output("%s.vapi" % self.target)
-                       # handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
-                       self._fix_output("%s.gidl" % self.target)
-                       # handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
-                       self._fix_output("%s.gir" % self.target)
-                       if hasattr(self, 'gir'):
-                               self._fix_output("%s.gir" % self.gir)
-
-               first = None
-               for node in self.outputs:
-                       if not first:
-                               first = node
-                       else:
-                               if first.parent.id != node.parent.id:
-                                       # issue #483
-                                       if env['VALAC_VERSION'] < (0, 7, 0):
-                                               shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
-               return result
-
-       def install(self):
-               bld = self.generator.bld
-               features = self.generator.features
-
-               if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
-                       headers_list = [o for o in self.outputs if o.suffix() == ".h"]
-                       vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
-                       gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
-
-                       for header in headers_list:
-                               top_src = self.generator.bld.srcnode
-                               package = self.env['PACKAGE']
-                               try:
-                                       api_version = Utils.g_module.API_VERSION
-                               except AttributeError:
-                                       version = Utils.g_module.VERSION.split(".")
-                                       if version[0] == "0":
-                                               api_version = "0." + version[1]
-                                       else:
-                                               api_version = version[0] + ".0"
-                               install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
-                               bld.install_as(install_path, header, self.env)
-                       bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env)
-                       bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env)
-
-       def _fix_output(self, output):
-               top_bld = self.generator.bld.srcnode.abspath(self.env)
-               try:
-                       src = os.path.join(top_bld, output)
-                       dst = self.generator.path.abspath (self.env)
-                       shutil.move(src, dst)
-               except:
-                       pass
-
-@extension(EXT_VALA)
-def vala_file(self, node):
-       valatask = getattr(self, "valatask", None)
-       # there is only one vala task and it compiles all vala files .. :-/
-       if not valatask:
-               valatask = self.create_task('valac')
-               self.valatask = valatask
-               self.includes = Utils.to_list(getattr(self, 'includes', []))
-               self.uselib = self.to_list(self.uselib)
-               valatask.packages = []
-               valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
-               valatask.vapi_dirs = []
-               valatask.target = self.target
-               valatask.threading = False
-               valatask.install_path = self.install_path
-               valatask.profile = getattr (self, 'profile', 'gobject')
-               valatask.target_glib = None #Deprecated
-
-               packages = Utils.to_list(getattr(self, 'packages', []))
-               vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
-               includes =  []
-
-               if hasattr(self, 'uselib_local'):
-                       local_packages = Utils.to_list(self.uselib_local)
-                       seen = []
-                       while len(local_packages) > 0:
-                               package = local_packages.pop()
-                               if package in seen:
-                                       continue
-                               seen.append(package)
-
-                               # check if the package exists
-                               package_obj = self.name_to_obj(package)
-                               if not package_obj:
-                                       raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
-
-                               package_name = package_obj.target
-                               package_node = package_obj.path
-                               package_dir = package_node.relpath_gen(self.path)
-
-                               for task in package_obj.tasks:
-                                       for output in task.outputs:
-                                               if output.name == package_name + ".vapi":
-                                                       valatask.set_run_after(task)
-                                                       if package_name not in packages:
-                                                               packages.append(package_name)
-                                                       if package_dir not in vapi_dirs:
-                                                               vapi_dirs.append(package_dir)
-                                                       if package_dir not in includes:
-                                                               includes.append(package_dir)
-
-                               if hasattr(package_obj, 'uselib_local'):
-                                       lst = self.to_list(package_obj.uselib_local)
-                                       lst.reverse()
-                                       local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
-
-               valatask.packages = packages
-               for vapi_dir in vapi_dirs:
-                       try:
-                               valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
-                               valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
-                       except AttributeError:
-                               Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
-
-               self.includes.append(node.bld.srcnode.abspath())
-               self.includes.append(node.bld.srcnode.abspath(self.env))
-               for include in includes:
-                       try:
-                               self.includes.append(self.path.find_dir(include).abspath())
-                               self.includes.append(self.path.find_dir(include).abspath(self.env))
-                       except AttributeError:
-                               Logs.warn("Unable to locate include directory: '%s'" % include)
-
-               if valatask.profile == 'gobject':
-                       if hasattr(self, 'target_glib'):
-                               Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
-
-                       if getattr(Options.options, 'vala_target_glib', None):
-                               valatask.target_glib = Options.options.vala_target_glib
-
-                       if not 'GOBJECT' in self.uselib:
-                               self.uselib.append('GOBJECT')
-
-               if hasattr(self, 'threading'):
-                       if valatask.profile == 'gobject':
-                               valatask.threading = self.threading
-                               if not 'GTHREAD' in self.uselib:
-                                       self.uselib.append('GTHREAD')
-                       else:
-                               #Vala doesn't have threading support for dova nor posix
-                               Logs.warn("Profile %s does not have threading support" % valatask.profile)
-
-               if hasattr(self, 'gir'):
-                       valatask.gir = self.gir
-
-       env = valatask.env
-
-       output_nodes = []
-
-       c_node = node.change_ext('.c')
-       output_nodes.append(c_node)
-       self.allnodes.append(c_node)
-
-       if env['VALAC_VERSION'] < (0, 7, 0):
-               output_nodes.append(node.change_ext('.h'))
-       else:
-               if not 'cprogram' in self.features:
-                       output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
-
-       if not 'cprogram' in self.features:
-               output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
-               if env['VALAC_VERSION'] > (0, 7, 2):
-                       if hasattr(self, 'gir'):
-                               output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
-               elif env['VALAC_VERSION'] > (0, 3, 5):
-                       output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
-               elif env['VALAC_VERSION'] > (0, 1, 7):
-                       output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
-               if valatask.packages:
-                       output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
-
-       valatask.inputs.append(node)
-       valatask.outputs.extend(output_nodes)
-
-def detect(conf):
-       min_version = (0, 1, 6)
-       min_version_str = "%d.%d.%d" % min_version
-
-       valac = conf.find_program('valac', var='VALAC', mandatory=True)
-
-       if not conf.env["HAVE_GOBJECT"]:
-               pkg_args = {'package':      'gobject-2.0',
-                           'uselib_store': 'GOBJECT',
-                           'args':         '--cflags --libs'}
-               if getattr(Options.options, 'vala_target_glib', None):
-                       pkg_args['atleast_version'] = Options.options.vala_target_glib
-
-               conf.check_cfg(**pkg_args)
-
-       if not conf.env["HAVE_GTHREAD"]:
-               pkg_args = {'package':      'gthread-2.0',
-                           'uselib_store': 'GTHREAD',
-                           'args':         '--cflags --libs'}
-               if getattr(Options.options, 'vala_target_glib', None):
-                       pkg_args['atleast_version'] = Options.options.vala_target_glib
-
-               conf.check_cfg(**pkg_args)
-
-       try:
-               output = Utils.cmd_output(valac + " --version", silent=True)
-               version = output.split(' ', 1)[-1].strip().split(".")[0:3]
-               version = [int(x) for x in version]
-               valac_version = tuple(version)
-       except Exception:
-               valac_version = (0, 0, 0)
-
-       conf.check_message('program version',
-                       'valac >= ' + min_version_str,
-                       valac_version >= min_version,
-                       "%d.%d.%d" % valac_version)
-
-       conf.check_tool('gnu_dirs')
-
-       if valac_version < min_version:
-               conf.fatal("valac version too old to be used with this tool")
-               return
-
-       conf.env['VALAC_VERSION'] = valac_version
-       conf.env['VALAFLAGS'] = ''
-
-def set_options (opt):
-       valaopts = opt.add_option_group('Vala Compiler Options')
-       valaopts.add_option ('--vala-target-glib', default=None,
-                            dest='vala_target_glib', metavar='MAJOR.MINOR',
-                            help='Target version of glib for Vala GObject code generation')
diff --git a/third_party/waf/wafadmin/Tools/winres.py b/third_party/waf/wafadmin/Tools/winres.py
deleted file mode 100644 (file)
index 6b5aad0..0000000
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Brant Young, 2007
-
-"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
-
-import os, sys, re
-import TaskGen, Task
-from Utils import quote_whitespace
-from TaskGen import extension
-
-EXT_WINRC = ['.rc']
-
-winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
-
-@extension(EXT_WINRC)
-def rc_file(self, node):
-       obj_ext = '.rc.o'
-       if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
-
-       rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
-       self.compiled_tasks.append(rctask)
-
-# create our action, for use with rc file
-Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
-
-def detect(conf):
-       v = conf.env
-
-       winrc = v['WINRC']
-       v['WINRC_TGT_F'] = '-o'
-       v['WINRC_SRC_F'] = '-i'
-       # find rc.exe
-       if not winrc:
-               if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
-                       winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
-               elif v['CC_NAME'] == 'msvc':
-                       winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
-                       v['WINRC_TGT_F'] = '/fo'
-                       v['WINRC_SRC_F'] = ''
-       if not winrc:
-               conf.fatal('winrc was not found!')
-
-       v['WINRCFLAGS'] = ''
diff --git a/third_party/waf/wafadmin/Tools/xlc.py b/third_party/waf/wafadmin/Tools/xlc.py
deleted file mode 100644 (file)
index e33b7a1..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2008 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-# Michael Kuhn, 2009
-
-import os, sys
-import Configure, Options, Utils
-import ccroot, ar
-from Configure import conftest
-
-@conftest
-def find_xlc(conf):
-       cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
-       cc = conf.cmd_to_list(cc)
-       conf.env.CC_NAME = 'xlc'
-       conf.env.CC      = cc
-
-@conftest
-def find_cpp(conf):
-       v = conf.env
-       cpp = None
-       if v['CPP']: cpp = v['CPP']
-       elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
-       #if not cpp: cpp = v['CC']
-       v['CPP'] = cpp
-
-@conftest
-def xlc_common_flags(conf):
-       v = conf.env
-
-       # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
-       v['CCFLAGS_DEBUG'] = ['-g']
-       v['CCFLAGS_RELEASE'] = ['-O2']
-
-       v['CC_SRC_F']            = ''
-       v['CC_TGT_F']            = ['-c', '-o', ''] # shell hack for -MD
-       v['CPPPATH_ST']          = '-I%s' # template for adding include paths
-
-       # linker
-       if not v['LINK_CC']: v['LINK_CC'] = v['CC']
-       v['CCLNK_SRC_F']         = ''
-       v['CCLNK_TGT_F']         = ['-o', ''] # shell hack for -MD
-
-       v['LIB_ST']              = '-l%s' # template for adding libs
-       v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-       v['STATICLIB_ST']        = '-l%s'
-       v['STATICLIBPATH_ST']    = '-L%s'
-       v['RPATH_ST']            = '-Wl,-rpath,%s'
-       v['CCDEFINES_ST']        = '-D%s'
-
-       v['SONAME_ST']           = ''
-       v['SHLIB_MARKER']        = ''
-       v['STATICLIB_MARKER']    = ''
-       v['FULLSTATIC_MARKER']   = '-static'
-
-       # program
-       v['program_LINKFLAGS']   = ['-Wl,-brtl']
-       v['program_PATTERN']     = '%s'
-
-       # shared library
-       v['shlib_CCFLAGS']       = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
-       v['shlib_LINKFLAGS']     = ['-G', '-Wl,-brtl,-bexpfull']
-       v['shlib_PATTERN']       = 'lib%s.so'
-
-       # static lib
-       v['staticlib_LINKFLAGS'] = ''
-       v['staticlib_PATTERN']   = 'lib%s.a'
-
-def detect(conf):
-       conf.find_xlc()
-       conf.find_cpp()
-       conf.find_ar()
-       conf.xlc_common_flags()
-       conf.cc_load_tools()
-       conf.cc_add_flags()
-       conf.link_add_flags()
diff --git a/third_party/waf/wafadmin/Tools/xlcxx.py b/third_party/waf/wafadmin/Tools/xlcxx.py
deleted file mode 100644 (file)
index 6e84662..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-# Michael Kuhn, 2009
-
-import os, sys
-import Configure, Options, Utils
-import ccroot, ar
-from Configure import conftest
-
-@conftest
-def find_xlcxx(conf):
-       cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
-       cxx = conf.cmd_to_list(cxx)
-       conf.env.CXX_NAME = 'xlc++'
-       conf.env.CXX      = cxx
-
-@conftest
-def find_cpp(conf):
-       v = conf.env
-       cpp = None
-       if v['CPP']: cpp = v['CPP']
-       elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
-       #if not cpp: cpp = v['CXX']
-       v['CPP'] = cpp
-
-@conftest
-def xlcxx_common_flags(conf):
-       v = conf.env
-
-       # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
-       v['CXXFLAGS_DEBUG'] = ['-g']
-       v['CXXFLAGS_RELEASE'] = ['-O2']
-
-       v['CXX_SRC_F']           = ''
-       v['CXX_TGT_F']           = ['-c', '-o', ''] # shell hack for -MD
-       v['CPPPATH_ST']          = '-I%s' # template for adding include paths
-
-       # linker
-       if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
-       v['CXXLNK_SRC_F']        = ''
-       v['CXXLNK_TGT_F']        = ['-o', ''] # shell hack for -MD
-
-       v['LIB_ST']              = '-l%s' # template for adding libs
-       v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-       v['STATICLIB_ST']        = '-l%s'
-       v['STATICLIBPATH_ST']    = '-L%s'
-       v['RPATH_ST']            = '-Wl,-rpath,%s'
-       v['CXXDEFINES_ST']       = '-D%s'
-
-       v['SONAME_ST']           = ''
-       v['SHLIB_MARKER']        = ''
-       v['STATICLIB_MARKER']    = ''
-       v['FULLSTATIC_MARKER']   = '-static'
-
-       # program
-       v['program_LINKFLAGS']   = ['-Wl,-brtl']
-       v['program_PATTERN']     = '%s'
-
-       # shared library
-       v['shlib_CXXFLAGS']      = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
-       v['shlib_LINKFLAGS']     = ['-G', '-Wl,-brtl,-bexpfull']
-       v['shlib_PATTERN']       = 'lib%s.so'
-
-       # static lib
-       v['staticlib_LINKFLAGS'] = ''
-       v['staticlib_PATTERN']   = 'lib%s.a'
-
-def detect(conf):
-       conf.find_xlcxx()
-       conf.find_cpp()
-       conf.find_ar()
-       conf.xlcxx_common_flags()
-       conf.cxx_load_tools()
-       conf.cxx_add_flags()
-       conf.link_add_flags()
diff --git a/third_party/waf/wafadmin/Utils.py b/third_party/waf/wafadmin/Utils.py
deleted file mode 100644 (file)
index abb46a7..0000000
+++ /dev/null
@@ -1,747 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
-
-"""
-Utilities, the stable ones are the following:
-
-* h_file: compute a unique value for a file (hash), it uses
-  the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
-  else, md5 (see the python docs)
-
-  For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
-  it is possible to use a hashing based on the path and the size (may give broken cache results)
-  The method h_file MUST raise an OSError if the file is a folder
-
-       import stat
-       def h_file(filename):
-               st = os.lstat(filename)
-               if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
-               m = Utils.md5()
-               m.update(str(st.st_mtime))
-               m.update(str(st.st_size))
-               m.update(filename)
-               return m.digest()
-
-       To replace the function in your project, use something like this:
-       import Utils
-       Utils.h_file = h_file
-
-* h_list
-* h_fun
-* get_term_cols
-* ordered_dict
-
-"""
-
-import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
-
-# In python 3.0 we can get rid of all this
-try: from UserDict import UserDict
-except ImportError: from collections import UserDict
-if sys.hexversion >= 0x2060000 or os.name == 'java':
-       import subprocess as pproc
-else:
-       import pproc
-import Logs
-from Constants import *
-
-try:
-       from collections import deque
-except ImportError:
-       class deque(list):
-               def popleft(self):
-                       return self.pop(0)
-
-is_win32 = sys.platform == 'win32'
-
-try:
-       # defaultdict in python 2.5
-       from collections import defaultdict as DefaultDict
-except ImportError:
-       class DefaultDict(dict):
-               def __init__(self, default_factory):
-                       super(DefaultDict, self).__init__()
-                       self.default_factory = default_factory
-               def __getitem__(self, key):
-                       try:
-                               return super(DefaultDict, self).__getitem__(key)
-                       except KeyError:
-                               value = self.default_factory()
-                               self[key] = value
-                               return value
-
-class WafError(Exception):
-       def __init__(self, *args):
-               self.args = args
-               try:
-                       self.stack = traceback.extract_stack()
-               except:
-                       pass
-               Exception.__init__(self, *args)
-       def __str__(self):
-               return str(len(self.args) == 1 and self.args[0] or self.args)
-
-class WscriptError(WafError):
-       def __init__(self, message, wscript_file=None):
-               if wscript_file:
-                       self.wscript_file = wscript_file
-                       self.wscript_line = None
-               else:
-                       try:
-                               (self.wscript_file, self.wscript_line) = self.locate_error()
-                       except:
-                               (self.wscript_file, self.wscript_line) = (None, None)
-
-               msg_file_line = ''
-               if self.wscript_file:
-                       msg_file_line = "%s:" % self.wscript_file
-                       if self.wscript_line:
-                               msg_file_line += "%s:" % self.wscript_line
-               err_message = "%s error: %s" % (msg_file_line, message)
-               WafError.__init__(self, err_message)
-
-       def locate_error(self):
-               stack = traceback.extract_stack()
-               stack.reverse()
-               for frame in stack:
-                       file_name = os.path.basename(frame[0])
-                       is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
-                       if is_wscript:
-                               return (frame[0], frame[1])
-               return (None, None)
-
-indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
-
-try:
-       from fnv import new as md5
-       import Constants
-       Constants.SIG_NIL = 'signofnv'
-
-       def h_file(filename):
-               m = md5()
-               try:
-                       m.hfile(filename)
-                       x = m.digest()
-                       if x is None: raise OSError("not a file")
-                       return x
-               except SystemError:
-                       raise OSError("not a file" + filename)
-
-except ImportError:
-       try:
-               try:
-                       from hashlib import md5
-               except ImportError:
-                       from md5 import md5
-
-               def h_file(filename):
-                       f = open(filename, 'rb')
-                       m = md5()
-                       while (filename):
-                               filename = f.read(100000)
-                               m.update(filename)
-                       f.close()
-                       return m.digest()
-       except ImportError:
-               # portability fixes may be added elsewhere (although, md5 should be everywhere by now)
-               md5 = None
-
-def readf(fname, m='r', encoding='ISO8859-1'):
-       """backported from waf 1.8"""
-       if sys.hexversion > 0x3000000 and not 'b' in m:
-               m += 'b'
-               f = open(fname, m)
-               try:
-                       txt = f.read()
-               finally:
-                       f.close()
-               if encoding:
-                       txt = txt.decode(encoding)
-               else:
-                       txt = txt.decode()
-       else:
-               f = open(fname, m)
-               try:
-                       txt = f.read()
-               finally:
-                       f.close()
-       return txt
-
-def writef(fname, data, m='w', encoding='ISO8859-1'):
-       """backported from waf 1.8"""
-       if sys.hexversion > 0x3000000 and not 'b' in m:
-               data = data.encode(encoding)
-               m += 'b'
-       f = open(fname, m)
-       try:
-               f.write(data)
-       finally:
-               f.close()
-
-class ordered_dict(UserDict):
-       def __init__(self, dict = None):
-               self.allkeys = []
-               UserDict.__init__(self, dict)
-
-       def __delitem__(self, key):
-               self.allkeys.remove(key)
-               UserDict.__delitem__(self, key)
-
-       def __setitem__(self, key, item):
-               if key not in self.allkeys: self.allkeys.append(key)
-               UserDict.__setitem__(self, key, item)
-
-def exec_command(s, **kw):
-       if 'log' in kw:
-               kw['stdout'] = kw['stderr'] = kw['log']
-               del(kw['log'])
-       kw['shell'] = isinstance(s, str)
-
-       try:
-               proc = pproc.Popen(s, **kw)
-               return proc.wait()
-       except OSError:
-               return -1
-
-if is_win32:
-       def exec_command(s, **kw):
-               if 'log' in kw:
-                       kw['stdout'] = kw['stderr'] = kw['log']
-                       del(kw['log'])
-               kw['shell'] = isinstance(s, str)
-
-               if len(s) > 2000:
-                       startupinfo = pproc.STARTUPINFO()
-                       startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
-                       kw['startupinfo'] = startupinfo
-
-               try:
-                       if 'stdout' not in kw:
-                               kw['stdout'] = pproc.PIPE
-                               kw['stderr'] = pproc.PIPE
-                               kw['universal_newlines'] = True
-                               proc = pproc.Popen(s,**kw)
-                               (stdout, stderr) = proc.communicate()
-                               Logs.info(stdout)
-                               if stderr:
-                                       Logs.error(stderr)
-                               return proc.returncode
-                       else:
-                               proc = pproc.Popen(s,**kw)
-                               return proc.wait()
-               except OSError:
-                       return -1
-
-listdir = os.listdir
-if is_win32:
-       def listdir_win32(s):
-               if re.match('^[A-Za-z]:$', s):
-                       # os.path.isdir fails if s contains only the drive name... (x:)
-                       s += os.sep
-               if not os.path.isdir(s):
-                       e = OSError()
-                       e.errno = errno.ENOENT
-                       raise e
-               return os.listdir(s)
-       listdir = listdir_win32
-
-def waf_version(mini = 0x010000, maxi = 0x100000):
-       "Halts if the waf version is wrong"
-       ver = HEXVERSION
-       try: min_val = mini + 0
-       except TypeError: min_val = int(mini.replace('.', '0'), 16)
-
-       if min_val > ver:
-               Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
-               sys.exit(1)
-
-       try: max_val = maxi + 0
-       except TypeError: max_val = int(maxi.replace('.', '0'), 16)
-
-       if max_val < ver:
-               Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
-               sys.exit(1)
-
-def python_24_guard():
-       if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
-               raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
-
-def ex_stack():
-       exc_type, exc_value, tb = sys.exc_info()
-       if Logs.verbose > 1:
-               exc_lines = traceback.format_exception(exc_type, exc_value, tb)
-               return ''.join(exc_lines)
-       return str(exc_value)
-
-def to_list(sth):
-       if isinstance(sth, str):
-               return sth.split()
-       else:
-               return sth
-
-g_loaded_modules = {}
-"index modules by absolute path"
-
-g_module=None
-"the main module is special"
-
-def load_module(file_path, name=WSCRIPT_FILE):
-       "this function requires an absolute path"
-       try:
-               return g_loaded_modules[file_path]
-       except KeyError:
-               pass
-
-       module = imp.new_module(name)
-
-       try:
-               code = readf(file_path, m='rU')
-       except (IOError, OSError):
-               raise WscriptError('Could not read the file %r' % file_path)
-
-       module.waf_hash_val = code
-
-       dt = os.path.dirname(file_path)
-       sys.path.insert(0, dt)
-       try:
-               exec(compile(code, file_path, 'exec'), module.__dict__)
-       except Exception:
-               exc_type, exc_value, tb = sys.exc_info()
-               raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
-       sys.path.remove(dt)
-
-       g_loaded_modules[file_path] = module
-
-       return module
-
-def set_main_module(file_path):
-       "Load custom options, if defined"
-       global g_module
-       g_module = load_module(file_path, 'wscript_main')
-       g_module.root_path = file_path
-
-       try:
-               g_module.APPNAME
-       except:
-               g_module.APPNAME = 'noname'
-       try:
-               g_module.VERSION
-       except:
-               g_module.VERSION = '1.0'
-
-       # note: to register the module globally, use the following:
-       # sys.modules['wscript_main'] = g_module
-
-def to_hashtable(s):
-       "used for importing env files"
-       tbl = {}
-       lst = s.split('\n')
-       for line in lst:
-               if not line: continue
-               mems = line.split('=')
-               tbl[mems[0]] = mems[1]
-       return tbl
-
-def get_term_cols():
-       "console width"
-       return 80
-try:
-       import struct, fcntl, termios
-except ImportError:
-       pass
-else:
-       if Logs.got_tty:
-               def myfun():
-                       dummy_lines, cols = struct.unpack("HHHH", \
-                       fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
-                       struct.pack("HHHH", 0, 0, 0, 0)))[:2]
-                       return cols
-               # we actually try the function once to see if it is suitable
-               try:
-                       myfun()
-               except:
-                       pass
-               else:
-                       get_term_cols = myfun
-
-rot_idx = 0
-rot_chr = ['\\', '|', '/', '-']
-"the rotation character in the progress bar"
-
-
-def split_path(path):
-       return path.split('/')
-
-def split_path_cygwin(path):
-       if path.startswith('//'):
-               ret = path.split('/')[2:]
-               ret[0] = '/' + ret[0]
-               return ret
-       return path.split('/')
-
-re_sp = re.compile('[/\\\\]')
-def split_path_win32(path):
-       if path.startswith('\\\\'):
-               ret = re.split(re_sp, path)[2:]
-               ret[0] = '\\' + ret[0]
-               return ret
-       return re.split(re_sp, path)
-
-if sys.platform == 'cygwin':
-       split_path = split_path_cygwin
-elif is_win32:
-       split_path = split_path_win32
-
-def copy_attrs(orig, dest, names, only_if_set=False):
-       for a in to_list(names):
-               u = getattr(orig, a, ())
-               if u or not only_if_set:
-                       setattr(dest, a, u)
-
-def def_attrs(cls, **kw):
-       '''
-       set attributes for class.
-       @param cls [any class]: the class to update the given attributes in.
-       @param kw [dictionary]: dictionary of attributes names and values.
-
-       if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
-       '''
-       for k, v in kw.iteritems():
-               if not hasattr(cls, k):
-                       setattr(cls, k, v)
-
-def quote_define_name(path):
-       fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
-       fu = fu.upper()
-       return fu
-
-def quote_whitespace(path):
-       return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
-
-def trimquotes(s):
-       if not s: return ''
-       s = s.rstrip()
-       if s[0] == "'" and s[-1] == "'": return s[1:-1]
-       return s
-
-def h_list(lst):
-       m = md5()
-       m.update(str(lst))
-       return m.digest()
-
-def h_fun(fun):
-       try:
-               return fun.code
-       except AttributeError:
-               try:
-                       h = inspect.getsource(fun)
-               except IOError:
-                       h = "nocode"
-               try:
-                       fun.code = h
-               except AttributeError:
-                       pass
-               return h
-
-def pprint(col, str, label='', sep='\n'):
-       "print messages in color"
-       sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
-
-def check_dir(path):
-       """If a folder doesn't exists, create it."""
-       if not os.path.isdir(path):
-               try:
-                       os.makedirs(path)
-               except OSError, e:
-                       if not os.path.isdir(path):
-                               raise WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
-
-def cmd_output(cmd, **kw):
-
-       silent = False
-       if 'silent' in kw:
-               silent = kw['silent']
-               del(kw['silent'])
-
-       if 'e' in kw:
-               tmp = kw['e']
-               del(kw['e'])
-               kw['env'] = tmp
-
-       kw['shell'] = isinstance(cmd, str)
-       kw['stdout'] = pproc.PIPE
-       if silent:
-               kw['stderr'] = pproc.PIPE
-
-       try:
-               p = pproc.Popen(cmd, **kw)
-               output = p.communicate()[0]
-       except OSError, e:
-               raise ValueError(str(e))
-
-       if p.returncode:
-               if not silent:
-                       msg = "command execution failed: %s -> %r" % (cmd, str(output))
-                       raise ValueError(msg)
-               output = ''
-       return output
-
-reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
-def subst_vars(expr, params):
-       "substitute ${PREFIX}/bin in /usr/local/bin"
-       def repl_var(m):
-               if m.group(1):
-                       return '\\'
-               if m.group(2):
-                       return '$'
-               try:
-                       # environments may contain lists
-                       return params.get_flat(m.group(3))
-               except AttributeError:
-                       return params[m.group(3)]
-       return reg_subst.sub(repl_var, expr)
-
-def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
-       "infers the binary format from the unversioned_sys_platform name."
-
-       if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos', 'gnu'):
-               return 'elf'
-       elif unversioned_sys_platform == 'darwin':
-               return 'mac-o'
-       elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
-               return 'pe'
-       # TODO we assume all other operating systems are elf, which is not true.
-       # we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
-       return 'elf'
-
-def unversioned_sys_platform():
-       """returns an unversioned name from sys.platform.
-       sys.plaform is not very well defined and depends directly on the python source tree.
-       The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
-       i.e., it's possible to get freebsd7 on a freebsd8 system.
-       So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
-       Some possible values of sys.platform are, amongst others:
-               aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
-               generic gnu0 irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
-       Investigating the python source tree may reveal more values.
-       """
-       s = sys.platform
-       if s == 'java':
-               # The real OS is hidden under the JVM.
-               from java.lang import System
-               s = System.getProperty('os.name')
-               # see http://lopica.sourceforge.net/os.html for a list of possible values
-               if s == 'Mac OS X':
-                       return 'darwin'
-               elif s.startswith('Windows '):
-                       return 'win32'
-               elif s == 'OS/2':
-                       return 'os2'
-               elif s == 'HP-UX':
-                       return 'hpux'
-               elif s in ('SunOS', 'Solaris'):
-                       return 'sunos'
-               else: s = s.lower()
-       if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
-       return re.split('\d+$', s)[0]
-
-#@deprecated('use unversioned_sys_platform instead')
-def detect_platform():
-       """this function has been in the Utils module for some time.
-       It's hard to guess what people have used it for.
-       It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
-       For example, the version is not removed on freebsd and netbsd, amongst others.
-       """
-       s = sys.platform
-
-       # known POSIX
-       for x in 'cygwin linux irix sunos hpux aix darwin gnu'.split():
-               # sys.platform may be linux2
-               if s.find(x) >= 0:
-                       return x
-
-       # unknown POSIX
-       if os.name in 'posix java os2'.split():
-               return os.name
-
-       return s
-
-def load_tool(tool, tooldir=None):
-       '''
-       load_tool: import a Python module, optionally using several directories.
-       @param tool [string]: name of tool to import.
-       @param tooldir [list]: directories to look for the tool.
-       @return: the loaded module.
-
-       Warning: this function is not thread-safe: plays with sys.path,
-                                        so must run in sequence.
-       '''
-       if tooldir:
-               assert isinstance(tooldir, list)
-               sys.path = tooldir + sys.path
-       else:
-               tooldir = []
-       try:
-               return __import__(tool)
-       finally:
-               for dt in tooldir:
-                       sys.path.remove(dt)
-
-def nada(*k, **kw):
-       """A function that does nothing"""
-       pass
-
-def diff_path(top, subdir):
-       """difference between two absolute paths"""
-       top = os.path.normpath(top).replace('\\', '/').split('/')
-       subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
-       if len(top) == len(subdir): return ''
-       diff = subdir[len(top) - len(subdir):]
-       return os.path.join(*diff)
-
-class Context(object):
-       """A base class for commands to be executed from Waf scripts"""
-
-       def set_curdir(self, dir):
-               self.curdir_ = dir
-
-       def get_curdir(self):
-               try:
-                       return self.curdir_
-               except AttributeError:
-                       self.curdir_ = os.getcwd()
-                       return self.get_curdir()
-
-       curdir = property(get_curdir, set_curdir)
-
-       def recurse(self, dirs, name=''):
-               """The function for calling scripts from folders, it tries to call wscript + function_name
-               and if that file does not exist, it will call the method 'function_name' from a file named wscript
-               the dirs can be a list of folders or a string containing space-separated folder paths
-               """
-               if not name:
-                       name = inspect.stack()[1][3]
-
-               if isinstance(dirs, str):
-                       dirs = to_list(dirs)
-
-               for x in dirs:
-                       if os.path.isabs(x):
-                               nexdir = x
-                       else:
-                               nexdir = os.path.join(self.curdir, x)
-
-                       base = os.path.join(nexdir, WSCRIPT_FILE)
-                       file_path = base + '_' + name
-
-                       try:
-                               txt = readf(file_path, m='rU')
-                       except (OSError, IOError):
-                               try:
-                                       module = load_module(base)
-                               except OSError:
-                                       raise WscriptError('No such script %s' % base)
-
-                               try:
-                                       f = module.__dict__[name]
-                               except KeyError:
-                                       raise WscriptError('No function %s defined in %s' % (name, base))
-
-                               if getattr(self.__class__, 'pre_recurse', None):
-                                       self.pre_recurse(f, base, nexdir)
-                               old = self.curdir
-                               self.curdir = nexdir
-                               try:
-                                       f(self)
-                               finally:
-                                       self.curdir = old
-                               if getattr(self.__class__, 'post_recurse', None):
-                                       self.post_recurse(module, base, nexdir)
-                       else:
-                               dc = {'ctx': self}
-                               if getattr(self.__class__, 'pre_recurse', None):
-                                       dc = self.pre_recurse(txt, file_path, nexdir)
-                               old = self.curdir
-                               self.curdir = nexdir
-                               try:
-                                       try:
-                                               exec(compile(txt, file_path, 'exec'), dc)
-                                       except Exception:
-                                               exc_type, exc_value, tb = sys.exc_info()
-                                               raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
-                               finally:
-                                       self.curdir = old
-                               if getattr(self.__class__, 'post_recurse', None):
-                                       self.post_recurse(txt, file_path, nexdir)
-
-if is_win32:
-       old = shutil.copy2
-       def copy2(src, dst):
-               old(src, dst)
-               shutil.copystat(src, src)
-       setattr(shutil, 'copy2', copy2)
-
-def zip_folder(dir, zip_file_name, prefix):
-       """
-       prefix represents the app to add in the archive
-       """
-       import zipfile
-       zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
-       base = os.path.abspath(dir)
-
-       if prefix:
-               if prefix[-1] != os.sep:
-                       prefix += os.sep
-
-       n = len(base)
-       for root, dirs, files in os.walk(base):
-               for f in files:
-                       archive_name = prefix + root[n:] + os.sep + f
-                       zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
-       zip.close()
-
-def get_elapsed_time(start):
-       "Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
-       delta = datetime.datetime.now() - start
-       # cast to int necessary for python 3.0
-       days = int(delta.days)
-       hours = int(delta.seconds / 3600)
-       minutes = int((delta.seconds - hours * 3600) / 60)
-       seconds = delta.seconds - hours * 3600 - minutes * 60 \
-               + float(delta.microseconds) / 1000 / 1000
-       result = ''
-       if days:
-               result += '%dd' % days
-       if days or hours:
-               result += '%dh' % hours
-       if days or hours or minutes:
-               result += '%dm' % minutes
-       return '%s%.3fs' % (result, seconds)
-
-if os.name == 'java':
-       # For Jython (they should really fix the inconsistency)
-       try:
-               gc.disable()
-               gc.enable()
-       except NotImplementedError:
-               gc.disable = gc.enable
-
-def run_once(fun):
-       """
-       decorator, make a function cache its results, use like this:
-
-       @run_once
-       def foo(k):
-               return 345*2343
-       """
-       cache = {}
-       def wrap(k):
-               try:
-                       return cache[k]
-               except KeyError:
-                       ret = fun(k)
-                       cache[k] = ret
-                       return ret
-       wrap.__cache__ = cache
-       return wrap
diff --git a/third_party/waf/wafadmin/__init__.py b/third_party/waf/wafadmin/__init__.py
deleted file mode 100644 (file)
index 01273cf..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005 (ita)
diff --git a/third_party/waf/wafadmin/ansiterm.py b/third_party/waf/wafadmin/ansiterm.py
deleted file mode 100644 (file)
index 2ec0b4c..0000000
+++ /dev/null
@@ -1,235 +0,0 @@
-import sys, os
-try:
-       if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
-               raise ValueError('not a tty')
-
-       from ctypes import *
-
-       class COORD(Structure):
-               _fields_ = [("X", c_short), ("Y", c_short)]
-
-       class SMALL_RECT(Structure):
-               _fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
-
-       class CONSOLE_SCREEN_BUFFER_INFO(Structure):
-               _fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
-
-       class CONSOLE_CURSOR_INFO(Structure):
-               _fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
-
-       sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-       csinfo = CONSOLE_CURSOR_INFO()
-       hconsole = windll.kernel32.GetStdHandle(-11)
-       windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
-       if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
-       windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
-except Exception:
-       pass
-else:
-       import re, threading
-
-       to_int = lambda number, default: number and int(number) or default
-       wlock = threading.Lock()
-
-       STD_OUTPUT_HANDLE = -11
-       STD_ERROR_HANDLE = -12
-
-       class AnsiTerm(object):
-               def __init__(self):
-                       self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
-                       self.cursor_history = []
-                       self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-                       self.orig_csinfo = CONSOLE_CURSOR_INFO()
-                       windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
-                       windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
-
-
-               def screen_buffer_info(self):
-                       sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-                       windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
-                       return sbinfo
-
-               def clear_line(self, param):
-                       mode = param and int(param) or 0
-                       sbinfo = self.screen_buffer_info()
-                       if mode == 1: # Clear from begining of line to cursor position
-                               line_start = COORD(0, sbinfo.CursorPosition.Y)
-                               line_length = sbinfo.Size.X
-                       elif mode == 2: # Clear entire line
-                               line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
-                               line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
-                       else: # Clear from cursor position to end of line
-                               line_start = sbinfo.CursorPosition
-                               line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
-                       chars_written = c_int()
-                       windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), line_length, line_start, byref(chars_written))
-                       windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
-
-               def clear_screen(self, param):
-                       mode = to_int(param, 0)
-                       sbinfo = self.screen_buffer_info()
-                       if mode == 1: # Clear from begining of screen to cursor position
-                               clear_start = COORD(0, 0)
-                               clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
-                       elif mode == 2: # Clear entire screen and return cursor to home
-                               clear_start = COORD(0, 0)
-                               clear_length = sbinfo.Size.X * sbinfo.Size.Y
-                               windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
-                       else: # Clear from cursor position to end of screen
-                               clear_start = sbinfo.CursorPosition
-                               clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
-                       chars_written = c_int()
-                       windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
-                       windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
-
-               def push_cursor(self, param):
-                       sbinfo = self.screen_buffer_info()
-                       self.cursor_history.push(sbinfo.CursorPosition)
-
-               def pop_cursor(self, param):
-                       if self.cursor_history:
-                               old_pos = self.cursor_history.pop()
-                               windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
-
-               def set_cursor(self, param):
-                       x, sep, y = param.partition(';')
-                       x = to_int(x, 1) - 1
-                       y = to_int(y, 1) - 1
-                       sbinfo = self.screen_buffer_info()
-                       new_pos = COORD(
-                               min(max(0, x), sbinfo.Size.X),
-                               min(max(0, y), sbinfo.Size.Y)
-                       )
-                       windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
-
-               def set_column(self, param):
-                       x = to_int(param, 1) - 1
-                       sbinfo = self.screen_buffer_info()
-                       new_pos = COORD(
-                               min(max(0, x), sbinfo.Size.X),
-                               sbinfo.CursorPosition.Y
-                       )
-                       windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
-
-               def move_cursor(self, x_offset=0, y_offset=0):
-                       sbinfo = self.screen_buffer_info()
-                       new_pos = COORD(
-                               min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
-                               min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
-                       )
-                       windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
-
-               def move_up(self, param):
-                       self.move_cursor(y_offset = -to_int(param, 1))
-
-               def move_down(self, param):
-                       self.move_cursor(y_offset = to_int(param, 1))
-
-               def move_left(self, param):
-                       self.move_cursor(x_offset = -to_int(param, 1))
-
-               def move_right(self, param):
-                       self.move_cursor(x_offset = to_int(param, 1))
-
-               def next_line(self, param):
-                       sbinfo = self.screen_buffer_info()
-                       self.move_cursor(
-                               x_offset = -sbinfo.CursorPosition.X,
-                               y_offset = to_int(param, 1)
-                       )
-
-               def prev_line(self, param):
-                       sbinfo = self.screen_buffer_info()
-                       self.move_cursor(
-                               x_offset = -sbinfo.CursorPosition.X,
-                               y_offset = -to_int(param, 1)
-                       )
-
-               escape_to_color = { (0, 30): 0x0,                        #black
-                                                       (0, 31): 0x4,                    #red
-                                                       (0, 32): 0x2,                    #green
-                                                       (0, 33): 0x4+0x2,                #dark yellow
-                                                       (0, 34): 0x1,                    #blue
-                                                       (0, 35): 0x1+0x4,                #purple
-                                                       (0, 36): 0x2+0x4,                #cyan
-                                                       (0, 37): 0x1+0x2+0x4,    #grey
-                                                       (1, 30): 0x1+0x2+0x4,    #dark gray
-                                                       (1, 31): 0x4+0x8,                #red
-                                                       (1, 32): 0x2+0x8,                #light green
-                                                       (1, 33): 0x4+0x2+0x8,    #yellow
-                                                       (1, 34): 0x1+0x8,                #light blue
-                                                       (1, 35): 0x1+0x4+0x8,    #light purple
-                                                       (1, 36): 0x1+0x2+0x8,    #light cyan
-                                                       (1, 37): 0x1+0x2+0x4+0x8, #white
-                                                  }
-
-               def set_color(self, param):
-                       cols = param.split(';')
-                       attr = self.orig_sbinfo.Attributes
-                       for c in cols:
-                               c = to_int(c, 0)
-                               if c in range(30,38):
-                                       attr = (attr & 0xf0) | (self.escape_to_color.get((0,c), 0x7))
-                               elif c in range(40,48):
-                                       attr = (attr & 0x0f) | (self.escape_to_color.get((0,c), 0x7) << 8)
-                               elif c in range(90,98):
-                                       attr = (attr & 0xf0) | (self.escape_to_color.get((1,c-60), 0x7))
-                               elif c in range(100,108):
-                                       attr = (attr & 0x0f) | (self.escape_to_color.get((1,c-60), 0x7) << 8)
-                               elif c == 1:
-                                       attr |= 0x08
-                       windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
-
-               def show_cursor(self,param):
-                       csinfo.bVisible = 1
-                       windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
-
-               def hide_cursor(self,param):
-                       csinfo.bVisible = 0
-                       windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
-
-               ansi_command_table = {
-                       'A': move_up,
-                       'B': move_down,
-                       'C': move_right,
-                       'D': move_left,
-                       'E': next_line,
-                       'F': prev_line,
-                       'G': set_column,
-                       'H': set_cursor,
-                       'f': set_cursor,
-                       'J': clear_screen,
-                       'K': clear_line,
-                       'h': show_cursor,
-                       'l': hide_cursor,
-                       'm': set_color,
-                       's': push_cursor,
-                       'u': pop_cursor,
-               }
-               # Match either the escape sequence or text not containing escape sequence
-               ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
-               def write(self, text):
-                       try:
-                               wlock.acquire()
-                               for param, cmd, txt in self.ansi_tokans.findall(text):
-                                       if cmd:
-                                               cmd_func = self.ansi_command_table.get(cmd)
-                                               if cmd_func:
-                                                       cmd_func(self, param)
-                                       else:
-                                               chars_written = c_int()
-                                               if isinstance(txt, unicode):
-                                                       windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
-                                               else:
-                                                       windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
-                       finally:
-                               wlock.release()
-
-               def flush(self):
-                       pass
-
-               def isatty(self):
-                       return True
-
-       sys.stderr = sys.stdout = AnsiTerm()
-       os.environ['TERM'] = 'vt100'
diff --git a/third_party/waf/wafadmin/pproc.py b/third_party/waf/wafadmin/pproc.py
deleted file mode 100644 (file)
index 44b9dd2..0000000
+++ /dev/null
@@ -1,619 +0,0 @@
-# borrowed from python 2.5.2c1
-# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
-# Licensed to PSF under a Contributor Agreement.
-
-import sys
-mswindows = (sys.platform == "win32")
-
-import os
-import types
-import traceback
-import gc
-
-class CalledProcessError(Exception):
-    def __init__(self, returncode, cmd):
-        self.returncode = returncode
-        self.cmd = cmd
-    def __str__(self):
-        return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
-
-if mswindows:
-    import threading
-    import msvcrt
-    if 0:
-        import pywintypes
-        from win32api import GetStdHandle, STD_INPUT_HANDLE, \
-                             STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
-        from win32api import GetCurrentProcess, DuplicateHandle, \
-                             GetModuleFileName, GetVersion
-        from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
-        from win32pipe import CreatePipe
-        from win32process import CreateProcess, STARTUPINFO, \
-                                 GetExitCodeProcess, STARTF_USESTDHANDLES, \
-                                 STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
-        from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
-    else:
-        from _subprocess import *
-        class STARTUPINFO:
-            dwFlags = 0
-            hStdInput = None
-            hStdOutput = None
-            hStdError = None
-            wShowWindow = 0
-        class pywintypes:
-            error = IOError
-else:
-    import select
-    import errno
-    import fcntl
-    import pickle
-
-__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
-
-try:
-    MAXFD = os.sysconf("SC_OPEN_MAX")
-except:
-    MAXFD = 256
-
-try:
-    False
-except NameError:
-    False = 0
-    True = 1
-
-_active = []
-
-def _cleanup():
-    for inst in _active[:]:
-        if inst.poll(_deadstate=sys.maxint) >= 0:
-            try:
-                _active.remove(inst)
-            except ValueError:
-                pass
-
-PIPE = -1
-STDOUT = -2
-
-
-def call(*popenargs, **kwargs):
-    return Popen(*popenargs, **kwargs).wait()
-
-def check_call(*popenargs, **kwargs):
-    retcode = call(*popenargs, **kwargs)
-    cmd = kwargs.get("args")
-    if cmd is None:
-        cmd = popenargs[0]
-    if retcode:
-        raise CalledProcessError(retcode, cmd)
-    return retcode
-
-
-def list2cmdline(seq):
-    result = []
-    needquote = False
-    for arg in seq:
-        bs_buf = []
-
-        if result:
-            result.append(' ')
-
-        needquote = (" " in arg) or ("\t" in arg) or arg == ""
-        if needquote:
-            result.append('"')
-
-        for c in arg:
-            if c == '\\':
-                bs_buf.append(c)
-            elif c == '"':
-                result.append('\\' * len(bs_buf)*2)
-                bs_buf = []
-                result.append('\\"')
-            else:
-                if bs_buf:
-                    result.extend(bs_buf)
-                    bs_buf = []
-                result.append(c)
-
-        if bs_buf:
-            result.extend(bs_buf)
-
-        if needquote:
-            result.extend(bs_buf)
-            result.append('"')
-
-    return ''.join(result)
-
-class Popen(object):
-    def __init__(self, args, bufsize=0, executable=None,
-                 stdin=None, stdout=None, stderr=None,
-                 preexec_fn=None, close_fds=False, shell=False,
-                 cwd=None, env=None, universal_newlines=False,
-                 startupinfo=None, creationflags=0):
-        _cleanup()
-
-        self._child_created = False
-        if not isinstance(bufsize, (int, long)):
-            raise TypeError("bufsize must be an integer")
-
-        if mswindows:
-            if preexec_fn is not None:
-                raise ValueError("preexec_fn is not supported on Windows platforms")
-            if close_fds:
-                raise ValueError("close_fds is not supported on Windows platforms")
-        else:
-            if startupinfo is not None:
-                raise ValueError("startupinfo is only supported on Windows platforms")
-            if creationflags != 0:
-                raise ValueError("creationflags is only supported on Windows platforms")
-
-        self.stdin = None
-        self.stdout = None
-        self.stderr = None
-        self.pid = None
-        self.returncode = None
-        self.universal_newlines = universal_newlines
-
-        (p2cread, p2cwrite,
-         c2pread, c2pwrite,
-         errread, errwrite) = self._get_handles(stdin, stdout, stderr)
-
-        self._execute_child(args, executable, preexec_fn, close_fds,
-                            cwd, env, universal_newlines,
-                            startupinfo, creationflags, shell,
-                            p2cread, p2cwrite,
-                            c2pread, c2pwrite,
-                            errread, errwrite)
-
-        if mswindows:
-            if stdin is None and p2cwrite is not None:
-                os.close(p2cwrite)
-                p2cwrite = None
-            if stdout is None and c2pread is not None:
-                os.close(c2pread)
-                c2pread = None
-            if stderr is None and errread is not None:
-                os.close(errread)
-                errread = None
-
-        if p2cwrite:
-            self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
-        if c2pread:
-            if universal_newlines:
-                self.stdout = os.fdopen(c2pread, 'rU', bufsize)
-            else:
-                self.stdout = os.fdopen(c2pread, 'rb', bufsize)
-        if errread:
-            if universal_newlines:
-                self.stderr = os.fdopen(errread, 'rU', bufsize)
-            else:
-                self.stderr = os.fdopen(errread, 'rb', bufsize)
-
-
-    def _translate_newlines(self, data):
-        data = data.replace("\r\n", "\n")
-        data = data.replace("\r", "\n")
-        return data
-
-
-    def __del__(self, sys=sys):
-        if not self._child_created:
-            return
-        self.poll(_deadstate=sys.maxint)
-        if self.returncode is None and _active is not None:
-            _active.append(self)
-
-
-    def communicate(self, input=None):
-        if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
-            stdout = None
-            stderr = None
-            if self.stdin:
-                if input:
-                    self.stdin.write(input)
-                self.stdin.close()
-            elif self.stdout:
-                stdout = self.stdout.read()
-            elif self.stderr:
-                stderr = self.stderr.read()
-            self.wait()
-            return (stdout, stderr)
-
-        return self._communicate(input)
-
-
-    if mswindows:
-        def _get_handles(self, stdin, stdout, stderr):
-            if stdin is None and stdout is None and stderr is None:
-                return (None, None, None, None, None, None)
-
-            p2cread, p2cwrite = None, None
-            c2pread, c2pwrite = None, None
-            errread, errwrite = None, None
-
-            if stdin is None:
-                p2cread = GetStdHandle(STD_INPUT_HANDLE)
-            if p2cread is not None:
-                pass
-            elif stdin is None or stdin == PIPE:
-                p2cread, p2cwrite = CreatePipe(None, 0)
-                p2cwrite = p2cwrite.Detach()
-                p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
-            elif isinstance(stdin, int):
-                p2cread = msvcrt.get_osfhandle(stdin)
-            else:
-                p2cread = msvcrt.get_osfhandle(stdin.fileno())
-            p2cread = self._make_inheritable(p2cread)
-
-            if stdout is None:
-                c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
-            if c2pwrite is not None:
-                pass
-            elif stdout is None or stdout == PIPE:
-                c2pread, c2pwrite = CreatePipe(None, 0)
-                c2pread = c2pread.Detach()
-                c2pread = msvcrt.open_osfhandle(c2pread, 0)
-            elif isinstance(stdout, int):
-                c2pwrite = msvcrt.get_osfhandle(stdout)
-            else:
-                c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
-            c2pwrite = self._make_inheritable(c2pwrite)
-
-            if stderr is None:
-                errwrite = GetStdHandle(STD_ERROR_HANDLE)
-            if errwrite is not None:
-                pass
-            elif stderr is None or stderr == PIPE:
-                errread, errwrite = CreatePipe(None, 0)
-                errread = errread.Detach()
-                errread = msvcrt.open_osfhandle(errread, 0)
-            elif stderr == STDOUT:
-                errwrite = c2pwrite
-            elif isinstance(stderr, int):
-                errwrite = msvcrt.get_osfhandle(stderr)
-            else:
-                errwrite = msvcrt.get_osfhandle(stderr.fileno())
-            errwrite = self._make_inheritable(errwrite)
-
-            return (p2cread, p2cwrite,
-                    c2pread, c2pwrite,
-                    errread, errwrite)
-        def _make_inheritable(self, handle):
-            return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
-
-        def _find_w9xpopen(self):
-            w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
-            if not os.path.exists(w9xpopen):
-                w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
-                if not os.path.exists(w9xpopen):
-                    raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
-            return w9xpopen
-
-        def _execute_child(self, args, executable, preexec_fn, close_fds,
-                           cwd, env, universal_newlines,
-                           startupinfo, creationflags, shell,
-                           p2cread, p2cwrite,
-                           c2pread, c2pwrite,
-                           errread, errwrite):
-
-            if not isinstance(args, types.StringTypes):
-                args = list2cmdline(args)
-
-            if startupinfo is None:
-                startupinfo = STARTUPINFO()
-            if None not in (p2cread, c2pwrite, errwrite):
-                startupinfo.dwFlags |= STARTF_USESTDHANDLES
-                startupinfo.hStdInput = p2cread
-                startupinfo.hStdOutput = c2pwrite
-                startupinfo.hStdError = errwrite
-
-            if shell:
-                startupinfo.dwFlags |= STARTF_USESHOWWINDOW
-                startupinfo.wShowWindow = SW_HIDE
-                comspec = os.environ.get("COMSPEC", "cmd.exe")
-                args = comspec + " /c " + args
-                if (GetVersion() >= 0x80000000L or
-                        os.path.basename(comspec).lower() == "command.com"):
-                    w9xpopen = self._find_w9xpopen()
-                    args = '"%s" %s' % (w9xpopen, args)
-                    creationflags |= CREATE_NEW_CONSOLE
-
-            try:
-                hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
-            except pywintypes.error, e:
-                raise WindowsError(*e.args)
-
-            self._child_created = True
-            self._handle = hp
-            self.pid = pid
-            ht.Close()
-
-            if p2cread is not None:
-                p2cread.Close()
-            if c2pwrite is not None:
-                c2pwrite.Close()
-            if errwrite is not None:
-                errwrite.Close()
-
-
-        def poll(self, _deadstate=None):
-            if self.returncode is None:
-                if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
-                    self.returncode = GetExitCodeProcess(self._handle)
-            return self.returncode
-
-
-        def wait(self):
-            if self.returncode is None:
-                obj = WaitForSingleObject(self._handle, INFINITE)
-                self.returncode = GetExitCodeProcess(self._handle)
-            return self.returncode
-
-        def _readerthread(self, fh, buffer):
-            buffer.append(fh.read())
-
-        def _communicate(self, input):
-            stdout = None
-            stderr = None
-
-            if self.stdout:
-                stdout = []
-                stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
-                stdout_thread.setDaemon(True)
-                stdout_thread.start()
-            if self.stderr:
-                stderr = []
-                stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
-                stderr_thread.setDaemon(True)
-                stderr_thread.start()
-
-            if self.stdin:
-                if input is not None:
-                    self.stdin.write(input)
-                self.stdin.close()
-
-            if self.stdout:
-                stdout_thread.join()
-            if self.stderr:
-                stderr_thread.join()
-
-            if stdout is not None:
-                stdout = stdout[0]
-            if stderr is not None:
-                stderr = stderr[0]
-
-            if self.universal_newlines and hasattr(file, 'newlines'):
-                if stdout:
-                    stdout = self._translate_newlines(stdout)
-                if stderr:
-                    stderr = self._translate_newlines(stderr)
-
-            self.wait()
-            return (stdout, stderr)
-
-    else:
-        def _get_handles(self, stdin, stdout, stderr):
-            p2cread, p2cwrite = None, None
-            c2pread, c2pwrite = None, None
-            errread, errwrite = None, None
-
-            if stdin is None:
-                pass
-            elif stdin == PIPE:
-                p2cread, p2cwrite = os.pipe()
-            elif isinstance(stdin, int):
-                p2cread = stdin
-            else:
-                p2cread = stdin.fileno()
-
-            if stdout is None:
-                pass
-            elif stdout == PIPE:
-                c2pread, c2pwrite = os.pipe()
-            elif isinstance(stdout, int):
-                c2pwrite = stdout
-            else:
-                c2pwrite = stdout.fileno()
-
-            if stderr is None:
-                pass
-            elif stderr == PIPE:
-                errread, errwrite = os.pipe()
-            elif stderr == STDOUT:
-                errwrite = c2pwrite
-            elif isinstance(stderr, int):
-                errwrite = stderr
-            else:
-                errwrite = stderr.fileno()
-
-            return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
-
-        def _set_cloexec_flag(self, fd):
-            try:
-                cloexec_flag = fcntl.FD_CLOEXEC
-            except AttributeError:
-                cloexec_flag = 1
-
-            old = fcntl.fcntl(fd, fcntl.F_GETFD)
-            fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
-
-        def _close_fds(self, but):
-            for i in xrange(3, MAXFD):
-                if i == but:
-                    continue
-                try:
-                    os.close(i)
-                except:
-                    pass
-
-        def _execute_child(self, args, executable, preexec_fn, close_fds,
-                           cwd, env, universal_newlines, startupinfo, creationflags, shell,
-                           p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
-
-            if isinstance(args, types.StringTypes):
-                args = [args]
-            else:
-                args = list(args)
-
-            if shell:
-                args = ["/bin/sh", "-c"] + args
-
-            if executable is None:
-                executable = args[0]
-
-            errpipe_read, errpipe_write = os.pipe()
-            self._set_cloexec_flag(errpipe_write)
-
-            gc_was_enabled = gc.isenabled()
-            gc.disable()
-            try:
-                self.pid = os.fork()
-            except:
-                if gc_was_enabled:
-                    gc.enable()
-                raise
-            self._child_created = True
-            if self.pid == 0:
-                try:
-                    if p2cwrite:
-                        os.close(p2cwrite)
-                    if c2pread:
-                        os.close(c2pread)
-                    if errread:
-                        os.close(errread)
-                    os.close(errpipe_read)
-
-                    if p2cread:
-                        os.dup2(p2cread, 0)
-                    if c2pwrite:
-                        os.dup2(c2pwrite, 1)
-                    if errwrite:
-                        os.dup2(errwrite, 2)
-
-                    if p2cread and p2cread not in (0,):
-                        os.close(p2cread)
-                    if c2pwrite and c2pwrite not in (p2cread, 1):
-                        os.close(c2pwrite)
-                    if errwrite and errwrite not in (p2cread, c2pwrite, 2):
-                        os.close(errwrite)
-
-                    if close_fds:
-                        self._close_fds(but=errpipe_write)
-
-                    if cwd is not None:
-                        os.chdir(cwd)
-
-                    if preexec_fn:
-                        apply(preexec_fn)
-
-                    if env is None:
-                        os.execvp(executable, args)
-                    else:
-                        os.execvpe(executable, args, env)
-
-                except:
-                    exc_type, exc_value, tb = sys.exc_info()
-                    exc_lines = traceback.format_exception(exc_type, exc_value, tb)
-                    exc_value.child_traceback = ''.join(exc_lines)
-                    os.write(errpipe_write, pickle.dumps(exc_value))
-
-                os._exit(255)
-
-            if gc_was_enabled:
-                gc.enable()
-            os.close(errpipe_write)
-            if p2cread and p2cwrite:
-                os.close(p2cread)
-            if c2pwrite and c2pread:
-                os.close(c2pwrite)
-            if errwrite and errread:
-                os.close(errwrite)
-
-            data = os.read(errpipe_read, 1048576)
-            os.close(errpipe_read)
-            if data != "":
-                os.waitpid(self.pid, 0)
-                child_exception = pickle.loads(data)
-                raise child_exception
-
-        def _handle_exitstatus(self, sts):
-            if os.WIFSIGNALED(sts):
-                self.returncode = -os.WTERMSIG(sts)
-            elif os.WIFEXITED(sts):
-                self.returncode = os.WEXITSTATUS(sts)
-            else:
-                raise RuntimeError("Unknown child exit status!")
-
-        def poll(self, _deadstate=None):
-            if self.returncode is None:
-                try:
-                    pid, sts = os.waitpid(self.pid, os.WNOHANG)
-                    if pid == self.pid:
-                        self._handle_exitstatus(sts)
-                except os.error:
-                    if _deadstate is not None:
-                        self.returncode = _deadstate
-            return self.returncode
-
-        def wait(self):
-            if self.returncode is None:
-                pid, sts = os.waitpid(self.pid, 0)
-                self._handle_exitstatus(sts)
-            return self.returncode
-
-        def _communicate(self, input):
-            read_set = []
-            write_set = []
-            stdout = None
-            stderr = None
-
-            if self.stdin:
-                self.stdin.flush()
-                if input:
-                    write_set.append(self.stdin)
-                else:
-                    self.stdin.close()
-            if self.stdout:
-                read_set.append(self.stdout)
-                stdout = []
-            if self.stderr:
-                read_set.append(self.stderr)
-                stderr = []
-
-            input_offset = 0
-            while read_set or write_set:
-                rlist, wlist, xlist = select.select(read_set, write_set, [])
-
-                if self.stdin in wlist:
-                    bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
-                    input_offset += bytes_written
-                    if input_offset >= len(input):
-                        self.stdin.close()
-                        write_set.remove(self.stdin)
-
-                if self.stdout in rlist:
-                    data = os.read(self.stdout.fileno(), 1024)
-                    if data == "":
-                        self.stdout.close()
-                        read_set.remove(self.stdout)
-                    stdout.append(data)
-
-                if self.stderr in rlist:
-                    data = os.read(self.stderr.fileno(), 1024)
-                    if data == "":
-                        self.stderr.close()
-                        read_set.remove(self.stderr)
-                    stderr.append(data)
-
-            if stdout is not None:
-                stdout = ''.join(stdout)
-            if stderr is not None:
-                stderr = ''.join(stderr)
-
-            if self.universal_newlines and hasattr(file, 'newlines'):
-                if stdout:
-                    stdout = self._translate_newlines(stdout)
-                if stderr:
-                    stderr = self._translate_newlines(stderr)
-
-            self.wait()
-            return (stdout, stderr)
diff --git a/third_party/waf/wafadmin/py3kfixes.py b/third_party/waf/wafadmin/py3kfixes.py
deleted file mode 100644 (file)
index 1a64706..0000000
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2009 (ita)
-
-"""
-Fixes for py3k go here
-"""
-
-import os
-
-all_modifs = {}
-
-def modif(dir, name, fun):
-       if name == '*':
-               lst = []
-               for y in '. Tools 3rdparty'.split():
-                       for x in os.listdir(os.path.join(dir, y)):
-                               if x.endswith('.py'):
-                                       lst.append(y + os.sep + x)
-               #lst = [y + os.sep + x for x in os.listdir(os.path.join(dir, y)) for y in '. Tools 3rdparty'.split() if x.endswith('.py')]
-               for x in lst:
-                       modif(dir, x, fun)
-               return
-
-       filename = os.path.join(dir, name)
-       f = open(filename, 'r')
-       txt = f.read()
-       f.close()
-
-       txt = fun(txt)
-
-       f = open(filename, 'w')
-       f.write(txt)
-       f.close()
-
-def subst(filename):
-       def do_subst(fun):
-               global all_modifs
-               try:
-                       all_modifs[filename] += fun
-               except KeyError:
-                       all_modifs[filename] = [fun]
-               return fun
-       return do_subst
-
-@subst('Constants.py')
-def r1(code):
-       code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
-       code = code.replace("ABI=7", "ABI=37")
-       return code
-
-@subst('Tools/ccroot.py')
-def r2(code):
-       code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
-       code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
-       return code
-
-@subst('Utils.py')
-def r3(code):
-       code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
-       code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
-       return code
-
-@subst('ansiterm.py')
-def r33(code):
-       code = code.replace('unicode', 'str')
-       return code
-
-@subst('Task.py')
-def r4(code):
-       code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
-       code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
-       code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
-       code = code.replace("up(x.name)", "up(x.name.encode())")
-       code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
-       code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
-       code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
-       code = code.replace("os.path.join(Options.cache_global,ssig)", "os.path.join(Options.cache_global,ssig.decode())")
-       return code
-
-@subst('Build.py')
-def r5(code):
-       code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
-       code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
-       return code
-
-@subst('*')
-def r6(code):
-       code = code.replace('xrange', 'range')
-       code = code.replace('iteritems', 'items')
-       code = code.replace('maxint', 'maxsize')
-       code = code.replace('iterkeys', 'keys')
-       code = code.replace('Error,e:', 'Error as e:')
-       code = code.replace('Exception,e:', 'Exception as e:')
-       return code
-
-@subst('TaskGen.py')
-def r7(code):
-       code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
-       return code
-
-@subst('Tools/python.py')
-def r8(code):
-       code = code.replace('proc.communicate()[0]', 'proc.communicate()[0].decode("utf-8")')
-       return code
-
-@subst('Tools/glib2.py')
-def r9(code):
-       code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
-       return code
-
-@subst('Tools/config_c.py')
-def r10(code):
-       code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
-       code = code.replace('out=str(out)','out=out.decode("utf-8")')
-       code = code.replace('err=str(err)','err=err.decode("utf-8")')
-       return code
-
-@subst('Tools/d.py')
-def r11(code):
-       code = code.replace('ret.strip()', 'ret.strip().decode("utf-8")')
-       return code
-
-def fixdir(dir):
-       global all_modifs
-       for k in all_modifs:
-               for v in all_modifs[k]:
-                       modif(os.path.join(dir, 'wafadmin'), k, v)
-       #print('substitutions finished')
diff --git a/third_party/waf/waflib/Build.py b/third_party/waf/waflib/Build.py
new file mode 100644 (file)
index 0000000..5858348
--- /dev/null
@@ -0,0 +1,1504 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Classes related to the build phase (build, clean, install, step, etc)
+
+The inheritance tree is the following:
+
+"""
+
+import os, sys, errno, re, shutil, stat
+try:
+       import cPickle
+except ImportError:
+       import pickle as cPickle
+from waflib import Node, Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors
+
+CACHE_DIR = 'c4che'
+"""Name of the cache directory"""
+
+CACHE_SUFFIX = '_cache.py'
+"""ConfigSet cache files for variants are written under :py:attr:´waflib.Build.CACHE_DIR´ in the form Â´variant_name´_cache.py"""
+
+INSTALL = 1337
+"""Positive value '->' install, see :py:attr:`waflib.Build.BuildContext.is_install`"""
+
+UNINSTALL = -1337
+"""Negative value '<-' uninstall, see :py:attr:`waflib.Build.BuildContext.is_install`"""
+
+SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
+"""Build class members to save between the runs; these should be all dicts
+except for `root` which represents a :py:class:`waflib.Node.Node` instance
+"""
+
+CFG_FILES = 'cfg_files'
+"""Files from the build directory to hash before starting the build (``config.h`` written during the configuration)"""
+
+POST_AT_ONCE = 0
+"""Post mode: all task generators are posted before any task executed"""
+
+POST_LAZY = 1
+"""Post mode: post the task generators group after group, the tasks in the next group are created when the tasks in the previous groups are done"""
+
+PROTOCOL = -1
+if sys.platform == 'cli':
+       PROTOCOL = 0
+
+class BuildContext(Context.Context):
+       '''executes the build'''
+
+       cmd = 'build'
+       variant = ''
+
+       def __init__(self, **kw):
+               super(BuildContext, self).__init__(**kw)
+
+               self.is_install = 0
+               """Non-zero value when installing or uninstalling file"""
+
+               self.top_dir = kw.get('top_dir', Context.top_dir)
+               """See :py:attr:`waflib.Context.top_dir`; prefer :py:attr:`waflib.Build.BuildContext.srcnode`"""
+
+               self.out_dir = kw.get('out_dir', Context.out_dir)
+               """See :py:attr:`waflib.Context.out_dir`; prefer :py:attr:`waflib.Build.BuildContext.bldnode`"""
+
+               self.run_dir = kw.get('run_dir', Context.run_dir)
+               """See :py:attr:`waflib.Context.run_dir`"""
+
+               self.launch_dir = Context.launch_dir
+               """See :py:attr:`waflib.Context.out_dir`; prefer :py:meth:`waflib.Build.BuildContext.launch_node`"""
+
+               self.post_mode = POST_LAZY
+               """Whether to post the task generators at once or group-by-group (default is group-by-group)"""
+
+               self.cache_dir = kw.get('cache_dir')
+               if not self.cache_dir:
+                       self.cache_dir = os.path.join(self.out_dir, CACHE_DIR)
+
+               self.all_envs = {}
+               """Map names to :py:class:`waflib.ConfigSet.ConfigSet`, the empty string must map to the default environment"""
+
+               # ======================================= #
+               # cache variables
+
+               self.node_sigs = {}
+               """Dict mapping build nodes to task identifier (uid), it indicates whether a task created a particular file (persists across builds)"""
+
+               self.task_sigs = {}
+               """Dict mapping task identifiers (uid) to task signatures (persists across builds)"""
+
+               self.imp_sigs = {}
+               """Dict mapping task identifiers (uid) to implicit task dependencies used for scanning targets (persists across builds)"""
+
+               self.node_deps = {}
+               """Dict mapping task identifiers (uid) to node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists across builds)"""
+
+               self.raw_deps = {}
+               """Dict mapping task identifiers (uid) to custom data returned by :py:meth:`waflib.Task.Task.scan` (persists across builds)"""
+
+               self.task_gen_cache_names = {}
+
+               self.jobs = Options.options.jobs
+               """Amount of jobs to run in parallel"""
+
+               self.targets = Options.options.targets
+               """List of targets to build (default: \*)"""
+
+               self.keep = Options.options.keep
+               """Whether the build should continue past errors"""
+
+               self.progress_bar = Options.options.progress_bar
+               """
+               Level of progress status:
+
+               0. normal output
+               1. progress bar
+               2. IDE output
+               3. No output at all
+               """
+
+               # Manual dependencies.
+               self.deps_man = Utils.defaultdict(list)
+               """Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`"""
+
+               # just the structure here
+               self.current_group = 0
+               """
+               Current build group
+               """
+
+               self.groups = []
+               """
+               List containing lists of task generators
+               """
+
+               self.group_names = {}
+               """
+               Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group`
+               """
+
+               for v in SAVED_ATTRS:
+                       if not hasattr(self, v):
+                               setattr(self, v, {})
+
+       def set_cur(self, cur):
+               self.current_group = cur
+       def get_cur(self):
+               return self.current_group
+       cur = property(get_cur, set_cur)
+
+       def get_variant_dir(self):
+               """Getter for the variant_dir attribute"""
+               if not self.variant:
+                       return self.out_dir
+               return os.path.join(self.out_dir, self.variant)
+       variant_dir = property(get_variant_dir, None)
+
+       def __call__(self, *k, **kw):
+               """
+               Create a task generator and add it to the current build group. The following forms are equivalent::
+
+                       def build(bld):
+                               tg = bld(a=1, b=2)
+
+                       def build(bld):
+                               tg = bld()
+                               tg.a = 1
+                               tg.b = 2
+
+                       def build(bld):
+                               tg = TaskGen.task_gen(a=1, b=2)
+                               bld.add_to_group(tg, None)
+
+               :param group: group name to add the task generator to
+               :type group: string
+               """
+               kw['bld'] = self
+               ret = TaskGen.task_gen(*k, **kw)
+               self.task_gen_cache_names = {} # reset the cache, each time
+               self.add_to_group(ret, group=kw.get('group'))
+               return ret
+
+       def rule(self, *k, **kw):
+               """
+               Wrapper for creating a task generator using the decorator notation. The following code::
+
+                       @bld.rule(target="foo")
+                       def _(tsk):
+                               print("bar")
+
+               is equivalent to::
+
+                       def bar(tsk):
+                               print("bar")
+
+                       bld(
+                               target = "foo",
+                               rule = bar,
+                       )
+               """
+               def f(rule):
+                       ret = self(*k, **kw)
+                       ret.rule = rule
+                       return ret
+               return f
+
+       def __copy__(self):
+               """
+               Build contexts cannot be copied
+
+               :raises: :py:class:`waflib.Errors.WafError`
+               """
+               raise Errors.WafError('build contexts cannot be copied')
+
+       def load_envs(self):
+               """
+               The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method
+               creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those
+               files and stores them in :py:attr:`waflib.Build.BuildContext.allenvs`.
+               """
+               node = self.root.find_node(self.cache_dir)
+               if not node:
+                       raise Errors.WafError('The project was not configured: run "waf configure" first!')
+               lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True)
+
+               if not lst:
+                       raise Errors.WafError('The cache directory is empty: reconfigure the project')
+
+               for x in lst:
+                       name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/')
+                       env = ConfigSet.ConfigSet(x.abspath())
+                       self.all_envs[name] = env
+                       for f in env[CFG_FILES]:
+                               newnode = self.root.find_resource(f)
+                               if not newnode or not newnode.exists():
+                                       raise Errors.WafError('Missing configuration file %r, reconfigure the project!' % f)
+
+       def init_dirs(self):
+               """
+               Initialize the project directory and the build directory by creating the nodes
+               :py:attr:`waflib.Build.BuildContext.srcnode` and :py:attr:`waflib.Build.BuildContext.bldnode`
+               corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory is
+               created if necessary.
+               """
+               if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
+                       raise Errors.WafError('The project was not configured: run "waf configure" first!')
+
+               self.path = self.srcnode = self.root.find_dir(self.top_dir)
+               self.bldnode = self.root.make_node(self.variant_dir)
+               self.bldnode.mkdir()
+
+       def execute(self):
+               """
+               Restore data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`.
+               Overrides from :py:func:`waflib.Context.Context.execute`
+               """
+               self.restore()
+               if not self.all_envs:
+                       self.load_envs()
+               self.execute_build()
+
+       def execute_build(self):
+               """
+               Execute the build by:
+
+               * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`)
+               * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions
+               * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks
+               * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions
+               """
+
+               Logs.info("Waf: Entering directory `%s'", self.variant_dir)
+               self.recurse([self.run_dir])
+               self.pre_build()
+
+               # display the time elapsed in the progress bar
+               self.timer = Utils.Timer()
+
+               try:
+                       self.compile()
+               finally:
+                       if self.progress_bar == 1 and sys.stderr.isatty():
+                               c = self.producer.processed or 1
+                               m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)
+                               Logs.info(m, extra={'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2' : Logs.colors.cursor_on})
+                       Logs.info("Waf: Leaving directory `%s'", self.variant_dir)
+               try:
+                       self.producer.bld = None
+                       del self.producer
+               except AttributeError:
+                       pass
+               self.post_build()
+
+       def restore(self):
+               """
+               Load data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`
+               """
+               try:
+                       env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py'))
+               except EnvironmentError:
+                       pass
+               else:
+                       if env.version < Context.HEXVERSION:
+                               raise Errors.WafError('Project was configured with a different version of Waf, please reconfigure it')
+
+                       for t in env.tools:
+                               self.setup(**t)
+
+               dbfn = os.path.join(self.variant_dir, Context.DBFILE)
+               try:
+                       data = Utils.readf(dbfn, 'rb')
+               except (EnvironmentError, EOFError):
+                       # handle missing file/empty file
+                       Logs.debug('build: Could not load the build cache %s (missing)', dbfn)
+               else:
+                       try:
+                               Node.pickle_lock.acquire()
+                               Node.Nod3 = self.node_class
+                               try:
+                                       data = cPickle.loads(data)
+                               except Exception ,e:
+                                       Logs.debug('build: Could not pickle the build cache %s: %r', dbfn, e)
+                               else:
+                                       for x in SAVED_ATTRS:
+                                               setattr(self, x, data.get(x, {}))
+                       finally:
+                               Node.pickle_lock.release()
+
+               self.init_dirs()
+
+       def store(self):
+               """
+               Store data for next runs, set the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary
+               file to avoid problems on ctrl+c.
+               """
+               data = {}
+               for x in SAVED_ATTRS:
+                       data[x] = getattr(self, x)
+               db = os.path.join(self.variant_dir, Context.DBFILE)
+
+               try:
+                       Node.pickle_lock.acquire()
+                       Node.Nod3 = self.node_class
+                       x = cPickle.dumps(data, PROTOCOL)
+               finally:
+                       Node.pickle_lock.release()
+
+               Utils.writef(db + '.tmp', x, m='wb')
+
+               try:
+                       st = os.stat(db)
+                       os.remove(db)
+                       if not Utils.is_win32: # win32 has no chown but we're paranoid
+                               os.chown(db + '.tmp', st.st_uid, st.st_gid)
+               except (AttributeError, OSError):
+                       pass
+
+               # do not use shutil.move (copy is not thread-safe)
+               os.rename(db + '.tmp', db)
+
+       def compile(self):
+               """
+               Run the build by creating an instance of :py:class:`waflib.Runner.Parallel`
+               The cache file is written when at least a task was executed.
+
+               :raises: :py:class:`waflib.Errors.BuildError` in case the build fails
+               """
+               Logs.debug('build: compile()')
+
+               # delegate the producer-consumer logic to another object to reduce the complexity
+               self.producer = Runner.Parallel(self, self.jobs)
+               self.producer.biter = self.get_build_iterator()
+               try:
+                       self.producer.start()
+               except KeyboardInterrupt:
+                       self.store()
+                       raise
+               else:
+                       if self.producer.dirty:
+                               self.store()
+
+               if self.producer.error:
+                       raise Errors.BuildError(self.producer.error)
+
+       def setup(self, tool, tooldir=None, funs=None):
+               """
+               Import waf tools defined during the configuration::
+
+                       def configure(conf):
+                               conf.load('glib2')
+
+                       def build(bld):
+                               pass # glib2 is imported implicitly
+
+               :param tool: tool list
+               :type tool: list
+               :param tooldir: optional tool directory (sys.path)
+               :type tooldir: list of string
+               :param funs: unused variable
+               """
+               if isinstance(tool, list):
+                       for i in tool: self.setup(i, tooldir)
+                       return
+
+               module = Context.load_tool(tool, tooldir)
+               if hasattr(module, "setup"): module.setup(self)
+
+       def get_env(self):
+               """Getter for the env property"""
+               try:
+                       return self.all_envs[self.variant]
+               except KeyError:
+                       return self.all_envs['']
+       def set_env(self, val):
+               """Setter for the env property"""
+               self.all_envs[self.variant] = val
+
+       env = property(get_env, set_env)
+
+       def add_manual_dependency(self, path, value):
+               """
+               Adds a dependency from a node object to a value::
+
+                       def build(bld):
+                               bld.add_manual_dependency(
+                                       bld.path.find_resource('wscript'),
+                                       bld.root.find_resource('/etc/fstab'))
+
+               :param path: file path
+               :type path: string or :py:class:`waflib.Node.Node`
+               :param value: value to depend
+               :type value: :py:class:`waflib.Node.Node`, byte object, or function returning a byte object
+               """
+               if not path:
+                       raise ValueError('Invalid input path %r' % path)
+
+               if isinstance(path, Node.Node):
+                       node = path
+               elif os.path.isabs(path):
+                       node = self.root.find_resource(path)
+               else:
+                       node = self.path.find_resource(path)
+               if not node:
+                       raise ValueError('Could not find the path %r' % path)
+
+               if isinstance(value, list):
+                       self.deps_man[node].extend(value)
+               else:
+                       self.deps_man[node].append(value)
+
+       def launch_node(self):
+               """Returns the launch directory as a :py:class:`waflib.Node.Node` object (cached)"""
+               try:
+                       # private cache
+                       return self.p_ln
+               except AttributeError:
+                       self.p_ln = self.root.find_dir(self.launch_dir)
+                       return self.p_ln
+
+       def hash_env_vars(self, env, vars_lst):
+               """
+               Hashes configuration set variables::
+
+                       def build(bld):
+                               bld.hash_env_vars(bld.env, ['CXX', 'CC'])
+
+               This method uses an internal cache.
+
+               :param env: Configuration Set
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               :param vars_lst: list of variables
+               :type vars_list: list of string
+               """
+
+               if not env.table:
+                       env = env.parent
+                       if not env:
+                               return Utils.SIG_NIL
+
+               idx = str(id(env)) + str(vars_lst)
+               try:
+                       cache = self.cache_env
+               except AttributeError:
+                       cache = self.cache_env = {}
+               else:
+                       try:
+                               return self.cache_env[idx]
+                       except KeyError:
+                               pass
+
+               lst = [env[a] for a in vars_lst]
+               cache[idx] = ret = Utils.h_list(lst)
+               Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
+               return ret
+
+       def get_tgen_by_name(self, name):
+               """
+               Fetches a task generator by its name or its target attribute;
+               the name must be unique in a build::
+
+                       def build(bld):
+                               tg = bld(name='foo')
+                               tg == bld.get_tgen_by_name('foo')
+
+               This method use a private internal cache.
+
+               :param name: Task generator name
+               :raises: :py:class:`waflib.Errors.WafError` in case there is no task genenerator by that name
+               """
+               cache = self.task_gen_cache_names
+               if not cache:
+                       # create the index lazily
+                       for g in self.groups:
+                               for tg in g:
+                                       try:
+                                               cache[tg.name] = tg
+                                       except AttributeError:
+                                               # raised if not a task generator, which should be uncommon
+                                               pass
+               try:
+                       return cache[name]
+               except KeyError:
+                       raise Errors.WafError('Could not find a task generator for the name %r' % name)
+
+       def progress_line(self, idx, total, col1, col2):
+               """
+               Computes a progress bar line displayed when running ``waf -p``
+
+               :returns: progress bar line
+               :rtype: string
+               """
+               if not sys.stderr.isatty():
+                       return ''
+
+               n = len(str(total))
+
+               Utils.rot_idx += 1
+               ind = Utils.rot_chr[Utils.rot_idx % 4]
+
+               pc = (100. * idx)/total
+               fs = "[%%%dd/%%d][%%s%%2d%%%%%%s][%s][" % (n, ind)
+               left = fs % (idx, total, col1, pc, col2)
+               right = '][%s%s%s]' % (col1, self.timer, col2)
+
+               cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
+               if cols < 7: cols = 7
+
+               ratio = ((cols * idx)//total) - 1
+
+               bar = ('='*ratio+'>').ljust(cols)
+               msg = Logs.indicator % (left, bar, right)
+
+               return msg
+
+       def declare_chain(self, *k, **kw):
+               """
+               Wraps :py:func:`waflib.TaskGen.declare_chain` for convenience
+               """
+               return TaskGen.declare_chain(*k, **kw)
+
+       def pre_build(self):
+               """Executes user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`"""
+               for m in getattr(self, 'pre_funs', []):
+                       m(self)
+
+       def post_build(self):
+               """Executes user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`"""
+               for m in getattr(self, 'post_funs', []):
+                       m(self)
+
+       def add_pre_fun(self, meth):
+               """
+               Binds a callback method to execute after the scripts are read and before the build starts::
+
+                       def mycallback(bld):
+                               print("Hello, world!")
+
+                       def build(bld):
+                               bld.add_pre_fun(mycallback)
+               """
+               try:
+                       self.pre_funs.append(meth)
+               except AttributeError:
+                       self.pre_funs = [meth]
+
+       def add_post_fun(self, meth):
+               """
+               Binds a callback method to execute immediately after the build is successful::
+
+                       def call_ldconfig(bld):
+                               bld.exec_command('/sbin/ldconfig')
+
+                       def build(bld):
+                               if bld.cmd == 'install':
+                                       bld.add_pre_fun(call_ldconfig)
+               """
+               try:
+                       self.post_funs.append(meth)
+               except AttributeError:
+                       self.post_funs = [meth]
+
+       def get_group(self, x):
+               """
+               Returns the build group named `x`, or the current group if `x` is None
+
+               :param x: name or number or None
+               :type x: string, int or None
+               """
+               if not self.groups:
+                       self.add_group()
+               if x is None:
+                       return self.groups[self.current_group]
+               if x in self.group_names:
+                       return self.group_names[x]
+               return self.groups[x]
+
+       def add_to_group(self, tgen, group=None):
+               """Adds a task or a task generator to the build; there is no attempt to remove it if it was already added."""
+               assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.TaskBase))
+               tgen.bld = self
+               self.get_group(group).append(tgen)
+
+       def get_group_name(self, g):
+               """
+               Returns the name of the input build group
+
+               :param g: build group object or build group index
+               :type g: integer or list
+               :return: name
+               :rtype: string
+               """
+               if not isinstance(g, list):
+                       g = self.groups[g]
+               for x in self.group_names:
+                       if id(self.group_names[x]) == id(g):
+                               return x
+               return ''
+
+       def get_group_idx(self, tg):
+               """
+               Returns the index of the group containing the task generator given as argument::
+
+                       def build(bld):
+                               tg = bld(name='nada')
+                               0 == bld.get_group_idx(tg)
+
+               :param tg: Task generator object
+               :type tg: :py:class:`waflib.TaskGen.task_gen`
+               :rtype: int
+               """
+               se = id(tg)
+               for i, tmp in enumerate(self.groups):
+                       for t in tmp:
+                               if id(t) == se:
+                                       return i
+               return None
+
+       def add_group(self, name=None, move=True):
+               """
+               Adds a new group of tasks/task generators. By default the new group becomes
+               the default group for new task generators (make sure to create build groups in order).
+
+               :param name: name for this group
+               :type name: string
+               :param move: set this new group as default group (True by default)
+               :type move: bool
+               :raises: :py:class:`waflib.Errors.WafError` if a group by the name given already exists
+               """
+               if name and name in self.group_names:
+                       raise Errors.WafError('add_group: name %s already present', name)
+               g = []
+               self.group_names[name] = g
+               self.groups.append(g)
+               if move:
+                       self.current_group = len(self.groups) - 1
+
+       def set_group(self, idx):
+               """
+               Sets the build group at position idx as current so that newly added
+               task generators are added to this one by default::
+
+                       def build(bld):
+                               bld(rule='touch ${TGT}', target='foo.txt')
+                               bld.add_group() # now the current group is 1
+                               bld(rule='touch ${TGT}', target='bar.txt')
+                               bld.set_group(0) # now the current group is 0
+                               bld(rule='touch ${TGT}', target='truc.txt') # build truc.txt before bar.txt
+
+               :param idx: group name or group index
+               :type idx: string or int
+               """
+               if isinstance(idx, str):
+                       g = self.group_names[idx]
+                       for i, tmp in enumerate(self.groups):
+                               if id(g) == id(tmp):
+                                       self.current_group = i
+                                       break
+               else:
+                       self.current_group = idx
+
+       def total(self):
+               """
+               Approximate task count: this value may be inaccurate if task generators
+               are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`).
+               The value :py:attr:`waflib.Runner.Parallel.total` is updated during the task execution.
+
+               :rtype: int
+               """
+               total = 0
+               for group in self.groups:
+                       for tg in group:
+                               try:
+                                       total += len(tg.tasks)
+                               except AttributeError:
+                                       total += 1
+               return total
+
+       def get_targets(self):
+               """
+               Returns the task generator corresponding to the 'targets' list; used internally
+               by :py:meth:`waflib.Build.BuildContext.get_build_iterator` to perform partial builds::
+
+                       $ waf --targets=myprogram,myshlib
+               """
+               to_post = []
+               min_grp = 0
+               for name in self.targets.split(','):
+                       tg = self.get_tgen_by_name(name)
+                       m = self.get_group_idx(tg)
+                       if m > min_grp:
+                               min_grp = m
+                               to_post = [tg]
+                       elif m == min_grp:
+                               to_post.append(tg)
+               return (min_grp, to_post)
+
+       def get_all_task_gen(self):
+               """
+               Returns a list of all task generators for troubleshooting purposes.
+               """
+               lst = []
+               for g in self.groups:
+                       lst.extend(g)
+               return lst
+
+       def post_group(self):
+               """
+               Post task generators from the group indexed by self.current_group; used internally
+               by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+               """
+               if self.targets == '*':
+                       for tg in self.groups[self.current_group]:
+                               try:
+                                       f = tg.post
+                               except AttributeError:
+                                       pass
+                               else:
+                                       f()
+               elif self.targets:
+                       if self.current_group < self._min_grp:
+                               for tg in self.groups[self.current_group]:
+                                       try:
+                                               f = tg.post
+                                       except AttributeError:
+                                               pass
+                                       else:
+                                               f()
+                       else:
+                               for tg in self._exact_tg:
+                                       tg.post()
+               else:
+                       ln = self.launch_node()
+                       if ln.is_child_of(self.bldnode):
+                               Logs.warn('Building from the build directory, forcing --targets=*')
+                               ln = self.srcnode
+                       elif not ln.is_child_of(self.srcnode):
+                               Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)', ln.abspath(), self.srcnode.abspath())
+                               ln = self.srcnode
+                       for tg in self.groups[self.current_group]:
+                               try:
+                                       f = tg.post
+                               except AttributeError:
+                                       pass
+                               else:
+                                       if tg.path.is_child_of(ln):
+                                               f()
+
+       def get_tasks_group(self, idx):
+               """
+               Returns all task instances for the build group at position idx,
+               used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+
+               :rtype: list of :py:class:`waflib.Task.TaskBase`
+               """
+               tasks = []
+               for tg in self.groups[idx]:
+                       try:
+                               tasks.extend(tg.tasks)
+                       except AttributeError: # not a task generator
+                               tasks.append(tg)
+               return tasks
+
+       def get_build_iterator(self):
+               """
+               Creates a Python generator object that returns lists of tasks that may be processed in parallel.
+
+               :return: tasks which can be executed immediately
+               :rtype: generator returning lists of :py:class:`waflib.Task.TaskBase`
+               """
+               self.current_group = 0
+
+               if self.targets and self.targets != '*':
+                       (self._min_grp, self._exact_tg) = self.get_targets()
+
+               global lazy_post
+               if self.post_mode != POST_LAZY:
+                       while self.current_group < len(self.groups):
+                               self.post_group()
+                               self.current_group += 1
+                       self.current_group = 0
+
+               while self.current_group < len(self.groups):
+                       # first post the task generators for the group
+                       if self.post_mode != POST_AT_ONCE:
+                               self.post_group()
+
+                       # then extract the tasks
+                       tasks = self.get_tasks_group(self.current_group)
+                       # if the constraints are set properly (ext_in/ext_out, before/after)
+                       # the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
+                       # (but leave set_file_constraints for the installation step)
+                       #
+                       # if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary
+                       #
+                       Task.set_file_constraints(tasks)
+                       Task.set_precedence_constraints(tasks)
+
+                       self.cur_tasks = tasks
+                       self.current_group += 1
+                       if not tasks: # return something else the build will stop
+                               continue
+                       yield tasks
+
+               while 1:
+                       yield []
+
+       def install_files(self, dest, files, **kw):
+               """
+               Creates a task generator to install files on the system::
+
+                       def build(bld):
+                               bld.install_files('${DATADIR}', self.path.find_resource('wscript'))
+
+               :param dest: path representing the destination directory
+               :type dest: :py:class:`waflib.Node.Node` or string (absolute path)
+               :param files: input files
+               :type files: list of strings or list of :py:class:`waflib.Node.Node`
+               :param env: configuration set to expand *dest*
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               :param relative_trick: preserve the folder hierarchy when installing whole folders
+               :type relative_trick: bool
+               :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node`
+               :type cwd: :py:class:`waflib.Node.Node`
+               :param postpone: execute the task immediately to perform the installation (False by default)
+               :type postpone: bool
+               """
+               assert(dest)
+               tg = self(features='install_task', install_to=dest, install_from=files, **kw)
+               tg.dest = tg.install_to
+               tg.type = 'install_files'
+               if not kw.get('postpone', True):
+                       tg.post()
+               return tg
+
+       def install_as(self, dest, srcfile, **kw):
+               """
+               Creates a task generator to install a file on the system with a different name::
+
+                       def build(bld):
+                               bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755)
+
+               :param dest: destination file
+               :type dest: :py:class:`waflib.Node.Node` or string (absolute path)
+               :param srcfile: input file
+               :type srcfile: string or :py:class:`waflib.Node.Node`
+               :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node`
+               :type cwd: :py:class:`waflib.Node.Node`
+               :param env: configuration set for performing substitutions in dest
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               :param postpone: execute the task immediately to perform the installation (False by default)
+               :type postpone: bool
+               """
+               assert(dest)
+               tg = self(features='install_task', install_to=dest, install_from=srcfile, **kw)
+               tg.dest = tg.install_to
+               tg.type = 'install_as'
+               if not kw.get('postpone', True):
+                       tg.post()
+               return tg
+
+       def symlink_as(self, dest, src, **kw):
+               """
+               Creates a task generator to install a symlink::
+
+                       def build(bld):
+                               bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3')
+
+               :param dest: absolute path of the symlink
+               :type dest: :py:class:`waflib.Node.Node` or string (absolute path)
+               :param src: link contents, which is a relative or abolute path which may exist or not
+               :type src: string
+               :param env: configuration set for performing substitutions in dest
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started
+               :type add: bool
+               :param postpone: execute the task immediately to perform the installation
+               :type postpone: bool
+               :param relative_trick: make the symlink relative (default: ``False``)
+               :type relative_trick: bool
+               """
+               assert(dest)
+               tg = self(features='install_task', install_to=dest, install_from=src, **kw)
+               tg.dest = tg.install_to
+               tg.type = 'symlink_as'
+               tg.link = src
+               # TODO if add: self.add_to_group(tsk)
+               if not kw.get('postpone', True):
+                       tg.post()
+               return tg
+
+@TaskGen.feature('install_task')
+@TaskGen.before_method('process_rule', 'process_source')
+def process_install_task(self):
+       """Creates the installation task for the current task generator; uses :py:func:`waflib.Build.add_install_task` internally."""
+       self.add_install_task(**self.__dict__)
+
+@TaskGen.taskgen_method
+def add_install_task(self, **kw):
+       """
+       Creates the installation task for the current task generator, and executes it immediately if necessary
+
+       :returns: An installation task
+       :rtype: :py:class:`waflib.Build.inst`
+       """
+       if not self.bld.is_install:
+               return
+       if not kw['install_to']:
+               return
+
+       if kw['type'] == 'symlink_as' and Utils.is_win32:
+               if kw.get('win32_install'):
+                       kw['type'] = 'install_as'
+               else:
+                       # just exit
+                       return
+
+       tsk = self.install_task = self.create_task('inst')
+       tsk.chmod = kw.get('chmod', Utils.O644)
+       tsk.link = kw.get('link', '') or kw.get('install_from', '')
+       tsk.relative_trick = kw.get('relative_trick', False)
+       tsk.type = kw['type']
+       tsk.install_to = tsk.dest = kw['install_to']
+       tsk.install_from = kw['install_from']
+       tsk.relative_base = kw.get('cwd') or kw.get('relative_base', self.path)
+       tsk.install_user = kw.get('install_user')
+       tsk.install_group = kw.get('install_group')
+       tsk.init_files()
+       if not kw.get('postpone', True):
+               tsk.run_now()
+       return tsk
+
+@TaskGen.taskgen_method
+def add_install_files(self, **kw):
+       """
+       Creates an installation task for files
+
+       :returns: An installation task
+       :rtype: :py:class:`waflib.Build.inst`
+       """
+       kw['type'] = 'install_files'
+       return self.add_install_task(**kw)
+
+@TaskGen.taskgen_method
+def add_install_as(self, **kw):
+       """
+       Creates an installation task for a single file
+
+       :returns: An installation task
+       :rtype: :py:class:`waflib.Build.inst`
+       """
+       kw['type'] = 'install_as'
+       return self.add_install_task(**kw)
+
+@TaskGen.taskgen_method
+def add_symlink_as(self, **kw):
+       """
+       Creates an installation task for a symbolic link
+
+       :returns: An installation task
+       :rtype: :py:class:`waflib.Build.inst`
+       """
+       kw['type'] = 'symlink_as'
+       return self.add_install_task(**kw)
+
+class inst(Task.Task):
+       """Task that installs files or symlinks; it is typically executed by :py:class:`waflib.Build.InstallContext` and :py:class:`waflib.Build.UnInstallContext`"""
+       def __str__(self):
+               """Returns an empty string to disable the standard task display"""
+               return ''
+
+       def uid(self):
+               """Returns a unique identifier for the task"""
+               lst = self.inputs + self.outputs + [self.link, self.generator.path.abspath()]
+               return Utils.h_list(lst)
+
+       def init_files(self):
+               """
+               Initializes the task input and output nodes
+               """
+               if self.type == 'symlink_as':
+                       inputs = []
+               else:
+                       inputs = self.generator.to_nodes(self.install_from)
+                       if self.type == 'install_as':
+                               assert len(inputs) == 1
+               self.set_inputs(inputs)
+
+               dest = self.get_install_path()
+               outputs = []
+               if self.type == 'symlink_as':
+                       if self.relative_trick:
+                               self.link = os.path.relpath(self.link, os.path.dirname(dest))
+                       outputs.append(self.generator.bld.root.make_node(dest))
+               elif self.type == 'install_as':
+                       outputs.append(self.generator.bld.root.make_node(dest))
+               else:
+                       for y in inputs:
+                               if self.relative_trick:
+                                       destfile = os.path.join(dest, y.path_from(self.relative_base))
+                               else:
+                                       destfile = os.path.join(dest, y.name)
+                               outputs.append(self.generator.bld.root.make_node(destfile))
+               self.set_outputs(outputs)
+
+       def runnable_status(self):
+               """
+               Installation tasks are always executed, so this method returns either :py:const:`waflib.Task.ASK_LATER` or :py:const:`waflib.Task.RUN_ME`.
+               """
+               ret = super(inst, self).runnable_status()
+               if ret == Task.SKIP_ME and self.generator.bld.is_install:
+                       return Task.RUN_ME
+               return ret
+
+       def post_run(self):
+               """
+               Disables any post-run operations
+               """
+               pass
+
+       def get_install_path(self, destdir=True):
+               """
+               Returns the destination path where files will be installed, pre-pending `destdir`.
+
+               :rtype: string
+               """
+               if isinstance(self.install_to, Node.Node):
+                       dest = self.install_to.abspath()
+               else:
+                       dest = Utils.subst_vars(self.install_to, self.env)
+               if destdir and Options.options.destdir:
+                       dest = os.path.join(Options.options.destdir, os.path.splitdrive(dest)[1].lstrip(os.sep))
+               return dest
+
+       def copy_fun(self, src, tgt):
+               """
+               Copies a file from src to tgt, preserving permissions and trying to work
+               around path limitations on Windows platforms. On Unix-like platforms,
+               the owner/group of the target file may be set through install_user/install_group
+
+               :param src: absolute path
+               :type src: string
+               :param tgt: absolute path
+               :type tgt: string
+               """
+               # override this if you want to strip executables
+               # kw['tsk'].source is the task that created the files in the build
+               if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'):
+                       tgt = '\\\\?\\' + tgt
+               shutil.copy2(src, tgt)
+               self.fix_perms(tgt)
+
+       def rm_empty_dirs(self, tgt):
+               """
+               Removes empty folders recursively when uninstalling.
+
+               :param tgt: absolute path
+               :type tgt: string
+               """
+               while tgt:
+                       tgt = os.path.dirname(tgt)
+                       try:
+                               os.rmdir(tgt)
+                       except OSError:
+                               break
+
+       def run(self):
+               """
+               Performs file or symlink installation
+               """
+               is_install = self.generator.bld.is_install
+               if not is_install: # unnecessary?
+                       return
+
+               for x in self.outputs:
+                       if is_install == INSTALL:
+                               x.parent.mkdir()
+               if self.type == 'symlink_as':
+                       fun = is_install == INSTALL and self.do_link or self.do_unlink
+                       fun(self.link, self.outputs[0].abspath())
+               else:
+                       fun = is_install == INSTALL and self.do_install or self.do_uninstall
+                       launch_node = self.generator.bld.launch_node()
+                       for x, y in zip(self.inputs, self.outputs):
+                               fun(x.abspath(), y.abspath(), x.path_from(launch_node))
+
+       def run_now(self):
+               """
+               Try executing the installation task right now
+
+               :raises: :py:class:`waflib.Errors.TaskNotReady`
+               """
+               status = self.runnable_status()
+               if status not in (Task.RUN_ME, Task.SKIP_ME):
+                       raise Errors.TaskNotReady('Could not process %r: status %r' % (self, status))
+               self.run()
+               self.hasrun = Task.SUCCESS
+
+       def do_install(self, src, tgt, lbl, **kw):
+               """
+               Copies a file from src to tgt with given file permissions. The actual copy is only performed
+               if the source and target file sizes or timestamps differ. When the copy occurs,
+               the file is always first removed and then copied so as to prevent stale inodes.
+
+               :param src: file name as absolute path
+               :type src: string
+               :param tgt: file destination, as absolute path
+               :type tgt: string
+               :param lbl: file source description
+               :type lbl: string
+               :param chmod: installation mode
+               :type chmod: int
+               :raises: :py:class:`waflib.Errors.WafError` if the file cannot be written
+               """
+               if not Options.options.force:
+                       # check if the file is already there to avoid a copy
+                       try:
+                               st1 = os.stat(tgt)
+                               st2 = os.stat(src)
+                       except OSError:
+                               pass
+                       else:
+                               # same size and identical timestamps -> make no copy
+                               if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size:
+                                       if not self.generator.bld.progress_bar:
+                                               Logs.info('- install %s (from %s)', tgt, lbl)
+                                       return False
+
+               if not self.generator.bld.progress_bar:
+                       Logs.info('+ install %s (from %s)', tgt, lbl)
+
+               # Give best attempt at making destination overwritable,
+               # like the 'install' utility used by 'make install' does.
+               try:
+                       os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode))
+               except EnvironmentError:
+                       pass
+
+               # following is for shared libs and stale inodes (-_-)
+               try:
+                       os.remove(tgt)
+               except OSError:
+                       pass
+
+               try:
+                       self.copy_fun(src, tgt)
+               except EnvironmentError ,e:
+                       if not os.path.exists(src):
+                               Logs.error('File %r does not exist', src)
+                       elif not os.path.isfile(src):
+                               Logs.error('Input %r is not a file', src)
+                       raise Errors.WafError('Could not install the file %r' % tgt, e)
+
+       def fix_perms(self, tgt):
+               """
+               Change the ownership of the file/folder/link pointed by the given path
+               This looks up for `install_user` or `install_group` attributes
+               on the task or on the task generator::
+
+                       def build(bld):
+                               bld.install_as('${PREFIX}/wscript',
+                                       'wscript',
+                                       install_user='nobody', install_group='nogroup')
+                               bld.symlink_as('${PREFIX}/wscript_link',
+                                       Utils.subst_vars('${PREFIX}/wscript', bld.env),
+                                       install_user='nobody', install_group='nogroup')
+               """
+               if not Utils.is_win32:
+                       user = getattr(self, 'install_user', None) or getattr(self.generator, 'install_user', None)
+                       group = getattr(self, 'install_group', None) or getattr(self.generator, 'install_group', None)
+                       if user or group:
+                               Utils.lchown(tgt, user or -1, group or -1)
+               if not os.path.islink(tgt):
+                       os.chmod(tgt, self.chmod)
+
+       def do_link(self, src, tgt, **kw):
+               """
+               Creates a symlink from tgt to src.
+
+               :param src: file name as absolute path
+               :type src: string
+               :param tgt: file destination, as absolute path
+               :type tgt: string
+               """
+               if os.path.islink(tgt) and os.readlink(tgt) == src:
+                       if not self.generator.bld.progress_bar:
+                               Logs.info('- symlink %s (to %s)', tgt, src)
+               else:
+                       try:
+                               os.remove(tgt)
+                       except OSError:
+                               pass
+                       if not self.generator.bld.progress_bar:
+                               Logs.info('+ symlink %s (to %s)', tgt, src)
+                       os.symlink(src, tgt)
+                       self.fix_perms(tgt)
+
+       def do_uninstall(self, src, tgt, lbl, **kw):
+               """
+               See :py:meth:`waflib.Build.inst.do_install`
+               """
+               if not self.generator.bld.progress_bar:
+                       Logs.info('- remove %s', tgt)
+
+               #self.uninstall.append(tgt)
+               try:
+                       os.remove(tgt)
+               except OSError ,e:
+                       if e.errno != errno.ENOENT:
+                               if not getattr(self, 'uninstall_error', None):
+                                       self.uninstall_error = True
+                                       Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+                               if Logs.verbose > 1:
+                                       Logs.warn('Could not remove %s (error code %r)', e.filename, e.errno)
+               self.rm_empty_dirs(tgt)
+
+       def do_unlink(self, src, tgt, **kw):
+               """
+               See :py:meth:`waflib.Build.inst.do_link`
+               """
+               try:
+                       if not self.generator.bld.progress_bar:
+                               Logs.info('- remove %s', tgt)
+                       os.remove(tgt)
+               except OSError:
+                       pass
+               self.rm_empty_dirs(tgt)
+
+class InstallContext(BuildContext):
+       '''installs the targets on the system'''
+       cmd = 'install'
+
+       def __init__(self, **kw):
+               super(InstallContext, self).__init__(**kw)
+               self.is_install = INSTALL
+
+class UninstallContext(InstallContext):
+       '''removes the targets installed'''
+       cmd = 'uninstall'
+
+       def __init__(self, **kw):
+               super(UninstallContext, self).__init__(**kw)
+               self.is_install = UNINSTALL
+
+       def execute(self):
+               """
+               See :py:func:`waflib.Build.BuildContext.execute`.
+               """
+               # TODO just mark the tasks are already run with hasrun=Task.SKIPPED?
+               try:
+                       # do not execute any tasks
+                       def runnable_status(self):
+                               return Task.SKIP_ME
+                       setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
+                       setattr(Task.Task, 'runnable_status', runnable_status)
+
+                       super(UninstallContext, self).execute()
+               finally:
+                       setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
+
+class CleanContext(BuildContext):
+       '''cleans the project'''
+       cmd = 'clean'
+       def execute(self):
+               """
+               See :py:func:`waflib.Build.BuildContext.execute`.
+               """
+               self.restore()
+               if not self.all_envs:
+                       self.load_envs()
+
+               self.recurse([self.run_dir])
+               try:
+                       self.clean()
+               finally:
+                       self.store()
+
+       def clean(self):
+               """Remove files from the build directory if possible, and reset the caches"""
+               Logs.debug('build: clean called')
+
+               if self.bldnode != self.srcnode:
+                       # would lead to a disaster if top == out
+                       lst = []
+                       for env in self.all_envs.values():
+                               lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES])
+                       for n in self.bldnode.ant_glob('**/*', excl='.lock* *conf_check_*/** config.log c4che/*', quiet=True):
+                               if n in lst:
+                                       continue
+                               n.delete()
+               self.root.children = {}
+
+               for v in SAVED_ATTRS:
+                       if v == 'root':
+                               continue
+                       setattr(self, v, {})
+
+class ListContext(BuildContext):
+       '''lists the targets to execute'''
+       cmd = 'list'
+
+       def execute(self):
+               """
+               In addition to printing the name of each build target,
+               a description column will include text for each task
+               generator which has a "description" field set.
+
+               See :py:func:`waflib.Build.BuildContext.execute`.
+               """
+               self.restore()
+               if not self.all_envs:
+                       self.load_envs()
+
+               self.recurse([self.run_dir])
+               self.pre_build()
+
+               # display the time elapsed in the progress bar
+               self.timer = Utils.Timer()
+
+               for g in self.groups:
+                       for tg in g:
+                               try:
+                                       f = tg.post
+                               except AttributeError:
+                                       pass
+                               else:
+                                       f()
+
+               try:
+                       # force the cache initialization
+                       self.get_tgen_by_name('')
+               except Errors.WafError:
+                       pass
+
+               targets = sorted(self.task_gen_cache_names)
+
+               # figure out how much to left-justify, for largest target name
+               line_just = max(len(t) for t in targets) if targets else 0
+
+               for target in targets:
+                       tgen = self.task_gen_cache_names[target]
+
+                       # Support displaying the description for the target
+                       # if it was set on the tgen
+                       descript = getattr(tgen, 'description', '')
+                       if descript:
+                               target = target.ljust(line_just)
+                               descript = ': %s' % descript
+
+                       Logs.pprint('GREEN', target, label=descript)
+
+class StepContext(BuildContext):
+       '''executes tasks in a step-by-step fashion, for debugging'''
+       cmd = 'step'
+
+       def __init__(self, **kw):
+               super(StepContext, self).__init__(**kw)
+               self.files = Options.options.files
+
+       def compile(self):
+               """
+               Overrides :py:meth:`waflib.Build.BuildContext.compile` to perform a partial build
+               on tasks matching the input/output pattern given (regular expression matching)::
+
+                       $ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o
+                       $ waf step --files=in:foo.cpp.1.o # link task only
+
+               """
+               if not self.files:
+                       Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
+                       BuildContext.compile(self)
+                       return
+
+               targets = []
+               if self.targets and self.targets != '*':
+                       targets = self.targets.split(',')
+
+               for g in self.groups:
+                       for tg in g:
+                               if targets and tg.name not in targets:
+                                       continue
+
+                               try:
+                                       f = tg.post
+                               except AttributeError:
+                                       pass
+                               else:
+                                       f()
+
+                       for pat in self.files.split(','):
+                               matcher = self.get_matcher(pat)
+                               for tg in g:
+                                       if isinstance(tg, Task.TaskBase):
+                                               lst = [tg]
+                                       else:
+                                               lst = tg.tasks
+                                       for tsk in lst:
+                                               do_exec = False
+                                               for node in getattr(tsk, 'inputs', []):
+                                                       if matcher(node, output=False):
+                                                               do_exec = True
+                                                               break
+                                               for node in getattr(tsk, 'outputs', []):
+                                                       if matcher(node, output=True):
+                                                               do_exec = True
+                                                               break
+                                               if do_exec:
+                                                       ret = tsk.run()
+                                                       Logs.info('%s -> exit %r', tsk, ret)
+
+       def get_matcher(self, pat):
+               """
+               Converts a step pattern into a function
+
+               :param: pat: pattern of the form in:truc.c,out:bar.o
+               :returns: Python function that uses Node objects as inputs and returns matches
+               :rtype: function
+               """
+               # this returns a function
+               inn = True
+               out = True
+               if pat.startswith('in:'):
+                       out = False
+                       pat = pat.replace('in:', '')
+               elif pat.startswith('out:'):
+                       inn = False
+                       pat = pat.replace('out:', '')
+
+               anode = self.root.find_node(pat)
+               pattern = None
+               if not anode:
+                       if not pat.startswith('^'):
+                               pat = '^.+?%s' % pat
+                       if not pat.endswith('$'):
+                               pat = '%s$' % pat
+                       pattern = re.compile(pat)
+
+               def match(node, output):
+                       if output == True and not out:
+                               return False
+                       if output == False and not inn:
+                               return False
+
+                       if anode:
+                               return anode == node
+                       else:
+                               return pattern.match(node.abspath())
+               return match
+
+class EnvContext(BuildContext):
+       """Subclass EnvContext to create commands that require configuration data in 'env'"""
+       fun = cmd = None
+       def execute(self):
+               """
+               See :py:func:`waflib.Build.BuildContext.execute`.
+               """
+               self.restore()
+               if not self.all_envs:
+                       self.load_envs()
+               self.recurse([self.run_dir])
diff --git a/third_party/waf/waflib/ConfigSet.py b/third_party/waf/waflib/ConfigSet.py
new file mode 100644 (file)
index 0000000..899c804
--- /dev/null
@@ -0,0 +1,358 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+
+ConfigSet: a special dict
+
+The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings)
+"""
+
+import copy, re, os
+from waflib import Logs, Utils
+re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
+
+class ConfigSet(object):
+       """
+       A copy-on-write dict with human-readable serialized format. The serialization format
+       is human-readable (python-like) and performed by using eval() and repr().
+       For high performance prefer pickle. Do not store functions as they are not serializable.
+
+       The values can be accessed by attributes or by keys::
+
+               from waflib.ConfigSet import ConfigSet
+               env = ConfigSet()
+               env.FOO = 'test'
+               env['FOO'] = 'test'
+       """
+       __slots__ = ('table', 'parent')
+       def __init__(self, filename=None):
+               self.table = {}
+               """
+               Internal dict holding the object values
+               """
+               #self.parent = None
+
+               if filename:
+                       self.load(filename)
+
+       def __contains__(self, key):
+               """
+               Enables the *in* syntax::
+
+                       if 'foo' in env:
+                               print(env['foo'])
+               """
+               if key in self.table: return True
+               try: return self.parent.__contains__(key)
+               except AttributeError: return False # parent may not exist
+
+       def keys(self):
+               """Dict interface"""
+               keys = set()
+               cur = self
+               while cur:
+                       keys.update(cur.table.keys())
+                       cur = getattr(cur, 'parent', None)
+               keys = list(keys)
+               keys.sort()
+               return keys
+
+       def __iter__(self):
+               return iter(self.keys())
+
+       def __str__(self):
+               """Text representation of the ConfigSet (for debugging purposes)"""
+               return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
+
+       def __getitem__(self, key):
+               """
+               Dictionary interface: get value from key::
+
+                       def configure(conf):
+                               conf.env['foo'] = {}
+                               print(env['foo'])
+               """
+               try:
+                       while 1:
+                               x = self.table.get(key)
+                               if not x is None:
+                                       return x
+                               self = self.parent
+               except AttributeError:
+                       return []
+
+       def __setitem__(self, key, value):
+               """
+               Dictionary interface: set value for key
+               """
+               self.table[key] = value
+
+       def __delitem__(self, key):
+               """
+               Dictionary interface: mark the key as missing
+               """
+               self[key] = []
+
+       def __getattr__(self, name):
+               """
+               Attribute access provided for convenience. The following forms are equivalent::
+
+                       def configure(conf):
+                               conf.env.value
+                               conf.env['value']
+               """
+               if name in self.__slots__:
+                       return object.__getattr__(self, name)
+               else:
+                       return self[name]
+
+       def __setattr__(self, name, value):
+               """
+               Attribute access provided for convenience. The following forms are equivalent::
+
+                       def configure(conf):
+                               conf.env.value = x
+                               env['value'] = x
+               """
+               if name in self.__slots__:
+                       object.__setattr__(self, name, value)
+               else:
+                       self[name] = value
+
+       def __delattr__(self, name):
+               """
+               Attribute access provided for convenience. The following forms are equivalent::
+
+                       def configure(conf):
+                               del env.value
+                               del env['value']
+               """
+               if name in self.__slots__:
+                       object.__delattr__(self, name)
+               else:
+                       del self[name]
+
+       def derive(self):
+               """
+               Returns a new ConfigSet deriving from self. The copy returned
+               will be a shallow copy::
+
+                       from waflib.ConfigSet import ConfigSet
+                       env = ConfigSet()
+                       env.append_value('CFLAGS', ['-O2'])
+                       child = env.derive()
+                       child.CFLAGS.append('test') # warning! this will modify 'env'
+                       child.CFLAGS = ['-O3'] # new list, ok
+                       child.append_value('CFLAGS', ['-O3']) # ok
+
+               Use :py:func:`ConfigSet.detach` to detach the child from the parent.
+               """
+               newenv = ConfigSet()
+               newenv.parent = self
+               return newenv
+
+       def detach(self):
+               """
+               Detaches this instance from its parent (if present)
+
+               Modifying the parent :py:class:`ConfigSet` will not change the current object
+               Modifying this :py:class:`ConfigSet` will not modify the parent one.
+               """
+               tbl = self.get_merged_dict()
+               try:
+                       delattr(self, 'parent')
+               except AttributeError:
+                       pass
+               else:
+                       keys = tbl.keys()
+                       for x in keys:
+                               tbl[x] = copy.deepcopy(tbl[x])
+                       self.table = tbl
+               return self
+
+       def get_flat(self, key):
+               """
+               Returns a value as a string. If the input is a list, the value returned is space-separated.
+
+               :param key: key to use
+               :type key: string
+               """
+               s = self[key]
+               if isinstance(s, str): return s
+               return ' '.join(s)
+
+       def _get_list_value_for_modification(self, key):
+               """
+               Returns a list value for further modification.
+
+               The list may be modified inplace and there is no need to do this afterwards::
+
+                       self.table[var] = value
+               """
+               try:
+                       value = self.table[key]
+               except KeyError:
+                       try:
+                               value = self.parent[key]
+                       except AttributeError:
+                               value = []
+                       else:
+                               if isinstance(value, list):
+                                       # force a copy
+                                       value = value[:]
+                               else:
+                                       value = [value]
+                       self.table[key] = value
+               else:
+                       if not isinstance(value, list):
+                               self.table[key] = value = [value]
+               return value
+
+       def append_value(self, var, val):
+               """
+               Appends a value to the specified config key::
+
+                       def build(bld):
+                               bld.env.append_value('CFLAGS', ['-O2'])
+
+               The value must be a list or a tuple
+               """
+               if isinstance(val, str): # if there were string everywhere we could optimize this
+                       val = [val]
+               current_value = self._get_list_value_for_modification(var)
+               current_value.extend(val)
+
+       def prepend_value(self, var, val):
+               """
+               Prepends a value to the specified item::
+
+                       def configure(conf):
+                               conf.env.prepend_value('CFLAGS', ['-O2'])
+
+               The value must be a list or a tuple
+               """
+               if isinstance(val, str):
+                       val = [val]
+               self.table[var] =  val + self._get_list_value_for_modification(var)
+
+       def append_unique(self, var, val):
+               """
+               Appends a value to the specified item only if it's not already present::
+
+                       def build(bld):
+                               bld.env.append_unique('CFLAGS', ['-O2', '-g'])
+
+               The value must be a list or a tuple
+               """
+               if isinstance(val, str):
+                       val = [val]
+               current_value = self._get_list_value_for_modification(var)
+
+               for x in val:
+                       if x not in current_value:
+                               current_value.append(x)
+
+       def get_merged_dict(self):
+               """
+               Computes the merged dictionary from the fusion of self and all its parent
+
+               :rtype: a ConfigSet object
+               """
+               table_list = []
+               env = self
+               while 1:
+                       table_list.insert(0, env.table)
+                       try: env = env.parent
+                       except AttributeError: break
+               merged_table = {}
+               for table in table_list:
+                       merged_table.update(table)
+               return merged_table
+
+       def store(self, filename):
+               """
+               Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files.
+
+               :param filename: file to use
+               :type filename: string
+               """
+               try:
+                       os.makedirs(os.path.split(filename)[0])
+               except OSError:
+                       pass
+
+               buf = []
+               merged_table = self.get_merged_dict()
+               keys = list(merged_table.keys())
+               keys.sort()
+
+               try:
+                       fun = ascii
+               except NameError:
+                       fun = repr
+
+               for k in keys:
+                       if k != 'undo_stack':
+                               buf.append('%s = %s\n' % (k, fun(merged_table[k])))
+               Utils.writef(filename, ''.join(buf))
+
+       def load(self, filename):
+               """
+               Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`.
+
+               :param filename: file to use
+               :type filename: string
+               """
+               tbl = self.table
+               code = Utils.readf(filename, m='rU')
+               for m in re_imp.finditer(code):
+                       g = m.group
+                       tbl[g(2)] = eval(g(3))
+               Logs.debug('env: %s', self.table)
+
+       def update(self, d):
+               """
+               Dictionary interface: replace values with the ones from another dict
+
+               :param d: object to use the value from
+               :type d: dict-like object
+               """
+               self.table.update(d)
+
+       def stash(self):
+               """
+               Stores the object state to provide transactionality semantics::
+
+                       env = ConfigSet()
+                       env.stash()
+                       try:
+                               env.append_value('CFLAGS', '-O3')
+                               call_some_method(env)
+                       finally:
+                               env.revert()
+
+               The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store`
+               """
+               orig = self.table
+               tbl = self.table = self.table.copy()
+               for x in tbl.keys():
+                       tbl[x] = copy.deepcopy(tbl[x])
+               self.undo_stack = self.undo_stack + [orig]
+
+       def commit(self):
+               """
+               Commits transactional changes. See :py:meth:`ConfigSet.stash`
+               """
+               self.undo_stack.pop(-1)
+
+       def revert(self):
+               """
+               Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
+               """
+               self.table = self.undo_stack.pop(-1)
diff --git a/third_party/waf/waflib/Configure.py b/third_party/waf/waflib/Configure.py
new file mode 100644 (file)
index 0000000..aa42f2f
--- /dev/null
@@ -0,0 +1,641 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Configuration system
+
+A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to:
+
+* create data dictionaries (ConfigSet instances)
+* store the list of modules to import
+* hold configuration routines such as ``find_program``, etc
+"""
+
+import os, shlex, sys, time, re, shutil
+from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
+
+WAF_CONFIG_LOG = 'config.log'
+"""Name of the configuration log file"""
+
+autoconfig = False
+"""Execute the configuration automatically"""
+
+conf_template = '''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#'''
+
+class ConfigurationContext(Context.Context):
+       '''configures the project'''
+
+       cmd = 'configure'
+
+       error_handlers = []
+       """
+       Additional functions to handle configuration errors
+       """
+
+       def __init__(self, **kw):
+               super(ConfigurationContext, self).__init__(**kw)
+               self.environ = dict(os.environ)
+               self.all_envs = {}
+
+               self.top_dir = None
+               self.out_dir = None
+
+               self.tools = [] # tools loaded in the configuration, and that will be loaded when building
+
+               self.hash = 0
+               self.files = []
+
+               self.tool_cache = []
+
+               self.setenv('')
+
+       def setenv(self, name, env=None):
+               """
+               Set a new config set for conf.env. If a config set of that name already exists,
+               recall it without modification.
+
+               The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it
+               is also used as *variants* by the build commands.
+               Though related to variants, whatever kind of data may be stored in the config set::
+
+                       def configure(cfg):
+                               cfg.env.ONE = 1
+                               cfg.setenv('foo')
+                               cfg.env.ONE = 2
+
+                       def build(bld):
+                               2 == bld.env_of_name('foo').ONE
+
+               :param name: name of the configuration set
+               :type name: string
+               :param env: ConfigSet to copy, or an empty ConfigSet is created
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               """
+               if name not in self.all_envs or env:
+                       if not env:
+                               env = ConfigSet.ConfigSet()
+                               self.prepare_env(env)
+                       else:
+                               env = env.derive()
+                       self.all_envs[name] = env
+               self.variant = name
+
+       def get_env(self):
+               """Getter for the env property"""
+               return self.all_envs[self.variant]
+       def set_env(self, val):
+               """Setter for the env property"""
+               self.all_envs[self.variant] = val
+
+       env = property(get_env, set_env)
+
+       def init_dirs(self):
+               """
+               Initialize the project directory and the build directory
+               """
+
+               top = self.top_dir
+               if not top:
+                       top = Options.options.top
+               if not top:
+                       top = getattr(Context.g_module, Context.TOP, None)
+               if not top:
+                       top = self.path.abspath()
+               top = os.path.abspath(top)
+
+               self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top)
+               assert(self.srcnode)
+
+               out = self.out_dir
+               if not out:
+                       out = Options.options.out
+               if not out:
+                       out = getattr(Context.g_module, Context.OUT, None)
+               if not out:
+                       out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '')
+
+               # someone can be messing with symlinks
+               out = os.path.realpath(out)
+
+               self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out)
+               self.bldnode.mkdir()
+
+               if not os.path.isdir(self.bldnode.abspath()):
+                       conf.fatal('Could not create the build directory %s' % self.bldnode.abspath())
+
+       def execute(self):
+               """
+               See :py:func:`waflib.Context.Context.execute`
+               """
+               self.init_dirs()
+
+               self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
+               self.cachedir.mkdir()
+
+               path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
+               self.logger = Logs.make_logger(path, 'cfg')
+
+               app = getattr(Context.g_module, 'APPNAME', '')
+               if app:
+                       ver = getattr(Context.g_module, 'VERSION', '')
+                       if ver:
+                               app = "%s (%s)" % (app, ver)
+
+               params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app}
+               self.to_log(conf_template % params)
+               self.msg('Setting top to', self.srcnode.abspath())
+               self.msg('Setting out to', self.bldnode.abspath())
+
+               if id(self.srcnode) == id(self.bldnode):
+                       Logs.warn('Setting top == out')
+               elif id(self.path) != id(self.srcnode):
+                       if self.srcnode.is_child_of(self.path):
+                               Logs.warn('Are you certain that you do not want to set top="." ?')
+
+               super(ConfigurationContext, self).execute()
+
+               self.store()
+
+               Context.top_dir = self.srcnode.abspath()
+               Context.out_dir = self.bldnode.abspath()
+
+               # this will write a configure lock so that subsequent builds will
+               # consider the current path as the root directory (see prepare_impl).
+               # to remove: use 'waf distclean'
+               env = ConfigSet.ConfigSet()
+               env.argv = sys.argv
+               env.options = Options.options.__dict__
+               env.config_cmd = self.cmd
+
+               env.run_dir = Context.run_dir
+               env.top_dir = Context.top_dir
+               env.out_dir = Context.out_dir
+
+               # conf.hash & conf.files hold wscript files paths and hash
+               # (used only by Configure.autoconfig)
+               env.hash = self.hash
+               env.files = self.files
+               env.environ = dict(self.environ)
+
+               if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')):
+                       env.store(os.path.join(Context.run_dir, Options.lockfile))
+               if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')):
+                       env.store(os.path.join(Context.top_dir, Options.lockfile))
+               if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')):
+                       env.store(os.path.join(Context.out_dir, Options.lockfile))
+
+       def prepare_env(self, env):
+               """
+               Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env``
+
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               :param env: a ConfigSet, usually ``conf.env``
+               """
+               if not env.PREFIX:
+                       if Options.options.prefix or Utils.is_win32:
+                               env.PREFIX = Utils.sane_path(Options.options.prefix)
+                       else:
+                               env.PREFIX = ''
+               if not env.BINDIR:
+                       if Options.options.bindir:
+                               env.BINDIR = Utils.sane_path(Options.options.bindir)
+                       else:
+                               env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
+               if not env.LIBDIR:
+                       if Options.options.libdir:
+                               env.LIBDIR = Utils.sane_path(Options.options.libdir)
+                       else:
+                               env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
+
+       def store(self):
+               """Save the config results into the cache file"""
+               n = self.cachedir.make_node('build.config.py')
+               n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools))
+
+               if not self.all_envs:
+                       self.fatal('nothing to store in the configuration context!')
+
+               for key in self.all_envs:
+                       tmpenv = self.all_envs[key]
+                       tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
+
+       def load(self, input, tooldir=None, funs=None, with_sys_path=True, cache=False):
+               """
+               Load Waf tools, which will be imported whenever a build is started.
+
+               :param input: waf tools to import
+               :type input: list of string
+               :param tooldir: paths for the imports
+               :type tooldir: list of string
+               :param funs: functions to execute from the waf tools
+               :type funs: list of string
+               :param cache: whether to prevent the tool from running twice
+               :type cache: bool
+               """
+
+               tools = Utils.to_list(input)
+               if tooldir: tooldir = Utils.to_list(tooldir)
+               for tool in tools:
+                       # avoid loading the same tool more than once with the same functions
+                       # used by composite projects
+
+                       if cache:
+                               mag = (tool, id(self.env), tooldir, funs)
+                               if mag in self.tool_cache:
+                                       self.to_log('(tool %s is already loaded, skipping)' % tool)
+                                       continue
+                               self.tool_cache.append(mag)
+
+                       module = None
+                       try:
+                               module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
+                       except ImportError ,e:
+                               self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
+                       except Exception ,e:
+                               self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
+                               self.to_log(Utils.ex_stack())
+                               raise
+
+                       if funs is not None:
+                               self.eval_rules(funs)
+                       else:
+                               func = getattr(module, 'configure', None)
+                               if func:
+                                       if type(func) is type(Utils.readf): func(self)
+                                       else: self.eval_rules(func)
+
+                       self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
+
+       def post_recurse(self, node):
+               """
+               Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse`
+
+               :param node: script
+               :type node: :py:class:`waflib.Node.Node`
+               """
+               super(ConfigurationContext, self).post_recurse(node)
+               self.hash = Utils.h_list((self.hash, node.read('rb')))
+               self.files.append(node.abspath())
+
+       def eval_rules(self, rules):
+               """
+               Execute configuration tests provided as list of funcitons to run
+
+               :param rules: list of configuration method names
+               :type rules: list of string
+               """
+               self.rules = Utils.to_list(rules)
+               for x in self.rules:
+                       f = getattr(self, x)
+                       if not f:
+                               self.fatal('No such configuration function %r' % x)
+                       f()
+
+def conf(f):
+       """
+       Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and
+       :py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter
+       named 'mandatory' to disable the configuration errors::
+
+               def configure(conf):
+                       conf.find_program('abc', mandatory=False)
+
+       :param f: method to bind
+       :type f: function
+       """
+       def fun(*k, **kw):
+               mandatory = True
+               if 'mandatory' in kw:
+                       mandatory = kw['mandatory']
+                       del kw['mandatory']
+
+               try:
+                       return f(*k, **kw)
+               except Errors.ConfigurationError:
+                       if mandatory:
+                               raise
+
+       fun.__name__ = f.__name__
+       setattr(ConfigurationContext, f.__name__, fun)
+       setattr(Build.BuildContext, f.__name__, fun)
+       return f
+
+@conf
+def add_os_flags(self, var, dest=None, dup=False):
+       """
+       Import operating system environment values into ``conf.env`` dict::
+
+               def configure(conf):
+                       conf.add_os_flags('CFLAGS')
+
+       :param var: variable to use
+       :type var: string
+       :param dest: destination variable, by default the same as var
+       :type dest: string
+       :param dup: add the same set of flags again
+       :type dup: bool
+       """
+       try:
+               flags = shlex.split(self.environ[var])
+       except KeyError:
+               return
+       if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])):
+               self.env.append_value(dest or var, flags)
+
+@conf
+def cmd_to_list(self, cmd):
+       """
+       Detect if a command is written in pseudo shell like ``ccache g++`` and return a list.
+
+       :param cmd: command
+       :type cmd: a string or a list of string
+       """
+       if isinstance(cmd, str):
+               if os.path.isfile(cmd):
+                       # do not take any risk
+                       return [cmd]
+               if os.sep == '/':
+                       return shlex.split(cmd)
+               else:
+                       try:
+                               return shlex.split(cmd, posix=False)
+                       except TypeError:
+                               # Python 2.5 on windows?
+                               return shlex.split(cmd)
+       return cmd
+
+@conf
+def check_waf_version(self, mini='1.8.99', maxi='2.0.0', **kw):
+       """
+       Raise a Configuration error if the Waf version does not strictly match the given bounds::
+
+               conf.check_waf_version(mini='1.8.99', maxi='2.0.0')
+
+       :type  mini: number, tuple or string
+       :param mini: Minimum required version
+       :type  maxi: number, tuple or string
+       :param maxi: Maximum allowed version
+       """
+       self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)), **kw)
+       ver = Context.HEXVERSION
+       if Utils.num2ver(mini) > ver:
+               self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver))
+       if Utils.num2ver(maxi) < ver:
+               self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver))
+       self.end_msg('ok', **kw)
+
+@conf
+def find_file(self, filename, path_list=[]):
+       """
+       Find a file in a list of paths
+
+       :param filename: name of the file to search for
+       :param path_list: list of directories to search
+       :return: the first occurrence filename or '' if filename could not be found
+       """
+       for n in Utils.to_list(filename):
+               for d in Utils.to_list(path_list):
+                       p = os.path.expanduser(os.path.join(d, n))
+                       if os.path.exists(p):
+                               return p
+       self.fatal('Could not find %r' % filename)
+
+@conf
+def find_program(self, filename, **kw):
+       """
+       Search for a program on the operating system
+
+       When var is used, you may set os.environ[var] to help find a specific program version, for example::
+
+               $ CC='ccache gcc' waf configure
+
+       :param path_list: paths to use for searching
+       :type param_list: list of string
+       :param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings
+       :type var: string
+       :param value: obtain the program from the value passed exclusively
+       :type value: list or string (list is preferred)
+       :param ext: list of extensions for the binary (do not add an extension for portability)
+       :type ext: list of string
+       :param msg: name to display in the log, by default filename is used
+       :type msg: string
+       :param interpreter: interpreter for the program
+       :type interpreter: ConfigSet variable key
+       """
+
+       exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
+
+       environ = kw.get('environ', getattr(self, 'environ', os.environ))
+
+       ret = ''
+
+       filename = Utils.to_list(filename)
+       msg = kw.get('msg', ', '.join(filename))
+
+       var = kw.get('var', '')
+       if not var:
+               var = re.sub(r'[-.]', '_', filename[0].upper())
+
+       path_list = kw.get('path_list', '')
+       if path_list:
+               path_list = Utils.to_list(path_list)
+       else:
+               path_list = environ.get('PATH', '').split(os.pathsep)
+
+       if kw.get('value'):
+               # user-provided in command-line options and passed to find_program
+               ret = self.cmd_to_list(kw['value'])
+       elif environ.get(var):
+               # user-provided in the os environment
+               ret = self.cmd_to_list(environ[var])
+       elif self.env[var]:
+               # a default option in the wscript file
+               ret = self.cmd_to_list(self.env[var])
+       else:
+               if not ret:
+                       ret = self.find_binary(filename, exts.split(','), path_list)
+               if not ret and Utils.winreg:
+                       ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
+               if not ret and Utils.winreg:
+                       ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
+               ret = self.cmd_to_list(ret)
+
+       if ret:
+               if len(ret) == 1:
+                       retmsg = ret[0]
+               else:
+                       retmsg = ret
+       else:
+               retmsg = False
+
+       self.msg('Checking for program %r' % msg, retmsg, **kw)
+       if not kw.get('quiet'):
+               self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))
+
+       if not ret:
+               self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)
+
+       interpreter = kw.get('interpreter')
+       if interpreter is None:
+               if not Utils.check_exe(ret[0], env=environ):
+                       self.fatal('Program %r is not executable' % ret)
+               self.env[var] = ret
+       else:
+               self.env[var] = self.env[interpreter] + ret
+
+       return ret
+
+@conf
+def find_binary(self, filenames, exts, paths):
+       for f in filenames:
+               for ext in exts:
+                       exe_name = f + ext
+                       if os.path.isabs(exe_name):
+                               if os.path.isfile(exe_name):
+                                       return exe_name
+                       else:
+                               for path in paths:
+                                       x = os.path.expanduser(os.path.join(path, exe_name))
+                                       if os.path.isfile(x):
+                                               return x
+       return None
+
+@conf
+def run_build(self, *k, **kw):
+       """
+       Create a temporary build context to execute a build. A reference to that build
+       context is kept on self.test_bld for debugging purposes, and you should not rely
+       on it too much (read the note on the cache below).
+       The parameters given in the arguments to this function are passed as arguments for
+       a single task generator created in the build. Only three parameters are obligatory:
+
+       :param features: features to pass to a task generator created in the build
+       :type features: list of string
+       :param compile_filename: file to create for the compilation (default: *test.c*)
+       :type compile_filename: string
+       :param code: code to write in the filename to compile
+       :type code: string
+
+       Though this function returns *0* by default, the build may set an attribute named *retval* on the
+       build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
+
+       This function also provides a limited cache. To use it, provide the following option::
+
+               def options(opt):
+                       opt.add_option('--confcache', dest='confcache', default=0,
+                               action='count', help='Use a configuration cache')
+
+       And execute the configuration with the following command-line::
+
+               $ waf configure --confcache
+
+       """
+       lst = [str(v) for (p, v) in kw.items() if p != 'env']
+       h = Utils.h_list(lst)
+       dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
+
+       try:
+               os.makedirs(dir)
+       except OSError:
+               pass
+
+       try:
+               os.stat(dir)
+       except OSError:
+               self.fatal('cannot use the configuration test folder %r' % dir)
+
+       cachemode = getattr(Options.options, 'confcache', None)
+       if cachemode == 1:
+               try:
+                       proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
+               except EnvironmentError:
+                       pass
+               else:
+                       ret = proj['cache_run_build']
+                       if isinstance(ret, str) and ret.startswith('Test does not build'):
+                               self.fatal(ret)
+                       return ret
+
+       bdir = os.path.join(dir, 'testbuild')
+
+       if not os.path.exists(bdir):
+               os.makedirs(bdir)
+
+       cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
+       self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
+       bld.init_dirs()
+       bld.progress_bar = 0
+       bld.targets = '*'
+
+       bld.logger = self.logger
+       bld.all_envs.update(self.all_envs) # not really necessary
+       bld.env = kw['env']
+
+       bld.kw = kw
+       bld.conf = self
+       kw['build_fun'](bld)
+       ret = -1
+       try:
+               try:
+                       bld.compile()
+               except Errors.WafError:
+                       ret = 'Test does not build: %s' % Utils.ex_stack()
+                       self.fatal(ret)
+               else:
+                       ret = getattr(bld, 'retval', 0)
+       finally:
+               if cachemode == 1:
+                       # cache the results each time
+                       proj = ConfigSet.ConfigSet()
+                       proj['cache_run_build'] = ret
+                       proj.store(os.path.join(dir, 'cache_run_build'))
+               else:
+                       shutil.rmtree(dir)
+       return ret
+
+@conf
+def ret_msg(self, msg, args):
+       if isinstance(msg, str):
+               return msg
+       return msg(args)
+
+@conf
+def test(self, *k, **kw):
+
+       if not 'env' in kw:
+               kw['env'] = self.env.derive()
+
+       # validate_c for example
+       if kw.get('validate'):
+               kw['validate'](kw)
+
+       self.start_msg(kw['msg'], **kw)
+       ret = None
+       try:
+               ret = self.run_build(*k, **kw)
+       except self.errors.ConfigurationError:
+               self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+               if Logs.verbose > 1:
+                       raise
+               else:
+                       self.fatal('The configuration failed')
+       else:
+               kw['success'] = ret
+
+       if kw.get('post_check'):
+               ret = kw['post_check'](kw)
+
+       if ret:
+               self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+               self.fatal('The configuration failed %r' % ret)
+       else:
+               self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+       return ret
diff --git a/third_party/waf/waflib/Context.py b/third_party/waf/waflib/Context.py
new file mode 100644 (file)
index 0000000..4a3b892
--- /dev/null
@@ -0,0 +1,723 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2016 (ita)
+
+"""
+Classes and functions enabling the command system
+"""
+
+import os, re, imp, sys
+from waflib import Utils, Errors, Logs
+import waflib.Node
+
+# the following 3 constants are updated on each new release (do not touch)
+HEXVERSION=0x1090a00
+"""Constant updated on new releases"""
+
+WAFVERSION="1.9.10"
+"""Constant updated on new releases"""
+
+WAFREVISION="ae3f254315e0dcea4059703987148882ba414894"
+"""Git revision when the waf version is updated"""
+
+ABI = 99
+"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
+
+DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
+"""Name of the pickle file for storing the build data"""
+
+APPNAME = 'APPNAME'
+"""Default application name (used by ``waf dist``)"""
+
+VERSION = 'VERSION'
+"""Default application version (used by ``waf dist``)"""
+
+TOP  = 'top'
+"""The variable name for the top-level directory in wscript files"""
+
+OUT  = 'out'
+"""The variable name for the output directory in wscript files"""
+
+WSCRIPT_FILE = 'wscript'
+"""Name of the waf script files"""
+
+launch_dir = ''
+"""Directory from which waf has been called"""
+run_dir = ''
+"""Location of the wscript file to use as the entry point"""
+top_dir = ''
+"""Location of the project directory (top), if the project was configured"""
+out_dir = ''
+"""Location of the build directory (out), if the project was configured"""
+waf_dir = ''
+"""Directory containing the waf modules"""
+
+g_module = None
+"""
+Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`)
+"""
+
+STDOUT = 1
+STDERR = -1
+BOTH   = 0
+
+classes = []
+"""
+List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes
+are added automatically by a metaclass.
+"""
+
+def create_context(cmd_name, *k, **kw):
+       """
+       Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
+       Used in particular by :py:func:`waflib.Scripting.run_command`
+
+       :param cmd_name: command name
+       :type cmd_name: string
+       :param k: arguments to give to the context class initializer
+       :type k: list
+       :param k: keyword arguments to give to the context class initializer
+       :type k: dict
+       :return: Context object
+       :rtype: :py:class:`waflib.Context.Context`
+       """
+       global classes
+       for x in classes:
+               if x.cmd == cmd_name:
+                       return x(*k, **kw)
+       ctx = Context(*k, **kw)
+       ctx.fun = cmd_name
+       return ctx
+
+class store_context(type):
+       """
+       Metaclass that registers command classes into the list :py:const:`waflib.Context.classes`
+       Context classes must provide an attribute 'cmd' representing the command name, and a function
+       attribute 'fun' representing the function name that the command uses.
+       """
+       def __init__(cls, name, bases, dict):
+               super(store_context, cls).__init__(name, bases, dict)
+               name = cls.__name__
+
+               if name in ('ctx', 'Context'):
+                       return
+
+               try:
+                       cls.cmd
+               except AttributeError:
+                       raise Errors.WafError('Missing command for the context class %r (cmd)' % name)
+
+               if not getattr(cls, 'fun', None):
+                       cls.fun = cls.cmd
+
+               global classes
+               classes.insert(0, cls)
+
+ctx = store_context('ctx', (object,), {})
+"""Base class for all :py:class:`waflib.Context.Context` classes"""
+
+class Context(ctx):
+       """
+       Default context for waf commands, and base class for new command contexts.
+
+       Context objects are passed to top-level functions::
+
+               def foo(ctx):
+                       print(ctx.__class__.__name__) # waflib.Context.Context
+
+       Subclasses must define the class attributes 'cmd' and 'fun':
+
+       :param cmd: command to execute as in ``waf cmd``
+       :type cmd: string
+       :param fun: function name to execute when the command is called
+       :type fun: string
+
+       .. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext
+
+       """
+
+       errors = Errors
+       """
+       Shortcut to :py:mod:`waflib.Errors` provided for convenience
+       """
+
+       tools = {}
+       """
+       A module cache for wscript files; see :py:meth:`Context.Context.load`
+       """
+
+       def __init__(self, **kw):
+               try:
+                       rd = kw['run_dir']
+               except KeyError:
+                       global run_dir
+                       rd = run_dir
+
+               # binds the context to the nodes in use to avoid a context singleton
+               self.node_class = type('Nod3', (waflib.Node.Node,), {})
+               self.node_class.__module__ = 'waflib.Node'
+               self.node_class.ctx = self
+
+               self.root = self.node_class('', None)
+               self.cur_script = None
+               self.path = self.root.find_dir(rd)
+
+               self.stack_path = []
+               self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
+               self.logger = None
+
+       def finalize(self):
+               """
+               Called to free resources such as logger files
+               """
+               try:
+                       logger = self.logger
+               except AttributeError:
+                       pass
+               else:
+                       Logs.free_logger(logger)
+                       delattr(self, 'logger')
+
+       def load(self, tool_list, *k, **kw):
+               """
+               Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun`
+               from it.  A ``tooldir`` argument may be provided as a list of module paths.
+
+               :param tool_list: list of Waf tool names to load
+               :type tool_list: list of string or space-separated string
+               """
+               tools = Utils.to_list(tool_list)
+               path = Utils.to_list(kw.get('tooldir', ''))
+               with_sys_path = kw.get('with_sys_path', True)
+
+               for t in tools:
+                       module = load_tool(t, path, with_sys_path=with_sys_path)
+                       fun = getattr(module, kw.get('name', self.fun), None)
+                       if fun:
+                               fun(self)
+
+       def execute(self):
+               """
+               Here, it calls the function name in the top-level wscript file. Most subclasses
+               redefine this method to provide additional functionality.
+               """
+               global g_module
+               self.recurse([os.path.dirname(g_module.root_path)])
+
+       def pre_recurse(self, node):
+               """
+               Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`.
+               The current script is bound as a Node object on ``self.cur_script``, and the current path
+               is bound to ``self.path``
+
+               :param node: script
+               :type node: :py:class:`waflib.Node.Node`
+               """
+               self.stack_path.append(self.cur_script)
+
+               self.cur_script = node
+               self.path = node.parent
+
+       def post_recurse(self, node):
+               """
+               Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
+
+               :param node: script
+               :type node: :py:class:`waflib.Node.Node`
+               """
+               self.cur_script = self.stack_path.pop()
+               if self.cur_script:
+                       self.path = self.cur_script.parent
+
+       def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
+               """
+               Runs user-provided functions from the supplied list of directories.
+               The directories can be either absolute, or relative to the directory
+               of the wscript file
+
+               The methods :py:meth:`waflib.Context.Context.pre_recurse` and
+               :py:meth:`waflib.Context.Context.post_recurse` are called immediately before
+               and after a script has been executed.
+
+               :param dirs: List of directories to visit
+               :type dirs: list of string or space-separated string
+               :param name: Name of function to invoke from the wscript
+               :type  name: string
+               :param mandatory: whether sub wscript files are required to exist
+               :type  mandatory: bool
+               :param once: read the script file once for a particular context
+               :type once: bool
+               """
+               try:
+                       cache = self.recurse_cache
+               except AttributeError:
+                       cache = self.recurse_cache = {}
+
+               for d in Utils.to_list(dirs):
+
+                       if not os.path.isabs(d):
+                               # absolute paths only
+                               d = os.path.join(self.path.abspath(), d)
+
+                       WSCRIPT     = os.path.join(d, WSCRIPT_FILE)
+                       WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)
+
+                       node = self.root.find_node(WSCRIPT_FUN)
+                       if node and (not once or node not in cache):
+                               cache[node] = True
+                               self.pre_recurse(node)
+                               try:
+                                       function_code = node.read('rU', encoding)
+                                       exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
+                               finally:
+                                       self.post_recurse(node)
+                       elif not node:
+                               node = self.root.find_node(WSCRIPT)
+                               tup = (node, name or self.fun)
+                               if node and (not once or tup not in cache):
+                                       cache[tup] = True
+                                       self.pre_recurse(node)
+                                       try:
+                                               wscript_module = load_module(node.abspath(), encoding=encoding)
+                                               user_function = getattr(wscript_module, (name or self.fun), None)
+                                               if not user_function:
+                                                       if not mandatory:
+                                                               continue
+                                                       raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath()))
+                                               user_function(self)
+                                       finally:
+                                               self.post_recurse(node)
+                               elif not node:
+                                       if not mandatory:
+                                               continue
+                                       try:
+                                               os.listdir(d)
+                                       except OSError:
+                                               raise Errors.WafError('Cannot read the folder %r' % d)
+                                       raise Errors.WafError('No wscript file in directory %s' % d)
+
+       def exec_command(self, cmd, **kw):
+               """
+               Runs an external process and returns the exit status::
+
+                       def run(tsk):
+                               ret = tsk.generator.bld.exec_command('touch foo.txt')
+                               return ret
+
+               If the context has the attribute 'log', then captures and logs the process stderr/stdout.
+               Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
+               stdout/stderr values captured.
+
+               :param cmd: command argument for subprocess.Popen
+               :type cmd: string or list
+               :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
+               :type kw: dict
+               :returns: process exit status
+               :rtype: integer
+               """
+               subprocess = Utils.subprocess
+               kw['shell'] = isinstance(cmd, str)
+               Logs.debug('runner: %r', cmd)
+               Logs.debug('runner_env: kw=%s', kw)
+
+               if self.logger:
+                       self.logger.info(cmd)
+
+               if 'stdout' not in kw:
+                       kw['stdout'] = subprocess.PIPE
+               if 'stderr' not in kw:
+                       kw['stderr'] = subprocess.PIPE
+
+               if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+                       raise Errors.WafError('Program %s not found!' % cmd[0])
+
+               cargs = {}
+               if 'timeout' in kw:
+                       if sys.hexversion >= 0x3030000:
+                               cargs['timeout'] = kw['timeout']
+                               if not 'start_new_session' in kw:
+                                       kw['start_new_session'] = True
+                       del kw['timeout']
+               if 'input' in kw:
+                       if kw['input']:
+                               cargs['input'] = kw['input']
+                               kw['stdin'] = subprocess.PIPE
+                       del kw['input']
+
+               if 'cwd' in kw:
+                       if not isinstance(kw['cwd'], str):
+                               kw['cwd'] = kw['cwd'].abspath()
+
+               try:
+                       ret, out, err = Utils.run_process(cmd, kw, cargs)
+               except Exception ,e:
+                       raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
+
+               if out:
+                       if not isinstance(out, str):
+                               out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+                       if self.logger:
+                               self.logger.debug('out: %s', out)
+                       else:
+                               Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+               if err:
+                       if not isinstance(err, str):
+                               err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+                       if self.logger:
+                               self.logger.error('err: %s' % err)
+                       else:
+                               Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+               return ret
+
+       def cmd_and_log(self, cmd, **kw):
+               """
+               Executes a process and returns stdout/stderr if the execution is successful.
+               An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
+               will be bound to the WafError object::
+
+                       def configure(conf):
+                               out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
+                               (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
+                               (out, err) = conf.cmd_and_log(cmd, input='\\n', output=waflib.Context.STDOUT)
+                               try:
+                                       conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
+                               except Exception ,e:
+                                       print(e.stdout, e.stderr)
+
+               :param cmd: args for subprocess.Popen
+               :type cmd: list or string
+               :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
+               :type kw: dict
+               :returns: process exit status
+               :rtype: integer
+               :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
+               :raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
+               """
+               subprocess = Utils.subprocess
+               kw['shell'] = isinstance(cmd, str)
+               Logs.debug('runner: %r', cmd)
+
+               if 'quiet' in kw:
+                       quiet = kw['quiet']
+                       del kw['quiet']
+               else:
+                       quiet = None
+
+               if 'output' in kw:
+                       to_ret = kw['output']
+                       del kw['output']
+               else:
+                       to_ret = STDOUT
+
+               if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+                       raise Errors.WafError('Program %r not found!' % cmd[0])
+
+               kw['stdout'] = kw['stderr'] = subprocess.PIPE
+               if quiet is None:
+                       self.to_log(cmd)
+
+               cargs = {}
+               if 'timeout' in kw:
+                       if sys.hexversion >= 0x3030000:
+                               cargs['timeout'] = kw['timeout']
+                               if not 'start_new_session' in kw:
+                                       kw['start_new_session'] = True
+                       del kw['timeout']
+               if 'input' in kw:
+                       if kw['input']:
+                               cargs['input'] = kw['input']
+                               kw['stdin'] = subprocess.PIPE
+                       del kw['input']
+
+               if 'cwd' in kw:
+                       if not isinstance(kw['cwd'], str):
+                               kw['cwd'] = kw['cwd'].abspath()
+
+               try:
+                       ret, out, err = Utils.run_process(cmd, kw, cargs)
+               except Exception ,e:
+                       raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
+
+               if not isinstance(out, str):
+                       out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+               if not isinstance(err, str):
+                       err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+
+               if out and quiet != STDOUT and quiet != BOTH:
+                       self.to_log('out: %s' % out)
+               if err and quiet != STDERR and quiet != BOTH:
+                       self.to_log('err: %s' % err)
+
+               if ret:
+                       e = Errors.WafError('Command %r returned %r' % (cmd, ret))
+                       e.returncode = ret
+                       e.stderr = err
+                       e.stdout = out
+                       raise e
+
+               if to_ret == BOTH:
+                       return (out, err)
+               elif to_ret == STDERR:
+                       return err
+               return out
+
+       def fatal(self, msg, ex=None):
+               """
+               Prints an error message in red and stops command execution; this is
+               usually used in the configuration section::
+
+                       def configure(conf):
+                               conf.fatal('a requirement is missing')
+
+               :param msg: message to display
+               :type msg: string
+               :param ex: optional exception object
+               :type ex: exception
+               :raises: :py:class:`waflib.Errors.ConfigurationError`
+               """
+               if self.logger:
+                       self.logger.info('from %s: %s' % (self.path.abspath(), msg))
+               try:
+                       msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
+               except AttributeError:
+                       pass
+               raise self.errors.ConfigurationError(msg, ex=ex)
+
+       def to_log(self, msg):
+               """
+               Logs information to the logger (if present), or to stderr.
+               Empty messages are not printed::
+
+                       def build(bld):
+                               bld.to_log('starting the build')
+
+               Provide a logger on the context class or override this methid if necessary.
+
+               :param msg: message
+               :type msg: string
+               """
+               if not msg:
+                       return
+               if self.logger:
+                       self.logger.info(msg)
+               else:
+                       sys.stderr.write(str(msg))
+                       sys.stderr.flush()
+
+
+       def msg(self, *k, **kw):
+               """
+               Prints a configuration message of the form ``msg: result``.
+               The second part of the message will be in colors. The output
+               can be disabled easly by setting ``in_msg`` to a positive value::
+
+                       def configure(conf):
+                               self.in_msg = 1
+                               conf.msg('Checking for library foo', 'ok')
+                               # no output
+
+               :param msg: message to display to the user
+               :type msg: string
+               :param result: result to display
+               :type result: string or boolean
+               :param color: color to use, see :py:const:`waflib.Logs.colors_lst`
+               :type color: string
+               """
+               try:
+                       msg = kw['msg']
+               except KeyError:
+                       msg = k[0]
+
+               self.start_msg(msg, **kw)
+
+               try:
+                       result = kw['result']
+               except KeyError:
+                       result = k[1]
+
+               color = kw.get('color')
+               if not isinstance(color, str):
+                       color = result and 'GREEN' or 'YELLOW'
+
+               self.end_msg(result, color, **kw)
+
+       def start_msg(self, *k, **kw):
+               """
+               Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
+               """
+               if kw.get('quiet'):
+                       return
+
+               msg = kw.get('msg') or k[0]
+               try:
+                       if self.in_msg:
+                               self.in_msg += 1
+                               return
+               except AttributeError:
+                       self.in_msg = 0
+               self.in_msg += 1
+
+               try:
+                       self.line_just = max(self.line_just, len(msg))
+               except AttributeError:
+                       self.line_just = max(40, len(msg))
+               for x in (self.line_just * '-', msg):
+                       self.to_log(x)
+               Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
+
+       def end_msg(self, *k, **kw):
+               """Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
+               if kw.get('quiet'):
+                       return
+               self.in_msg -= 1
+               if self.in_msg:
+                       return
+
+               result = kw.get('result') or k[0]
+
+               defcolor = 'GREEN'
+               if result == True:
+                       msg = 'ok'
+               elif result == False:
+                       msg = 'not found'
+                       defcolor = 'YELLOW'
+               else:
+                       msg = str(result)
+
+               self.to_log(msg)
+               try:
+                       color = kw['color']
+               except KeyError:
+                       if len(k) > 1 and k[1] in Logs.colors_lst:
+                               # compatibility waf 1.7
+                               color = k[1]
+                       else:
+                               color = defcolor
+               Logs.pprint(color, msg)
+
+       def load_special_tools(self, var, ban=[]):
+               """
+               Loads third-party extensions modules for certain programming languages
+               by trying to list certain files in the extras/ directory. This method
+               is typically called once for a programming language group, see for
+               example :py:mod:`waflib.Tools.compiler_c`
+
+               :param var: glob expression, for example 'cxx\_\*.py'
+               :type var: string
+               :param ban: list of exact file names to exclude
+               :type ban: list of string
+               """
+               global waf_dir
+               if os.path.isdir(waf_dir):
+                       lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
+                       for x in lst:
+                               if not x.name in ban:
+                                       load_tool(x.name.replace('.py', ''))
+               else:
+                       from zipfile import PyZipFile
+                       waflibs = PyZipFile(waf_dir)
+                       lst = waflibs.namelist()
+                       for x in lst:
+                               if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var):
+                                       continue
+                               f = os.path.basename(x)
+                               doban = False
+                               for b in ban:
+                                       r = b.replace('*', '.*')
+                                       if re.match(r, f):
+                                               doban = True
+                               if not doban:
+                                       f = f.replace('.py', '')
+                                       load_tool(f)
+
+cache_modules = {}
+"""
+Dictionary holding already loaded modules (wscript), indexed by their absolute path.
+The modules are added automatically by :py:func:`waflib.Context.load_module`
+"""
+
+def load_module(path, encoding=None):
+       """
+       Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules`
+
+       :param path: file path
+       :type path: string
+       :return: Loaded Python module
+       :rtype: module
+       """
+       try:
+               return cache_modules[path]
+       except KeyError:
+               pass
+
+       module = imp.new_module(WSCRIPT_FILE)
+       try:
+               code = Utils.readf(path, m='rU', encoding=encoding)
+       except EnvironmentError:
+               raise Errors.WafError('Could not read the file %r' % path)
+
+       module_dir = os.path.dirname(path)
+       sys.path.insert(0, module_dir)
+       try:
+               exec(compile(code, path, 'exec'), module.__dict__)
+       finally:
+               sys.path.remove(module_dir)
+
+       cache_modules[path] = module
+       return module
+
+def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
+       """
+       Importx a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools`
+
+       :type  tool: string
+       :param tool: Name of the tool
+       :type  tooldir: list
+       :param tooldir: List of directories to search for the tool module
+       :type  with_sys_path: boolean
+       :param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs
+       """
+       if tool == 'java':
+               tool = 'javaw' # jython
+       else:
+               tool = tool.replace('++', 'xx')
+
+       if not with_sys_path:
+               back_path = sys.path
+               sys.path = []
+       try:
+               if tooldir:
+                       assert isinstance(tooldir, list)
+                       sys.path = tooldir + sys.path
+                       try:
+                               __import__(tool)
+                       finally:
+                               for d in tooldir:
+                                       sys.path.remove(d)
+                       ret = sys.modules[tool]
+                       Context.tools[tool] = ret
+                       return ret
+               else:
+                       if not with_sys_path: sys.path.insert(0, waf_dir)
+                       try:
+                               for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
+                                       try:
+                                               __import__(x % tool)
+                                               break
+                                       except ImportError:
+                                               x = None
+                               else: # raise an exception
+                                       __import__(tool)
+                       finally:
+                               if not with_sys_path: sys.path.remove(waf_dir)
+                       ret = sys.modules[x % tool]
+                       Context.tools[tool] = ret
+                       return ret
+       finally:
+               if not with_sys_path:
+                       sys.path += back_path
diff --git a/third_party/waf/waflib/Errors.py b/third_party/waf/waflib/Errors.py
new file mode 100644 (file)
index 0000000..c9f4262
--- /dev/null
@@ -0,0 +1,69 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2016 (ita)
+
+"""
+Exceptions used in the Waf code
+"""
+
+import traceback, sys
+
+class WafError(Exception):
+       """Base class for all Waf errors"""
+       def __init__(self, msg='', ex=None):
+               """
+               :param msg: error message
+               :type msg: string
+               :param ex: exception causing this error (optional)
+               :type ex: exception
+               """
+               self.msg = msg
+               assert not isinstance(msg, Exception)
+
+               self.stack = []
+               if ex:
+                       if not msg:
+                               self.msg = str(ex)
+                       if isinstance(ex, WafError):
+                               self.stack = ex.stack
+                       else:
+                               self.stack = traceback.extract_tb(sys.exc_info()[2])
+               self.stack += traceback.extract_stack()[:-1]
+               self.verbose_msg = ''.join(traceback.format_list(self.stack))
+
+       def __str__(self):
+               return str(self.msg)
+
+class BuildError(WafError):
+       """Error raised during the build and install phases"""
+       def __init__(self, error_tasks=[]):
+               """
+               :param error_tasks: tasks that could not complete normally
+               :type error_tasks: list of task objects
+               """
+               self.tasks = error_tasks
+               WafError.__init__(self, self.format_error())
+
+       def format_error(self):
+               """Formats the error messages from the tasks that failed"""
+               lst = ['Build failed']
+               for tsk in self.tasks:
+                       txt = tsk.format_error()
+                       if txt: lst.append(txt)
+               return '\n'.join(lst)
+
+class ConfigurationError(WafError):
+       """Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`"""
+       pass
+
+class TaskRescan(WafError):
+       """Task-specific exception type signalling required signature recalculations"""
+       pass
+
+class TaskNotReady(WafError):
+       """Task-specific exception type signalling that task signatures cannot be computed"""
+       pass
diff --git a/third_party/waf/waflib/Logs.py b/third_party/waf/waflib/Logs.py
new file mode 100644 (file)
index 0000000..1216bad
--- /dev/null
@@ -0,0 +1,384 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+logging, colors, terminal width and pretty-print
+"""
+
+import os, re, traceback, sys
+from waflib import Utils, ansiterm
+
+if not os.environ.get('NOSYNC', False):
+       # synchronized output is nearly mandatory to prevent garbled output
+       if sys.stdout.isatty() and id(sys.stdout) == id(sys.__stdout__):
+               sys.stdout = ansiterm.AnsiTerm(sys.stdout)
+       if sys.stderr.isatty() and id(sys.stderr) == id(sys.__stderr__):
+               sys.stderr = ansiterm.AnsiTerm(sys.stderr)
+
+# import the logging module after since it holds a reference on sys.stderr
+# in case someone uses the root logger
+import logging
+
+LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
+HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S')
+
+zones = []
+"""
+See :py:class:`waflib.Logs.log_filter`
+"""
+
+verbose = 0
+"""
+Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error`
+"""
+
+colors_lst = {
+'USE' : True,
+'BOLD'  :'\x1b[01;1m',
+'RED'   :'\x1b[01;31m',
+'GREEN' :'\x1b[32m',
+'YELLOW':'\x1b[33m',
+'PINK'  :'\x1b[35m',
+'BLUE'  :'\x1b[01;34m',
+'CYAN'  :'\x1b[36m',
+'GREY'  :'\x1b[37m',
+'NORMAL':'\x1b[0m',
+'cursor_on'  :'\x1b[?25h',
+'cursor_off' :'\x1b[?25l',
+}
+
+indicator = '\r\x1b[K%s%s%s'
+
+try:
+       unicode
+except NameError:
+       unicode = None
+
+def enable_colors(use):
+       """
+       If *1* is given, then the system will perform a few verifications
+       before enabling colors, such as checking whether the interpreter
+       is running in a terminal. A value of zero will disable colors,
+       and a value above *1* will force colors.
+
+       :param use: whether to enable colors or not
+       :type use: integer
+       """
+       if use == 1:
+               if not (sys.stderr.isatty() or sys.stdout.isatty()):
+                       use = 0
+               if Utils.is_win32 and os.name != 'java':
+                       term = os.environ.get('TERM', '') # has ansiterm
+               else:
+                       term = os.environ.get('TERM', 'dumb')
+
+               if term in ('dumb', 'emacs'):
+                       use = 0
+
+       if use >= 1:
+               os.environ['TERM'] = 'vt100'
+
+       colors_lst['USE'] = use
+
+# If console packages are available, replace the dummy function with a real
+# implementation
+try:
+       get_term_cols = ansiterm.get_term_cols
+except AttributeError:
+       def get_term_cols():
+               return 80
+
+get_term_cols.__doc__ = """
+       Returns the console width in characters.
+
+       :return: the number of characters per line
+       :rtype: int
+       """
+
+def get_color(cl):
+       """
+       Returns the ansi sequence corresponding to the given color name.
+       An empty string is returned when coloring is globally disabled.
+
+       :param cl: color name in capital letters
+       :type cl: string
+       """
+       if colors_lst['USE']:
+               return colors_lst.get(cl, '')
+       return ''
+
+class color_dict(object):
+       """attribute-based color access, eg: colors.PINK"""
+       def __getattr__(self, a):
+               return get_color(a)
+       def __call__(self, a):
+               return get_color(a)
+
+colors = color_dict()
+
+re_log = re.compile(r'(\w+): (.*)', re.M)
+class log_filter(logging.Filter):
+       """
+       Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
+       For example, the following::
+
+               from waflib import Logs
+               Logs.debug('test: here is a message')
+
+       Will be displayed only when executing::
+
+               $ waf --zones=test
+       """
+       def __init__(self, name=''):
+               logging.Filter.__init__(self, name)
+
+       def filter(self, rec):
+               """
+               Filters log records by zone and by logging level
+
+               :param rec: log entry
+               """
+               global verbose
+               rec.zone = rec.module
+               if rec.levelno >= logging.INFO:
+                       return True
+
+               m = re_log.match(rec.msg)
+               if m:
+                       rec.zone = m.group(1)
+                       rec.msg = m.group(2)
+
+               if zones:
+                       return getattr(rec, 'zone', '') in zones or '*' in zones
+               elif not verbose > 2:
+                       return False
+               return True
+
+class log_handler(logging.StreamHandler):
+       """Dispatches messages to stderr/stdout depending on the severity level"""
+       def emit(self, record):
+               """
+               Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override`
+               """
+               # default implementation
+               try:
+                       try:
+                               self.stream = record.stream
+                       except AttributeError:
+                               if record.levelno >= logging.WARNING:
+                                       record.stream = self.stream = sys.stderr
+                               else:
+                                       record.stream = self.stream = sys.stdout
+                       self.emit_override(record)
+                       self.flush()
+               except (KeyboardInterrupt, SystemExit):
+                       raise
+               except: # from the python library -_-
+                       self.handleError(record)
+
+       def emit_override(self, record, **kw):
+               """
+               Writes the log record to the desired stream (stderr/stdout)
+               """
+               self.terminator = getattr(record, 'terminator', '\n')
+               stream = self.stream
+               if unicode:
+                       # python2
+                       msg = self.formatter.format(record)
+                       fs = '%s' + self.terminator
+                       try:
+                               if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)):
+                                       fs = fs.decode(stream.encoding)
+                                       try:
+                                               stream.write(fs % msg)
+                                       except UnicodeEncodeError:
+                                               stream.write((fs % msg).encode(stream.encoding))
+                               else:
+                                       stream.write(fs % msg)
+                       except UnicodeError:
+                               stream.write((fs % msg).encode('utf-8'))
+               else:
+                       logging.StreamHandler.emit(self, record)
+
+class formatter(logging.Formatter):
+       """Simple log formatter which handles colors"""
+       def __init__(self):
+               logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
+
+       def format(self, rec):
+               """
+               Formats records and adds colors as needed. The records do not get
+               a leading hour format if the logging level is above *INFO*.
+               """
+               try:
+                       msg = rec.msg.decode('utf-8')
+               except Exception:
+                       msg = rec.msg
+
+               use = colors_lst['USE']
+               if (use == 1 and rec.stream.isatty()) or use == 2:
+
+                       c1 = getattr(rec, 'c1', None)
+                       if c1 is None:
+                               c1 = ''
+                               if rec.levelno >= logging.ERROR:
+                                       c1 = colors.RED
+                               elif rec.levelno >= logging.WARNING:
+                                       c1 = colors.YELLOW
+                               elif rec.levelno >= logging.INFO:
+                                       c1 = colors.GREEN
+                       c2 = getattr(rec, 'c2', colors.NORMAL)
+                       msg = '%s%s%s' % (c1, msg, c2)
+               else:
+                       # remove single \r that make long lines in text files
+                       # and other terminal commands
+                       msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg)
+
+               if rec.levelno >= logging.INFO:
+                       # the goal of this is to format without the leading "Logs, hour" prefix
+                       if rec.args:
+                               return msg % rec.args
+                       return msg
+
+               rec.msg = msg
+               rec.c1 = colors.PINK
+               rec.c2 = colors.NORMAL
+               return logging.Formatter.format(self, rec)
+
+log = None
+"""global logger for Logs.debug, Logs.error, etc"""
+
+def debug(*k, **kw):
+       """
+       Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` â‰¤ 0
+       """
+       global verbose
+       if verbose:
+               k = list(k)
+               k[0] = k[0].replace('\n', ' ')
+               global log
+               log.debug(*k, **kw)
+
+def error(*k, **kw):
+       """
+       Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` â‰¥ 2
+       """
+       global log, verbose
+       log.error(*k, **kw)
+       if verbose > 2:
+               st = traceback.extract_stack()
+               if st:
+                       st = st[:-1]
+                       buf = []
+                       for filename, lineno, name, line in st:
+                               buf.append('  File %r, line %d, in %s' % (filename, lineno, name))
+                               if line:
+                                       buf.append('    %s' % line.strip())
+                       if buf: log.error('\n'.join(buf))
+
+def warn(*k, **kw):
+       """
+       Wraps logging.warn
+       """
+       global log
+       log.warn(*k, **kw)
+
+def info(*k, **kw):
+       """
+       Wraps logging.info
+       """
+       global log
+       log.info(*k, **kw)
+
+def init_log():
+       """
+       Initializes the logger :py:attr:`waflib.Logs.log`
+       """
+       global log
+       log = logging.getLogger('waflib')
+       log.handlers = []
+       log.filters = []
+       hdlr = log_handler()
+       hdlr.setFormatter(formatter())
+       log.addHandler(hdlr)
+       log.addFilter(log_filter())
+       log.setLevel(logging.DEBUG)
+
+def make_logger(path, name):
+       """
+       Creates a simple logger, which is often used to redirect the context command output::
+
+               from waflib import Logs
+               bld.logger = Logs.make_logger('test.log', 'build')
+               bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False)
+
+               # have the file closed immediately
+               Logs.free_logger(bld.logger)
+
+               # stop logging
+               bld.logger = None
+
+       The method finalize() of the command will try to free the logger, if any
+
+       :param path: file name to write the log output to
+       :type path: string
+       :param name: logger name (loggers are reused)
+       :type name: string
+       """
+       logger = logging.getLogger(name)
+       hdlr = logging.FileHandler(path, 'w')
+       formatter = logging.Formatter('%(message)s')
+       hdlr.setFormatter(formatter)
+       logger.addHandler(hdlr)
+       logger.setLevel(logging.DEBUG)
+       return logger
+
+def make_mem_logger(name, to_log, size=8192):
+       """
+       Creates a memory logger to avoid writing concurrently to the main logger
+       """
+       from logging.handlers import MemoryHandler
+       logger = logging.getLogger(name)
+       hdlr = MemoryHandler(size, target=to_log)
+       formatter = logging.Formatter('%(message)s')
+       hdlr.setFormatter(formatter)
+       logger.addHandler(hdlr)
+       logger.memhandler = hdlr
+       logger.setLevel(logging.DEBUG)
+       return logger
+
+def free_logger(logger):
+       """
+       Frees the resources held by the loggers created through make_logger or make_mem_logger.
+       This is used for file cleanup and for handler removal (logger objects are re-used).
+       """
+       try:
+               for x in logger.handlers:
+                       x.close()
+                       logger.removeHandler(x)
+       except Exception:
+               pass
+
+def pprint(col, msg, label='', sep='\n'):
+       """
+       Prints messages in color immediately on stderr::
+
+               from waflib import Logs
+               Logs.pprint('RED', 'Something bad just happened')
+
+       :param col: color name to use in :py:const:`Logs.colors_lst`
+       :type col: string
+       :param msg: message to display
+       :type msg: string or a value that can be printed by %s
+       :param label: a message to add after the colored output
+       :type label: string
+       :param sep: a string to append at the end (line separator)
+       :type sep: string
+       """
+       global info
+       info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})
diff --git a/third_party/waf/waflib/Node.py b/third_party/waf/waflib/Node.py
new file mode 100644 (file)
index 0000000..3b0f578
--- /dev/null
@@ -0,0 +1,944 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Node: filesystem structure
+
+#. Each file/folder is represented by exactly one node.
+
+#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc.
+   Unused class members can increase the `.wafpickle` file size sensibly.
+
+#. Node objects should never be created directly, use
+   the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` for the low-level operations
+
+#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` must be
+   used when a build context is present
+
+#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass required for serialization.
+   (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context
+   owning a node is held as *self.ctx*
+"""
+
+import os, re, sys, shutil
+from waflib import Utils, Errors
+
+exclude_regs = '''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/BitKeeper
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzrignore
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/.arch-ids
+**/{arch}
+**/_darcs
+**/_darcs/**
+**/.intlcache
+**/.DS_Store'''
+"""
+Ant patterns for files and folders to exclude while doing the
+recursive traversal in :py:meth:`waflib.Node.Node.ant_glob`
+"""
+
+class Node(object):
+       """
+       This class is organized in two parts:
+
+       * The basic methods meant for filesystem access (compute paths, create folders, etc)
+       * The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``)
+       """
+
+       dict_class = dict
+       """
+       Subclasses can provide a dict class to enable case insensitivity for example.
+       """
+
+       __slots__ = ('name', 'parent', 'children', 'cache_abspath', 'cache_isdir')
+       def __init__(self, name, parent):
+               """
+               .. note:: Use :py:func:`Node.make_node` or :py:func:`Node.find_node` instead of calling this constructor
+               """
+               self.name = name
+               self.parent = parent
+               if parent:
+                       if name in parent.children:
+                               raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent))
+                       parent.children[name] = self
+
+       def __setstate__(self, data):
+               "Deserializes node information, used for persistence"
+               self.name = data[0]
+               self.parent = data[1]
+               if data[2] is not None:
+                       # Issue 1480
+                       self.children = self.dict_class(data[2])
+
+       def __getstate__(self):
+               "Serializes node information, used for persistence"
+               return (self.name, self.parent, getattr(self, 'children', None))
+
+       def __str__(self):
+               """
+               String representation (abspath), for debugging purposes
+
+               :rtype: string
+               """
+               return self.abspath()
+
+       def __repr__(self):
+               """
+               String representation (abspath), for debugging purposes
+
+               :rtype: string
+               """
+               return self.abspath()
+
+       def __copy__(self):
+               """
+               Provided to prevent nodes from being copied
+
+               :raises: :py:class:`waflib.Errors.WafError`
+               """
+               raise Errors.WafError('nodes are not supposed to be copied')
+
+       def read(self, flags='r', encoding='ISO8859-1'):
+               """
+               Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`::
+
+                       def build(bld):
+                               bld.path.find_node('wscript').read()
+
+               :param flags: Open mode
+               :type  flags: string
+               :param encoding: encoding value for Python3
+               :type encoding: string
+               :rtype: string or bytes
+               :return: File contents
+               """
+               return Utils.readf(self.abspath(), flags, encoding)
+
+       def write(self, data, flags='w', encoding='ISO8859-1'):
+               """
+               Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`::
+
+                       def build(bld):
+                               bld.path.make_node('foo.txt').write('Hello, world!')
+
+               :param data: data to write
+               :type  data: string
+               :param flags: Write mode
+               :type  flags: string
+               :param encoding: encoding value for Python3
+               :type encoding: string
+               """
+               Utils.writef(self.abspath(), data, flags, encoding)
+
+       def read_json(self, convert=True, encoding='utf-8'):
+               """
+               Reads and parses the contents of this node as JSON (Python â‰¥ 2.6)::
+
+                       def build(bld):
+                               bld.path.find_node('abc.json').read_json()
+
+               Note that this by default automatically decodes unicode strings on Python2, unlike what the Python JSON module does.
+
+               :type  convert: boolean
+               :param convert: Prevents decoding of unicode strings on Python2
+               :type  encoding: string
+               :param encoding: The encoding of the file to read. This default to UTF8 as per the JSON standard
+               :rtype: object
+               :return: Parsed file contents
+               """
+               import json # Python 2.6 and up
+               object_pairs_hook = None
+               if convert and sys.hexversion < 0x3000000:
+                       try:
+                               _type = unicode
+                       except NameError:
+                               _type = str
+
+                       def convert(value):
+                               if isinstance(value, list):
+                                       return [convert(element) for element in value]
+                               elif isinstance(value, _type):
+                                       return str(value)
+                               else:
+                                       return value
+
+                       def object_pairs(pairs):
+                               return dict((str(pair[0]), convert(pair[1])) for pair in pairs)
+
+                       object_pairs_hook = object_pairs
+
+               return json.loads(self.read(encoding=encoding), object_pairs_hook=object_pairs_hook)
+
+       def write_json(self, data, pretty=True):
+               """
+               Writes a python object as JSON to disk (Python â‰¥ 2.6) as UTF-8 data (JSON standard)::
+
+                       def build(bld):
+                               bld.path.find_node('xyz.json').write_json(199)
+
+               :type  data: object
+               :param data: The data to write to disk
+               :type  pretty: boolean
+               :param pretty: Determines if the JSON will be nicely space separated
+               """
+               import json # Python 2.6 and up
+               indent = 2
+               separators = (',', ': ')
+               sort_keys = pretty
+               newline = os.linesep
+               if not pretty:
+                       indent = None
+                       separators = (',', ':')
+                       newline = ''
+               output = json.dumps(data, indent=indent, separators=separators, sort_keys=sort_keys) + newline
+               self.write(output, encoding='utf-8')
+
+       def exists(self):
+               """
+               Returns whether the Node is present on the filesystem
+
+               :rtype: bool
+               """
+               return os.path.exists(self.abspath())
+
+       def isdir(self):
+               """
+               Returns whether the Node represents a folder
+
+               :rtype: bool
+               """
+               return os.path.isdir(self.abspath())
+
+       def chmod(self, val):
+               """
+               Changes the file/dir permissions::
+
+                       def build(bld):
+                               bld.path.chmod(493) # 0755
+               """
+               os.chmod(self.abspath(), val)
+
+       def delete(self, evict=True):
+               """
+               Removes the file/folder from the filesystem (equivalent to `rm -rf`), and remove this object from the Node tree.
+               Do not use this object after calling this method.
+               """
+               try:
+                       try:
+                               if os.path.isdir(self.abspath()):
+                                       shutil.rmtree(self.abspath())
+                               else:
+                                       os.remove(self.abspath())
+                       except OSError:
+                               if os.path.exists(self.abspath()):
+                                       raise
+               finally:
+                       if evict:
+                               self.evict()
+
+       def evict(self):
+               """
+               Removes this node from the Node tree
+               """
+               del self.parent.children[self.name]
+
+       def suffix(self):
+               """
+               Returns the file rightmost extension, for example `a.b.c.d â†’ .d`
+
+               :rtype: string
+               """
+               k = max(0, self.name.rfind('.'))
+               return self.name[k:]
+
+       def height(self):
+               """
+               Returns the depth in the folder hierarchy from the filesystem root or from all the file drives
+
+               :returns: filesystem depth
+               :rtype: integer
+               """
+               d = self
+               val = -1
+               while d:
+                       d = d.parent
+                       val += 1
+               return val
+
+       def listdir(self):
+               """
+               Lists the folder contents
+
+               :returns: list of file/folder names ordered alphabetically
+               :rtype: list of string
+               """
+               lst = Utils.listdir(self.abspath())
+               lst.sort()
+               return lst
+
+       def mkdir(self):
+               """
+               Creates a folder represented by this node. Intermediate folders are created as needed.
+
+               :raises: :py:class:`waflib.Errors.WafError` when the folder is missing
+               """
+               if self.isdir():
+                       return
+
+               try:
+                       self.parent.mkdir()
+               except OSError:
+                       pass
+
+               if self.name:
+                       try:
+                               os.makedirs(self.abspath())
+                       except OSError:
+                               pass
+
+                       if not self.isdir():
+                               raise Errors.WafError('Could not create the directory %r' % self)
+
+                       try:
+                               self.children
+                       except AttributeError:
+                               self.children = self.dict_class()
+
+       def find_node(self, lst):
+               """
+               Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists
+
+               :param lst: relative path
+               :type lst: string or list of string
+               :returns: The corresponding Node object or None if no entry was found on the filesystem
+               :rtype: :py:class:´waflib.Node.Node´
+               """
+
+               if isinstance(lst, str):
+                       lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+               cur = self
+               for x in lst:
+                       if x == '..':
+                               cur = cur.parent or cur
+                               continue
+
+                       try:
+                               ch = cur.children
+                       except AttributeError:
+                               cur.children = self.dict_class()
+                       else:
+                               try:
+                                       cur = ch[x]
+                                       continue
+                               except KeyError:
+                                       pass
+
+                       # optimistic: create the node first then look if it was correct to do so
+                       cur = self.__class__(x, cur)
+                       if not cur.exists():
+                               cur.evict()
+                               return None
+
+               if not cur.exists():
+                       cur.evict()
+                       return None
+
+               return cur
+
+       def make_node(self, lst):
+               """
+               Returns or creates a Node object corresponding to the input path without considering the filesystem.
+
+               :param lst: relative path
+               :type lst: string or list of string
+               :rtype: :py:class:´waflib.Node.Node´
+               """
+               if isinstance(lst, str):
+                       lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+               cur = self
+               for x in lst:
+                       if x == '..':
+                               cur = cur.parent or cur
+                               continue
+
+                       try:
+                               cur = cur.children[x]
+                       except AttributeError:
+                               cur.children = self.dict_class()
+                       except KeyError:
+                               pass
+                       else:
+                               continue
+                       cur = self.__class__(x, cur)
+               return cur
+
+       def search_node(self, lst):
+               """
+               Returns a Node previously defined in the data structure. The filesystem is not considered.
+
+               :param lst: relative path
+               :type lst: string or list of string
+               :rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure
+               """
+               if isinstance(lst, str):
+                       lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+               cur = self
+               for x in lst:
+                       if x == '..':
+                               cur = cur.parent or cur
+                       else:
+                               try:
+                                       cur = cur.children[x]
+                               except (AttributeError, KeyError):
+                                       return None
+               return cur
+
+       def path_from(self, node):
+               """
+               Path of this node seen from the other::
+
+                       def build(bld):
+                               n1 = bld.path.find_node('foo/bar/xyz.txt')
+                               n2 = bld.path.find_node('foo/stuff/')
+                               n1.path_from(n2) # '../bar/xyz.txt'
+
+               :param node: path to use as a reference
+               :type node: :py:class:`waflib.Node.Node`
+               :returns: a relative path or an absolute one if that is better
+               :rtype: string
+               """
+               c1 = self
+               c2 = node
+
+               c1h = c1.height()
+               c2h = c2.height()
+
+               lst = []
+               up = 0
+
+               while c1h > c2h:
+                       lst.append(c1.name)
+                       c1 = c1.parent
+                       c1h -= 1
+
+               while c2h > c1h:
+                       up += 1
+                       c2 = c2.parent
+                       c2h -= 1
+
+               while not c1 is c2:
+                       lst.append(c1.name)
+                       up += 1
+
+                       c1 = c1.parent
+                       c2 = c2.parent
+
+               if c1.parent:
+                       lst.extend(['..'] * up)
+                       lst.reverse()
+                       return os.sep.join(lst) or '.'
+               else:
+                       return self.abspath()
+
+       def abspath(self):
+               """
+               Returns the absolute path. A cache is kept in the context as ``cache_node_abspath``
+
+               :rtype: string
+               """
+               try:
+                       return self.cache_abspath
+               except AttributeError:
+                       pass
+               # think twice before touching this (performance + complexity + correctness)
+
+               if not self.parent:
+                       val = os.sep
+               elif not self.parent.name:
+                       val = os.sep + self.name
+               else:
+                       val = self.parent.abspath() + os.sep + self.name
+               self.cache_abspath = val
+               return val
+
+       if Utils.is_win32:
+               def abspath(self):
+                       try:
+                               return self.cache_abspath
+                       except AttributeError:
+                               pass
+                       if not self.parent:
+                               val = ''
+                       elif not self.parent.name:
+                               val = self.name + os.sep
+                       else:
+                               val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name
+                       self.cache_abspath = val
+                       return val
+
+       def is_child_of(self, node):
+               """
+               Returns whether the object belongs to a subtree of the input node::
+
+                       def build(bld):
+                               node = bld.path.find_node('wscript')
+                               node.is_child_of(bld.path) # True
+
+               :param node: path to use as a reference
+               :type node: :py:class:`waflib.Node.Node`
+               :rtype: bool
+               """
+               p = self
+               diff = self.height() - node.height()
+               while diff > 0:
+                       diff -= 1
+                       p = p.parent
+               return p is node
+
+       def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True):
+               """
+               Recursive method used by :py:meth:`waflib.Node.ant_glob`.
+
+               :param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion
+               :type accept: function
+               :param maxdepth: maximum depth in the filesystem (25)
+               :type maxdepth: int
+               :param pats: list of patterns to accept and list of patterns to exclude
+               :type pats: tuple
+               :param dir: return folders too (False by default)
+               :type dir: bool
+               :param src: return files (True by default)
+               :type src: bool
+               :param remove: remove files/folders that do not exist (True by default)
+               :type remove: bool
+               :returns: A generator object to iterate from
+               :rtype: iterator
+               """
+               dircont = self.listdir()
+               dircont.sort()
+
+               try:
+                       lst = set(self.children.keys())
+               except AttributeError:
+                       self.children = self.dict_class()
+               else:
+                       if remove:
+                               for x in lst - set(dircont):
+                                       self.children[x].evict()
+
+               for name in dircont:
+                       npats = accept(name, pats)
+                       if npats and npats[0]:
+                               accepted = [] in npats[0]
+
+                               node = self.make_node([name])
+
+                               isdir = node.isdir()
+                               if accepted:
+                                       if isdir:
+                                               if dir:
+                                                       yield node
+                                       else:
+                                               if src:
+                                                       yield node
+
+                               if isdir:
+                                       node.cache_isdir = True
+                                       if maxdepth:
+                                               for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove):
+                                                       yield k
+               raise StopIteration
+
+       def ant_glob(self, *k, **kw):
+               """
+               Finds files across folders:
+
+               * ``**/*`` find all files recursively
+               * ``**/*.class`` find all files ending by .class
+               * ``..`` find files having two dot characters
+
+               For example::
+
+                       def configure(cfg):
+                               cfg.path.ant_glob('**/*.cpp') # finds all .cpp files
+                               cfg.root.ant_glob('etc/*.txt') # matching from the filesystem root can be slow
+                               cfg.path.ant_glob('*.cpp', excl=['*.c'], src=True, dir=False)
+
+               For more information see http://ant.apache.org/manual/dirtasks.html
+
+               The nodes that correspond to files and folders that do not exist are garbage-collected.
+               To prevent this behaviour in particular when running over the build directory, pass ``remove=False``
+
+               :param incl: ant patterns or list of patterns to include
+               :type incl: string or list of strings
+               :param excl: ant patterns or list of patterns to exclude
+               :type excl: string or list of strings
+               :param dir: return folders too (False by default)
+               :type dir: bool
+               :param src: return files (True by default)
+               :type src: bool
+               :param remove: remove files/folders that do not exist (True by default)
+               :type remove: bool
+               :param maxdepth: maximum depth of recursion
+               :type maxdepth: int
+               :param ignorecase: ignore case while matching (False by default)
+               :type ignorecase: bool
+               :returns: The corresponding Nodes
+               :rtype: list of :py:class:`waflib.Node.Node` instances
+               """
+
+               src = kw.get('src', True)
+               dir = kw.get('dir', False)
+
+               excl = kw.get('excl', exclude_regs)
+               incl = k and k[0] or kw.get('incl', '**')
+               reflags = kw.get('ignorecase', 0) and re.I
+
+               def to_pat(s):
+                       lst = Utils.to_list(s)
+                       ret = []
+                       for x in lst:
+                               x = x.replace('\\', '/').replace('//', '/')
+                               if x.endswith('/'):
+                                       x += '**'
+                               lst2 = x.split('/')
+                               accu = []
+                               for k in lst2:
+                                       if k == '**':
+                                               accu.append(k)
+                                       else:
+                                               k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
+                                               k = '^%s$' % k
+                                               try:
+                                                       #print "pattern", k
+                                                       accu.append(re.compile(k, flags=reflags))
+                                               except Exception ,e:
+                                                       raise Errors.WafError('Invalid pattern: %s' % k, e)
+                               ret.append(accu)
+                       return ret
+
+               def filtre(name, nn):
+                       ret = []
+                       for lst in nn:
+                               if not lst:
+                                       pass
+                               elif lst[0] == '**':
+                                       ret.append(lst)
+                                       if len(lst) > 1:
+                                               if lst[1].match(name):
+                                                       ret.append(lst[2:])
+                                       else:
+                                               ret.append([])
+                               elif lst[0].match(name):
+                                       ret.append(lst[1:])
+                       return ret
+
+               def accept(name, pats):
+                       nacc = filtre(name, pats[0])
+                       nrej = filtre(name, pats[1])
+                       if [] in nrej:
+                               nacc = []
+                       return [nacc, nrej]
+
+               ret = [x for x in self.ant_iter(accept=accept, pats=[to_pat(incl), to_pat(excl)], maxdepth=kw.get('maxdepth', 25), dir=dir, src=src, remove=kw.get('remove', True))]
+               if kw.get('flat', False):
+                       return ' '.join([x.path_from(self) for x in ret])
+
+               return ret
+
+       # --------------------------------------------------------------------------------
+       # the following methods require the source/build folders (bld.srcnode/bld.bldnode)
+       # using a subclass is a possibility, but is that really necessary?
+       # --------------------------------------------------------------------------------
+
+       def is_src(self):
+               """
+               Returns True if the node is below the source directory. Note that ``!is_src() â‰  is_bld()``
+
+               :rtype: bool
+               """
+               cur = self
+               x = self.ctx.srcnode
+               y = self.ctx.bldnode
+               while cur.parent:
+                       if cur is y:
+                               return False
+                       if cur is x:
+                               return True
+                       cur = cur.parent
+               return False
+
+       def is_bld(self):
+               """
+               Returns True if the node is below the build directory. Note that ``!is_bld() â‰  is_src()``
+
+               :rtype: bool
+               """
+               cur = self
+               y = self.ctx.bldnode
+               while cur.parent:
+                       if cur is y:
+                               return True
+                       cur = cur.parent
+               return False
+
+       def get_src(self):
+               """
+               Returns the corresponding Node object in the source directory (or self if already
+               under the source directory). Use this method only if the purpose is to create
+               a Node object (this is common with folders but not with files, see ticket 1937)
+
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               cur = self
+               x = self.ctx.srcnode
+               y = self.ctx.bldnode
+               lst = []
+               while cur.parent:
+                       if cur is y:
+                               lst.reverse()
+                               return x.make_node(lst)
+                       if cur is x:
+                               return self
+                       lst.append(cur.name)
+                       cur = cur.parent
+               return self
+
+       def get_bld(self):
+               """
+               Return the corresponding Node object in the build directory (or self if already
+               under the build directory). Use this method only if the purpose is to create
+               a Node object (this is common with folders but not with files, see ticket 1937)
+
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               cur = self
+               x = self.ctx.srcnode
+               y = self.ctx.bldnode
+               lst = []
+               while cur.parent:
+                       if cur is y:
+                               return self
+                       if cur is x:
+                               lst.reverse()
+                               return self.ctx.bldnode.make_node(lst)
+                       lst.append(cur.name)
+                       cur = cur.parent
+               # the file is external to the current project, make a fake root in the current build directory
+               lst.reverse()
+               if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'):
+                       lst[0] = lst[0][0]
+               return self.ctx.bldnode.make_node(['__root__'] + lst)
+
+       def find_resource(self, lst):
+               """
+               Use this method in the build phase to find source files corresponding to the relative path given.
+
+               First it looks up the Node data structure to find any declared Node object in the build directory.
+               If None is found, it then considers the filesystem in the source directory.
+
+               :param lst: relative path
+               :type lst: string or list of string
+               :returns: the corresponding Node object or None
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               if isinstance(lst, str):
+                       lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+               node = self.get_bld().search_node(lst)
+               if not node:
+                       node = self.get_src().find_node(lst)
+               if node and node.isdir():
+                       return None
+               return node
+
+       def find_or_declare(self, lst):
+               """
+               Use this method in the build phase to declare output files.
+
+               If 'self' is in build directory, it first tries to return an existing node object.
+               If no Node is found, it tries to find one in the source directory.
+               If no Node is found, a new Node object is created in the build directory, and the
+               intermediate folders are added.
+
+               :param lst: relative path
+               :type lst: string or list of string
+               """
+               if isinstance(lst, str):
+                       lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+               node = self.get_bld().search_node(lst)
+               if node:
+                       if not os.path.isfile(node.abspath()):
+                               node.parent.mkdir()
+                       return node
+               self = self.get_src()
+               node = self.find_node(lst)
+               if node:
+                       return node
+               node = self.get_bld().make_node(lst)
+               node.parent.mkdir()
+               return node
+
+       def find_dir(self, lst):
+               """
+               Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`)
+
+               :param lst: relative path
+               :type lst: string or list of string
+               :returns: The corresponding Node object or None if there is no such folder
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               if isinstance(lst, str):
+                       lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+               node = self.find_node(lst)
+               if node and not node.isdir():
+                       return None
+               return node
+
+       # helpers for building things
+       def change_ext(self, ext, ext_in=None):
+               """
+               Declares a build node with a distinct extension; this is uses :py:meth:`waflib.Node.Node.find_or_declare`
+
+               :return: A build node of the same path, but with a different extension
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               name = self.name
+               if ext_in is None:
+                       k = name.rfind('.')
+                       if k >= 0:
+                               name = name[:k] + ext
+                       else:
+                               name = name + ext
+               else:
+                       name = name[:- len(ext_in)] + ext
+
+               return self.parent.find_or_declare([name])
+
+       def bldpath(self):
+               """
+               Returns the relative path seen from the build directory ``src/foo.cpp``
+
+               :rtype: string
+               """
+               return self.path_from(self.ctx.bldnode)
+
+       def srcpath(self):
+               """
+               Returns the relative path seen from the source directory ``../src/foo.cpp``
+
+               :rtype: string
+               """
+               return self.path_from(self.ctx.srcnode)
+
+       def relpath(self):
+               """
+               If a file in the build directory, returns :py:meth:`waflib.Node.Node.bldpath`,
+               else returns :py:meth:`waflib.Node.Node.srcpath`
+
+               :rtype: string
+               """
+               cur = self
+               x = self.ctx.bldnode
+               while cur.parent:
+                       if cur is x:
+                               return self.bldpath()
+                       cur = cur.parent
+               return self.srcpath()
+
+       def bld_dir(self):
+               """
+               Equivalent to self.parent.bldpath()
+
+               :rtype: string
+               """
+               return self.parent.bldpath()
+
+       def h_file(self):
+               """
+               See :py:func:`waflib.Utils.h_file`
+
+               :return: a hash representing the file contents
+               :rtype: string or bytes
+               """
+               return Utils.h_file(self.abspath())
+
+       def get_bld_sig(self):
+               """
+               Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose
+               of build dependency calculation. This method uses a per-context cache.
+
+               :return: a hash representing the object contents
+               :rtype: string or bytes
+               """
+               # previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node
+               try:
+                       cache = self.ctx.cache_sig
+               except AttributeError:
+                       cache = self.ctx.cache_sig = {}
+               try:
+                       ret = cache[self]
+               except KeyError:
+                       p = self.abspath()
+                       try:
+                               ret = cache[self] = self.h_file()
+                       except EnvironmentError:
+                               if self.isdir():
+                                       # allow folders as build nodes, do not use the creation time
+                                       st = os.stat(p)
+                                       ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode])
+                                       return ret
+                               raise
+               return ret
+
+       # --------------------------------------------
+       # TODO waf 2.0, remove the sig and cache_sig attributes
+       def get_sig(self):
+               return self.h_file()
+       def set_sig(self, val):
+               # clear the cache, so that past implementation should still work
+               try:
+                       del self.get_bld_sig.__cache__[(self,)]
+               except (AttributeError, KeyError):
+                       pass
+       sig = property(get_sig, set_sig)
+       cache_sig = property(get_sig, set_sig)
+
+pickle_lock = Utils.threading.Lock()
+"""Lock mandatory for thread-safe node serialization"""
+
+class Nod3(Node):
+       """Mandatory subclass for thread-safe node serialization"""
+       pass # do not remove
diff --git a/third_party/waf/waflib/Options.py b/third_party/waf/waflib/Options.py
new file mode 100644 (file)
index 0000000..4f4f7c6
--- /dev/null
@@ -0,0 +1,281 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Scott Newton, 2005 (scottn)
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+Support for waf command-line options
+
+Provides default and command-line options, as well the command
+that reads the ``options`` wscript function.
+"""
+
+import os, tempfile, optparse, sys, re
+from waflib import Logs, Utils, Context, Errors
+
+options = {}
+"""
+A global dictionary representing user-provided command-line options::
+
+       $ waf --foo=bar
+"""
+
+commands = []
+"""
+List of commands to execute extracted from the command-line. This list
+is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
+"""
+
+envvars = []
+"""
+List of environment variable declarations placed after the Waf executable name.
+These are detected by searching for "=" in the remaining arguments.
+You probably do not want to use this.
+"""
+
+lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
+"""
+Name of the lock file that marks a project as configured
+"""
+
+class opt_parser(optparse.OptionParser):
+       """
+       Command-line options parser.
+       """
+       def __init__(self, ctx):
+               optparse.OptionParser.__init__(self, conflict_handler="resolve",
+                       version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
+               self.formatter.width = Logs.get_term_cols()
+               self.ctx = ctx
+
+       def print_usage(self, file=None):
+               return self.print_help(file)
+
+       def get_usage(self):
+               """
+               Builds the message to print on ``waf --help``
+
+               :rtype: string
+               """
+               cmds_str = {}
+               for cls in Context.classes:
+                       if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ):
+                               continue
+
+                       s = cls.__doc__ or ''
+                       cmds_str[cls.cmd] = s
+
+               if Context.g_module:
+                       for (k, v) in Context.g_module.__dict__.items():
+                               if k in ('options', 'init', 'shutdown'):
+                                       continue
+
+                               if type(v) is type(Context.create_context):
+                                       if v.__doc__ and not k.startswith('_'):
+                                               cmds_str[k] = v.__doc__
+
+               just = 0
+               for k in cmds_str:
+                       just = max(just, len(k))
+
+               lst = ['  %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()]
+               lst.sort()
+               ret = '\n'.join(lst)
+
+               return '''waf [commands] [options]
+
+Main commands (example: ./waf build -j4)
+%s
+''' % ret
+
+
+class OptionsContext(Context.Context):
+       """
+       Collects custom options from wscript files and parses the command line.
+       Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
+       """
+       cmd = 'options'
+       fun = 'options'
+
+       def __init__(self, **kw):
+               super(OptionsContext, self).__init__(**kw)
+
+               self.parser = opt_parser(self)
+               """Instance of :py:class:`waflib.Options.opt_parser`"""
+
+               self.option_groups = {}
+
+               jobs = self.jobs()
+               p = self.add_option
+               color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
+               if os.environ.get('CLICOLOR', '') == '0':
+                       color = 'no'
+               elif os.environ.get('CLICOLOR_FORCE', '') == '1':
+                       color = 'yes'
+               p('-c', '--color',    dest='colors',  default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
+               p('-j', '--jobs',     dest='jobs',    default=jobs,  type='int', help='amount of parallel jobs (%r)' % jobs)
+               p('-k', '--keep',     dest='keep',    default=0,     action='count', help='continue despite errors (-kk to try harder)')
+               p('-v', '--verbose',  dest='verbose', default=0,     action='count', help='verbosity level -v -vv or -vvv [default: 0]')
+               p('--zones',          dest='zones',   default='',    action='store', help='debugging zones (task_gen, deps, tasks, etc)')
+               p('--profile',        dest='profile', default='',    action='store_true', help=optparse.SUPPRESS_HELP)
+
+               gr = self.add_option_group('Configuration options')
+               self.option_groups['configure options'] = gr
+
+               gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
+               gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
+
+               gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
+               gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
+               gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
+
+               default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
+               if not default_prefix:
+                       if Utils.unversioned_sys_platform() == 'win32':
+                               d = tempfile.gettempdir()
+                               default_prefix = d[0].upper() + d[1:]
+                               # win32 preserves the case, but gettempdir does not
+                       else:
+                               default_prefix = '/usr/local/'
+               gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix)
+               gr.add_option('--bindir', dest='bindir', help='bindir')
+               gr.add_option('--libdir', dest='libdir', help='libdir')
+
+               gr = self.add_option_group('Build and installation options')
+               self.option_groups['build and install options'] = gr
+               gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
+               gr.add_option('--targets',        dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
+
+               gr = self.add_option_group('Step options')
+               self.option_groups['step options'] = gr
+               gr.add_option('--files',          dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
+
+               default_destdir = os.environ.get('DESTDIR', '')
+
+               gr = self.add_option_group('Installation and uninstallation options')
+               self.option_groups['install/uninstall options'] = gr
+               gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
+               gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation')
+               gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store')
+
+       def jobs(self):
+               """
+               Finds the optimal amount of cpu cores to use for parallel jobs.
+               At runtime the options can be obtained from :py:const:`waflib.Options.options` ::
+
+                       from waflib.Options import options
+                       njobs = options.jobs
+
+               :return: the amount of cpu cores
+               :rtype: int
+               """
+               count = int(os.environ.get('JOBS', 0))
+               if count < 1:
+                       if 'NUMBER_OF_PROCESSORS' in os.environ:
+                               # on Windows, use the NUMBER_OF_PROCESSORS environment variable
+                               count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
+                       else:
+                               # on everything else, first try the POSIX sysconf values
+                               if hasattr(os, 'sysconf_names'):
+                                       if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
+                                               count = int(os.sysconf('SC_NPROCESSORS_ONLN'))
+                                       elif 'SC_NPROCESSORS_CONF' in os.sysconf_names:
+                                               count = int(os.sysconf('SC_NPROCESSORS_CONF'))
+                               if not count and os.name not in ('nt', 'java'):
+                                       try:
+                                               tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
+                                       except Errors.WafError:
+                                               pass
+                                       else:
+                                               if re.match('^[0-9]+$', tmp):
+                                                       count = int(tmp)
+               if count < 1:
+                       count = 1
+               elif count > 1024:
+                       count = 1024
+               return count
+
+       def add_option(self, *k, **kw):
+               """
+               Wraps ``optparse.add_option``::
+
+                       def options(ctx):
+                               ctx.add_option('-u', '--use', dest='use', default=False,
+                                       action='store_true', help='a boolean option')
+
+               :rtype: optparse option object
+               """
+               return self.parser.add_option(*k, **kw)
+
+       def add_option_group(self, *k, **kw):
+               """
+               Wraps ``optparse.add_option_group``::
+
+                       def options(ctx):
+                               gr = ctx.add_option_group('some options')
+                               gr.add_option('-u', '--use', dest='use', default=False, action='store_true')
+
+               :rtype: optparse option group object
+               """
+               try:
+                       gr = self.option_groups[k[0]]
+               except KeyError:
+                       gr = self.parser.add_option_group(*k, **kw)
+               self.option_groups[k[0]] = gr
+               return gr
+
+       def get_option_group(self, opt_str):
+               """
+               Wraps ``optparse.get_option_group``::
+
+                       def options(ctx):
+                               gr = ctx.get_option_group('configure options')
+                               gr.add_option('-o', '--out', action='store', default='',
+                                       help='build dir for the project', dest='out')
+
+               :rtype: optparse option group object
+               """
+               try:
+                       return self.option_groups[opt_str]
+               except KeyError:
+                       for group in self.parser.option_groups:
+                               if group.title == opt_str:
+                                       return group
+                       return None
+
+       def parse_args(self, _args=None):
+               """
+               Parses arguments from a list which is not necessarily the command-line.
+
+               :param _args: arguments
+               :type _args: list of strings
+               """
+               global options, commands, envvars
+               (options, leftover_args) = self.parser.parse_args(args=_args)
+
+               for arg in leftover_args:
+                       if '=' in arg:
+                               envvars.append(arg)
+                       else:
+                               commands.append(arg)
+
+               if options.destdir:
+                       options.destdir = Utils.sane_path(options.destdir)
+
+               if options.verbose >= 1:
+                       self.load('errcheck')
+
+               colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
+               Logs.enable_colors(colors)
+
+       def execute(self):
+               """
+               See :py:func:`waflib.Context.Context.execute`
+               """
+               super(OptionsContext, self).execute()
+               self.parse_args()
+               Utils.alloc_process_pool(options.jobs)
diff --git a/third_party/waf/waflib/Runner.py b/third_party/waf/waflib/Runner.py
new file mode 100644 (file)
index 0000000..1e37401
--- /dev/null
@@ -0,0 +1,353 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Runner.py: Task scheduling and execution
+"""
+
+import random
+try:
+       from queue import Queue
+except ImportError:
+       from Queue import Queue
+from waflib import Utils, Task, Errors, Logs
+
+GAP = 20
+"""
+Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
+"""
+
+class Consumer(Utils.threading.Thread):
+       """
+       Daemon thread object that executes a task. It shares a semaphore with
+       the coordinator :py:class:`waflib.Runner.Spawner`. There is one
+       instance per task to consume.
+       """
+       def __init__(self, spawner, task):
+               Utils.threading.Thread.__init__(self)
+               self.task = task
+               """Task to execute"""
+               self.spawner = spawner
+               """Coordinator object"""
+               self.setDaemon(1)
+               self.start()
+       def run(self):
+               """
+               Processes a single task
+               """
+               try:
+                       if not self.spawner.master.stop:
+                               self.task.process()
+               finally:
+                       self.spawner.sem.release()
+                       self.spawner.master.out.put(self.task)
+                       self.task = None
+                       self.spawner = None
+
+class Spawner(Utils.threading.Thread):
+       """
+       Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
+       spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
+       :py:class:`waflib.Task.TaskBase` instance.
+       """
+       def __init__(self, master):
+               Utils.threading.Thread.__init__(self)
+               self.master = master
+               """:py:class:`waflib.Runner.Parallel` producer instance"""
+               self.sem = Utils.threading.Semaphore(master.numjobs)
+               """Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
+               self.setDaemon(1)
+               self.start()
+       def run(self):
+               """
+               Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop`
+               """
+               try:
+                       self.loop()
+               except Exception:
+                       # Python 2 prints unnecessary messages when shutting down
+                       # we also want to stop the thread properly
+                       pass
+       def loop(self):
+               """
+               Consumes task objects from the producer; ends when the producer has no more
+               task to provide.
+               """
+               master = self.master
+               while 1:
+                       task = master.ready.get()
+                       self.sem.acquire()
+                       if not master.stop:
+                               task.log_display(task.generator.bld)
+                       Consumer(self, task)
+
+class Parallel(object):
+       """
+       Schedule the tasks obtained from the build context for execution.
+       """
+       def __init__(self, bld, j=2):
+               """
+               The initialization requires a build context reference
+               for computing the total number of jobs.
+               """
+
+               self.numjobs = j
+               """
+               Amount of parallel consumers to use
+               """
+
+               self.bld = bld
+               """
+               Instance of :py:class:`waflib.Build.BuildContext`
+               """
+
+               self.outstanding = Utils.deque()
+               """List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
+
+               self.frozen = Utils.deque()
+               """List of :py:class:`waflib.Task.TaskBase` that are not ready yet"""
+
+               self.ready = Queue(0)
+               """List of :py:class:`waflib.Task.TaskBase` ready to be executed by consumers"""
+
+               self.out = Queue(0)
+               """List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
+
+               self.count = 0
+               """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
+
+               self.processed = 1
+               """Amount of tasks processed"""
+
+               self.stop = False
+               """Error flag to stop the build"""
+
+               self.error = []
+               """Tasks that could not be executed"""
+
+               self.biter = None
+               """Task iterator which must give groups of parallelizable tasks when calling ``next()``"""
+
+               self.dirty = False
+               """
+               Flag that indicates that the build cache must be saved when a task was executed
+               (calls :py:meth:`waflib.Build.BuildContext.store`)"""
+
+               self.spawner = Spawner(self)
+               """
+               Coordinating daemon thread that spawns thread consumers
+               """
+
+       def get_next_task(self):
+               """
+               Obtains the next Task instance to run
+
+               :rtype: :py:class:`waflib.Task.TaskBase`
+               """
+               if not self.outstanding:
+                       return None
+               return self.outstanding.popleft()
+
+       def postpone(self, tsk):
+               """
+               Adds the task to the list :py:attr:`waflib.Runner.Parallel.frozen`.
+               The order is scrambled so as to consume as many tasks in parallel as possible.
+
+               :param tsk: task instance
+               :type tsk: :py:class:`waflib.Task.TaskBase`
+               """
+               if random.randint(0, 1):
+                       self.frozen.appendleft(tsk)
+               else:
+                       self.frozen.append(tsk)
+
+       def refill_task_list(self):
+               """
+               Adds the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
+               """
+               while self.count > self.numjobs * GAP:
+                       self.get_out()
+
+               while not self.outstanding:
+                       if self.count:
+                               self.get_out()
+                       elif self.frozen:
+                               try:
+                                       cond = self.deadlock == self.processed
+                               except AttributeError:
+                                       pass
+                               else:
+                                       if cond:
+                                               msg = 'check the build order for the tasks'
+                                               for tsk in self.frozen:
+                                                       if not tsk.run_after:
+                                                               msg = 'check the methods runnable_status'
+                                                               break
+                                               lst = []
+                                               for tsk in self.frozen:
+                                                       lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
+                                               raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
+                               self.deadlock = self.processed
+
+                       if self.frozen:
+                               self.outstanding.extend(self.frozen)
+                               self.frozen.clear()
+                       elif not self.count:
+                               self.outstanding.extend(self.biter.next())
+                               self.total = self.bld.total()
+                               break
+
+       def add_more_tasks(self, tsk):
+               """
+               If a task provides :py:attr:`waflib.Task.TaskBase.more_tasks`, then the tasks contained
+               in that list are added to the current build and will be processed before the next build group.
+
+               :param tsk: task instance
+               :type tsk: :py:attr:`waflib.Task.TaskBase`
+               """
+               if getattr(tsk, 'more_tasks', None):
+                       self.outstanding.extend(tsk.more_tasks)
+                       self.total += len(tsk.more_tasks)
+
+       def get_out(self):
+               """
+               Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
+               Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.
+
+               :rtype: :py:attr:`waflib.Task.TaskBase`
+               """
+               tsk = self.out.get()
+               if not self.stop:
+                       self.add_more_tasks(tsk)
+               self.count -= 1
+               self.dirty = True
+               return tsk
+
+       def add_task(self, tsk):
+               """
+               Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.
+
+               :param tsk: task instance
+               :type tsk: :py:attr:`waflib.Task.TaskBase`
+               """
+               self.ready.put(tsk)
+
+       def skip(self, tsk):
+               """
+               Mark a task as skipped/up-to-date
+               """
+               tsk.hasrun = Task.SKIPPED
+
+       def error_handler(self, tsk):
+               """
+               Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
+               the build is executed with::
+
+                       $ waf build -k
+
+               :param tsk: task instance
+               :type tsk: :py:attr:`waflib.Task.TaskBase`
+               """
+               if hasattr(tsk, 'scan') and hasattr(tsk, 'uid'):
+                       # TODO waf 2.0 - this breaks encapsulation
+                       try:
+                               del self.bld.imp_sigs[tsk.uid()]
+                       except KeyError:
+                               pass
+               if not self.bld.keep:
+                       self.stop = True
+               self.error.append(tsk)
+
+       def task_status(self, tsk):
+               """
+               Obtains the task status to decide whether to run it immediately or not.
+
+               :return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
+               :rtype: integer
+               """
+               try:
+                       return tsk.runnable_status()
+               except Exception:
+                       self.processed += 1
+                       tsk.err_msg = Utils.ex_stack()
+                       if not self.stop and self.bld.keep:
+                               self.skip(tsk)
+                               if self.bld.keep == 1:
+                                       # if -k stop at the first exception, if -kk try to go as far as possible
+                                       if Logs.verbose > 1 or not self.error:
+                                               self.error.append(tsk)
+                                       self.stop = True
+                               else:
+                                       if Logs.verbose > 1:
+                                               self.error.append(tsk)
+                               return Task.EXCEPTION
+                       tsk.hasrun = Task.EXCEPTION
+
+                       self.error_handler(tsk)
+                       return Task.EXCEPTION
+
+       def start(self):
+               """
+               Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to
+               :py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread
+               has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out`
+               and marks the build as failed by setting the ``stop`` flag.
+               If only one job is used, then executes the tasks one by one, without consumers.
+               """
+               self.total = self.bld.total()
+
+               while not self.stop:
+
+                       self.refill_task_list()
+
+                       # consider the next task
+                       tsk = self.get_next_task()
+                       if not tsk:
+                               if self.count:
+                                       # tasks may add new ones after they are run
+                                       continue
+                               else:
+                                       # no tasks to run, no tasks running, time to exit
+                                       break
+
+                       if tsk.hasrun:
+                               # if the task is marked as "run", just skip it
+                               self.processed += 1
+                               continue
+
+                       if self.stop: # stop immediately after a failure was detected
+                               break
+
+
+                       st = self.task_status(tsk)
+                       if st == Task.RUN_ME:
+                               self.count += 1
+                               self.processed += 1
+
+                               if self.numjobs == 1:
+                                       tsk.log_display(tsk.generator.bld)
+                                       try:
+                                               tsk.process()
+                                       finally:
+                                               self.out.put(tsk)
+                               else:
+                                       self.add_task(tsk)
+                       if st == Task.ASK_LATER:
+                               self.postpone(tsk)
+                       elif st == Task.SKIP_ME:
+                               self.processed += 1
+                               self.skip(tsk)
+                               self.add_more_tasks(tsk)
+
+               # self.count represents the tasks that have been made available to the consumer threads
+               # collect all the tasks after an error else the message may be incomplete
+               while self.error and self.count:
+                       self.get_out()
+
+               self.ready.put(None)
+               assert (self.count == 0 or self.stop)
diff --git a/third_party/waf/waflib/Scripting.py b/third_party/waf/waflib/Scripting.py
new file mode 100644 (file)
index 0000000..9b27840
--- /dev/null
@@ -0,0 +1,627 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"Module called for configuring, compiling and installing targets"
+
+import os, shlex, shutil, traceback, errno, sys, stat
+from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
+
+build_dir_override = None
+
+no_climb_commands = ['configure']
+
+default_cmd = "build"
+
+def waf_entry_point(current_directory, version, wafdir):
+       """
+       This is the main entry point, all Waf execution starts here.
+
+       :param current_directory: absolute path representing the current directory
+       :type current_directory: string
+       :param version: version number
+       :type version: string
+       :param wafdir: absolute path representing the directory of the waf library
+       :type wafdir: string
+       """
+
+       Logs.init_log()
+
+       if Context.WAFVERSION != version:
+               Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
+               sys.exit(1)
+
+       if '--version' in sys.argv:
+               Context.run_dir = current_directory
+               ctx = Context.create_context('options')
+               ctx.curdir = current_directory
+               ctx.parse_args()
+               sys.exit(0)
+
+       if len(sys.argv) > 1:
+               # os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones)
+               # if sys.argv[1] is not an absolute path, then it is relative to the current working directory
+               potential_wscript = os.path.join(current_directory, sys.argv[1])
+               # maybe check if the file is executable
+               # perhaps extract 'wscript' as a constant
+               if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript):
+                       # need to explicitly normalize the path, as it may contain extra '/.'
+                       # TODO abspath?
+                       current_directory = os.path.normpath(os.path.dirname(potential_wscript))
+                       sys.argv.pop(1)
+
+       Context.waf_dir = wafdir
+       Context.launch_dir = current_directory
+
+       # if 'configure' is in the commands, do not search any further
+       no_climb = os.environ.get('NOCLIMB')
+       if not no_climb:
+               for k in no_climb_commands:
+                       for y in sys.argv:
+                               if y.startswith(k):
+                                       no_climb = True
+                                       break
+
+       # if --top is provided assume the build started in the top directory
+       for i, x in enumerate(sys.argv):
+               # WARNING: this modifies sys.argv
+               if x.startswith('--top='):
+                       Context.run_dir = Context.top_dir = Utils.sane_path(x[6:])
+                       sys.argv[i] = '--top=' + Context.run_dir
+               if x.startswith('--out='):
+                       Context.out_dir = Utils.sane_path(x[6:])
+                       sys.argv[i] = '--out=' + Context.out_dir
+
+       # try to find a lock file (if the project was configured)
+       # at the same time, store the first wscript file seen
+       cur = current_directory
+       while cur and not Context.top_dir:
+               try:
+                       lst = os.listdir(cur)
+               except OSError:
+                       lst = []
+                       Logs.error('Directory %r is unreadable!', cur)
+               if Options.lockfile in lst:
+                       env = ConfigSet.ConfigSet()
+                       try:
+                               env.load(os.path.join(cur, Options.lockfile))
+                               ino = os.stat(cur)[stat.ST_INO]
+                       except EnvironmentError:
+                               pass
+                       else:
+                               # check if the folder was not moved
+                               for x in (env.run_dir, env.top_dir, env.out_dir):
+                                       if not x:
+                                               continue
+                                       if Utils.is_win32:
+                                               if cur == x:
+                                                       load = True
+                                                       break
+                                       else:
+                                               # if the filesystem features symlinks, compare the inode numbers
+                                               try:
+                                                       ino2 = os.stat(x)[stat.ST_INO]
+                                               except OSError:
+                                                       pass
+                                               else:
+                                                       if ino == ino2:
+                                                               load = True
+                                                               break
+                               else:
+                                       Logs.warn('invalid lock file in %s', cur)
+                                       load = False
+
+                               if load:
+                                       Context.run_dir = env.run_dir
+                                       Context.top_dir = env.top_dir
+                                       Context.out_dir = env.out_dir
+                                       break
+
+               if not Context.run_dir:
+                       if Context.WSCRIPT_FILE in lst:
+                               Context.run_dir = cur
+
+               next = os.path.dirname(cur)
+               if next == cur:
+                       break
+               cur = next
+
+               if no_climb:
+                       break
+
+       if not Context.run_dir:
+               if '-h' in sys.argv or '--help' in sys.argv:
+                       Logs.warn('No wscript file found: the help message may be incomplete')
+                       Context.run_dir = current_directory
+                       ctx = Context.create_context('options')
+                       ctx.curdir = current_directory
+                       ctx.parse_args()
+                       sys.exit(0)
+               Logs.error('Waf: Run from a directory containing a file named %r', Context.WSCRIPT_FILE)
+               sys.exit(1)
+
+       try:
+               os.chdir(Context.run_dir)
+       except OSError:
+               Logs.error('Waf: The folder %r is unreadable', Context.run_dir)
+               sys.exit(1)
+
+       try:
+               set_main_module(os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)))
+       except Errors.WafError ,e:
+               Logs.pprint('RED', e.verbose_msg)
+               Logs.error(str(e))
+               sys.exit(1)
+       except Exception ,e:
+               Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
+               traceback.print_exc(file=sys.stdout)
+               sys.exit(2)
+
+       if '--profile' in sys.argv:
+               import cProfile, pstats
+               cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
+               p = pstats.Stats('profi.txt')
+               p.sort_stats('time').print_stats(75) # or 'cumulative'
+       else:
+               try:
+                       run_commands()
+               except Errors.WafError ,e:
+                       if Logs.verbose > 1:
+                               Logs.pprint('RED', e.verbose_msg)
+                       Logs.error(e.msg)
+                       sys.exit(1)
+               except SystemExit:
+                       raise
+               except Exception ,e:
+                       traceback.print_exc(file=sys.stdout)
+                       sys.exit(2)
+               except KeyboardInterrupt:
+                       Logs.pprint('RED', 'Interrupted')
+                       sys.exit(68)
+
+def set_main_module(file_path):
+       """
+       Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and
+       bind default functions such as ``init``, ``dist``, ``distclean`` if not defined.
+       Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
+
+       :param file_path: absolute path representing the top-level wscript file
+       :type file_path: string
+       """
+       Context.g_module = Context.load_module(file_path)
+       Context.g_module.root_path = file_path
+
+       # note: to register the module globally, use the following:
+       # sys.modules['wscript_main'] = g_module
+
+       def set_def(obj):
+               name = obj.__name__
+               if not name in Context.g_module.__dict__:
+                       setattr(Context.g_module, name, obj)
+       for k in (dist, distclean, distcheck):
+               set_def(k)
+       # add dummy init and shutdown functions if they're not defined
+       if not 'init' in Context.g_module.__dict__:
+               Context.g_module.init = Utils.nada
+       if not 'shutdown' in Context.g_module.__dict__:
+               Context.g_module.shutdown = Utils.nada
+       if not 'options' in Context.g_module.__dict__:
+               Context.g_module.options = Utils.nada
+
+def parse_options():
+       """
+       Parses the command-line options and initialize the logging system.
+       Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
+       """
+       Context.create_context('options').execute()
+
+       for var in Options.envvars:
+               (name, value) = var.split('=', 1)
+               os.environ[name.strip()] = value
+
+       if not Options.commands:
+               Options.commands = [default_cmd]
+       Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076
+
+       # process some internal Waf options
+       Logs.verbose = Options.options.verbose
+       #Logs.init_log()
+
+       if Options.options.zones:
+               Logs.zones = Options.options.zones.split(',')
+               if not Logs.verbose:
+                       Logs.verbose = 1
+       elif Logs.verbose > 0:
+               Logs.zones = ['runner']
+
+       if Logs.verbose > 2:
+               Logs.zones = ['*']
+
+def run_command(cmd_name):
+       """
+       Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`.
+
+       :param cmd_name: command to execute, like ``build``
+       :type cmd_name: string
+       """
+       ctx = Context.create_context(cmd_name)
+       ctx.log_timer = Utils.Timer()
+       ctx.options = Options.options # provided for convenience
+       ctx.cmd = cmd_name
+       try:
+               ctx.execute()
+       finally:
+               # Issue 1374
+               ctx.finalize()
+       return ctx
+
+def run_commands():
+       """
+       Execute the Waf commands that were given on the command-line, and the other options
+       Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
+       after :py:func:`waflib.Scripting.parse_options`.
+       """
+       parse_options()
+       run_command('init')
+       while Options.commands:
+               cmd_name = Options.commands.pop(0)
+               ctx = run_command(cmd_name)
+               Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer)
+       run_command('shutdown')
+
+###########################################################################################
+
+def distclean_dir(dirname):
+       """
+       Distclean function called in the particular case when::
+
+               top == out
+
+       :param dirname: absolute path of the folder to clean
+       :type dirname: string
+       """
+       for (root, dirs, files) in os.walk(dirname):
+               for f in files:
+                       if f.endswith(('.o', '.moc', '.exe')):
+                               fname = os.path.join(root, f)
+                               try:
+                                       os.remove(fname)
+                               except OSError:
+                                       Logs.warn('Could not remove %r', fname)
+
+       for x in (Context.DBFILE, 'config.log'):
+               try:
+                       os.remove(x)
+               except OSError:
+                       pass
+
+       try:
+               shutil.rmtree('c4che')
+       except OSError:
+               pass
+
+def distclean(ctx):
+       '''removes the build directory'''
+       lst = os.listdir('.')
+       for f in lst:
+               if f == Options.lockfile:
+                       try:
+                               proj = ConfigSet.ConfigSet(f)
+                       except IOError:
+                               Logs.warn('Could not read %r', f)
+                               continue
+
+                       if proj['out_dir'] != proj['top_dir']:
+                               try:
+                                       shutil.rmtree(proj['out_dir'])
+                               except EnvironmentError ,e:
+                                       if e.errno != errno.ENOENT:
+                                               Logs.warn('Could not remove %r', proj['out_dir'])
+                       else:
+                               distclean_dir(proj['out_dir'])
+
+                       for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
+                               p = os.path.join(k, Options.lockfile)
+                               try:
+                                       os.remove(p)
+                               except OSError ,e:
+                                       if e.errno != errno.ENOENT:
+                                               Logs.warn('Could not remove %r', p)
+
+               # remove local waf cache folders
+               if not Options.commands:
+                       for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split():
+                               if f.startswith(x):
+                                       shutil.rmtree(f, ignore_errors=True)
+
+class Dist(Context.Context):
+       '''creates an archive containing the project source code'''
+       cmd = 'dist'
+       fun = 'dist'
+       algo = 'tar.bz2'
+       ext_algo = {}
+
+       def execute(self):
+               """
+               See :py:func:`waflib.Context.Context.execute`
+               """
+               self.recurse([os.path.dirname(Context.g_module.root_path)])
+               self.archive()
+
+       def archive(self):
+               """
+               Creates the source archive.
+               """
+               import tarfile
+
+               arch_name = self.get_arch_name()
+
+               try:
+                       self.base_path
+               except AttributeError:
+                       self.base_path = self.path
+
+               node = self.base_path.make_node(arch_name)
+               try:
+                       node.delete()
+               except OSError:
+                       pass
+
+               files = self.get_files()
+
+               if self.algo.startswith('tar.'):
+                       tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', ''))
+
+                       for x in files:
+                               self.add_tar_file(x, tar)
+                       tar.close()
+               elif self.algo == 'zip':
+                       import zipfile
+                       zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED)
+
+                       for x in files:
+                               archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
+                               zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED)
+                       zip.close()
+               else:
+                       self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
+
+               try:
+                       from hashlib import sha1
+               except ImportError:
+                       digest = ''
+               else:
+                       digest = ' (sha=%r)' % sha1(node.read(flags='rb')).hexdigest()
+
+               Logs.info('New archive created: %s%s', self.arch_name, digest)
+
+       def get_tar_path(self, node):
+               """
+               Return the path to use for a node in the tar archive, the purpose of this
+               is to let subclases resolve symbolic links or to change file names
+
+               :return: absolute path
+               :rtype: string
+               """
+               return node.abspath()
+
+       def add_tar_file(self, x, tar):
+               """
+               Adds a file to the tar archive. Symlinks are not verified.
+
+               :param x: file path
+               :param tar: tar file object
+               """
+               p = self.get_tar_path(x)
+               tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
+               tinfo.uid   = 0
+               tinfo.gid   = 0
+               tinfo.uname = 'root'
+               tinfo.gname = 'root'
+
+               if os.path.isfile(p):
+                       fu = open(p, 'rb')
+                       try:
+                               tar.addfile(tinfo, fileobj=fu)
+                       finally:
+                               fu.close()
+               else:
+                       tar.addfile(tinfo)
+
+       def get_tar_prefix(self):
+               """
+               Returns the base path for files added into the archive tar file
+
+               :rtype: string
+               """
+               try:
+                       return self.tar_prefix
+               except AttributeError:
+                       return self.get_base_name()
+
+       def get_arch_name(self):
+               """
+               Returns the archive file name.
+               Set the attribute *arch_name* to change the default value::
+
+                       def dist(ctx):
+                               ctx.arch_name = 'ctx.tar.bz2'
+
+               :rtype: string
+               """
+               try:
+                       self.arch_name
+               except AttributeError:
+                       self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo)
+               return self.arch_name
+
+       def get_base_name(self):
+               """
+               Returns the default name of the main directory in the archive, which is set to *appname-version*.
+               Set the attribute *base_name* to change the default value::
+
+                       def dist(ctx):
+                               ctx.base_name = 'files'
+
+               :rtype: string
+               """
+               try:
+                       self.base_name
+               except AttributeError:
+                       appname = getattr(Context.g_module, Context.APPNAME, 'noname')
+                       version = getattr(Context.g_module, Context.VERSION, '1.0')
+                       self.base_name = appname + '-' + version
+               return self.base_name
+
+       def get_excl(self):
+               """
+               Returns the patterns to exclude for finding the files in the top-level directory.
+               Set the attribute *excl* to change the default value::
+
+                       def dist(ctx):
+                               ctx.excl = 'build **/*.o **/*.class'
+
+               :rtype: string
+               """
+               try:
+                       return self.excl
+               except AttributeError:
+                       self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+                       if Context.out_dir:
+                               nd = self.root.find_node(Context.out_dir)
+                               if nd:
+                                       self.excl += ' ' + nd.path_from(self.base_path)
+                       return self.excl
+
+       def get_files(self):
+               """
+               Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`.
+               Set *files* to prevent this behaviour::
+
+                       def dist(ctx):
+                               ctx.files = ctx.path.find_node('wscript')
+
+               Files are also searched from the directory 'base_path', to change it, set::
+
+                       def dist(ctx):
+                               ctx.base_path = path
+
+               :rtype: list of :py:class:`waflib.Node.Node`
+               """
+               try:
+                       files = self.files
+               except AttributeError:
+                       files = self.base_path.ant_glob('**/*', excl=self.get_excl())
+               return files
+
+def dist(ctx):
+       '''makes a tarball for redistributing the sources'''
+       pass
+
+class DistCheck(Dist):
+       """
+       Creates an archive of the project, then attempts to build the project in a temporary directory::
+
+               $ waf distcheck
+       """
+       fun = 'distcheck'
+       cmd = 'distcheck'
+
+       def execute(self):
+               """
+               See :py:func:`waflib.Context.Context.execute`
+               """
+               self.recurse([os.path.dirname(Context.g_module.root_path)])
+               self.archive()
+               self.check()
+
+       def make_distcheck_cmd(self, tmpdir):
+               cfg = []
+               if Options.options.distcheck_args:
+                       cfg = shlex.split(Options.options.distcheck_args)
+               else:
+                       cfg = [x for x in sys.argv if x.startswith('-')]
+               cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg
+               return cmd
+
+       def check(self):
+               """
+               Creates the archive, uncompresses it and tries to build the project
+               """
+               import tempfile, tarfile
+
+               try:
+                       t = tarfile.open(self.get_arch_name())
+                       for x in t:
+                               t.extract(x)
+               finally:
+                       t.close()
+
+               instdir = tempfile.mkdtemp('.inst', self.get_base_name())
+               cmd = self.make_distcheck_cmd(instdir)
+               ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait()
+               if ret:
+                       raise Errors.WafError('distcheck failed with code %r' % ret)
+
+               if os.path.exists(instdir):
+                       raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
+
+               shutil.rmtree(self.get_base_name())
+
+
+def distcheck(ctx):
+       '''checks if the project compiles (tarball from 'dist')'''
+       pass
+
+def autoconfigure(execute_method):
+       """
+       Decorator that enables context commands to run *configure* as needed.
+       """
+       def execute(self):
+               """
+               Wraps :py:func:`waflib.Context.Context.execute` on the context class
+               """
+               if not Configure.autoconfig:
+                       return execute_method(self)
+
+               env = ConfigSet.ConfigSet()
+               do_config = False
+               try:
+                       env.load(os.path.join(Context.top_dir, Options.lockfile))
+               except EnvironmentError:
+                       Logs.warn('Configuring the project')
+                       do_config = True
+               else:
+                       if env.run_dir != Context.run_dir:
+                               do_config = True
+                       else:
+                               h = 0
+                               for f in env.files:
+                                       try:
+                                               h = Utils.h_list((h, Utils.readf(f, 'rb')))
+                                       except EnvironmentError:
+                                               do_config = True
+                                               break
+                               else:
+                                       do_config = h != env.hash
+
+               if do_config:
+                       cmd = env.config_cmd or 'configure'
+                       if Configure.autoconfig == 'clobber':
+                               tmp = Options.options.__dict__
+                               Options.options.__dict__ = env.options
+                               try:
+                                       run_command(cmd)
+                               finally:
+                                       Options.options.__dict__ = tmp
+                       else:
+                               run_command(cmd)
+                       run_command(self.cmd)
+               else:
+                       return execute_method(self)
+       return execute
+Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)
diff --git a/third_party/waf/waflib/Task.py b/third_party/waf/waflib/Task.py
new file mode 100644 (file)
index 0000000..44db70c
--- /dev/null
@@ -0,0 +1,1242 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Tasks represent atomic operations such as processes.
+"""
+
+import os, re, sys, tempfile
+from waflib import Utils, Logs, Errors
+
+# task states
+NOT_RUN = 0
+"""The task was not executed yet"""
+
+MISSING = 1
+"""The task has been executed but the files have not been created"""
+
+CRASHED = 2
+"""The task execution returned a non-zero exit status"""
+
+EXCEPTION = 3
+"""An exception occurred in the task execution"""
+
+SKIPPED = 8
+"""The task did not have to be executed"""
+
+SUCCESS = 9
+"""The task was successfully executed"""
+
+ASK_LATER = -1
+"""The task is not ready to be executed"""
+
+SKIP_ME = -2
+"""The task does not need to be executed"""
+
+RUN_ME = -3
+"""The task must be executed"""
+
+COMPILE_TEMPLATE_SHELL = '''
+def f(tsk):
+       env = tsk.env
+       gen = tsk.generator
+       bld = gen.bld
+       cwdx = tsk.get_cwd()
+       p = env.get_flat
+       tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
+       return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None)
+'''
+
+COMPILE_TEMPLATE_NOSHELL = '''
+def f(tsk):
+       env = tsk.env
+       gen = tsk.generator
+       bld = gen.bld
+       cwdx = tsk.get_cwd()
+       def to_list(xx):
+               if isinstance(xx, str): return [xx]
+               return xx
+       def merge(lst1, lst2):
+               if lst1 and lst2:
+                       return lst1[:-1] + [lst1[-1] + lst2[0]] + lst2[1:]
+               return lst1 + lst2
+       lst = []
+       %s
+       if '' in lst:
+               lst = [x for x in lst if x]
+       tsk.last_cmd = lst
+       return tsk.exec_command(lst, cwd=cwdx, env=env.env or None)
+'''
+
+classes = {}
+"""
+The metaclass :py:class:`waflib.Task.store_task_type` stores all class tasks
+created by user scripts or Waf tools to this dict. It maps class names to class objects.
+"""
+
+class store_task_type(type):
+       """
+       Metaclass: store the task classes into the dict pointed by the
+       class attribute 'register' which defaults to :py:const:`waflib.Task.classes`,
+
+       The attribute 'run_str' is compiled into a method 'run' bound to the task class.
+       """
+       def __init__(cls, name, bases, dict):
+               super(store_task_type, cls).__init__(name, bases, dict)
+               name = cls.__name__
+
+               if name != 'evil' and name != 'TaskBase':
+                       global classes
+                       if getattr(cls, 'run_str', None):
+                               # if a string is provided, convert it to a method
+                               (f, dvars) = compile_fun(cls.run_str, cls.shell)
+                               cls.hcode = Utils.h_cmd(cls.run_str)
+                               cls.orig_run_str = cls.run_str
+                               # change the name of run_str or it is impossible to subclass with a function
+                               cls.run_str = None
+                               cls.run = f
+                               cls.vars = list(set(cls.vars + dvars))
+                               cls.vars.sort()
+                       elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__:
+                               # getattr(cls, 'hcode') would look in the upper classes
+                               cls.hcode = Utils.h_cmd(cls.run)
+
+                       # be creative
+                       getattr(cls, 'register', classes)[name] = cls
+
+evil = store_task_type('evil', (object,), {})
+"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified"
+
+class TaskBase(evil):
+       """
+       Base class for all Waf tasks, which should be seen as an interface.
+       For illustration purposes, instances of this class will execute the attribute
+       'fun' in :py:meth:`waflib.Task.TaskBase.run`. When in doubt, create
+       subclasses of :py:class:`waflib.Task.Task` instead.
+
+       Subclasses must override these methods:
+
+       #. __str__: string to display to the user
+       #. runnable_status: ask the task if it should be run, skipped, or if we have to ask later
+       #. run: what to do to execute the task
+       #. post_run: what to do after the task has been executed
+       """
+
+       color = 'GREEN'
+       """Color for the console display, see :py:const:`waflib.Logs.colors_lst`"""
+
+       ext_in = []
+       """File extensions that objects of this task class may use"""
+
+       ext_out = []
+       """File extensions that objects of this task class may create"""
+
+       before = []
+       """List of task class names to execute before instances of this class"""
+
+       after = []
+       """List of task class names to execute after instances of this class"""
+
+       hcode = ''
+       """String representing an additional hash for the class representation"""
+
+       keep_last_cmd = False
+       """Whether to keep the last command executed on the instance after execution.
+       This may be useful for certain extensions but it can a lot of memory.
+       """
+
+       __slots__ = ('hasrun', 'generator')
+
+       def __init__(self, *k, **kw):
+               """
+               The base task class requires a task generator (set to *self* if missing)
+               """
+               self.hasrun = NOT_RUN
+               try:
+                       self.generator = kw['generator']
+               except KeyError:
+                       self.generator = self
+
+       def __repr__(self):
+               return '\n\t{task %r: %s %s}' % (self.__class__.__name__, id(self), str(getattr(self, 'fun', '')))
+
+       def __str__(self):
+               "String to display to the user"
+               if hasattr(self, 'fun'):
+                       return self.fun.__name__
+               return self.__class__.__name__
+
+       def keyword(self):
+               "Display keyword used to prettify the console outputs"
+               if hasattr(self, 'fun'):
+                       return 'Function'
+               return 'Processing'
+
+       def get_cwd(self):
+               """
+               :return: current working directory
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               bld = self.generator.bld
+               ret = getattr(self, 'cwd', None) or getattr(bld, 'cwd', bld.bldnode)
+               if isinstance(ret, str):
+                       if os.path.isabs(ret):
+                               ret = bld.root.make_node(ret)
+                       else:
+                               ret = self.generator.path.make_node(ret)
+               return ret
+
+       def quote_flag(self, x):
+               """
+               Surround a process argument by quotes so that a list of arguments can be written to a file
+
+               :param x: flag
+               :type x: string
+               :return: quoted flag
+               :rtype: string
+               """
+               old = x
+               if '\\' in x:
+                       x = x.replace('\\', '\\\\')
+               if '"' in x:
+                       x = x.replace('"', '\\"')
+               if old != x or ' ' in x or '\t' in x or "'" in x:
+                       x = '"%s"' % x
+               return x
+
+       def split_argfile(self, cmd):
+               """
+               Splits a list of process commands into the executable part and its list of arguments
+
+               :return: a tuple containing the executable first and then the rest of arguments
+               :rtype: tuple
+               """
+               return ([cmd[0]], [self.quote_flag(x) for x in cmd[1:]])
+
+       def exec_command(self, cmd, **kw):
+               """
+               Wrapper for :py:meth:`waflib.Context.Context.exec_command`.
+               This version set the current working directory (``build.variant_dir``),
+               applies PATH settings (if self.env.PATH is provided), and can run long
+               commands through a temporary ``@argfile``.
+
+               :param cmd: process command to execute
+               :type cmd: list of string (best) or string (process will use a shell)
+               :return: the return code
+               :rtype: int
+               """
+               if not 'cwd' in kw:
+                       kw['cwd'] = self.get_cwd()
+
+               if hasattr(self, 'timeout'):
+                       kw['timeout'] = self.timeout
+
+               if self.env.PATH:
+                       env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+                       env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
+
+               # workaround for command line length limit:
+               # http://support.microsoft.com/kb/830473
+               if not isinstance(cmd, str) and (len(repr(cmd)) >= 8192 if Utils.is_win32 else len(cmd) > 200000):
+                       cmd, args = self.split_argfile(cmd)
+                       try:
+                               (fd, tmp) = tempfile.mkstemp()
+                               os.write(fd, '\r\n'.join(args))
+                               os.close(fd)
+                               if Logs.verbose:
+                                       Logs.debug('argfile: @%r -> %r', tmp, args)
+                               return self.generator.bld.exec_command(cmd + ['@' + tmp], **kw)
+                       finally:
+                               try:
+                                       os.remove(tmp)
+                               except OSError:
+                                       # anti-virus and indexers can keep files open -_-
+                                       pass
+               else:
+                       return self.generator.bld.exec_command(cmd, **kw)
+
+       def runnable_status(self):
+               """
+               Returns the Task status
+
+               :return: a task state in :py:const:`waflib.Task.RUN_ME`, :py:const:`waflib.Task.SKIP_ME` or :py:const:`waflib.Task.ASK_LATER`.
+               :rtype: int
+               """
+               return RUN_ME
+
+       def uid(self):
+               """
+               Computes a unique identifier for the task
+
+               :rtype: string or bytes
+               """
+               return Utils.SIG_NIL
+
+       def process(self):
+               """
+               Assume that the task has had a ``master`` which is an instance of :py:class:`waflib.Runner.Parallel`.
+               Execute the task and then put it back in the queue :py:attr:`waflib.Runner.Parallel.out` (may be replaced by subclassing).
+
+               :return: 0 or None if everything is fine
+               :rtype: integer
+               """
+               # remove the task signature immediately before it is executed
+               # in case of failure the task will be executed again
+               m = self.generator.bld.producer
+               try:
+                       # TODO another place for this?
+                       del self.generator.bld.task_sigs[self.uid()]
+               except KeyError:
+                       pass
+
+               try:
+                       ret = self.run()
+               except Exception:
+                       self.err_msg = Utils.ex_stack()
+                       self.hasrun = EXCEPTION
+
+                       # TODO cleanup
+                       m.error_handler(self)
+                       return
+
+               if ret:
+                       self.err_code = ret
+                       self.hasrun = CRASHED
+               else:
+                       try:
+                               self.post_run()
+                       except Errors.WafError:
+                               pass
+                       except Exception:
+                               self.err_msg = Utils.ex_stack()
+                               self.hasrun = EXCEPTION
+                       else:
+                               self.hasrun = SUCCESS
+               if self.hasrun != SUCCESS:
+                       m.error_handler(self)
+
+       def run(self):
+               """
+               Called by threads to execute the tasks. The default is empty and meant to be overridden in subclasses.
+
+               .. warning:: It is a bad idea to create nodes in this method, so avoid :py:meth:`waflib.Node.Node.ant_glob`
+
+               :rtype: int
+               """
+               if hasattr(self, 'fun'):
+                       return self.fun(self)
+               return 0
+
+       def post_run(self):
+               "Update build data after successful Task execution. Override in subclasses."
+               pass
+
+       def log_display(self, bld):
+               "Writes the execution status on the context logger"
+               if self.generator.bld.progress_bar == 3:
+                       return
+
+               s = self.display()
+               if s:
+                       if bld.logger:
+                               logger = bld.logger
+                       else:
+                               logger = Logs
+
+                       if self.generator.bld.progress_bar == 1:
+                               c1 = Logs.colors.cursor_off
+                               c2 = Logs.colors.cursor_on
+                               logger.info(s, extra={'stream': sys.stderr, 'terminator':'', 'c1': c1, 'c2' : c2})
+                       else:
+                               logger.info(s, extra={'terminator':'', 'c1': '', 'c2' : ''})
+
+       def display(self):
+               """
+               Returns an execution status for the console, the progress bar, or the IDE output.
+
+               :rtype: string
+               """
+               col1 = Logs.colors(self.color)
+               col2 = Logs.colors.NORMAL
+               master = self.generator.bld.producer
+
+               def cur():
+                       # the current task position, computed as late as possible
+                       tmp = -1
+                       if hasattr(master, 'ready'):
+                               tmp -= master.ready.qsize()
+                       return master.processed + tmp
+
+               if self.generator.bld.progress_bar == 1:
+                       return self.generator.bld.progress_line(cur(), master.total, col1, col2)
+
+               if self.generator.bld.progress_bar == 2:
+                       ela = str(self.generator.bld.timer)
+                       try:
+                               ins  = ','.join([n.name for n in self.inputs])
+                       except AttributeError:
+                               ins = ''
+                       try:
+                               outs = ','.join([n.name for n in self.outputs])
+                       except AttributeError:
+                               outs = ''
+                       return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (master.total, cur(), ins, outs, ela)
+
+               s = str(self)
+               if not s:
+                       return None
+
+               total = master.total
+               n = len(str(total))
+               fs = '[%%%dd/%%%dd] %%s%%s%%s%%s\n' % (n, n)
+               kw = self.keyword()
+               if kw:
+                       kw += ' '
+               return fs % (cur(), total, kw, col1, s, col2)
+
+       def hash_constraints(self):
+               """
+               Identifies a task type for all the constraints relevant for the scheduler: precedence, file production
+
+               :return: a hash value
+               :rtype: string
+               """
+               cls = self.__class__
+               tup = (str(cls.before), str(cls.after), str(cls.ext_in), str(cls.ext_out), cls.__name__, cls.hcode)
+               return hash(tup)
+
+       def format_error(self):
+               """
+               Returns an error message to display the build failure reasons
+
+               :rtype: string
+               """
+               if Logs.verbose:
+                       msg = ': %r\n%r' % (self, getattr(self, 'last_cmd', ''))
+               else:
+                       msg = ' (run with -v to display more information)'
+               name = getattr(self.generator, 'name', '')
+               if getattr(self, "err_msg", None):
+                       return self.err_msg
+               elif not self.hasrun:
+                       return 'task in %r was not executed for some reason: %r' % (name, self)
+               elif self.hasrun == CRASHED:
+                       try:
+                               return ' -> task in %r failed with exit status %r%s' % (name, self.err_code, msg)
+                       except AttributeError:
+                               return ' -> task in %r failed%s' % (name, msg)
+               elif self.hasrun == MISSING:
+                       return ' -> missing files in %r%s' % (name, msg)
+               else:
+                       return 'invalid status for task in %r: %r' % (name, self.hasrun)
+
+       def colon(self, var1, var2):
+               """
+               Enable scriptlet expressions of the form ${FOO_ST:FOO}
+               If the first variable (FOO_ST) is empty, then an empty list is returned
+
+               The results will be slightly different if FOO_ST is a list, for example::
+
+                       env.FOO_ST = ['-a', '-b']
+                       env.FOO_ST = '-I%s'
+                       # ${FOO_ST:FOO} returns
+                       ['-Ip1', '-Ip2']
+
+                       env.FOO    = ['p1', 'p2']
+                       # ${FOO_ST:FOO} returns
+                       ['-a', '-b', 'p1', '-a', '-b', 'p2']
+               """
+               tmp = self.env[var1]
+               if not tmp:
+                       return []
+
+               if isinstance(var2, str):
+                       it = self.env[var2]
+               else:
+                       it = var2
+               if isinstance(tmp, str):
+                       return [tmp % x for x in it]
+               else:
+                       lst = []
+                       for y in it:
+                               lst.extend(tmp)
+                               lst.append(y)
+                       return lst
+
+class Task(TaskBase):
+       """
+       This class deals with the filesystem (:py:class:`waflib.Node.Node`). The method :py:class:`waflib.Task.Task.runnable_status`
+       uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes,
+       the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output
+       nodes (if present).
+       """
+       vars = []
+       """ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
+
+       always_run = False
+       """Specify whether task instances must always be executed or not (class attribute)"""
+
+       shell = False
+       """Execute the command with the shell (class attribute)"""
+
+       def __init__(self, *k, **kw):
+               TaskBase.__init__(self, *k, **kw)
+
+               self.env = kw['env']
+               """:py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)"""
+
+               self.inputs  = []
+               """List of input nodes, which represent the files used by the task instance"""
+
+               self.outputs = []
+               """List of output nodes, which represent the files created by the task instance"""
+
+               self.dep_nodes = []
+               """List of additional nodes to depend on"""
+
+               self.run_after = set()
+               """Set of tasks that must be executed before this one"""
+
+       def __str__(self):
+               "string to display to the user"
+               name = self.__class__.__name__
+               if self.outputs:
+                       if name.endswith(('lib', 'program')) or not self.inputs:
+                               node = self.outputs[0]
+                               return node.path_from(node.ctx.launch_node())
+               if not (self.inputs or self.outputs):
+                       return self.__class__.__name__
+               if len(self.inputs) == 1:
+                       node = self.inputs[0]
+                       return node.path_from(node.ctx.launch_node())
+
+               src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
+               tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+               if self.outputs: sep = ' -> '
+               else: sep = ''
+               return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str)
+
+       def keyword(self):
+               """
+               See :py:meth:`waflib.Task.TaskBase`
+               """
+               name = self.__class__.__name__
+               if name.endswith(('lib', 'program')):
+                       return 'Linking'
+               if len(self.inputs) == 1 and len(self.outputs) == 1:
+                       return 'Compiling'
+               if not self.inputs:
+                       if self.outputs:
+                               return 'Creating'
+                       else:
+                               return 'Running'
+               return 'Processing'
+
+       def __repr__(self):
+               "for debugging purposes"
+               try:
+                       ins = ",".join([x.name for x in self.inputs])
+                       outs = ",".join([x.name for x in self.outputs])
+               except AttributeError:
+                       ins = ",".join([str(x) for x in self.inputs])
+                       outs = ",".join([str(x) for x in self.outputs])
+               return "".join(['\n\t{task %r: ' % id(self), self.__class__.__name__, " ", ins, " -> ", outs, '}'])
+
+       def uid(self):
+               """
+               Returns an identifier used to determine if tasks are up-to-date. Since the
+               identifier will be stored between executions, it must be:
+
+                       - unique for a task: no two tasks return the same value (for a given build context)
+                       - the same for a given task instance
+
+               By default, the node paths, the class name, and the function are used
+               as inputs to compute a hash.
+
+               The pointer to the object (python built-in 'id') will change between build executions,
+               and must be avoided in such hashes.
+
+               :return: hash value
+               :rtype: string
+               """
+               try:
+                       return self.uid_
+               except AttributeError:
+                       m = Utils.md5(self.__class__.__name__)
+                       up = m.update
+                       for x in self.inputs + self.outputs:
+                               up(x.abspath())
+                       self.uid_ = m.digest()
+                       return self.uid_
+
+       def set_inputs(self, inp):
+               """
+               Appends the nodes to the *inputs* list
+
+               :param inp: input nodes
+               :type inp: node or list of nodes
+               """
+               if isinstance(inp, list): self.inputs += inp
+               else: self.inputs.append(inp)
+
+       def set_outputs(self, out):
+               """
+               Appends the nodes to the *outputs* list
+
+               :param out: output nodes
+               :type out: node or list of nodes
+               """
+               if isinstance(out, list): self.outputs += out
+               else: self.outputs.append(out)
+
+       def set_run_after(self, task):
+               """
+               Run this task only after the given *task*.
+
+               :param task: task
+               :type task: :py:class:`waflib.Task.Task`
+               """
+               assert isinstance(task, TaskBase)
+               self.run_after.add(task)
+
+       def signature(self):
+               """
+               Task signatures are stored between build executions, they are use to track the changes
+               made to the input nodes (not to the outputs!). The signature hashes data from various sources:
+
+               * explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps`
+               * implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps`
+               * hashed data: variables/values read from task.vars/task.env :py:meth:`waflib.Task.Task.sig_vars`
+
+               If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``::
+
+                       from waflib import Task
+                       class cls(Task.Task):
+                               def signature(self):
+                                       sig = super(Task.Task, self).signature()
+                                       delattr(self, 'cache_sig')
+                                       return super(Task.Task, self).signature()
+
+               :return: the signature value
+               :rtype: string or bytes
+               """
+               try:
+                       return self.cache_sig
+               except AttributeError:
+                       pass
+
+               self.m = Utils.md5(self.hcode)
+
+               # explicit deps
+               self.sig_explicit_deps()
+
+               # env vars
+               self.sig_vars()
+
+               # implicit deps / scanner results
+               if self.scan:
+                       try:
+                               self.sig_implicit_deps()
+                       except Errors.TaskRescan:
+                               return self.signature()
+
+               ret = self.cache_sig = self.m.digest()
+               return ret
+
+       def runnable_status(self):
+               """
+               See :py:meth:`waflib.Task.TaskBase.runnable_status`
+               """
+               #return 0 # benchmarking
+
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return ASK_LATER
+
+               # first compute the signature
+               try:
+                       new_sig = self.signature()
+               except Errors.TaskNotReady:
+                       return ASK_LATER
+
+               # compare the signature to a signature computed previously
+               bld = self.generator.bld
+               key = self.uid()
+               try:
+                       prev_sig = bld.task_sigs[key]
+               except KeyError:
+                       Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
+                       return RUN_ME
+
+               if new_sig != prev_sig:
+                       Logs.debug('task: task %r must run: the task signature changed', self)
+                       return RUN_ME
+
+               # compare the signatures of the outputs
+               for node in self.outputs:
+                       sig = bld.node_sigs.get(node)
+                       if not sig:
+                               Logs.debug('task: task %r must run: an output node has no signature', self)
+                               return RUN_ME
+                       if sig != key:
+                               Logs.debug('task: task %r must run: an output node was produced by another task', self)
+                               return RUN_ME
+                       if not node.exists():
+                               Logs.debug('task: task %r must run: an output node does not exist', self)
+                               return RUN_ME
+
+               return (self.always_run and RUN_ME) or SKIP_ME
+
+       def post_run(self):
+               """
+               Called after successful execution to record that the task has run by
+               updating the entry in :py:attr:`waflib.Build.BuildContext.task_sigs`.
+               """
+               bld = self.generator.bld
+               for node in self.outputs:
+                       if not node.exists():
+                               self.hasrun = MISSING
+                               self.err_msg = '-> missing file: %r' % node.abspath()
+                               raise Errors.WafError(self.err_msg)
+                       bld.node_sigs[node] = self.uid() # make sure this task produced the files in question
+               bld.task_sigs[self.uid()] = self.signature()
+               if not self.keep_last_cmd:
+                       try:
+                               del self.last_cmd
+                       except AttributeError:
+                               pass
+
+       def sig_explicit_deps(self):
+               """
+               Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.inputs`
+               and :py:attr:`waflib.Task.Task.dep_nodes` signatures.
+               """
+               bld = self.generator.bld
+               upd = self.m.update
+
+               # the inputs
+               for x in self.inputs + self.dep_nodes:
+                       upd(x.get_bld_sig())
+
+               # manual dependencies, they can slow down the builds
+               if bld.deps_man:
+                       additional_deps = bld.deps_man
+                       for x in self.inputs + self.outputs:
+                               try:
+                                       d = additional_deps[x]
+                               except KeyError:
+                                       continue
+
+                               for v in d:
+                                       if isinstance(v, bld.root.__class__):
+                                               v = v.get_bld_sig()
+                                       elif hasattr(v, '__call__'):
+                                               v = v() # dependency is a function, call it
+                                       upd(v)
+
+       def sig_vars(self):
+               """
+               Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.env` variables/values
+               """
+               sig = self.generator.bld.hash_env_vars(self.env, self.vars)
+               self.m.update(sig)
+
+       scan = None
+       """
+       This method, when provided, returns a tuple containing:
+
+       * a list of nodes corresponding to real files
+       * a list of names for files not found in path_lst
+
+       For example::
+
+               from waflib.Task import Task
+               class mytask(Task):
+                       def scan(self, node):
+                               return ([], [])
+
+       The first and second lists in the tuple are stored in :py:attr:`waflib.Build.BuildContext.node_deps` and
+       :py:attr:`waflib.Build.BuildContext.raw_deps` respectively.
+       """
+
+       def sig_implicit_deps(self):
+               """
+               Used by :py:meth:`waflib.Task.Task.signature`; it hashes node signatures
+               obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`).
+
+               The exception :py:class:`waflib.Errors.TaskRescan` is thrown
+               when a file has changed. In this case, the method :py:meth:`waflib.Task.Task.signature` is called
+               once again, and return here to call :py:meth:`waflib.Task.Task.scan` and searching for dependencies.
+               """
+               bld = self.generator.bld
+
+               # get the task signatures from previous runs
+               key = self.uid()
+               prev = bld.imp_sigs.get(key, [])
+
+               # for issue #379
+               if prev:
+                       try:
+                               if prev == self.compute_sig_implicit_deps():
+                                       return prev
+                       except Errors.TaskNotReady:
+                               raise
+                       except EnvironmentError:
+                               # when a file was renamed, remove the stale nodes (headers in folders without source files)
+                               # this will break the order calculation for headers created during the build in the source directory (should be uncommon)
+                               # the behaviour will differ when top != out
+                               for x in bld.node_deps.get(self.uid(), []):
+                                       if not x.is_bld() and not x.exists():
+                                               try:
+                                                       del x.parent.children[x.name]
+                                               except KeyError:
+                                                       pass
+                       del bld.imp_sigs[key]
+                       raise Errors.TaskRescan('rescan')
+
+               # no previous run or the signature of the dependencies has changed, rescan the dependencies
+               (bld.node_deps[key], bld.raw_deps[key]) = self.scan()
+               if Logs.verbose:
+                       Logs.debug('deps: scanner for %s: %r; unresolved: %r', self, bld.node_deps[key], bld.raw_deps[key])
+
+               # recompute the signature and return it
+               try:
+                       bld.imp_sigs[key] = self.compute_sig_implicit_deps()
+               except EnvironmentError:
+                       for k in bld.node_deps.get(self.uid(), []):
+                               if not k.exists():
+                                       Logs.warn('Dependency %r for %r is missing: check the task declaration and the build order!', k, self)
+                       raise
+
+       def compute_sig_implicit_deps(self):
+               """
+               Used by :py:meth:`waflib.Task.Task.sig_implicit_deps` for computing the actual hash of the
+               :py:class:`waflib.Node.Node` returned by the scanner.
+
+               :return: a hash value for the implicit dependencies
+               :rtype: string or bytes
+               """
+               upd = self.m.update
+               self.are_implicit_nodes_ready()
+
+               # scanner returns a node that does not have a signature
+               # just *ignore* the error and let them figure out from the compiler output
+               # waf -k behaviour
+               for k in self.generator.bld.node_deps.get(self.uid(), []):
+                       upd(k.get_bld_sig())
+               return self.m.digest()
+
+       def are_implicit_nodes_ready(self):
+               """
+               For each node returned by the scanner, see if there is a task that creates it,
+               and infer the build order
+
+               This has a low performance impact on null builds (1.86s->1.66s) thanks to caching (28s->1.86s)
+               """
+               bld = self.generator.bld
+               try:
+                       cache = bld.dct_implicit_nodes
+               except AttributeError:
+                       bld.dct_implicit_nodes = cache = {}
+
+               # one cache per build group
+               try:
+                       dct = cache[bld.current_group]
+               except KeyError:
+                       dct = cache[bld.current_group] = {}
+                       for tsk in bld.cur_tasks:
+                               for x in tsk.outputs:
+                                       dct[x] = tsk
+
+               modified = False
+               for x in bld.node_deps.get(self.uid(), []):
+                       if x in dct:
+                               self.run_after.add(dct[x])
+                               modified = True
+
+               if modified:
+                       for tsk in self.run_after:
+                               if not tsk.hasrun:
+                                       #print "task is not ready..."
+                                       raise Errors.TaskNotReady('not ready')
+if sys.hexversion > 0x3000000:
+       def uid(self):
+               try:
+                       return self.uid_
+               except AttributeError:
+                       m = Utils.md5(self.__class__.__name__.encode('iso8859-1', 'xmlcharrefreplace'))
+                       up = m.update
+                       for x in self.inputs + self.outputs:
+                               up(x.abspath().encode('iso8859-1', 'xmlcharrefreplace'))
+                       self.uid_ = m.digest()
+                       return self.uid_
+       uid.__doc__ = Task.uid.__doc__
+       Task.uid = uid
+
+def is_before(t1, t2):
+       """
+       Returns a non-zero value if task t1 is to be executed before task t2::
+
+               t1.ext_out = '.h'
+               t2.ext_in = '.h'
+               t2.after = ['t1']
+               t1.before = ['t2']
+               waflib.Task.is_before(t1, t2) # True
+
+       :param t1: Task object
+       :type t1: :py:class:`waflib.Task.TaskBase`
+       :param t2: Task object
+       :type t2: :py:class:`waflib.Task.TaskBase`
+       """
+       to_list = Utils.to_list
+       for k in to_list(t2.ext_in):
+               if k in to_list(t1.ext_out):
+                       return 1
+
+       if t1.__class__.__name__ in to_list(t2.after):
+               return 1
+
+       if t2.__class__.__name__ in to_list(t1.before):
+               return 1
+
+       return 0
+
+def set_file_constraints(tasks):
+       """
+       Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs
+
+       :param tasks: tasks
+       :type tasks: list of :py:class:`waflib.Task.TaskBase`
+       """
+       ins = Utils.defaultdict(set)
+       outs = Utils.defaultdict(set)
+       for x in tasks:
+               for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []):
+                       ins[id(a)].add(x)
+               for a in getattr(x, 'outputs', []):
+                       outs[id(a)].add(x)
+
+       links = set(ins.keys()).intersection(outs.keys())
+       for k in links:
+               for a in ins[k]:
+                       a.run_after.update(outs[k])
+
+def set_precedence_constraints(tasks):
+       """
+       Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes
+
+       :param tasks: tasks
+       :type tasks: list of :py:class:`waflib.Task.TaskBase`
+       """
+       cstr_groups = Utils.defaultdict(list)
+       for x in tasks:
+               h = x.hash_constraints()
+               cstr_groups[h].append(x)
+
+       keys = list(cstr_groups.keys())
+       maxi = len(keys)
+
+       # this list should be short
+       for i in range(maxi):
+               t1 = cstr_groups[keys[i]][0]
+               for j in range(i + 1, maxi):
+                       t2 = cstr_groups[keys[j]][0]
+
+                       # add the constraints based on the comparisons
+                       if is_before(t1, t2):
+                               a = i
+                               b = j
+                       elif is_before(t2, t1):
+                               a = j
+                               b = i
+                       else:
+                               continue
+
+                       aval = set(cstr_groups[keys[a]])
+                       for x in cstr_groups[keys[b]]:
+                               x.run_after.update(aval)
+
+def funex(c):
+       """
+       Compiles a scriptlet expression into a Python function
+
+       :param c: function to compile
+       :type c: string
+       :return: the function 'f' declared in the input string
+       :rtype: function
+       """
+       dc = {}
+       exec(c, dc)
+       return dc['f']
+
+re_cond = re.compile('(?P<var>\w+)|(?P<or>\|)|(?P<and>&)')
+re_novar = re.compile(r'^(SRC|TGT)\W+.*?$')
+reg_act = re.compile(r'(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})', re.M)
+def compile_fun_shell(line):
+       """
+       Creates a compiled function to execute a process through a sub-shell
+       """
+       extr = []
+       def repl(match):
+               g = match.group
+               if g('dollar'):
+                       return "$"
+               elif g('backslash'):
+                       return '\\\\'
+               elif g('subst'):
+                       extr.append((g('var'), g('code')))
+                       return "%s"
+               return None
+       line = reg_act.sub(repl, line) or line
+       dvars = []
+
+       def replc(m):
+               # performs substitutions and populates dvars
+               if m.group('and'):
+                       return ' and '
+               elif m.group('or'):
+                       return ' or '
+               else:
+                       x = m.group('var')
+                       if x not in dvars:
+                               dvars.append(x)
+                       return 'env[%r]' % x
+
+       parm = []
+       app = parm.append
+       for (var, meth) in extr:
+               if var == 'SRC':
+                       if meth: app('tsk.inputs%s' % meth)
+                       else: app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
+               elif var == 'TGT':
+                       if meth: app('tsk.outputs%s' % meth)
+                       else: app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
+               elif meth:
+                       if meth.startswith(':'):
+                               if var not in dvars:
+                                       dvars.append(var)
+                               m = meth[1:]
+                               if m == 'SRC':
+                                       m = '[a.path_from(cwdx) for a in tsk.inputs]'
+                               elif m == 'TGT':
+                                       m = '[a.path_from(cwdx) for a in tsk.outputs]'
+                               elif re_novar.match(m):
+                                       m = '[tsk.inputs%s]' % m[3:]
+                               elif re_novar.match(m):
+                                       m = '[tsk.outputs%s]' % m[3:]
+                               elif m[:3] not in ('tsk', 'gen', 'bld'):
+                                       dvars.append(meth[1:])
+                                       m = '%r' % m
+                               app('" ".join(tsk.colon(%r, %s))' % (var, m))
+                       elif meth.startswith('?'):
+                               # In A?B|C output env.A if one of env.B or env.C is non-empty
+                               expr = re_cond.sub(replc, meth[1:])
+                               app('p(%r) if (%s) else ""' % (var, expr))
+                       else:
+                               app('%s%s' % (var, meth))
+               else:
+                       if var not in dvars:
+                               dvars.append(var)
+                       app("p('%s')" % var)
+       if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
+       else: parm = ''
+
+       c = COMPILE_TEMPLATE_SHELL % (line, parm)
+       Logs.debug('action: %s', c.strip().splitlines())
+       return (funex(c), dvars)
+
+reg_act_noshell = re.compile(r"(?P<space>\s+)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})|(?P<text>([^$ \t\n\r\f\v]|\$\$)+)", re.M)
+def compile_fun_noshell(line):
+       """
+       Creates a compiled function to execute a process without a sub-shell
+       """
+       buf = []
+       dvars = []
+       merge = False
+       app = buf.append
+
+       def replc(m):
+               # performs substitutions and populates dvars
+               if m.group('and'):
+                       return ' and '
+               elif m.group('or'):
+                       return ' or '
+               else:
+                       x = m.group('var')
+                       if x not in dvars:
+                               dvars.append(x)
+                       return 'env[%r]' % x
+
+       for m in reg_act_noshell.finditer(line):
+               if m.group('space'):
+                       merge = False
+                       continue
+               elif m.group('text'):
+                       app('[%r]' % m.group('text').replace('$$', '$'))
+               elif m.group('subst'):
+                       var = m.group('var')
+                       code = m.group('code')
+                       if var == 'SRC':
+                               if code:
+                                       app('[tsk.inputs%s]' % code)
+                               else:
+                                       app('[a.path_from(cwdx) for a in tsk.inputs]')
+                       elif var == 'TGT':
+                               if code:
+                                       app('[tsk.outputs%s]' % code)
+                               else:
+                                       app('[a.path_from(cwdx) for a in tsk.outputs]')
+                       elif code:
+                               if code.startswith(':'):
+                                       # a composed variable ${FOO:OUT}
+                                       if not var in dvars:
+                                               dvars.append(var)
+                                       m = code[1:]
+                                       if m == 'SRC':
+                                               m = '[a.path_from(cwdx) for a in tsk.inputs]'
+                                       elif m == 'TGT':
+                                               m = '[a.path_from(cwdx) for a in tsk.outputs]'
+                                       elif re_novar.match(m):
+                                               m = '[tsk.inputs%s]' % m[3:]
+                                       elif re_novar.match(m):
+                                               m = '[tsk.outputs%s]' % m[3:]
+                                       elif m[:3] not in ('tsk', 'gen', 'bld'):
+                                               dvars.append(m)
+                                               m = '%r' % m
+                                       app('tsk.colon(%r, %s)' % (var, m))
+                               elif code.startswith('?'):
+                                       # In A?B|C output env.A if one of env.B or env.C is non-empty
+                                       expr = re_cond.sub(replc, code[1:])
+                                       app('to_list(env[%r] if (%s) else [])' % (var, expr))
+                               else:
+                                       # plain code such as ${tsk.inputs[0].abspath()}
+                                       app('gen.to_list(%s%s)' % (var, code))
+                       else:
+                               # a plain variable such as # a plain variable like ${AR}
+                               app('to_list(env[%r])' % var)
+                               if not var in dvars:
+                                       dvars.append(var)
+               if merge:
+                       tmp = 'merge(%s, %s)' % (buf[-2], buf[-1])
+                       del buf[-1]
+                       buf[-1] = tmp
+               merge = True # next turn
+
+       buf = ['lst.extend(%s)' % x for x in buf]
+       fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
+       Logs.debug('action: %s', fun.strip().splitlines())
+       return (funex(fun), dvars)
+
+def compile_fun(line, shell=False):
+       """
+       Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing:
+
+       * The function created (compiled) for use as :py:meth:`waflib.Task.TaskBase.run`
+       * The list of variables that must cause rebuilds when *env* data is modified
+
+       for example::
+
+               from waflib.Task import compile_fun
+               compile_fun('cxx', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
+
+               def build(bld):
+                       bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
+
+       The env variables (CXX, ..) on the task must not hold dicts so as to preserve a consistent order.
+       The reserved keywords ``TGT`` and ``SRC`` represent the task input and output nodes
+
+       """
+       if isinstance(line, str):
+               if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
+                       shell = True
+       else:
+               dvars_lst = []
+               funs_lst = []
+               for x in line:
+                       if isinstance(x, str):
+                               fun, dvars = compile_fun(x, shell)
+                               dvars_lst += dvars
+                               funs_lst.append(fun)
+                       else:
+                               # assume a function to let through
+                               funs_lst.append(x)
+               def composed_fun(task):
+                       for x in funs_lst:
+                               ret = x(task)
+                               if ret:
+                                       return ret
+                       return None
+               return composed_fun, dvars_lst
+       if shell:
+               return compile_fun_shell(line)
+       else:
+               return compile_fun_noshell(line)
+
+def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None):
+       """
+       Returns a new task subclass with the function ``run`` compiled from the line given.
+
+       :param func: method run
+       :type func: string or function
+       :param vars: list of variables to hash
+       :type vars: list of string
+       :param color: color to use
+       :type color: string
+       :param shell: when *func* is a string, enable/disable the use of the shell
+       :type shell: bool
+       :param scan: method scan
+       :type scan: function
+       :rtype: :py:class:`waflib.Task.Task`
+       """
+
+       params = {
+               'vars': vars or [], # function arguments are static, and this one may be modified by the class
+               'color': color,
+               'name': name,
+               'shell': shell,
+               'scan': scan,
+       }
+
+       if isinstance(func, str) or isinstance(func, tuple):
+               params['run_str'] = func
+       else:
+               params['run'] = func
+
+       cls = type(Task)(name, (Task,), params)
+       global classes
+       classes[name] = cls
+
+       if ext_in:
+               cls.ext_in = Utils.to_list(ext_in)
+       if ext_out:
+               cls.ext_out = Utils.to_list(ext_out)
+       if before:
+               cls.before = Utils.to_list(before)
+       if after:
+               cls.after = Utils.to_list(after)
+
+       return cls
+
+
+def always_run(cls):
+       """
+       Deprecated Task class decorator (to be removed in waf 2.0)
+
+       Set all task instances of this class to be executed whenever a build is started
+       The task signature is calculated, but the result of the comparison between
+       task signatures is bypassed
+       """
+       Logs.warn('This decorator is deprecated, set always_run on the task class instead!')
+       cls.always_run = True
+       return cls
+
+def update_outputs(cls):
+       """
+       Obsolete, to be removed in waf 2.0
+       """
+       return cls
diff --git a/third_party/waf/waflib/TaskGen.py b/third_party/waf/waflib/TaskGen.py
new file mode 100644 (file)
index 0000000..07e854b
--- /dev/null
@@ -0,0 +1,891 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Task generators
+
+The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
+The instances can have various parameters, but the creation of task nodes (Task.py)
+is deferred. To achieve this, various methods are called from the method "apply"
+"""
+
+import copy, re, os, functools
+from waflib import Task, Utils, Logs, Errors, ConfigSet, Node
+
+feats = Utils.defaultdict(set)
+"""remember the methods declaring features"""
+
+HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']
+
+class task_gen(object):
+       """
+       Instances of this class create :py:class:`waflib.Task.TaskBase` when
+       calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
+       A few notes:
+
+       * The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..)
+       * The 'features' are used to add methods to self.meths and then execute them
+       * The attribute 'path' is a node representing the location of the task generator
+       * The tasks created are added to the attribute *tasks*
+       * The attribute 'idx' is a counter of task generators in the same path
+       """
+
+       mappings = Utils.ordered_iter_dict()
+       """Mappings are global file extension mappings that are retrieved in the order of definition"""
+
+       prec = Utils.defaultdict(list)
+       """Dict that holds the precedence execution rules for task generator methods"""
+
+       def __init__(self, *k, **kw):
+               """
+               Task generator objects predefine various attributes (source, target) for possible
+               processing by process_rule (make-like rules) or process_source (extensions, misc methods)
+
+               Tasks are stored on the attribute 'tasks'. They are created by calling methods
+               listed in ``self.meths`` or referenced in the attribute ``features``
+               A topological sort is performed to execute the methods in correct order.
+
+               The extra key/value elements passed in ``kw`` are set as attributes
+               """
+               self.source = ''
+               self.target = ''
+
+               self.meths = []
+               """
+               List of method names to execute (internal)
+               """
+
+               self.features = []
+               """
+               List of feature names for bringing new methods in
+               """
+
+               self.tasks = []
+               """
+               Tasks created are added to this list
+               """
+
+               if not 'bld' in kw:
+                       # task generators without a build context :-/
+                       self.env = ConfigSet.ConfigSet()
+                       self.idx = 0
+                       self.path = None
+               else:
+                       self.bld = kw['bld']
+                       self.env = self.bld.env.derive()
+                       self.path = self.bld.path # emulate chdir when reading scripts
+
+                       # provide a unique id
+                       try:
+                               self.idx = self.bld.idx[self.path] = self.bld.idx.get(self.path, 0) + 1
+                       except AttributeError:
+                               self.bld.idx = {}
+                               self.idx = self.bld.idx[self.path] = 1
+
+               for key, val in kw.items():
+                       setattr(self, key, val)
+
+       def __str__(self):
+               """Debugging helper"""
+               return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())
+
+       def __repr__(self):
+               """Debugging helper"""
+               lst = []
+               for x in self.__dict__:
+                       if x not in ('env', 'bld', 'compiled_tasks', 'tasks'):
+                               lst.append("%s=%s" % (x, repr(getattr(self, x))))
+               return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())
+
+       def get_cwd(self):
+               """
+               Current working directory for the task generator, defaults to the build directory.
+               This is still used in a few places but it should disappear at some point as the classes
+               define their own working directory.
+
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               return self.bld.bldnode
+
+       def get_name(self):
+               """
+               If the attribute ``name`` is not set on the instance,
+               the name is computed from the target name::
+
+                       def build(bld):
+                               x = bld(name='foo')
+                               x.get_name() # foo
+                               y = bld(target='bar')
+                               y.get_name() # bar
+
+               :rtype: string
+               :return: name of this task generator
+               """
+               try:
+                       return self._name
+               except AttributeError:
+                       if isinstance(self.target, list):
+                               lst = [str(x) for x in self.target]
+                               name = self._name = ','.join(lst)
+                       else:
+                               name = self._name = str(self.target)
+                       return name
+       def set_name(self, name):
+               self._name = name
+
+       name = property(get_name, set_name)
+
+       def to_list(self, val):
+               """
+               Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list`
+
+               :type val: string or list of string
+               :param val: input to return as a list
+               :rtype: list
+               """
+               if isinstance(val, str):
+                       return val.split()
+               else:
+                       return val
+
+       def post(self):
+               """
+               Creates tasks for this task generators. The following operations are performed:
+
+               #. The body of this method is called only once and sets the attribute ``posted``
+               #. The attribute ``features`` is used to add more methods in ``self.meths``
+               #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
+               #. The methods are then executed in order
+               #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
+               """
+               if getattr(self, 'posted', None):
+                       return False
+               self.posted = True
+
+               keys = set(self.meths)
+               keys.update(feats['*'])
+
+               # add the methods listed in the features
+               self.features = Utils.to_list(self.features)
+               for x in self.features:
+                       st = feats[x]
+                       if st:
+                               keys.update(st)
+                       elif not x in Task.classes:
+                               Logs.warn('feature %r does not exist - bind at least one method to it?', x)
+
+               # copy the precedence table
+               prec = {}
+               prec_tbl = self.prec
+               for x in prec_tbl:
+                       if x in keys:
+                               prec[x] = prec_tbl[x]
+
+               # elements disconnected
+               tmp = []
+               for a in keys:
+                       for x in prec.values():
+                               if a in x: break
+                       else:
+                               tmp.append(a)
+
+               tmp.sort()
+
+               # topological sort
+               out = []
+               while tmp:
+                       e = tmp.pop()
+                       if e in keys:
+                               out.append(e)
+                       try:
+                               nlst = prec[e]
+                       except KeyError:
+                               pass
+                       else:
+                               del prec[e]
+                               for x in nlst:
+                                       for y in prec:
+                                               if x in prec[y]:
+                                                       break
+                                       else:
+                                               tmp.append(x)
+
+               if prec:
+                       buf = ['Cycle detected in the method execution:']
+                       for k, v in prec.items():
+                               buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
+                       raise Errors.WafError('\n'.join(buf))
+               out.reverse()
+               self.meths = out
+
+               # then we run the methods in order
+               Logs.debug('task_gen: posting %s %d', self, id(self))
+               for x in out:
+                       try:
+                               v = getattr(self, x)
+                       except AttributeError:
+                               raise Errors.WafError('%r is not a valid task generator method' % x)
+                       Logs.debug('task_gen: -> %s (%d)', x, id(self))
+                       v()
+
+               Logs.debug('task_gen: posted %s', self.name)
+               return True
+
+       def get_hook(self, node):
+               """
+               Returns the ``@extension`` method to call for a Node of a particular extension.
+
+               :param node: Input file to process
+               :type node: :py:class:`waflib.Tools.Node.Node`
+               :return: A method able to process the input node by looking at the extension
+               :rtype: function
+               """
+               name = node.name
+               for k in self.mappings:
+                       try:
+                               if name.endswith(k):
+                                       return self.mappings[k]
+                       except TypeError:
+                               # regexps objects
+                               if k.match(name):
+                                       return self.mappings[k]
+               keys = list(self.mappings.keys())
+               raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys))
+
+       def create_task(self, name, src=None, tgt=None, **kw):
+               """
+               Creates task instances.
+
+               :param name: task class name
+               :type name: string
+               :param src: input nodes
+               :type src: list of :py:class:`waflib.Tools.Node.Node`
+               :param tgt: output nodes
+               :type tgt: list of :py:class:`waflib.Tools.Node.Node`
+               :return: A task object
+               :rtype: :py:class:`waflib.Task.TaskBase`
+               """
+               task = Task.classes[name](env=self.env.derive(), generator=self)
+               if src:
+                       task.set_inputs(src)
+               if tgt:
+                       task.set_outputs(tgt)
+               task.__dict__.update(kw)
+               self.tasks.append(task)
+               return task
+
+       def clone(self, env):
+               """
+               Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the
+               it does not create the same output files as the original, or the same files may
+               be compiled several times.
+
+               :param env: A configuration set
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               :return: A copy
+               :rtype: :py:class:`waflib.TaskGen.task_gen`
+               """
+               newobj = self.bld()
+               for x in self.__dict__:
+                       if x in ('env', 'bld'):
+                               continue
+                       elif x in ('path', 'features'):
+                               setattr(newobj, x, getattr(self, x))
+                       else:
+                               setattr(newobj, x, copy.copy(getattr(self, x)))
+
+               newobj.posted = False
+               if isinstance(env, str):
+                       newobj.env = self.bld.all_envs[env].derive()
+               else:
+                       newobj.env = env.derive()
+
+               return newobj
+
+def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
+       ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
+       """
+       Creates a new mapping and a task class for processing files by extension.
+       See Tools/flex.py for an example.
+
+       :param name: name for the task class
+       :type name: string
+       :param rule: function to execute or string to be compiled in a function
+       :type rule: string or function
+       :param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable)
+       :type reentrant: int
+       :param color: color for the task output
+       :type color: string
+       :param ext_in: execute the task only after the files of such extensions are created
+       :type ext_in: list of string
+       :param ext_out: execute the task only before files of such extensions are processed
+       :type ext_out: list of string
+       :param before: execute instances of this task before classes of the given names
+       :type before: list of string
+       :param after: execute instances of this task after classes of the given names
+       :type after: list of string
+       :param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order)
+       :type decider: function
+       :param scan: scanner function for the task
+       :type scan: function
+       :param install_path: installation path for the output nodes
+       :type install_path: string
+       """
+       ext_in = Utils.to_list(ext_in)
+       ext_out = Utils.to_list(ext_out)
+       if not name:
+               name = rule
+       cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)
+
+       def x_file(self, node):
+               if ext_in:
+                       _ext_in = ext_in[0]
+
+               tsk = self.create_task(name, node)
+               cnt = 0
+
+               ext = decider(self, node) if decider else cls.ext_out
+               for x in ext:
+                       k = node.change_ext(x, ext_in=_ext_in)
+                       tsk.outputs.append(k)
+
+                       if reentrant != None:
+                               if cnt < int(reentrant):
+                                       self.source.append(k)
+                       else:
+                               # reinject downstream files into the build
+                               for y in self.mappings: # ~ nfile * nextensions :-/
+                                       if k.name.endswith(y):
+                                               self.source.append(k)
+                                               break
+                       cnt += 1
+
+               if install_path:
+                       self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs)
+               return tsk
+
+       for x in cls.ext_in:
+               task_gen.mappings[x] = x_file
+       return x_file
+
+def taskgen_method(func):
+       """
+       Decorator that registers method as a task generator method.
+       The function must accept a task generator as first parameter::
+
+               from waflib.TaskGen import taskgen_method
+               @taskgen_method
+               def mymethod(self):
+                       pass
+
+       :param func: task generator method to add
+       :type func: function
+       :rtype: function
+       """
+       setattr(task_gen, func.__name__, func)
+       return func
+
+def feature(*k):
+       """
+       Decorator that registers a task generator method that will be executed when the
+       object attribute ``feature`` contains the corresponding key(s)::
+
+               from waflib.Task import feature
+               @feature('myfeature')
+               def myfunction(self):
+                       print('that is my feature!')
+               def build(bld):
+                       bld(features='myfeature')
+
+       :param k: feature names
+       :type k: list of string
+       """
+       def deco(func):
+               setattr(task_gen, func.__name__, func)
+               for name in k:
+                       feats[name].update([func.__name__])
+               return func
+       return deco
+
+def before_method(*k):
+       """
+       Decorator that registera task generator method which will be executed
+       before the functions of given name(s)::
+
+               from waflib.TaskGen import feature, before
+               @feature('myfeature')
+               @before_method('fun2')
+               def fun1(self):
+                       print('feature 1!')
+               @feature('myfeature')
+               def fun2(self):
+                       print('feature 2!')
+               def build(bld):
+                       bld(features='myfeature')
+
+       :param k: method names
+       :type k: list of string
+       """
+       def deco(func):
+               setattr(task_gen, func.__name__, func)
+               for fun_name in k:
+                       if not func.__name__ in task_gen.prec[fun_name]:
+                               task_gen.prec[fun_name].append(func.__name__)
+                               #task_gen.prec[fun_name].sort()
+               return func
+       return deco
+before = before_method
+
+def after_method(*k):
+       """
+       Decorator that registers a task generator method which will be executed
+       after the functions of given name(s)::
+
+               from waflib.TaskGen import feature, after
+               @feature('myfeature')
+               @after_method('fun2')
+               def fun1(self):
+                       print('feature 1!')
+               @feature('myfeature')
+               def fun2(self):
+                       print('feature 2!')
+               def build(bld):
+                       bld(features='myfeature')
+
+       :param k: method names
+       :type k: list of string
+       """
+       def deco(func):
+               setattr(task_gen, func.__name__, func)
+               for fun_name in k:
+                       if not fun_name in task_gen.prec[func.__name__]:
+                               task_gen.prec[func.__name__].append(fun_name)
+                               #task_gen.prec[func.__name__].sort()
+               return func
+       return deco
+after = after_method
+
+def extension(*k):
+       """
+       Decorator that registers a task generator method which will be invoked during
+       the processing of source files for the extension given::
+
+               from waflib import Task
+               class mytask(Task):
+                       run_str = 'cp ${SRC} ${TGT}'
+               @extension('.moo')
+               def create_maa_file(self, node):
+                       self.create_task('mytask', node, node.change_ext('.maa'))
+               def build(bld):
+                       bld(source='foo.moo')
+       """
+       def deco(func):
+               setattr(task_gen, func.__name__, func)
+               for x in k:
+                       task_gen.mappings[x] = func
+               return func
+       return deco
+
+# ---------------------------------------------------------------
+# The following methods are task generator methods commonly used
+# they are almost examples, the rest of waf core does not depend on them
+
+@taskgen_method
+def to_nodes(self, lst, path=None):
+       """
+       Converts the input list into a list of nodes.
+       It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
+       It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
+
+       :param lst: input list
+       :type lst: list of string and nodes
+       :param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`)
+       :type path: :py:class:`waflib.Tools.Node.Node`
+       :rtype: list of :py:class:`waflib.Tools.Node.Node`
+       """
+       tmp = []
+       path = path or self.path
+       find = path.find_resource
+
+       if isinstance(lst, Node.Node):
+               lst = [lst]
+
+       # either a list or a string, convert to a list of nodes
+       for x in Utils.to_list(lst):
+               if isinstance(x, str):
+                       node = find(x)
+               else:
+                       node = x
+               if not node:
+                       raise Errors.WafError("source not found: %r in %r" % (x, self))
+               tmp.append(node)
+       return tmp
+
+@feature('*')
+def process_source(self):
+       """
+       Processes each element in the attribute ``source`` by extension.
+
+       #. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
+       #. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
+       #. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook`
+       #. When called, the methods may modify self.source to append more source to process
+       #. The mappings can map an extension or a filename (see the code below)
+       """
+       self.source = self.to_nodes(getattr(self, 'source', []))
+       for node in self.source:
+               self.get_hook(node)(self, node)
+
+@feature('*')
+@before_method('process_source')
+def process_rule(self):
+       """
+       Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
+
+               def build(bld):
+                       bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
+       """
+       if not getattr(self, 'rule', None):
+               return
+
+       # create the task class
+       name = str(getattr(self, 'name', None) or self.target or getattr(self.rule, '__name__', self.rule))
+
+       # or we can put the class in a cache for performance reasons
+       try:
+               cache = self.bld.cache_rule_attr
+       except AttributeError:
+               cache = self.bld.cache_rule_attr = {}
+
+       chmod = getattr(self, 'chmod', None)
+       shell = getattr(self, 'shell', True)
+       color = getattr(self, 'color', 'BLUE')
+       scan = getattr(self, 'scan', None)
+       _vars = getattr(self, 'vars', [])
+       cls_str = getattr(self, 'cls_str', None)
+       cls_keyword = getattr(self, 'cls_keyword', None)
+       use_cache = getattr(self, 'cache_rule', 'True')
+
+       scan_val = has_deps = hasattr(self, 'deps')
+       if scan:
+               scan_val = id(scan)
+
+       key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars))
+
+       cls = None
+       if use_cache:
+               try:
+                       cls = cache[key]
+               except KeyError:
+                       pass
+       if not cls:
+               rule = self.rule
+               if chmod is not None:
+                       def chmod_fun(tsk):
+                               for x in tsk.outputs:
+                                       os.chmod(x.abspath(), tsk.generator.chmod)
+                       if isinstance(rule, tuple):
+                               rule = list(rule)
+                               rule.append(chmod_fun)
+                               rule = tuple(rule)
+                       else:
+                               rule = (rule, chmod_fun)
+
+               cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
+
+               if cls_str:
+                       setattr(cls, '__str__', self.cls_str)
+
+               if cls_keyword:
+                       setattr(cls, 'keyword', self.cls_keyword)
+
+               if scan:
+                       cls.scan = self.scan
+               elif has_deps:
+                       def scan(self):
+                               nodes = []
+                               for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
+                                       node = self.generator.path.find_resource(x)
+                                       if not node:
+                                               self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
+                                       nodes.append(node)
+                               return [nodes, []]
+                       cls.scan = scan
+
+               # TODO use these values in the cache key if provided
+               # (may cause excessive caching)
+               for x in ('after', 'before', 'ext_in', 'ext_out'):
+                       setattr(cls, x, getattr(self, x, []))
+
+               if use_cache:
+                       cache[key] = cls
+
+       # now create one instance
+       tsk = self.create_task(name)
+
+       if getattr(self, 'timeout', None):
+               tsk.timeout = self.timeout
+
+       if getattr(self, 'always', None):
+               tsk.always_run = True
+
+       if getattr(self, 'target', None):
+               if isinstance(self.target, str):
+                       self.target = self.target.split()
+               if not isinstance(self.target, list):
+                       self.target = [self.target]
+               for x in self.target:
+                       if isinstance(x, str):
+                               tsk.outputs.append(self.path.find_or_declare(x))
+                       else:
+                               x.parent.mkdir() # if a node was given, create the required folders
+                               tsk.outputs.append(x)
+               if getattr(self, 'install_path', None):
+                       self.install_task = self.add_install_files(install_to=self.install_path,
+                               install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))
+
+       if getattr(self, 'source', None):
+               tsk.inputs = self.to_nodes(self.source)
+               # bypass the execution of process_source by setting the source to an empty list
+               self.source = []
+
+       if getattr(self, 'cwd', None):
+               tsk.cwd = self.cwd
+
+       if isinstance(tsk.run, functools.partial):
+               # Python documentation says: "partial objects defined in classes
+               # behave like static methods and do not transform into bound
+               # methods during instance attribute look-up."
+               tsk.run = functools.partial(tsk.run, tsk)
+
+
+@feature('seq')
+def sequence_order(self):
+       """
+       Adds a strict sequential constraint between the tasks generated by task generators.
+       It works because task generators are posted in order.
+       It will not post objects which belong to other folders.
+
+       Example::
+
+               bld(features='javac seq')
+               bld(features='jar seq')
+
+       To start a new sequence, set the attribute seq_start, for example::
+
+               obj = bld(features='seq')
+               obj.seq_start = True
+
+       Note that the method is executed in last position. This is more an
+       example than a widely-used solution.
+       """
+       if self.meths and self.meths[-1] != 'sequence_order':
+               self.meths.append('sequence_order')
+               return
+
+       if getattr(self, 'seq_start', None):
+               return
+
+       # all the tasks previously declared must be run before these
+       if getattr(self.bld, 'prev', None):
+               self.bld.prev.post()
+               for x in self.bld.prev.tasks:
+                       for y in self.tasks:
+                               y.set_run_after(x)
+
+       self.bld.prev = self
+
+
+re_m4 = re.compile('@(\w+)@', re.M)
+
+class subst_pc(Task.Task):
+       """
+       Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used
+       in the substitution changes.
+       """
+
+       def force_permissions(self):
+               "Private for the time being, we will probably refactor this into run_str=[run1,chmod]"
+               if getattr(self.generator, 'chmod', None):
+                       for x in self.outputs:
+                               os.chmod(x.abspath(), self.generator.chmod)
+
+       def run(self):
+               "Substitutes variables in a .in file"
+
+               if getattr(self.generator, 'is_copy', None):
+                       for i, x in enumerate(self.outputs):
+                               x.write(self.inputs[i].read('rb'), 'wb')
+                       self.force_permissions()
+                       return None
+
+               if getattr(self.generator, 'fun', None):
+                       ret = self.generator.fun(self)
+                       if not ret:
+                               self.force_permissions()
+                       return ret
+
+               code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
+               if getattr(self.generator, 'subst_fun', None):
+                       code = self.generator.subst_fun(self, code)
+                       if code is not None:
+                               self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
+                       self.force_permissions()
+                       return None
+
+               # replace all % by %% to prevent errors by % signs
+               code = code.replace('%', '%%')
+
+               # extract the vars foo into lst and replace @foo@ by %(foo)s
+               lst = []
+               def repl(match):
+                       g = match.group
+                       if g(1):
+                               lst.append(g(1))
+                               return "%%(%s)s" % g(1)
+                       return ''
+               global re_m4
+               code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)
+
+               try:
+                       d = self.generator.dct
+               except AttributeError:
+                       d = {}
+                       for x in lst:
+                               tmp = getattr(self.generator, x, '') or self.env[x] or self.env[x.upper()]
+                               try:
+                                       tmp = ''.join(tmp)
+                               except TypeError:
+                                       tmp = str(tmp)
+                               d[x] = tmp
+
+               code = code % d
+               self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
+               self.generator.bld.raw_deps[self.uid()] = lst
+
+               # make sure the signature is updated
+               try: delattr(self, 'cache_sig')
+               except AttributeError: pass
+
+               self.force_permissions()
+
+       def sig_vars(self):
+               """
+               Compute a hash (signature) of the variables used in the substitution
+               """
+               bld = self.generator.bld
+               env = self.env
+               upd = self.m.update
+
+               if getattr(self.generator, 'fun', None):
+                       upd(Utils.h_fun(self.generator.fun))
+               if getattr(self.generator, 'subst_fun', None):
+                       upd(Utils.h_fun(self.generator.subst_fun))
+
+               # raw_deps: persistent custom values returned by the scanner
+               vars = self.generator.bld.raw_deps.get(self.uid(), [])
+
+               # hash both env vars and task generator attributes
+               act_sig = bld.hash_env_vars(env, vars)
+               upd(act_sig)
+
+               lst = [getattr(self.generator, x, '') for x in vars]
+               upd(Utils.h_list(lst))
+
+               return self.m.digest()
+
+@extension('.pc.in')
+def add_pcfile(self, node):
+       """
+       Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default
+
+               def build(bld):
+                       bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
+       """
+       tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
+       self.install_task = self.add_install_files(
+               install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs)
+
+class subst(subst_pc):
+       pass
+
+@feature('subst')
+@before_method('process_source', 'process_rule')
+def process_subst(self):
+       """
+       Defines a transformation that substitutes the contents of *source* files to *target* files::
+
+               def build(bld):
+                       bld(
+                               features='subst',
+                               source='foo.c.in',
+                               target='foo.c',
+                               install_path='${LIBDIR}/pkgconfig',
+                               VAR = 'val'
+                       )
+
+       The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument
+       of the task generator object.
+
+       This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`.
+       """
+
+       src = Utils.to_list(getattr(self, 'source', []))
+       if isinstance(src, Node.Node):
+               src = [src]
+       tgt = Utils.to_list(getattr(self, 'target', []))
+       if isinstance(tgt, Node.Node):
+               tgt = [tgt]
+       if len(src) != len(tgt):
+               raise Errors.WafError('invalid number of source/target for %r' % self)
+
+       for x, y in zip(src, tgt):
+               if not x or not y:
+                       raise Errors.WafError('null source or target for %r' % self)
+               a, b = None, None
+
+               if isinstance(x, str) and isinstance(y, str) and x == y:
+                       a = self.path.find_node(x)
+                       b = self.path.get_bld().make_node(y)
+                       if not os.path.isfile(b.abspath()):
+                               b.parent.mkdir()
+               else:
+                       if isinstance(x, str):
+                               a = self.path.find_resource(x)
+                       elif isinstance(x, Node.Node):
+                               a = x
+                       if isinstance(y, str):
+                               b = self.path.find_or_declare(y)
+                       elif isinstance(y, Node.Node):
+                               b = y
+
+               if not a:
+                       raise Errors.WafError('could not find %r for %r' % (x, self))
+
+               has_constraints = False
+               tsk = self.create_task('subst', a, b)
+               for k in ('after', 'before', 'ext_in', 'ext_out'):
+                       val = getattr(self, k, None)
+                       if val:
+                               has_constraints = True
+                               setattr(tsk, k, val)
+
+               # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
+               if not has_constraints:
+                       global HEADER_EXTS
+                       for xt in HEADER_EXTS:
+                               if b.name.endswith(xt):
+                                       tsk.before = [k for k in ('c', 'cxx') if k in Task.classes]
+                                       break
+
+               inst_to = getattr(self, 'install_path', None)
+               if inst_to:
+                       self.install_task = self.add_install_files(install_to=inst_to,
+                               install_from=b, chmod=getattr(self, 'chmod', Utils.O644))
+
+       self.source = []
diff --git a/third_party/waf/waflib/Tools/__init__.py b/third_party/waf/waflib/Tools/__init__.py
new file mode 100644 (file)
index 0000000..8766ecb
--- /dev/null
@@ -0,0 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
diff --git a/third_party/waf/waflib/Tools/ar.py b/third_party/waf/waflib/Tools/ar.py
new file mode 100644 (file)
index 0000000..2ee1a08
--- /dev/null
@@ -0,0 +1,27 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+# Ralf Habacker, 2006 (rh)
+
+"""
+The **ar** program creates static libraries. This tool is almost always loaded
+from others (C, C++, D, etc) for static library support.
+"""
+
+from waflib.Configure import conf
+
+@conf
+def find_ar(conf):
+       """Configuration helper used by C/C++ tools to enable the support for static libraries"""
+       conf.load('ar')
+
+def configure(conf):
+       """Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
+       conf.find_program('ar', var='AR')
+       conf.add_os_flags('ARFLAGS')
+       if not conf.env.ARFLAGS:
+               conf.env.ARFLAGS = ['rcs']
diff --git a/third_party/waf/waflib/Tools/asm.py b/third_party/waf/waflib/Tools/asm.py
new file mode 100644 (file)
index 0000000..f14a725
--- /dev/null
@@ -0,0 +1,77 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2016 (ita)
+
+"""
+Assembly support, used by tools such as gas and nasm
+
+To declare targets using assembly::
+
+       def configure(conf):
+               conf.load('gcc gas')
+
+       def build(bld):
+               bld(
+                       features='c cstlib asm',
+                       source = 'test.S',
+                       target = 'asmtest')
+
+               bld(
+                       features='asm asmprogram',
+                       source = 'test.S',
+                       target = 'asmtest')
+
+Support for pure asm programs and libraries should also work::
+
+       def configure(conf):
+               conf.load('nasm')
+               conf.find_program('ld', 'ASLINK')
+
+       def build(bld):
+               bld(
+                       features='asm asmprogram',
+                       source = 'test.S',
+                       target = 'asmtest')
+"""
+
+from waflib import Task
+from waflib.Tools.ccroot import link_task, stlink_task
+from waflib.TaskGen import extension
+
+class asm(Task.Task):
+       """
+       Compiles asm files by gas/nasm/yasm/...
+       """
+       color = 'BLUE'
+       run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+
+@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
+def asm_hook(self, node):
+       """
+       Binds the asm extension to the asm task
+
+       :param node: input file
+       :type node: :py:class:`waflib.Node.Node`
+       """
+       return self.create_compiled_task('asm', node)
+
+class asmprogram(link_task):
+       "Links object files into a c program"
+       run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
+       ext_out = ['.bin']
+       inst_to = '${BINDIR}'
+
+class asmshlib(asmprogram):
+       "Links object files into a c shared library"
+       inst_to = '${LIBDIR}'
+
+class asmstlib(stlink_task):
+       "Links object files into a c static library"
+       pass # do not remove
+
+def configure(conf):
+       conf.env.ASMPATH_ST = '-I%s'
diff --git a/third_party/waf/waflib/Tools/bison.py b/third_party/waf/waflib/Tools/bison.py
new file mode 100644 (file)
index 0000000..5b45583
--- /dev/null
@@ -0,0 +1,52 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# John O'Meara, 2006
+# Thomas Nagy 2009-2016 (ita)
+
+"""
+The **bison** program is a code generator which creates C or C++ files.
+The generated files are compiled into object files.
+"""
+
+from waflib import Task
+from waflib.TaskGen import extension
+
+class bison(Task.Task):
+       """Compiles bison files"""
+       color   = 'BLUE'
+       run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
+       ext_out = ['.h'] # just to make sure
+
+@extension('.y', '.yc', '.yy')
+def big_bison(self, node):
+       """
+       Creates a bison task, which must be executed from the directory of the output file.
+       """
+       has_h = '-d' in self.env.BISONFLAGS
+
+       outs = []
+       if node.name.endswith('.yc'):
+               outs.append(node.change_ext('.tab.cc'))
+               if has_h:
+                       outs.append(node.change_ext('.tab.hh'))
+       else:
+               outs.append(node.change_ext('.tab.c'))
+               if has_h:
+                       outs.append(node.change_ext('.tab.h'))
+
+       tsk = self.create_task('bison', node, outs)
+       tsk.cwd = node.parent.get_bld()
+
+       # and the c/cxx file must be compiled too
+       self.source.append(outs[0])
+
+def configure(conf):
+       """
+       Detects the *bison* program
+       """
+       conf.find_program('bison', var='BISON')
+       conf.env.BISONFLAGS = ['-d']
diff --git a/third_party/waf/waflib/Tools/c.py b/third_party/waf/waflib/Tools/c.py
new file mode 100644 (file)
index 0000000..f54fd10
--- /dev/null
@@ -0,0 +1,42 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"Base for c programs/libraries"
+
+from waflib import TaskGen, Task
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task, stlink_task
+
+@TaskGen.extension('.c')
+def c_hook(self, node):
+       "Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
+       if not self.env.CC and self.env.CXX:
+               return self.create_compiled_task('cxx', node)
+       return self.create_compiled_task('c', node)
+
+class c(Task.Task):
+       "Compiles C files into object files"
+       run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+       vars    = ['CCDEPS'] # unused variable to depend on, just in case
+       ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
+       scan    = c_preproc.scan
+
+class cprogram(link_task):
+       "Links object files into c programs"
+       run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
+       ext_out = ['.bin']
+       vars    = ['LINKDEPS']
+       inst_to = '${BINDIR}'
+
+class cshlib(cprogram):
+       "Links object files into c shared libraries"
+       inst_to = '${LIBDIR}'
+
+class cstlib(stlink_task):
+       "Links object files into a c static libraries"
+       pass # do not remove
diff --git a/third_party/waf/waflib/Tools/c_aliases.py b/third_party/waf/waflib/Tools/c_aliases.py
new file mode 100644 (file)
index 0000000..6fa8c68
--- /dev/null
@@ -0,0 +1,147 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2015 (ita)
+
+"base for all c/c++ programs and libraries"
+
+from waflib import Utils, Errors
+from waflib.Configure import conf
+
+def get_extensions(lst):
+       """
+       Returns the file extensions for the list of files given as input
+
+       :param lst: files to process
+       :list lst: list of string or :py:class:`waflib.Node.Node`
+       :return: list of file extensions
+       :rtype: list of string
+       """
+       ret = []
+       for x in Utils.to_list(lst):
+               if not isinstance(x, str):
+                       x = x.name
+               ret.append(x[x.rfind('.') + 1:])
+       return ret
+
+def sniff_features(**kw):
+       """
+       Computes and returns the features required for a task generator by
+       looking at the file extensions. This aimed for C/C++ mainly::
+
+               snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
+               # returns  ['cxx', 'c', 'cxxshlib', 'cshlib']
+
+       :param source: source files to process
+       :type source: list of string or :py:class:`waflib.Node.Node`
+       :param type: object type in *program*, *shlib* or *stlib*
+       :type type: string
+       :return: the list of features for a task generator processing the source files
+       :rtype: list of string
+       """
+       exts = get_extensions(kw['source'])
+       typ = kw['typ']
+       feats = []
+
+       # watch the order, cxx will have the precedence
+       for x in 'cxx cpp c++ cc C'.split():
+               if x in exts:
+                       feats.append('cxx')
+                       break
+
+       if 'c' in exts or 'vala' in exts or 'gs' in exts:
+               feats.append('c')
+
+       for x in 'f f90 F F90 for FOR'.split():
+               if x in exts:
+                       feats.append('fc')
+                       break
+
+       if 'd' in exts:
+               feats.append('d')
+
+       if 'java' in exts:
+               feats.append('java')
+               return 'java'
+
+       if typ in ('program', 'shlib', 'stlib'):
+               will_link = False
+               for x in feats:
+                       if x in ('cxx', 'd', 'fc', 'c'):
+                               feats.append(x + typ)
+                               will_link = True
+               if not will_link and not kw.get('features', []):
+                       raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw)
+       return feats
+
+def set_features(kw, typ):
+       """
+       Inserts data in the input dict *kw* based on existing data and on the type of target
+       required (typ).
+
+       :param kw: task generator parameters
+       :type kw: dict
+       :param typ: type of target
+       :type typ: string
+       """
+       kw['typ'] = typ
+       kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
+
+@conf
+def program(bld, *k, **kw):
+       """
+       Alias for creating programs by looking at the file extensions::
+
+               def build(bld):
+                       bld.program(source='foo.c', target='app')
+                       # equivalent to:
+                       # bld(features='c cprogram', source='foo.c', target='app')
+
+       """
+       set_features(kw, 'program')
+       return bld(*k, **kw)
+
+@conf
+def shlib(bld, *k, **kw):
+       """
+       Alias for creating shared libraries by looking at the file extensions::
+
+               def build(bld):
+                       bld.shlib(source='foo.c', target='app')
+                       # equivalent to:
+                       # bld(features='c cshlib', source='foo.c', target='app')
+
+       """
+       set_features(kw, 'shlib')
+       return bld(*k, **kw)
+
+@conf
+def stlib(bld, *k, **kw):
+       """
+       Alias for creating static libraries by looking at the file extensions::
+
+               def build(bld):
+                       bld.stlib(source='foo.cpp', target='app')
+                       # equivalent to:
+                       # bld(features='cxx cxxstlib', source='foo.cpp', target='app')
+
+       """
+       set_features(kw, 'stlib')
+       return bld(*k, **kw)
+
+@conf
+def objects(bld, *k, **kw):
+       """
+       Alias for creating object files by looking at the file extensions::
+
+               def build(bld):
+                       bld.objects(source='foo.c', target='app')
+                       # equivalent to:
+                       # bld(features='c', source='foo.c', target='app')
+
+       """
+       set_features(kw, 'objects')
+       return bld(*k, **kw)
diff --git a/third_party/waf/waflib/Tools/c_config.py b/third_party/waf/waflib/Tools/c_config.py
new file mode 100644 (file)
index 0000000..b615600
--- /dev/null
@@ -0,0 +1,1426 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+C/C++/D configuration helpers
+"""
+
+from __future__ import with_statement
+
+import os, re, shlex
+from waflib import Build, Utils, Task, Options, Logs, Errors, Runner
+from waflib.TaskGen import after_method, feature
+from waflib.Configure import conf
+
+WAF_CONFIG_H   = 'config.h'
+"""default name for the config.h file"""
+
+DEFKEYS = 'define_key'
+INCKEYS = 'include_key'
+
+cfg_ver = {
+       'atleast-version': '>=',
+       'exact-version': '==',
+       'max-version': '<=',
+}
+
+SNIP_FUNCTION = '''
+int main(int argc, char **argv) {
+       void (*p)();
+       (void)argc; (void)argv;
+       p=(void(*)())(%s);
+       return !p;
+}
+'''
+"""Code template for checking for functions"""
+
+SNIP_TYPE = '''
+int main(int argc, char **argv) {
+       (void)argc; (void)argv;
+       if ((%(type_name)s *) 0) return 0;
+       if (sizeof (%(type_name)s)) return 0;
+       return 1;
+}
+'''
+"""Code template for checking for types"""
+
+SNIP_EMPTY_PROGRAM = '''
+int main(int argc, char **argv) {
+       (void)argc; (void)argv;
+       return 0;
+}
+'''
+
+SNIP_FIELD = '''
+int main(int argc, char **argv) {
+       char *off;
+       (void)argc; (void)argv;
+       off = (char*) &((%(type_name)s*)0)->%(field_name)s;
+       return (size_t) off < sizeof(%(type_name)s);
+}
+'''
+
+MACRO_TO_DESTOS = {
+'__linux__'                                      : 'linux',
+'__GNU__'                                        : 'gnu', # hurd
+'__FreeBSD__'                                    : 'freebsd',
+'__NetBSD__'                                     : 'netbsd',
+'__OpenBSD__'                                    : 'openbsd',
+'__sun'                                          : 'sunos',
+'__hpux'                                         : 'hpux',
+'__sgi'                                          : 'irix',
+'_AIX'                                           : 'aix',
+'__CYGWIN__'                                     : 'cygwin',
+'__MSYS__'                                       : 'cygwin',
+'_UWIN'                                          : 'uwin',
+'_WIN64'                                         : 'win32',
+'_WIN32'                                         : 'win32',
+# Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file.
+'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__'  : 'darwin',
+'__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone
+'__QNX__'                                        : 'qnx',
+'__native_client__'                              : 'nacl' # google native client platform
+}
+
+MACRO_TO_DEST_CPU = {
+'__x86_64__'  : 'x86_64',
+'__amd64__'   : 'x86_64',
+'__i386__'    : 'x86',
+'__ia64__'    : 'ia',
+'__mips__'    : 'mips',
+'__sparc__'   : 'sparc',
+'__alpha__'   : 'alpha',
+'__aarch64__' : 'aarch64',
+'__thumb__'   : 'thumb',
+'__arm__'     : 'arm',
+'__hppa__'    : 'hppa',
+'__powerpc__' : 'powerpc',
+'__ppc__'     : 'powerpc',
+'__convex__'  : 'convex',
+'__m68k__'    : 'm68k',
+'__s390x__'   : 's390x',
+'__s390__'    : 's390',
+'__sh__'      : 'sh',
+'__xtensa__'  : 'xtensa',
+}
+
+@conf
+def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=None):
+       """
+       Parses flags from the input lines, and adds them to the relevant use variables::
+
+               def configure(conf):
+                       conf.parse_flags('-O3', 'FOO')
+                       # conf.env.CXXFLAGS_FOO = ['-O3']
+                       # conf.env.CFLAGS_FOO = ['-O3']
+
+       :param line: flags
+       :type line: string
+       :param uselib_store: where to add the flags
+       :type uselib_store: string
+       :param env: config set or conf.env by default
+       :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+       """
+
+       assert(isinstance(line, str))
+
+       env = env or self.env
+
+       # Issue 811 and 1371
+       if posix is None:
+               posix = True
+               if '\\' in line:
+                       posix = ('\\ ' in line) or ('\\\\' in line)
+
+       lex = shlex.shlex(line, posix=posix)
+       lex.whitespace_split = True
+       lex.commenters = ''
+       lst = list(lex)
+
+       # append_unique is not always possible
+       # for example, apple flags may require both -arch i386 and -arch ppc
+       uselib = uselib_store
+       def app(var, val):
+               env.append_value('%s_%s' % (var, uselib), val)
+       def appu(var, val):
+               env.append_unique('%s_%s' % (var, uselib), val)
+       static = False
+       while lst:
+               x = lst.pop(0)
+               st = x[:2]
+               ot = x[2:]
+
+               if st == '-I' or st == '/I':
+                       if not ot:
+                               ot = lst.pop(0)
+                       appu('INCLUDES', ot)
+               elif st == '-i':
+                       tmp = [x, lst.pop(0)]
+                       app('CFLAGS', tmp)
+                       app('CXXFLAGS', tmp)
+               elif st == '-D' or (env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but..
+                       if not ot:
+                               ot = lst.pop(0)
+                       app('DEFINES', ot)
+               elif st == '-l':
+                       if not ot:
+                               ot = lst.pop(0)
+                       prefix = 'STLIB' if (force_static or static) else 'LIB'
+                       app(prefix, ot)
+               elif st == '-L':
+                       if not ot:
+                               ot = lst.pop(0)
+                       prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
+                       appu(prefix, ot)
+               elif x.startswith('/LIBPATH:'):
+                       prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
+                       appu(prefix, x.replace('/LIBPATH:', ''))
+               elif x.startswith('-std='):
+                       prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS'
+                       app(prefix, x)
+               elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie'):
+                       app('CFLAGS', x)
+                       app('CXXFLAGS', x)
+                       app('LINKFLAGS', x)
+               elif x == '-framework':
+                       appu('FRAMEWORK', lst.pop(0))
+               elif x.startswith('-F'):
+                       appu('FRAMEWORKPATH', x[2:])
+               elif x == '-Wl,-rpath' or x == '-Wl,-R':
+                       app('RPATH', lst.pop(0).lstrip('-Wl,'))
+               elif x.startswith('-Wl,-R,'):
+                       app('RPATH', x[7:])
+               elif x.startswith('-Wl,-R'):
+                       app('RPATH', x[6:])
+               elif x.startswith('-Wl,-rpath,'):
+                       app('RPATH', x[11:])
+               elif x == '-Wl,-Bstatic' or x == '-Bstatic':
+                       static = True
+               elif x == '-Wl,-Bdynamic' or x == '-Bdynamic':
+                       static = False
+               elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'):
+                       app('LINKFLAGS', x)
+               elif x.startswith(('-m', '-f', '-dynamic', '-O')):
+                       app('CFLAGS', x)
+                       app('CXXFLAGS', x)
+               elif x.startswith('-bundle'):
+                       app('LINKFLAGS', x)
+               elif x.startswith(('-undefined', '-Xlinker')):
+                       arg = lst.pop(0)
+                       app('LINKFLAGS', [x, arg])
+               elif x.startswith(('-arch', '-isysroot')):
+                       tmp = [x, lst.pop(0)]
+                       app('CFLAGS', tmp)
+                       app('CXXFLAGS', tmp)
+                       app('LINKFLAGS', tmp)
+               elif x.endswith(('.a', '.so', '.dylib', '.lib')):
+                       appu('LINKFLAGS', x) # not cool, #762
+               else:
+                       self.to_log('Unhandled flag %r' % x)
+
+@conf
+def validate_cfg(self, kw):
+       """
+       Searches for the program *pkg-config* if missing, and validates the
+       parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`.
+
+       :param path: the **-config program to use** (default is *pkg-config*)
+       :type path: list of string
+       :param msg: message to display to describe the test executed
+       :type msg: string
+       :param okmsg: message to display when the test is successful
+       :type okmsg: string
+       :param errmsg: message to display in case of error
+       :type errmsg: string
+       """
+       if not 'path' in kw:
+               if not self.env.PKGCONFIG:
+                       self.find_program('pkg-config', var='PKGCONFIG')
+               kw['path'] = self.env.PKGCONFIG
+
+       # pkg-config version
+       if 'atleast_pkgconfig_version' in kw:
+               if not 'msg' in kw:
+                       kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version']
+               return
+
+       if not 'okmsg' in kw:
+               kw['okmsg'] = 'yes'
+       if not 'errmsg' in kw:
+               kw['errmsg'] = 'not found'
+
+       if 'modversion' in kw:
+               if not 'msg' in kw:
+                       kw['msg'] = 'Checking for %r version' % kw['modversion']
+               if not 'uselib_store' in kw:
+                       kw['uselib_store'] = kw['modversion']
+               if not 'define_name' in kw:
+                       kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store'])
+               return
+
+       if not 'package' in kw:
+               raise ValueError('a package name is required')
+
+       if not 'uselib_store' in kw:
+               kw['uselib_store'] = kw['package'].upper()
+
+       if not 'define_name' in kw:
+               kw['define_name'] = self.have_define(kw['uselib_store'])
+
+       if not 'msg' in kw:
+               kw['msg'] = 'Checking for %r' % (kw['package'] or kw['path'])
+
+       for x in cfg_ver:
+               # Gotcha: only one predicate is allowed at a time
+               # TODO remove in waf 2.0
+               y = x.replace('-', '_')
+               if y in kw:
+                       package = kw['package']
+                       if Logs.verbose:
+                               Logs.warn('Passing %r to conf.check_cfg() is obsolete, pass parameters directly, eg:', y)
+                               Logs.warn(" conf.check_cfg(package='%s', args=['--libs', '--cflags', '%s >= 1.6'])", package, package)
+                       if not 'msg' in kw:
+                               kw['msg'] = 'Checking for %r %s %s' % (package, cfg_ver[x], kw[y])
+                       break
+
+@conf
+def exec_cfg(self, kw):
+       """
+       Executes ``pkg-config`` or other ``-config`` applications to colect configuration flags:
+
+       * if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
+       * if modversion is given, then return the module version
+       * else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable
+
+       :param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests)
+       :type atleast_pkgconfig_version: string
+       :param package: package name, for example *gtk+-2.0*
+       :type package: string
+       :param uselib_store: if the test is successful, define HAVE\_*name*. It is also used to define *conf.env.FLAGS_name* variables.
+       :type uselib_store: string
+       :param modversion: if provided, return the version of the given module and define *name*\_VERSION
+       :type modversion: string
+       :param args: arguments to give to *package* when retrieving flags
+       :type args: list of string
+       :param variables: return the values of particular variables
+       :type variables: list of string
+       :param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES)
+       :type define_variable: dict(string: string)
+       """
+
+       path = Utils.to_list(kw['path'])
+       env = self.env.env or None
+       if kw.get('pkg_config_path'):
+               if not env:
+                       env = dict(self.environ)
+               env['PKG_CONFIG_PATH'] = kw['pkg_config_path']
+
+       def define_it():
+               define_name = kw['define_name']
+               # by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X
+               if kw.get('global_define', 1):
+                       self.define(define_name, 1, False)
+               else:
+                       self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name)
+
+               if kw.get('add_have_to_env', 1):
+                       self.env[define_name] = 1
+
+       # pkg-config version
+       if 'atleast_pkgconfig_version' in kw:
+               cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
+               self.cmd_and_log(cmd, env=env)
+               if not 'okmsg' in kw:
+                       kw['okmsg'] = 'yes'
+               return
+
+       for x in cfg_ver:
+               # TODO remove in waf 2.0
+               y = x.replace('-', '_')
+               if y in kw:
+                       self.cmd_and_log(path + ['--%s=%s' % (x, kw[y]), kw['package']], env=env)
+                       if not 'okmsg' in kw:
+                               kw['okmsg'] = 'yes'
+                       define_it()
+                       break
+
+       # single version for a module
+       if 'modversion' in kw:
+               version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip()
+               self.define(kw['define_name'], version)
+               return version
+
+       lst = [] + path
+
+       defi = kw.get('define_variable')
+       if not defi:
+               defi = self.env.PKG_CONFIG_DEFINES or {}
+       for key, val in defi.items():
+               lst.append('--define-variable=%s=%s' % (key, val))
+
+       static = kw.get('force_static', False)
+       if 'args' in kw:
+               args = Utils.to_list(kw['args'])
+               if '--static' in args or '--static-libs' in args:
+                       static = True
+               lst += args
+
+       # tools like pkgconf expect the package argument after the -- ones -_-
+       lst.extend(Utils.to_list(kw['package']))
+
+       # retrieving variables of a module
+       if 'variables' in kw:
+               v_env = kw.get('env', self.env)
+               vars = Utils.to_list(kw['variables'])
+               for v in vars:
+                       val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip()
+                       var = '%s_%s' % (kw['uselib_store'], v)
+                       v_env[var] = val
+               if not 'okmsg' in kw:
+                       kw['okmsg'] = 'yes'
+               return
+
+       # so we assume the command-line will output flags to be parsed afterwards
+       ret = self.cmd_and_log(lst, env=env)
+       if not 'okmsg' in kw:
+               kw['okmsg'] = 'yes'
+
+       define_it()
+       self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix'))
+       return ret
+
+@conf
+def check_cfg(self, *k, **kw):
+       """
+       Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc).
+       This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg`
+
+       A few examples::
+
+               def configure(conf):
+                       conf.load('compiler_c')
+                       conf.check_cfg(package='glib-2.0', args='--libs --cflags')
+                       conf.check_cfg(package='glib-2.0', uselib_store='GLIB', atleast_version='2.10.0',
+                               args='--cflags --libs')
+                       conf.check_cfg(package='pango')
+                       conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs'])
+                       conf.check_cfg(package='pango',
+                               args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'],
+                               msg="Checking for 'pango 0.1.0'")
+                       conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL')
+                       conf.check_cfg(path='mpicc', args='--showme:compile --showme:link',
+                               package='', uselib_store='OPEN_MPI', mandatory=False)
+                       # variables
+                       conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO')
+                       print(conf.env.FOO_includedir)
+       """
+       if k:
+               lst = k[0].split()
+               kw['package'] = lst[0]
+               kw['args'] = ' '.join(lst[1:])
+
+       self.validate_cfg(kw)
+       if 'msg' in kw:
+               self.start_msg(kw['msg'], **kw)
+       ret = None
+       try:
+               ret = self.exec_cfg(kw)
+       except self.errors.WafError:
+               if 'errmsg' in kw:
+                       self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+               if Logs.verbose > 1:
+                       raise
+               else:
+                       self.fatal('The configuration failed')
+       else:
+               if not ret:
+                       ret = True
+               kw['success'] = ret
+               if 'okmsg' in kw:
+                       self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+
+       return ret
+
+def build_fun(bld):
+       """
+       Build function that is used for running configuration tests with ``conf.check()``
+       """
+       if bld.kw['compile_filename']:
+               node = bld.srcnode.make_node(bld.kw['compile_filename'])
+               node.write(bld.kw['code'])
+
+       o = bld(features=bld.kw['features'], source=bld.kw['compile_filename'], target='testprog')
+
+       for k, v in bld.kw.items():
+               setattr(o, k, v)
+
+       if not bld.kw.get('quiet'):
+               bld.conf.to_log("==>\n%s\n<==" % bld.kw['code'])
+
+@conf
+def validate_c(self, kw):
+       """
+       Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build`
+
+       :param compiler: c or cxx (tries to guess what is best)
+       :type compiler: string
+       :param type: cprogram, cshlib, cstlib - not required if *features are given directly*
+       :type type: binary to create
+       :param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib``
+       :type feature: list of string
+       :param fragment: provide a piece of code for the test (default is to let the system create one)
+       :type fragment: string
+       :param uselib_store: define variables after the test is executed (IMPORTANT!)
+       :type uselib_store: string
+       :param use: parameters to use for building (just like the normal *use* keyword)
+       :type use: list of string
+       :param define_name: define to set when the check is over
+       :type define_name: string
+       :param execute: execute the resulting binary
+       :type execute: bool
+       :param define_ret: if execute is set to True, use the execution output in both the define and the return value
+       :type define_ret: bool
+       :param header_name: check for a particular header
+       :type header_name: string
+       :param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
+       :type auto_add_header_name: bool
+       """
+
+       if not 'build_fun' in kw:
+               kw['build_fun'] = build_fun
+
+       if not 'env' in kw:
+               kw['env'] = self.env.derive()
+       env = kw['env']
+
+       if not 'compiler' in kw and not 'features' in kw:
+               kw['compiler'] = 'c'
+               if env.CXX_NAME and Task.classes.get('cxx'):
+                       kw['compiler'] = 'cxx'
+                       if not self.env.CXX:
+                               self.fatal('a c++ compiler is required')
+               else:
+                       if not self.env.CC:
+                               self.fatal('a c compiler is required')
+
+       if not 'compile_mode' in kw:
+               kw['compile_mode'] = 'c'
+               if 'cxx' in Utils.to_list(kw.get('features',[])) or kw.get('compiler', '') == 'cxx':
+                       kw['compile_mode'] = 'cxx'
+
+       if not 'type' in kw:
+               kw['type'] = 'cprogram'
+
+       if not 'features' in kw:
+               if not 'header_name' in kw or kw.get('link_header_test', True):
+                       kw['features'] = [kw['compile_mode'], kw['type']] # "c ccprogram"
+               else:
+                       kw['features'] = [kw['compile_mode']]
+       else:
+               kw['features'] = Utils.to_list(kw['features'])
+
+       if not 'compile_filename' in kw:
+               kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
+
+       def to_header(dct):
+               if 'header_name' in dct:
+                       dct = Utils.to_list(dct['header_name'])
+                       return ''.join(['#include <%s>\n' % x for x in dct])
+               return ''
+
+       #OSX
+       if 'framework_name' in kw:
+               fwkname = kw['framework_name']
+               if not 'uselib_store' in kw:
+                       kw['uselib_store'] = fwkname.upper()
+               if not kw.get('no_header', False):
+                       if not 'header_name' in kw:
+                               kw['header_name'] = []
+                       fwk = '%s/%s.h' % (fwkname, fwkname)
+                       if kw.get('remove_dot_h'):
+                               fwk = fwk[:-2]
+                       kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
+
+               kw['msg'] = 'Checking for framework %s' % fwkname
+               kw['framework'] = fwkname
+               #kw['frameworkpath'] = set it yourself
+
+       if 'function_name' in kw:
+               fu = kw['function_name']
+               if not 'msg' in kw:
+                       kw['msg'] = 'Checking for function %s' % fu
+               kw['code'] = to_header(kw) + SNIP_FUNCTION % fu
+               if not 'uselib_store' in kw:
+                       kw['uselib_store'] = fu.upper()
+               if not 'define_name' in kw:
+                       kw['define_name'] = self.have_define(fu)
+
+       elif 'type_name' in kw:
+               tu = kw['type_name']
+               if not 'header_name' in kw:
+                       kw['header_name'] = 'stdint.h'
+               if 'field_name' in kw:
+                       field = kw['field_name']
+                       kw['code'] = to_header(kw) + SNIP_FIELD % {'type_name' : tu, 'field_name' : field}
+                       if not 'msg' in kw:
+                               kw['msg'] = 'Checking for field %s in %s' % (field, tu)
+                       if not 'define_name' in kw:
+                               kw['define_name'] = self.have_define((tu + '_' + field).upper())
+               else:
+                       kw['code'] = to_header(kw) + SNIP_TYPE % {'type_name' : tu}
+                       if not 'msg' in kw:
+                               kw['msg'] = 'Checking for type %s' % tu
+                       if not 'define_name' in kw:
+                               kw['define_name'] = self.have_define(tu.upper())
+
+       elif 'header_name' in kw:
+               if not 'msg' in kw:
+                       kw['msg'] = 'Checking for header %s' % kw['header_name']
+
+               l = Utils.to_list(kw['header_name'])
+               assert len(l), 'list of headers in header_name is empty'
+
+               kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM
+               if not 'uselib_store' in kw:
+                       kw['uselib_store'] = l[0].upper()
+               if not 'define_name' in kw:
+                       kw['define_name'] = self.have_define(l[0])
+
+       if 'lib' in kw:
+               if not 'msg' in kw:
+                       kw['msg'] = 'Checking for library %s' % kw['lib']
+               if not 'uselib_store' in kw:
+                       kw['uselib_store'] = kw['lib'].upper()
+
+       if 'stlib' in kw:
+               if not 'msg' in kw:
+                       kw['msg'] = 'Checking for static library %s' % kw['stlib']
+               if not 'uselib_store' in kw:
+                       kw['uselib_store'] = kw['stlib'].upper()
+
+       if 'fragment' in kw:
+               # an additional code fragment may be provided to replace the predefined code
+               # in custom headers
+               kw['code'] = kw['fragment']
+               if not 'msg' in kw:
+                       kw['msg'] = 'Checking for code snippet'
+               if not 'errmsg' in kw:
+                       kw['errmsg'] = 'no'
+
+       for (flagsname,flagstype) in (('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')):
+               if flagsname in kw:
+                       if not 'msg' in kw:
+                               kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
+                       if not 'errmsg' in kw:
+                               kw['errmsg'] = 'no'
+
+       if not 'execute' in kw:
+               kw['execute'] = False
+       if kw['execute']:
+               kw['features'].append('test_exec')
+               kw['chmod'] = Utils.O755
+
+       if not 'errmsg' in kw:
+               kw['errmsg'] = 'not found'
+
+       if not 'okmsg' in kw:
+               kw['okmsg'] = 'yes'
+
+       if not 'code' in kw:
+               kw['code'] = SNIP_EMPTY_PROGRAM
+
+       # if there are headers to append automatically to the next tests
+       if self.env[INCKEYS]:
+               kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']
+
+       # in case defines lead to very long command-lines
+       if kw.get('merge_config_header', False) or env.merge_config_header:
+               kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
+               env.DEFINES = [] # modify the copy
+
+       if not kw.get('success'): kw['success'] = None
+
+       if 'define_name' in kw:
+               self.undefine(kw['define_name'])
+       if not 'msg' in kw:
+               self.fatal('missing "msg" in conf.check(...)')
+
+@conf
+def post_check(self, *k, **kw):
+       """
+       Sets the variables after a test executed in
+       :py:func:`waflib.Tools.c_config.check` was run successfully
+       """
+       is_success = 0
+       if kw['execute']:
+               if kw['success'] is not None:
+                       if kw.get('define_ret', False):
+                               is_success = kw['success']
+                       else:
+                               is_success = (kw['success'] == 0)
+       else:
+               is_success = (kw['success'] == 0)
+
+       if kw.get('define_name'):
+               # TODO this is still way too complicated
+               comment = kw.get('comment', '')
+               define_name = kw['define_name']
+               if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str):
+                       if kw.get('global_define', 1):
+                               self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment)
+                       else:
+                               if kw.get('quote', 1):
+                                       succ = '"%s"' % is_success
+                               else:
+                                       succ = int(is_success)
+                               val = '%s=%s' % (define_name, succ)
+                               var = 'DEFINES_%s' % kw['uselib_store']
+                               self.env.append_value(var, val)
+               else:
+                       if kw.get('global_define', 1):
+                               self.define_cond(define_name, is_success, comment=comment)
+                       else:
+                               var = 'DEFINES_%s' % kw['uselib_store']
+                               self.env.append_value(var, '%s=%s' % (define_name, int(is_success)))
+
+               # define conf.env.HAVE_X to 1
+               if kw.get('add_have_to_env', 1):
+                       if kw.get('uselib_store'):
+                               self.env[self.have_define(kw['uselib_store'])] = 1
+                       elif kw['execute'] and kw.get('define_ret'):
+                               self.env[define_name] = is_success
+                       else:
+                               self.env[define_name] = int(is_success)
+
+       if 'header_name' in kw:
+               if kw.get('auto_add_header_name', False):
+                       self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))
+
+       if is_success and 'uselib_store' in kw:
+               from waflib.Tools import ccroot
+               # See get_uselib_vars in ccroot.py
+               _vars = set()
+               for x in kw['features']:
+                       if x in ccroot.USELIB_VARS:
+                               _vars |= ccroot.USELIB_VARS[x]
+
+               for k in _vars:
+                       x = k.lower()
+                       if x in kw:
+                               self.env.append_value(k + '_' + kw['uselib_store'], kw[x])
+       return is_success
+
+@conf
+def check(self, *k, **kw):
+       """
+       Performs a configuration test by calling :py:func:`waflib.Configure.run_build`.
+       For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`.
+       To force a specific compiler, pass ``compiler='c'`` or ``compiler='cxx'`` to the list of arguments
+
+       Besides build targets, complete builds can be given through a build function. All files will
+       be written to a temporary directory::
+
+               def build(bld):
+                       lib_node = bld.srcnode.make_node('libdir/liblc1.c')
+                       lib_node.parent.mkdir()
+                       lib_node.write('#include <stdio.h>\\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w')
+                       bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc')
+               conf.check(build_fun=build, msg=msg)
+       """
+       self.validate_c(kw)
+       self.start_msg(kw['msg'], **kw)
+       ret = None
+       try:
+               ret = self.run_build(*k, **kw)
+       except self.errors.ConfigurationError:
+               self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+               if Logs.verbose > 1:
+                       raise
+               else:
+                       self.fatal('The configuration failed')
+       else:
+               kw['success'] = ret
+
+       ret = self.post_check(*k, **kw)
+       if not ret:
+               self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+               self.fatal('The configuration failed %r' % ret)
+       else:
+               self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+       return ret
+
+class test_exec(Task.Task):
+       """
+       A task that runs programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`.
+       """
+       color = 'PINK'
+       def run(self):
+               if getattr(self.generator, 'rpath', None):
+                       if getattr(self.generator, 'define_ret', False):
+                               self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
+                       else:
+                               self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()])
+               else:
+                       env = self.env.env or {}
+                       env.update(dict(os.environ))
+                       for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'):
+                               env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '')
+                       if getattr(self.generator, 'define_ret', False):
+                               self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()], env=env)
+                       else:
+                               self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()], env=env)
+
+@feature('test_exec')
+@after_method('apply_link')
+def test_exec_fun(self):
+       """
+       The feature **test_exec** is used to create a task that will to execute the binary
+       created (link task output) during the build. The exit status will be set
+       on the build context, so only one program may have the feature *test_exec*.
+       This is used by configuration tests::
+
+               def configure(conf):
+                       conf.check(execute=True)
+       """
+       self.create_task('test_exec', self.link_task.outputs[0])
+
+@conf
+def check_cxx(self, *k, **kw):
+       """
+       Runs a test with a task generator of the form::
+
+               conf.check(features='cxx cxxprogram', ...)
+       """
+       kw['compiler'] = 'cxx'
+       return self.check(*k, **kw)
+
+@conf
+def check_cc(self, *k, **kw):
+       """
+       Runs a test with a task generator of the form::
+
+               conf.check(features='c cprogram', ...)
+       """
+       kw['compiler'] = 'c'
+       return self.check(*k, **kw)
+
+@conf
+def set_define_comment(self, key, comment):
+       """
+       Sets a comment that will appear in the configuration header
+
+       :type key: string
+       :type comment: string
+       """
+       coms = self.env.DEFINE_COMMENTS
+       if not coms:
+               coms = self.env.DEFINE_COMMENTS = {}
+       coms[key] = comment or ''
+
+@conf
+def get_define_comment(self, key):
+       """
+       Returns the comment associated to a define
+
+       :type key: string
+       """
+       coms = self.env.DEFINE_COMMENTS or {}
+       return coms.get(key, '')
+
+@conf
+def define(self, key, val, quote=True, comment=''):
+       """
+       Stores a single define and its state into ``conf.env.DEFINES``. The value is cast to an integer (0/1).
+
+       :param key: define name
+       :type key: string
+       :param val: value
+       :type val: int or string
+       :param quote: enclose strings in quotes (yes by default)
+       :type quote: bool
+       """
+       assert isinstance(key, str)
+       if not key:
+               return
+       if val is True:
+               val = 1
+       elif val in (False, None):
+               val = 0
+
+       if isinstance(val, int) or isinstance(val, float):
+               s = '%s=%s'
+       else:
+               s = quote and '%s="%s"' or '%s=%s'
+       app = s % (key, str(val))
+
+       ban = key + '='
+       lst = self.env.DEFINES
+       for x in lst:
+               if x.startswith(ban):
+                       lst[lst.index(x)] = app
+                       break
+       else:
+               self.env.append_value('DEFINES', app)
+
+       self.env.append_unique(DEFKEYS, key)
+       self.set_define_comment(key, comment)
+
+@conf
+def undefine(self, key, comment=''):
+       """
+       Removes a global define from ``conf.env.DEFINES``
+
+       :param key: define name
+       :type key: string
+       """
+       assert isinstance(key, str)
+       if not key:
+               return
+       ban = key + '='
+       lst = [x for x in self.env.DEFINES if not x.startswith(ban)]
+       self.env.DEFINES = lst
+       self.env.append_unique(DEFKEYS, key)
+       self.set_define_comment(key, comment)
+
+@conf
+def define_cond(self, key, val, comment=''):
+       """
+       Conditionally defines a name::
+
+               def configure(conf):
+                       conf.define_cond('A', True)
+                       # equivalent to:
+                       # if val: conf.define('A', 1)
+                       # else: conf.undefine('A')
+
+       :param key: define name
+       :type key: string
+       :param val: value
+       :type val: int or string
+       """
+       assert isinstance(key, str)
+       if not key:
+               return
+       if val:
+               self.define(key, 1, comment=comment)
+       else:
+               self.undefine(key, comment=comment)
+
+@conf
+def is_defined(self, key):
+       """
+       Indicates whether a particular define is globally set in ``conf.env.DEFINES``.
+
+       :param key: define name
+       :type key: string
+       :return: True if the define is set
+       :rtype: bool
+       """
+       assert key and isinstance(key, str)
+
+       ban = key + '='
+       for x in self.env.DEFINES:
+               if x.startswith(ban):
+                       return True
+       return False
+
+@conf
+def get_define(self, key):
+       """
+       Returns the value of an existing define, or None if not found
+
+       :param key: define name
+       :type key: string
+       :rtype: string
+       """
+       assert key and isinstance(key, str)
+
+       ban = key + '='
+       for x in self.env.DEFINES:
+               if x.startswith(ban):
+                       return x[len(ban):]
+       return None
+
+@conf
+def have_define(self, key):
+       """
+       Returns a variable suitable for command-line or header use by removing invalid characters
+       and prefixing it with ``HAVE_``
+
+       :param key: define name
+       :type key: string
+       :return: the input key prefixed by *HAVE_* and substitute any invalid characters.
+       :rtype: string
+       """
+       return (self.env.HAVE_PAT or 'HAVE_%s') % Utils.quote_define_name(key)
+
+@conf
+def write_config_header(self, configfile='', guard='', top=False, defines=True, headers=False, remove=True, define_prefix=''):
+       """
+       Writes a configuration header containing defines and includes::
+
+               def configure(cnf):
+                       cnf.define('A', 1)
+                       cnf.write_config_header('config.h')
+
+       This function only adds include guards (if necessary), consult
+       :py:func:`waflib.Tools.c_config.get_config_header` for details on the body.
+
+       :param configfile: path to the file to create (relative or absolute)
+       :type configfile: string
+       :param guard: include guard name to add, by default it is computed from the file name
+       :type guard: string
+       :param top: write the configuration header from the build directory (default is from the current path)
+       :type top: bool
+       :param defines: add the defines (yes by default)
+       :type defines: bool
+       :param headers: add #include in the file
+       :type headers: bool
+       :param remove: remove the defines after they are added (yes by default, works like in autoconf)
+       :type remove: bool
+       :type define_prefix: string
+       :param define_prefix: prefix all the defines in the file with a particular prefix
+       """
+       if not configfile: configfile = WAF_CONFIG_H
+       waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile)
+
+       node = top and self.bldnode or self.path.get_bld()
+       node = node.make_node(configfile)
+       node.parent.mkdir()
+
+       lst = ['/* WARNING! All changes made to this file will be lost! */\n']
+       lst.append('#ifndef %s\n#define %s\n' % (waf_guard, waf_guard))
+       lst.append(self.get_config_header(defines, headers, define_prefix=define_prefix))
+       lst.append('\n#endif /* %s */\n' % waf_guard)
+
+       node.write('\n'.join(lst))
+
+       # config files must not be removed on "waf clean"
+       self.env.append_unique(Build.CFG_FILES, [node.abspath()])
+
+       if remove:
+               for key in self.env[DEFKEYS]:
+                       self.undefine(key)
+               self.env[DEFKEYS] = []
+
+@conf
+def get_config_header(self, defines=True, headers=False, define_prefix=''):
+       """
+       Creates the contents of a ``config.h`` file from the defines and includes
+       set in conf.env.define_key / conf.env.include_key. No include guards are added.
+
+       A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This
+       can be used to insert complex macros or include guards::
+
+               def configure(conf):
+                       conf.env.WAF_CONFIG_H_PRELUDE = '#include <unistd.h>\\n'
+                       conf.write_config_header('config.h')
+
+       :param defines: write the defines values
+       :type defines: bool
+       :param headers: write include entries for each element in self.env.INCKEYS
+       :type headers: bool
+       :type define_prefix: string
+       :param define_prefix: prefix all the defines with a particular prefix
+       :return: the contents of a ``config.h`` file
+       :rtype: string
+       """
+       lst = []
+
+       if self.env.WAF_CONFIG_H_PRELUDE:
+               lst.append(self.env.WAF_CONFIG_H_PRELUDE)
+
+       if headers:
+               for x in self.env[INCKEYS]:
+                       lst.append('#include <%s>' % x)
+
+       if defines:
+               tbl = {}
+               for k in self.env.DEFINES:
+                       a, _, b = k.partition('=')
+                       tbl[a] = b
+
+               for k in self.env[DEFKEYS]:
+                       caption = self.get_define_comment(k)
+                       if caption:
+                               caption = ' /* %s */' % caption
+                       try:
+                               txt = '#define %s%s %s%s' % (define_prefix, k, tbl[k], caption)
+                       except KeyError:
+                               txt = '/* #undef %s%s */%s' % (define_prefix, k, caption)
+                       lst.append(txt)
+       return "\n".join(lst)
+
+@conf
+def cc_add_flags(conf):
+       """
+       Adds CFLAGS / CPPFLAGS from os.environ to conf.env
+       """
+       conf.add_os_flags('CPPFLAGS', dup=False)
+       conf.add_os_flags('CFLAGS', dup=False)
+
+@conf
+def cxx_add_flags(conf):
+       """
+       Adds CXXFLAGS / CPPFLAGS from os.environ to conf.env
+       """
+       conf.add_os_flags('CPPFLAGS', dup=False)
+       conf.add_os_flags('CXXFLAGS', dup=False)
+
+@conf
+def link_add_flags(conf):
+       """
+       Adds LINKFLAGS / LDFLAGS from os.environ to conf.env
+       """
+       conf.add_os_flags('LINKFLAGS', dup=False)
+       conf.add_os_flags('LDFLAGS', dup=False)
+
+@conf
+def cc_load_tools(conf):
+       """
+       Loads the Waf c extensions
+       """
+       if not conf.env.DEST_OS:
+               conf.env.DEST_OS = Utils.unversioned_sys_platform()
+       conf.load('c')
+
+@conf
+def cxx_load_tools(conf):
+       """
+       Loads the Waf c++ extensions
+       """
+       if not conf.env.DEST_OS:
+               conf.env.DEST_OS = Utils.unversioned_sys_platform()
+       conf.load('cxx')
+
+@conf
+def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
+       """
+       Runs the preprocessor to determine the gcc/icc/clang version
+
+       The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*
+
+       :raise: :py:class:`waflib.Errors.ConfigurationError`
+       """
+       cmd = cc + ['-dM', '-E', '-']
+       env = conf.env.env or None
+       try:
+               out, err = conf.cmd_and_log(cmd, output=0, input='\n', env=env)
+       except Exception:
+               conf.fatal('Could not determine the compiler version %r' % cmd)
+
+       if gcc:
+               if out.find('__INTEL_COMPILER') >= 0:
+                       conf.fatal('The intel compiler pretends to be gcc')
+               if out.find('__GNUC__') < 0 and out.find('__clang__') < 0:
+                       conf.fatal('Could not determine the compiler type')
+
+       if icc and out.find('__INTEL_COMPILER') < 0:
+               conf.fatal('Not icc/icpc')
+
+       if clang and out.find('__clang__') < 0:
+               conf.fatal('Not clang/clang++')
+       if not clang and out.find('__clang__') >= 0:
+               conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure')
+
+       k = {}
+       if icc or gcc or clang:
+               out = out.splitlines()
+               for line in out:
+                       lst = shlex.split(line)
+                       if len(lst)>2:
+                               key = lst[1]
+                               val = lst[2]
+                               k[key] = val
+
+               def isD(var):
+                       return var in k
+
+               # Some documentation is available at http://predef.sourceforge.net
+               # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
+               if not conf.env.DEST_OS:
+                       conf.env.DEST_OS = ''
+               for i in MACRO_TO_DESTOS:
+                       if isD(i):
+                               conf.env.DEST_OS = MACRO_TO_DESTOS[i]
+                               break
+               else:
+                       if isD('__APPLE__') and isD('__MACH__'):
+                               conf.env.DEST_OS = 'darwin'
+                       elif isD('__unix__'): # unix must be tested last as it's a generic fallback
+                               conf.env.DEST_OS = 'generic'
+
+               if isD('__ELF__'):
+                       conf.env.DEST_BINFMT = 'elf'
+               elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
+                       conf.env.DEST_BINFMT = 'pe'
+                       conf.env.LIBDIR = conf.env.BINDIR
+               elif isD('__APPLE__'):
+                       conf.env.DEST_BINFMT = 'mac-o'
+
+               if not conf.env.DEST_BINFMT:
+                       # Infer the binary format from the os name.
+                       conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)
+
+               for i in MACRO_TO_DEST_CPU:
+                       if isD(i):
+                               conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
+                               break
+
+               Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
+               if icc:
+                       ver = k['__INTEL_COMPILER']
+                       conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1])
+               else:
+                       if isD('__clang__') and isD('__clang_major__'):
+                               conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+                       else:
+                               # older clang versions and gcc
+                               conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0'))
+       return k
+
+@conf
+def get_xlc_version(conf, cc):
+       """
+       Returns the Aix compiler version
+
+       :raise: :py:class:`waflib.Errors.ConfigurationError`
+       """
+       cmd = cc + ['-qversion']
+       try:
+               out, err = conf.cmd_and_log(cmd, output=0)
+       except Errors.WafError:
+               conf.fatal('Could not find xlc %r' % cmd)
+
+       # the intention is to catch the 8.0 in "IBM XL C/C++ Enterprise Edition V8.0 for AIX..."
+       for v in (r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+               version_re = re.compile(v, re.I).search
+               match = version_re(out or err)
+               if match:
+                       k = match.groupdict()
+                       conf.env.CC_VERSION = (k['major'], k['minor'])
+                       break
+       else:
+               conf.fatal('Could not determine the XLC version.')
+
+@conf
+def get_suncc_version(conf, cc):
+       """
+       Returns the Sun compiler version
+
+       :raise: :py:class:`waflib.Errors.ConfigurationError`
+       """
+       cmd = cc + ['-V']
+       try:
+               out, err = conf.cmd_and_log(cmd, output=0)
+       except Errors.WafError ,e:
+               # Older versions of the compiler exit with non-zero status when reporting their version
+               if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')):
+                       conf.fatal('Could not find suncc %r' % cmd)
+               out = e.stdout
+               err = e.stderr
+
+       version = (out or err)
+       version = version.splitlines()[0]
+
+       # cc: Sun C 5.10 SunOS_i386 2009/06/03
+       # cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17
+       # cc: WorkShop Compilers 5.0 98/12/15 C 5.0
+       version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search
+       match = version_re(version)
+       if match:
+               k = match.groupdict()
+               conf.env.CC_VERSION = (k['major'], k['minor'])
+       else:
+               conf.fatal('Could not determine the suncc version.')
+
+# ============ the --as-needed flag should added during the configuration, not at runtime =========
+
+@conf
+def add_as_needed(self):
+       """
+       Adds ``--as-needed`` to the *LINKFLAGS*
+       On some platforms, it is a default flag.  In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag.
+       """
+       if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME):
+               self.env.append_unique('LINKFLAGS', '-Wl,--as-needed')
+
+# ============ parallel configuration
+
+class cfgtask(Task.TaskBase):
+       """
+       A task that executes build configuration tests (calls conf.check)
+
+       Make sure to use locks if concurrent access to the same conf.env data is necessary.
+       """
+       def __init__(self, *k, **kw):
+               Task.TaskBase.__init__(self, *k, **kw)
+               self.run_after = set()
+
+       def display(self):
+               return ''
+
+       def runnable_status(self):
+               for x in self.run_after:
+                       if not x.hasrun:
+                               return Task.ASK_LATER
+               return Task.RUN_ME
+
+       def uid(self):
+               return Utils.SIG_NIL
+
+       def run(self):
+               conf = self.conf
+               bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath())
+               bld.env = conf.env
+               bld.init_dirs()
+               bld.in_msg = 1 # suppress top-level start_msg
+               bld.logger = self.logger
+               bld.multicheck_task = self
+               args = self.args
+               try:
+                       if 'func' in args:
+                               bld.test(build_fun=args['func'],
+                                       msg=args.get('msg', ''),
+                                       okmsg=args.get('okmsg', ''),
+                                       errmsg=args.get('errmsg', ''),
+                                       )
+                       else:
+                               args['multicheck_mandatory'] = args.get('mandatory', True)
+                               args['mandatory'] = True
+                               try:
+                                       bld.check(**args)
+                               finally:
+                                       args['mandatory'] = args['multicheck_mandatory']
+               except Exception:
+                       return 1
+
+       def process(self):
+               Task.TaskBase.process(self)
+               if 'msg' in self.args:
+                       with self.generator.bld.multicheck_lock:
+                               self.conf.start_msg(self.args['msg'])
+                               if self.hasrun == Task.NOT_RUN:
+                                       self.conf.end_msg('test cancelled', 'YELLOW')
+                               elif self.hasrun != Task.SUCCESS:
+                                       self.conf.end_msg(self.args.get('errmsg', 'no'), 'YELLOW')
+                               else:
+                                       self.conf.end_msg(self.args.get('okmsg', 'yes'), 'GREEN')
+
+@conf
+def multicheck(self, *k, **kw):
+       """
+       Runs configuration tests in parallel; results are printed sequentially at the end of the build
+       but each test must provide its own msg value to display a line::
+
+               def test_build(ctx):
+                       ctx.in_msg = True # suppress console outputs
+                       ctx.check_large_file(mandatory=False)
+
+               conf.multicheck(
+                       {'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False},
+                       {'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False},
+                       {'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'},
+                       {'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'},
+                       msg       = 'Checking for headers in parallel',
+                       mandatory = True, # mandatory tests raise an error at the end
+                       run_all_tests = True, # try running all tests
+               )
+
+       The configuration tests may modify the values in conf.env in any order, and the define
+       values can affect configuration tests being executed. It is hence recommended
+       to provide `uselib_store` values with `global_define=False` to prevent such issues.
+       """
+       self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw)
+
+       # Force a copy so that threads append to the same list at least
+       # no order is guaranteed, but the values should not disappear at least
+       for var in ('DEFINES', DEFKEYS):
+               self.env.append_value(var, [])
+       self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {}
+
+       # define a task object that will execute our tests
+       class par(object):
+               def __init__(self):
+                       self.keep = False
+                       self.task_sigs = {}
+                       self.progress_bar = 0
+               def total(self):
+                       return len(tasks)
+               def to_log(self, *k, **kw):
+                       return
+
+       bld = par()
+       bld.keep = kw.get('run_all_tests', True)
+       tasks = []
+
+       id_to_task = {}
+       for dct in k:
+               x = Task.classes['cfgtask'](bld=bld)
+               tasks.append(x)
+               x.args = dct
+               x.bld = bld
+               x.conf = self
+               x.args = dct
+
+               # bind a logger that will keep the info in memory
+               x.logger = Logs.make_mem_logger(str(id(x)), self.logger)
+
+               if 'id' in dct:
+                       id_to_task[dct['id']] = x
+
+       # second pass to set dependencies with after_test/before_test
+       for x in tasks:
+               for key in Utils.to_list(x.args.get('before_tests', [])):
+                       tsk = id_to_task[key]
+                       if not tsk:
+                               raise ValueError('No test named %r' % key)
+                       tsk.run_after.add(x)
+               for key in Utils.to_list(x.args.get('after_tests', [])):
+                       tsk = id_to_task[key]
+                       if not tsk:
+                               raise ValueError('No test named %r' % key)
+                       x.run_after.add(tsk)
+
+       def it():
+               yield tasks
+               while 1:
+                       yield []
+       bld.producer = p = Runner.Parallel(bld, Options.options.jobs)
+       bld.multicheck_lock = Utils.threading.Lock()
+       p.biter = it()
+
+       self.end_msg('started')
+       p.start()
+
+       # flush the logs in order into the config.log
+       for x in tasks:
+               x.logger.memhandler.flush()
+
+       self.start_msg('-> processing test results')
+       if p.error:
+               for x in p.error:
+                       if getattr(x, 'err_msg', None):
+                               self.to_log(x.err_msg)
+                               self.end_msg('fail', color='RED')
+                               raise Errors.WafError('There is an error in the library, read config.log for more information')
+
+       failure_count = 0
+       for x in tasks:
+               if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN):
+                       failure_count += 1
+
+       if failure_count:
+               self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw)
+       else:
+               self.end_msg('all ok', **kw)
+
+       for x in tasks:
+               if x.hasrun != Task.SUCCESS:
+                       if x.args.get('mandatory', True):
+                               self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information')
diff --git a/third_party/waf/waflib/Tools/c_osx.py b/third_party/waf/waflib/Tools/c_osx.py
new file mode 100644 (file)
index 0000000..c575de2
--- /dev/null
@@ -0,0 +1,196 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2008-2016 (ita)
+
+"""
+MacOSX related tools
+"""
+
+import os, shutil, platform
+from waflib import Task, Utils
+from waflib.TaskGen import taskgen_method, feature, after_method, before_method
+
+app_info = '''
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
+<plist version="0.9">
+<dict>
+       <key>CFBundlePackageType</key>
+       <string>APPL</string>
+       <key>CFBundleGetInfoString</key>
+       <string>Created by Waf</string>
+       <key>CFBundleSignature</key>
+       <string>????</string>
+       <key>NOTE</key>
+       <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
+       <key>CFBundleExecutable</key>
+       <string>{app_name}</string>
+</dict>
+</plist>
+'''
+"""
+plist template
+"""
+
+@feature('c', 'cxx')
+def set_macosx_deployment_target(self):
+       """
+       see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
+       """
+       if self.env.MACOSX_DEPLOYMENT_TARGET:
+               os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
+       elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
+               if Utils.unversioned_sys_platform() == 'darwin':
+                       os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
+
+@taskgen_method
+def create_bundle_dirs(self, name, out):
+       """
+       Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
+       """
+       dir = out.parent.find_or_declare(name)
+       dir.mkdir()
+       macos = dir.find_or_declare(['Contents', 'MacOS'])
+       macos.mkdir()
+       return dir
+
+def bundle_name_for_output(out):
+       name = out.name
+       k = name.rfind('.')
+       if k >= 0:
+               name = name[:k] + '.app'
+       else:
+               name = name + '.app'
+       return name
+
+@feature('cprogram', 'cxxprogram')
+@after_method('apply_link')
+def create_task_macapp(self):
+       """
+       To compile an executable into a Mac application (a .app), set its *mac_app* attribute::
+
+               def build(bld):
+                       bld.shlib(source='a.c', target='foo', mac_app=True)
+
+       To force *all* executables to be transformed into Mac applications::
+
+               def build(bld):
+                       bld.env.MACAPP = True
+                       bld.shlib(source='a.c', target='foo')
+       """
+       if self.env.MACAPP or getattr(self, 'mac_app', False):
+               out = self.link_task.outputs[0]
+
+               name = bundle_name_for_output(out)
+               dir = self.create_bundle_dirs(name, out)
+
+               n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
+
+               self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
+               inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
+               self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)
+
+               if getattr(self, 'mac_files', None):
+                       # this only accepts files; they will be installed as seen from mac_files_root
+                       mac_files_root = getattr(self, 'mac_files_root', None)
+                       if isinstance(mac_files_root, str):
+                               mac_files_root = self.path.find_node(mac_files_root)
+                               if not mac_files_root:
+                                       self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root)
+                       res_dir = n1.parent.parent.make_node('Resources')
+                       inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
+                       for node in self.to_nodes(self.mac_files):
+                               relpath = node.path_from(mac_files_root or node.parent)
+                               self.create_task('macapp', node, res_dir.make_node(relpath))
+                               self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)
+
+               if getattr(self.bld, 'is_install', None):
+                       # disable regular binary installation
+                       self.install_task.hasrun = Task.SKIP_ME
+
+@feature('cprogram', 'cxxprogram')
+@after_method('apply_link')
+def create_task_macplist(self):
+       """
+       Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
+       """
+       if  self.env.MACAPP or getattr(self, 'mac_app', False):
+               out = self.link_task.outputs[0]
+
+               name = bundle_name_for_output(out)
+
+               dir = self.create_bundle_dirs(name, out)
+               n1 = dir.find_or_declare(['Contents', 'Info.plist'])
+               self.plisttask = plisttask = self.create_task('macplist', [], n1)
+               plisttask.context = {
+                       'app_name': self.link_task.outputs[0].name,
+                       'env': self.env
+               }
+
+               plist_ctx = getattr(self, 'plist_context', None)
+               if (plist_ctx):
+                       plisttask.context.update(plist_ctx)
+
+               if getattr(self, 'mac_plist', False):
+                       node = self.path.find_resource(self.mac_plist)
+                       if node:
+                               plisttask.inputs.append(node)
+                       else:
+                               plisttask.code = self.mac_plist
+               else:
+                       plisttask.code = app_info
+
+               inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
+               self.add_install_files(install_to=inst_to, install_from=n1)
+
+@feature('cshlib', 'cxxshlib')
+@before_method('apply_link', 'propagate_uselib_vars')
+def apply_bundle(self):
+       """
+       To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute::
+
+               def build(bld):
+                       bld.shlib(source='a.c', target='foo', mac_bundle = True)
+
+       To force *all* executables to be transformed into bundles::
+
+               def build(bld):
+                       bld.env.MACBUNDLE = True
+                       bld.shlib(source='a.c', target='foo')
+       """
+       if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
+               self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
+               self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
+               use = self.use = self.to_list(getattr(self, 'use', []))
+               if not 'MACBUNDLE' in use:
+                       use.append('MACBUNDLE')
+
+app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
+
+class macapp(Task.Task):
+       """
+       Creates mac applications
+       """
+       color = 'PINK'
+       def run(self):
+               self.outputs[0].parent.mkdir()
+               shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath())
+
+class macplist(Task.Task):
+       """
+       Creates plist files
+       """
+       color = 'PINK'
+       ext_in = ['.bin']
+       def run(self):
+               if getattr(self, 'code', None):
+                       txt = self.code
+               else:
+                       txt = self.inputs[0].read()
+               context = getattr(self, 'context', {})
+               txt = txt.format(**context)
+               self.outputs[0].write(txt)
diff --git a/third_party/waf/waflib/Tools/c_preproc.py b/third_party/waf/waflib/Tools/c_preproc.py
new file mode 100644 (file)
index 0000000..3d1208d
--- /dev/null
@@ -0,0 +1,1058 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+C/C++ preprocessor for finding dependencies
+
+Reasons for using the Waf preprocessor by default
+
+#. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
+#. Not all compilers provide .d files for obtaining the dependencies (portability)
+#. A naive file scanner will not catch the constructs such as "#include foo()"
+#. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
+
+Regarding the speed concerns:
+
+* the preprocessing is performed only when files must be compiled
+* the macros are evaluated only for #if/#elif/#include
+* system headers are not scanned by default
+
+Now if you do not want the Waf preprocessor, the tool +gccdeps* uses the .d files produced
+during the compilation to track the dependencies (useful when used with the boost libraries).
+It only works with gcc >= 4.4 though.
+
+A dumb preprocessor is also available in the tool *c_dumbpreproc*
+"""
+# TODO: more varargs, pragma once
+
+import re, string, traceback
+from waflib import Logs, Utils, Errors
+
+class PreprocError(Errors.WafError):
+       pass
+
+FILE_CACHE_SIZE = 100000
+LINE_CACHE_SIZE = 100000
+
+POPFILE = '-'
+"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"
+
+recursion_limit = 150
+"Limit on the amount of files to read in the dependency scanner"
+
+go_absolute = False
+"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
+
+standard_includes = ['/usr/include']
+if Utils.is_win32:
+       standard_includes = []
+
+use_trigraphs = 0
+"""Apply trigraph rules (False by default)"""
+
+strict_quotes = 0
+"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""
+
+g_optrans = {
+'not':'!',
+'not_eq':'!',
+'and':'&&',
+'and_eq':'&=',
+'or':'||',
+'or_eq':'|=',
+'xor':'^',
+'xor_eq':'^=',
+'bitand':'&',
+'bitor':'|',
+'compl':'~',
+}
+"""Operators such as and/or/xor for c++. Set an empty dict to disable."""
+
+# ignore #warning and #error
+re_lines = re.compile(
+       '^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
+       re.IGNORECASE | re.MULTILINE)
+"""Match #include lines"""
+
+re_mac = re.compile("^[a-zA-Z_]\w*")
+"""Match macro definitions"""
+
+re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
+"""Match macro functions"""
+
+re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
+"""Match #pragma once statements"""
+
+re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
+"""Match newlines"""
+
+re_cpp = re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', re.DOTALL | re.MULTILINE )
+"""Filter C/C++ comments"""
+
+trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
+"""Trigraph definitions"""
+
+chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
+"""Escape characters"""
+
+NUM   = 'i'
+"""Number token"""
+
+OP    = 'O'
+"""Operator token"""
+
+IDENT = 'T'
+"""Identifier token"""
+
+STR   = 's'
+"""String token"""
+
+CHAR  = 'c'
+"""Character token"""
+
+tok_types = [NUM, STR, IDENT, OP]
+"""Token types"""
+
+exp_types = [
+       r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
+       r'L?"([^"\\]|\\.)*"',
+       r'[a-zA-Z_]\w*',
+       r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
+]
+"""Expression types"""
+
+re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
+"""Match expressions into tokens"""
+
+accepted  = 'a'
+"""Parser state is *accepted*"""
+
+ignored   = 'i'
+"""Parser state is *ignored*, for example preprocessor lines in an #if 0 block"""
+
+undefined = 'u'
+"""Parser state is *undefined* at the moment"""
+
+skipped   = 's'
+"""Parser state is *skipped*, for example preprocessor lines in a #elif 0 block"""
+
+def repl(m):
+       """Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
+       s = m.group()
+       if s[0] == '/':
+               return ' '
+       return s
+
+prec = {}
+"""
+Operator precendence rules required for parsing expressions of the form::
+
+       #if 1 && 2 != 0
+"""
+ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
+for x, syms in enumerate(ops):
+       for u in syms.split():
+               prec[u] = x
+
+def trimquotes(s):
+       """
+       Remove the single quotes around an expression::
+
+               trimquotes("'test'") == "test"
+
+       :param s: expression to transform
+       :type s: string
+       :rtype: string
+       """
+       # TODO remove in waf 2.0
+       if not s: return ''
+       s = s.rstrip()
+       if s[0] == "'" and s[-1] == "'": return s[1:-1]
+       return s
+
+def reduce_nums(val_1, val_2, val_op):
+       """
+       Apply arithmetic rules to compute a result
+
+       :param val1: input parameter
+       :type val1: int or string
+       :param val2: input parameter
+       :type val2: int or string
+       :param val_op: C operator in *+*, */*, *-*, etc
+       :type val_op: string
+       :rtype: int
+       """
+       #print val_1, val_2, val_op
+
+       # now perform the operation, make certain a and b are numeric
+       try:    a = 0 + val_1
+       except TypeError: a = int(val_1)
+       try:    b = 0 + val_2
+       except TypeError: b = int(val_2)
+
+       d = val_op
+       if d == '%':  c = a%b
+       elif d=='+':  c = a+b
+       elif d=='-':  c = a-b
+       elif d=='*':  c = a*b
+       elif d=='/':  c = a/b
+       elif d=='^':  c = a^b
+       elif d=='==': c = int(a == b)
+       elif d=='|'  or d == 'bitor':  c = a|b
+       elif d=='||' or d == 'or' : c = int(a or b)
+       elif d=='&'  or d == 'bitand':  c = a&b
+       elif d=='&&' or d == 'and': c = int(a and b)
+       elif d=='!=' or d == 'not_eq': c = int(a != b)
+       elif d=='^'  or d == 'xor':  c = int(a^b)
+       elif d=='<=': c = int(a <= b)
+       elif d=='<':  c = int(a < b)
+       elif d=='>':  c = int(a > b)
+       elif d=='>=': c = int(a >= b)
+       elif d=='<<': c = a<<b
+       elif d=='>>': c = a>>b
+       else: c = 0
+       return c
+
+def get_num(lst):
+       """
+       Try to obtain a number from a list of tokens. The token types are defined in :py:attr:`waflib.Tools.ccroot.tok_types`.
+
+       :param lst: list of preprocessor tokens
+       :type lst: list of tuple (tokentype, value)
+       :return: a pair containing the number and the rest of the list
+       :rtype: tuple(value, list)
+       """
+       if not lst: raise PreprocError('empty list for get_num')
+       (p, v) = lst[0]
+       if p == OP:
+               if v == '(':
+                       count_par = 1
+                       i = 1
+                       while i < len(lst):
+                               (p, v) = lst[i]
+
+                               if p == OP:
+                                       if v == ')':
+                                               count_par -= 1
+                                               if count_par == 0:
+                                                       break
+                                       elif v == '(':
+                                               count_par += 1
+                               i += 1
+                       else:
+                               raise PreprocError('rparen expected %r' % lst)
+
+                       (num, _) = get_term(lst[1:i])
+                       return (num, lst[i+1:])
+
+               elif v == '+':
+                       return get_num(lst[1:])
+               elif v == '-':
+                       num, lst = get_num(lst[1:])
+                       return (reduce_nums('-1', num, '*'), lst)
+               elif v == '!':
+                       num, lst = get_num(lst[1:])
+                       return (int(not int(num)), lst)
+               elif v == '~':
+                       num, lst = get_num(lst[1:])
+                       return (~ int(num), lst)
+               else:
+                       raise PreprocError('Invalid op token %r for get_num' % lst)
+       elif p == NUM:
+               return v, lst[1:]
+       elif p == IDENT:
+               # all macros should have been replaced, remaining identifiers eval to 0
+               return 0, lst[1:]
+       else:
+               raise PreprocError('Invalid token %r for get_num' % lst)
+
+def get_term(lst):
+       """
+       Evaluate an expression recursively, for example::
+
+               1+1+1 -> 2+1 -> 3
+
+       :param lst: list of tokens
+       :type lst: list of tuple(token, value)
+       :return: the value and the remaining tokens
+       :rtype: value, list
+       """
+
+       if not lst: raise PreprocError('empty list for get_term')
+       num, lst = get_num(lst)
+       if not lst:
+               return (num, [])
+       (p, v) = lst[0]
+       if p == OP:
+               if v == ',':
+                       # skip
+                       return get_term(lst[1:])
+               elif v == '?':
+                       count_par = 0
+                       i = 1
+                       while i < len(lst):
+                               (p, v) = lst[i]
+
+                               if p == OP:
+                                       if v == ')':
+                                               count_par -= 1
+                                       elif v == '(':
+                                               count_par += 1
+                                       elif v == ':':
+                                               if count_par == 0:
+                                                       break
+                               i += 1
+                       else:
+                               raise PreprocError('rparen expected %r' % lst)
+
+                       if int(num):
+                               return get_term(lst[1:i])
+                       else:
+                               return get_term(lst[i+1:])
+
+               else:
+                       num2, lst = get_num(lst[1:])
+
+                       if not lst:
+                               # no more tokens to process
+                               num2 = reduce_nums(num, num2, v)
+                               return get_term([(NUM, num2)] + lst)
+
+                       # operator precedence
+                       p2, v2 = lst[0]
+                       if p2 != OP:
+                               raise PreprocError('op expected %r' % lst)
+
+                       if prec[v2] >= prec[v]:
+                               num2 = reduce_nums(num, num2, v)
+                               return get_term([(NUM, num2)] + lst)
+                       else:
+                               num3, lst = get_num(lst[1:])
+                               num3 = reduce_nums(num2, num3, v2)
+                               return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
+
+
+       raise PreprocError('cannot reduce %r' % lst)
+
+def reduce_eval(lst):
+       """
+       Take a list of tokens and output true or false for #if/#elif conditions.
+
+       :param lst: a list of tokens
+       :type lst: list of tuple(token, value)
+       :return: a token
+       :rtype: tuple(NUM, int)
+       """
+       num, lst = get_term(lst)
+       return (NUM, num)
+
+def stringize(lst):
+       """
+       Merge a list of tokens into a string
+
+       :param lst: a list of tokens
+       :type lst: list of tuple(token, value)
+       :rtype: string
+       """
+       lst = [str(v2) for (p2, v2) in lst]
+       return "".join(lst)
+
+def paste_tokens(t1, t2):
+       """
+       Token pasting works between identifiers, particular operators, and identifiers and numbers::
+
+               a ## b  ->  ab
+               > ## =  ->  >=
+               a ## 2  ->  a2
+
+       :param t1: token
+       :type t1: tuple(type, value)
+       :param t2: token
+       :type t2: tuple(type, value)
+       """
+       p1 = None
+       if t1[0] == OP and t2[0] == OP:
+               p1 = OP
+       elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
+               p1 = IDENT
+       elif t1[0] == NUM and t2[0] == NUM:
+               p1 = NUM
+       if not p1:
+               raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
+       return (p1, t1[1] + t2[1])
+
+def reduce_tokens(lst, defs, ban=[]):
+       """
+       Replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied
+
+       :param lst: list of tokens
+       :type lst: list of tuple(token, value)
+       :param defs: macro definitions
+       :type defs: dict
+       :param ban: macros that cannot be substituted (recursion is not allowed)
+       :type ban: list of string
+       :return: the new list of tokens
+       :rtype: value, list
+       """
+
+       i = 0
+       while i < len(lst):
+               (p, v) = lst[i]
+
+               if p == IDENT and v == "defined":
+                       del lst[i]
+                       if i < len(lst):
+                               (p2, v2) = lst[i]
+                               if p2 == IDENT:
+                                       if v2 in defs:
+                                               lst[i] = (NUM, 1)
+                                       else:
+                                               lst[i] = (NUM, 0)
+                               elif p2 == OP and v2 == '(':
+                                       del lst[i]
+                                       (p2, v2) = lst[i]
+                                       del lst[i] # remove the ident, and change the ) for the value
+                                       if v2 in defs:
+                                               lst[i] = (NUM, 1)
+                                       else:
+                                               lst[i] = (NUM, 0)
+                               else:
+                                       raise PreprocError('Invalid define expression %r' % lst)
+
+               elif p == IDENT and v in defs:
+
+                       if isinstance(defs[v], str):
+                               a, b = extract_macro(defs[v])
+                               defs[v] = b
+                       macro_def = defs[v]
+                       to_add = macro_def[1]
+
+                       if isinstance(macro_def[0], list):
+                               # macro without arguments
+                               del lst[i]
+                               accu = to_add[:]
+                               reduce_tokens(accu, defs, ban+[v])
+                               for tmp in accu:
+                                       lst.insert(i, tmp)
+                                       i += 1
+                       else:
+                               # collect the arguments for the funcall
+
+                               args = []
+                               del lst[i]
+
+                               if i >= len(lst):
+                                       raise PreprocError('expected ( after %r (got nothing)' % v)
+
+                               (p2, v2) = lst[i]
+                               if p2 != OP or v2 != '(':
+                                       raise PreprocError('expected ( after %r' % v)
+
+                               del lst[i]
+
+                               one_param = []
+                               count_paren = 0
+                               while i < len(lst):
+                                       p2, v2 = lst[i]
+
+                                       del lst[i]
+                                       if p2 == OP and count_paren == 0:
+                                               if v2 == '(':
+                                                       one_param.append((p2, v2))
+                                                       count_paren += 1
+                                               elif v2 == ')':
+                                                       if one_param: args.append(one_param)
+                                                       break
+                                               elif v2 == ',':
+                                                       if not one_param: raise PreprocError('empty param in funcall %r' % v)
+                                                       args.append(one_param)
+                                                       one_param = []
+                                               else:
+                                                       one_param.append((p2, v2))
+                                       else:
+                                               one_param.append((p2, v2))
+                                               if   v2 == '(': count_paren += 1
+                                               elif v2 == ')': count_paren -= 1
+                               else:
+                                       raise PreprocError('malformed macro')
+
+                               # substitute the arguments within the define expression
+                               accu = []
+                               arg_table = macro_def[0]
+                               j = 0
+                               while j < len(to_add):
+                                       (p2, v2) = to_add[j]
+
+                                       if p2 == OP and v2 == '#':
+                                               # stringize is for arguments only
+                                               if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
+                                                       toks = args[arg_table[to_add[j+1][1]]]
+                                                       accu.append((STR, stringize(toks)))
+                                                       j += 1
+                                               else:
+                                                       accu.append((p2, v2))
+                                       elif p2 == OP and v2 == '##':
+                                               # token pasting, how can man invent such a complicated system?
+                                               if accu and j+1 < len(to_add):
+                                                       # we have at least two tokens
+
+                                                       t1 = accu[-1]
+
+                                                       if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
+                                                               toks = args[arg_table[to_add[j+1][1]]]
+
+                                                               if toks:
+                                                                       accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
+                                                                       accu.extend(toks[1:])
+                                                               else:
+                                                                       # error, case "a##"
+                                                                       accu.append((p2, v2))
+                                                                       accu.extend(toks)
+                                                       elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
+                                                               # TODO not sure
+                                                               # first collect the tokens
+                                                               va_toks = []
+                                                               st = len(macro_def[0])
+                                                               pt = len(args)
+                                                               for x in args[pt-st+1:]:
+                                                                       va_toks.extend(x)
+                                                                       va_toks.append((OP, ','))
+                                                               if va_toks: va_toks.pop() # extra comma
+                                                               if len(accu)>1:
+                                                                       (p3, v3) = accu[-1]
+                                                                       (p4, v4) = accu[-2]
+                                                                       if v3 == '##':
+                                                                               # remove the token paste
+                                                                               accu.pop()
+                                                                               if v4 == ',' and pt < st:
+                                                                                       # remove the comma
+                                                                                       accu.pop()
+                                                               accu += va_toks
+                                                       else:
+                                                               accu[-1] = paste_tokens(t1, to_add[j+1])
+
+                                                       j += 1
+                                               else:
+                                                       # Invalid paste, case    "##a" or "b##"
+                                                       accu.append((p2, v2))
+
+                                       elif p2 == IDENT and v2 in arg_table:
+                                               toks = args[arg_table[v2]]
+                                               reduce_tokens(toks, defs, ban+[v])
+                                               accu.extend(toks)
+                                       else:
+                                               accu.append((p2, v2))
+
+                                       j += 1
+
+
+                               reduce_tokens(accu, defs, ban+[v])
+
+                               for x in range(len(accu)-1, -1, -1):
+                                       lst.insert(i, accu[x])
+
+               i += 1
+
+
+def eval_macro(lst, defs):
+       """
+       Reduce the tokens by :py:func:`waflib.Tools.c_preproc.reduce_tokens` and try to return a 0/1 result by :py:func:`waflib.Tools.c_preproc.reduce_eval`.
+
+       :param lst: list of tokens
+       :type lst: list of tuple(token, value)
+       :param defs: macro definitions
+       :type defs: dict
+       :rtype: int
+       """
+       reduce_tokens(lst, defs, [])
+       if not lst: raise PreprocError('missing tokens to evaluate')
+
+       if lst:
+               p, v = lst[0]
+               if p == IDENT and v not in defs:
+                       raise PreprocError('missing macro %r' % lst)
+
+       p, v = reduce_eval(lst)
+       return int(v) != 0
+
+def extract_macro(txt):
+       """
+       Process a macro definition of the form::
+                #define f(x, y) x * y
+
+       into a function or a simple macro without arguments
+
+       :param txt: expression to exact a macro definition from
+       :type txt: string
+       :return: a tuple containing the name, the list of arguments and the replacement
+       :rtype: tuple(string, [list, list])
+       """
+       t = tokenize(txt)
+       if re_fun.search(txt):
+               p, name = t[0]
+
+               p, v = t[1]
+               if p != OP: raise PreprocError('expected (')
+
+               i = 1
+               pindex = 0
+               params = {}
+               prev = '('
+
+               while 1:
+                       i += 1
+                       p, v = t[i]
+
+                       if prev == '(':
+                               if p == IDENT:
+                                       params[v] = pindex
+                                       pindex += 1
+                                       prev = p
+                               elif p == OP and v == ')':
+                                       break
+                               else:
+                                       raise PreprocError('unexpected token (3)')
+                       elif prev == IDENT:
+                               if p == OP and v == ',':
+                                       prev = v
+                               elif p == OP and v == ')':
+                                       break
+                               else:
+                                       raise PreprocError('comma or ... expected')
+                       elif prev == ',':
+                               if p == IDENT:
+                                       params[v] = pindex
+                                       pindex += 1
+                                       prev = p
+                               elif p == OP and v == '...':
+                                       raise PreprocError('not implemented (1)')
+                               else:
+                                       raise PreprocError('comma or ... expected (2)')
+                       elif prev == '...':
+                               raise PreprocError('not implemented (2)')
+                       else:
+                               raise PreprocError('unexpected else')
+
+               #~ print (name, [params, t[i+1:]])
+               return (name, [params, t[i+1:]])
+       else:
+               (p, v) = t[0]
+               if len(t) > 1:
+                       return (v, [[], t[1:]])
+               else:
+                       # empty define, assign an empty token
+                       return (v, [[], [('T','')]])
+
+re_include = re.compile('^\s*(<(?:.*)>|"(?:.*)")')
+def extract_include(txt, defs):
+       """
+       Process a line in the form::
+
+               #include foo
+
+       :param txt: include line to process
+       :type txt: string
+       :param defs: macro definitions
+       :type defs: dict
+       :return: the file name
+       :rtype: string
+       """
+       m = re_include.search(txt)
+       if m:
+               txt = m.group(1)
+               return txt[0], txt[1:-1]
+
+       # perform preprocessing and look at the result, it must match an include
+       toks = tokenize(txt)
+       reduce_tokens(toks, defs, ['waf_include'])
+
+       if not toks:
+               raise PreprocError('could not parse include %r' % txt)
+
+       if len(toks) == 1:
+               if toks[0][0] == STR:
+                       return '"', toks[0][1]
+       else:
+               if toks[0][1] == '<' and toks[-1][1] == '>':
+                       ret = '<', stringize(toks).lstrip('<').rstrip('>')
+                       return ret
+
+       raise PreprocError('could not parse include %r' % txt)
+
+def parse_char(txt):
+       """
+       Parse a c character
+
+       :param txt: character to parse
+       :type txt: string
+       :return: a character literal
+       :rtype: string
+       """
+
+       if not txt:
+               raise PreprocError('attempted to parse a null char')
+       if txt[0] != '\\':
+               return ord(txt)
+       c = txt[1]
+       if c == 'x':
+               if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
+               return int(txt[2:], 16)
+       elif c.isdigit():
+               if c == '0' and len(txt)==2: return 0
+               for i in 3, 2, 1:
+                       if len(txt) > i and txt[1:1+i].isdigit():
+                               return (1+i, int(txt[1:1+i], 8))
+       else:
+               try: return chr_esc[c]
+               except KeyError: raise PreprocError('could not parse char literal %r' % txt)
+
+def tokenize(s):
+       """
+       Convert a string into a list of tokens (shlex.split does not apply to c/c++/d)
+
+       :param s: input to tokenize
+       :type s: string
+       :return: a list of tokens
+       :rtype: list of tuple(token, value)
+       """
+       return tokenize_private(s)[:] # force a copy of the results
+
+def tokenize_private(s):
+       ret = []
+       for match in re_clexer.finditer(s):
+               m = match.group
+               for name in tok_types:
+                       v = m(name)
+                       if v:
+                               if name == IDENT:
+                                       try:
+                                               g_optrans[v]
+                                               name = OP
+                                       except KeyError:
+                                               # c++ specific
+                                               if v.lower() == "true":
+                                                       v = 1
+                                                       name = NUM
+                                               elif v.lower() == "false":
+                                                       v = 0
+                                                       name = NUM
+                               elif name == NUM:
+                                       if m('oct'): v = int(v, 8)
+                                       elif m('hex'): v = int(m('hex'), 16)
+                                       elif m('n0'): v = m('n0')
+                                       else:
+                                               v = m('char')
+                                               if v: v = parse_char(v)
+                                               else: v = m('n2') or m('n4')
+                               elif name == OP:
+                                       if v == '%:': v = '#'
+                                       elif v == '%:%:': v = '##'
+                               elif name == STR:
+                                       # remove the quotes around the string
+                                       v = v[1:-1]
+                               ret.append((name, v))
+                               break
+       return ret
+
+def format_defines(lst):
+       ret = []
+       for y in lst:
+               if y:
+                       pos = y.find('=')
+                       if pos == -1:
+                               # "-DFOO" should give "#define FOO 1"
+                               ret.append(y)
+                       elif pos > 0:
+                               # all others are assumed to be -DX=Y
+                               ret.append('%s %s' % (y[:pos], y[pos+1:]))
+                       else:
+                               raise ValueError('Invalid define expression %r' % y)
+       return ret
+
+class c_parser(object):
+       """
+       Used by :py:func:`waflib.Tools.c_preproc.scan` to parse c/h files. Note that by default,
+       only project headers are parsed.
+       """
+       def __init__(self, nodepaths=None, defines=None):
+               self.lines = []
+               """list of lines read"""
+
+               if defines is None:
+                       self.defs  = {}
+               else:
+                       self.defs  = dict(defines) # make a copy
+               self.state = []
+
+               self.count_files = 0
+               self.currentnode_stack = []
+
+               self.nodepaths = nodepaths or []
+               """Include paths"""
+
+               self.nodes = []
+               """List of :py:class:`waflib.Node.Node` found so far"""
+
+               self.names = []
+               """List of file names that could not be matched by any file"""
+
+               self.curfile = ''
+               """Current file"""
+
+               self.ban_includes = set()
+               """Includes that must not be read (#pragma once)"""
+
+       def cached_find_resource(self, node, filename):
+               """
+               Find a file from the input directory
+
+               :param node: directory
+               :type node: :py:class:`waflib.Node.Node`
+               :param filename: header to find
+               :type filename: string
+               :return: the node if found, or None
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               try:
+                       cache = node.ctx.preproc_cache_node
+               except AttributeError:
+                       global FILE_CACHE_SIZE
+                       cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)
+
+               key = (node, filename)
+               try:
+                       return cache[key]
+               except KeyError:
+                       ret = node.find_resource(filename)
+                       if ret:
+                               if getattr(ret, 'children', None):
+                                       ret = None
+                               elif ret.is_child_of(node.ctx.bldnode):
+                                       tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
+                                       if tmp and getattr(tmp, 'children', None):
+                                               ret = None
+                       cache[key] = ret
+                       return ret
+
+       def tryfind(self, filename):
+               """
+               Try to obtain a node from the filename based from the include paths. Will add
+               the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
+               :py:attr:`waflib.Tools.c_preproc.c_parser.names` if no corresponding file is found. Called by
+               :py:attr:`waflib.Tools.c_preproc.c_parser.start`.
+
+               :param filename: header to find
+               :type filename: string
+               :return: the node if found
+               :rtype: :py:class:`waflib.Node.Node`
+               """
+               if filename.endswith('.moc'):
+                       # we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
+                       # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. TODO waf 1.9
+                       self.names.append(filename)
+                       return None
+
+               self.curfile = filename
+
+               # for msvc it should be a for loop over the whole stack
+               found = self.cached_find_resource(self.currentnode_stack[-1], filename)
+
+               for n in self.nodepaths:
+                       if found:
+                               break
+                       found = self.cached_find_resource(n, filename)
+
+               if found and not found in self.ban_includes:
+                       # TODO duplicates do not increase the no-op build times too much, but they may be worth removing
+                       self.nodes.append(found)
+                       self.addlines(found)
+               else:
+                       if not filename in self.names:
+                               self.names.append(filename)
+               return found
+
+       def filter_comments(self, node):
+               """
+               Filter the comments from a c/h file, and return the preprocessor lines.
+               The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
+
+               :return: the preprocessor directives as a list of (keyword, line)
+               :rtype: a list of string pairs
+               """
+               # return a list of tuples : keyword, line
+               code = node.read()
+               if use_trigraphs:
+                       for (a, b) in trig_def: code = code.split(a).join(b)
+               code = re_nl.sub('', code)
+               code = re_cpp.sub(repl, code)
+               return re_lines.findall(code)
+
+       def parse_lines(self, node):
+               try:
+                       cache = node.ctx.preproc_cache_lines
+               except AttributeError:
+                       global LINE_CACHE_SIZE
+                       cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
+               try:
+                       return cache[node]
+               except KeyError:
+                       cache[node] = lines = self.filter_comments(node)
+                       lines.append((POPFILE, ''))
+                       lines.reverse()
+                       return lines
+
+       def addlines(self, node):
+               """
+               Add the lines from a header in the list of preprocessor lines to parse
+
+               :param node: header
+               :type node: :py:class:`waflib.Node.Node`
+               """
+
+               self.currentnode_stack.append(node.parent)
+
+               self.count_files += 1
+               if self.count_files > recursion_limit:
+                       # issue #812
+                       raise PreprocError('recursion limit exceeded')
+
+               if Logs.verbose:
+                       Logs.debug('preproc: reading file %r', node)
+               try:
+                       lines = self.parse_lines(node)
+               except EnvironmentError:
+                       raise PreprocError('could not read the file %r' % node)
+               except Exception:
+                       if Logs.verbose > 0:
+                               Logs.error('parsing %r failed', node)
+                               traceback.print_exc()
+               else:
+                       self.lines.extend(lines)
+
+       def start(self, node, env):
+               """
+               Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
+               and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.
+
+               :param node: source file
+               :type node: :py:class:`waflib.Node.Node`
+               :param env: config set containing additional defines to take into account
+               :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+               """
+               Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
+
+               self.current_file = node
+               self.addlines(node)
+
+               # macros may be defined on the command-line, so they must be parsed as if they were part of the file
+               if env.DEFINES:
+                       lst = format_defines(env.DEFINES)
+                       lst.reverse()
+                       self.lines.extend([('define', x) for x in lst])
+
+               while self.lines:
+                       (token, line) = self.lines.pop()
+                       if token == POPFILE:
+                               self.count_files -= 1
+                               self.currentnode_stack.pop()
+                               continue
+
+                       try:
+                               ve = Logs.verbose
+                               if ve: Logs.debug('preproc: line is %s - %s state is %s', token, line, self.state)
+                               state = self.state
+
+                               # make certain we define the state if we are about to enter in an if block
+                               if token[:2] == 'if':
+                                       state.append(undefined)
+                               elif token == 'endif':
+                                       state.pop()
+
+                               # skip lines when in a dead 'if' branch, wait for the endif
+                               if token[0] != 'e':
+                                       if skipped in self.state or ignored in self.state:
+                                               continue
+
+                               if token == 'if':
+                                       ret = eval_macro(tokenize(line), self.defs)
+                                       if ret: state[-1] = accepted
+                                       else: state[-1] = ignored
+                               elif token == 'ifdef':
+                                       m = re_mac.match(line)
+                                       if m and m.group() in self.defs: state[-1] = accepted
+                                       else: state[-1] = ignored
+                               elif token == 'ifndef':
+                                       m = re_mac.match(line)
+                                       if m and m.group() in self.defs: state[-1] = ignored
+                                       else: state[-1] = accepted
+                               elif token == 'include' or token == 'import':
+                                       (kind, inc) = extract_include(line, self.defs)
+                                       if ve: Logs.debug('preproc: include found %s    (%s) ', inc, kind)
+                                       if kind == '"' or not strict_quotes:
+                                               self.current_file = self.tryfind(inc)
+                                               if token == 'import':
+                                                       self.ban_includes.add(self.current_file)
+                               elif token == 'elif':
+                                       if state[-1] == accepted:
+                                               state[-1] = skipped
+                                       elif state[-1] == ignored:
+                                               if eval_macro(tokenize(line), self.defs):
+                                                       state[-1] = accepted
+                               elif token == 'else':
+                                       if state[-1] == accepted: state[-1] = skipped
+                                       elif state[-1] == ignored: state[-1] = accepted
+                               elif token == 'define':
+                                       try:
+                                               self.defs[self.define_name(line)] = line
+                                       except AttributeError:
+                                               raise PreprocError('Invalid define line %r' % line)
+                               elif token == 'undef':
+                                       m = re_mac.match(line)
+                                       if m and m.group() in self.defs:
+                                               self.defs.__delitem__(m.group())
+                                               #print "undef %s" % name
+                               elif token == 'pragma':
+                                       if re_pragma_once.match(line.lower()):
+                                               self.ban_includes.add(self.current_file)
+                       except Exception ,e:
+                               if Logs.verbose:
+                                       Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
+
+       def define_name(self, line):
+               """
+               :param line: define line
+               :type line: string
+               :rtype: string
+               :return: the define name
+               """
+               return re_mac.match(line).group()
+
+def scan(task):
+       """
+       Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind::
+
+               #include some_macro()
+
+       This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
+       """
+
+       global go_absolute
+
+       try:
+               incn = task.generator.includes_nodes
+       except AttributeError:
+               raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": ' % task.generator)
+
+       if go_absolute:
+               nodepaths = incn + [task.generator.bld.root.find_dir(x) for x in standard_includes]
+       else:
+               nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)]
+
+       tmp = c_parser(nodepaths)
+       tmp.start(task.inputs[0], task.env)
+       return (tmp.nodes, tmp.names)
diff --git a/third_party/waf/waflib/Tools/c_tests.py b/third_party/waf/waflib/Tools/c_tests.py
new file mode 100644 (file)
index 0000000..d4b8469
--- /dev/null
@@ -0,0 +1,232 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016 (ita)
+
+"""
+Various configuration tests.
+"""
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method, after_method
+
+LIB_CODE = '''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllexport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void) { return 9; }
+'''
+
+MAIN_CODE = '''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllimport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void);
+int main(int argc, char **argv) {
+       (void)argc; (void)argv;
+       return !(lib_func() == 9);
+}
+'''
+
+@feature('link_lib_test')
+@before_method('process_source')
+def link_lib_test_fun(self):
+       """
+       The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator,
+       so we need to create other task generators from here to check if the linker is able to link libraries.
+       """
+       def write_test_file(task):
+               task.outputs[0].write(task.generator.code)
+
+       rpath = []
+       if getattr(self, 'add_rpath', False):
+               rpath = [self.bld.path.get_bld().abspath()]
+
+       mode = self.mode
+       m = '%s %s' % (mode, mode)
+       ex = self.test_exec and 'test_exec' or ''
+       bld = self.bld
+       bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE)
+       bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE)
+       bld(features='%sshlib' % m, source='test.' + mode, target='test')
+       bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath)
+
+@conf
+def check_library(self, mode=None, test_exec=True):
+       """
+       Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
+
+       :param mode: c or cxx or d
+       :type mode: string
+       """
+       if not mode:
+               mode = 'c'
+               if self.env.CXX:
+                       mode = 'cxx'
+       self.check(
+               compile_filename = [],
+               features = 'link_lib_test',
+               msg = 'Checking for libraries',
+               mode = mode,
+               test_exec = test_exec)
+
+########################################################################################
+
+INLINE_CODE = '''
+typedef int foo_t;
+static %s foo_t static_foo () {return 0; }
+%s foo_t foo () {
+       return 0;
+}
+'''
+INLINE_VALUES = ['inline', '__inline__', '__inline']
+
+@conf
+def check_inline(self, **kw):
+       """
+       Checks for the right value for inline macro.
+       Define INLINE_MACRO to 1 if the define is found.
+       If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
+
+       :param define_name: define INLINE_MACRO by default to 1 if the macro is defined
+       :type define_name: string
+       :param features: by default *c* or *cxx* depending on the compiler present
+       :type features: list of string
+       """
+       self.start_msg('Checking for inline')
+
+       if not 'define_name' in kw:
+               kw['define_name'] = 'INLINE_MACRO'
+       if not 'features' in kw:
+               if self.env.CXX:
+                       kw['features'] = ['cxx']
+               else:
+                       kw['features'] = ['c']
+
+       for x in INLINE_VALUES:
+               kw['fragment'] = INLINE_CODE % (x, x)
+
+               try:
+                       self.check(**kw)
+               except self.errors.ConfigurationError:
+                       continue
+               else:
+                       self.end_msg(x)
+                       if x != 'inline':
+                               self.define('inline', x, quote=False)
+                       return x
+       self.fatal('could not use inline functions')
+
+########################################################################################
+
+LARGE_FRAGMENT = '''#include <unistd.h>
+int main(int argc, char **argv) {
+       (void)argc; (void)argv;
+       return !(sizeof(off_t) >= 8);
+}
+'''
+
+@conf
+def check_large_file(self, **kw):
+       """
+       Checks for large file support and define the macro HAVE_LARGEFILE
+       The test is skipped on win32 systems (DEST_BINFMT == pe).
+
+       :param define_name: define to set, by default *HAVE_LARGEFILE*
+       :type define_name: string
+       :param execute: execute the test (yes by default)
+       :type execute: bool
+       """
+       if not 'define_name' in kw:
+               kw['define_name'] = 'HAVE_LARGEFILE'
+       if not 'execute' in kw:
+               kw['execute'] = True
+
+       if not 'features' in kw:
+               if self.env.CXX:
+                       kw['features'] = ['cxx', 'cxxprogram']
+               else:
+                       kw['features'] = ['c', 'cprogram']
+
+       kw['fragment'] = LARGE_FRAGMENT
+
+       kw['msg'] = 'Checking for large file support'
+       ret = True
+       try:
+               if self.env.DEST_BINFMT != 'pe':
+                       ret = self.check(**kw)
+       except self.errors.ConfigurationError:
+               pass
+       else:
+               if ret:
+                       return True
+
+       kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64'
+       kw['defines'] = ['_FILE_OFFSET_BITS=64']
+       try:
+               ret = self.check(**kw)
+       except self.errors.ConfigurationError:
+               pass
+       else:
+               self.define('_FILE_OFFSET_BITS', 64)
+               return ret
+
+       self.fatal('There is no support for large files')
+
+########################################################################################
+
+ENDIAN_FRAGMENT = '''
+short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
+short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
+int use_ascii (int i) {
+       return ascii_mm[i] + ascii_ii[i];
+}
+short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
+short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
+int use_ebcdic (int i) {
+       return ebcdic_mm[i] + ebcdic_ii[i];
+}
+extern int foo;
+'''
+
+class grep_for_endianness(Task.Task):
+       """
+       Task that reads a binary and tries to determine the endianness
+       """
+       color = 'PINK'
+       def run(self):
+               txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
+               if txt.find('LiTTleEnDian') > -1:
+                       self.generator.tmp.append('little')
+               elif txt.find('BIGenDianSyS') > -1:
+                       self.generator.tmp.append('big')
+               else:
+                       return -1
+
+@feature('grep_for_endianness')
+@after_method('process_source')
+def grep_for_endianness_fun(self):
+       """
+       Used by the endianness configuration test
+       """
+       self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
+
+@conf
+def check_endianness(self):
+       """
+       Executes a configuration test to determine the endianness
+       """
+       tmp = []
+       def check_msg(self):
+               return tmp[0]
+       self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
+               msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
+       return tmp[0]
diff --git a/third_party/waf/waflib/Tools/ccroot.py b/third_party/waf/waflib/Tools/ccroot.py
new file mode 100644 (file)
index 0000000..506bd12
--- /dev/null
@@ -0,0 +1,772 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Classes and methods shared by tools providing support for C-like language such
+as C/C++/D/Assembly/Go (this support module is almost never used alone).
+"""
+
+import os, re
+from waflib import Task, Utils, Node, Errors, Logs
+from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
+from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
+from waflib.Configure import conf
+
+SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']
+
+USELIB_VARS = Utils.defaultdict(set)
+"""
+Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`.
+"""
+
+USELIB_VARS['c']        = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH'])
+USELIB_VARS['cxx']      = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH'])
+USELIB_VARS['d']        = set(['INCLUDES', 'DFLAGS'])
+USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH'])
+
+USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
+USELIB_VARS['cshlib']   = USELIB_VARS['cxxshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
+USELIB_VARS['cstlib']   = USELIB_VARS['cxxstlib']   = set(['ARFLAGS', 'LINKDEPS'])
+
+USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+USELIB_VARS['dshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+USELIB_VARS['dstlib']   = set(['ARFLAGS', 'LINKDEPS'])
+
+USELIB_VARS['asm'] = set(['ASFLAGS'])
+
+# =================================================================================================
+
+@taskgen_method
+def create_compiled_task(self, name, node):
+       """
+       Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension).
+       The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link`
+
+       :param name: name of the task class
+       :type name: string
+       :param node: the file to compile
+       :type node: :py:class:`waflib.Node.Node`
+       :return: The task created
+       :rtype: :py:class:`waflib.Task.Task`
+       """
+       out = '%s.%d.o' % (node.name, self.idx)
+       task = self.create_task(name, node, node.parent.find_or_declare(out))
+       try:
+               self.compiled_tasks.append(task)
+       except AttributeError:
+               self.compiled_tasks = [task]
+       return task
+
+@taskgen_method
+def to_incnodes(self, inlst):
+       """
+       Task generator method provided to convert a list of string/nodes into a list of includes folders.
+
+       The paths are assumed to be relative to the task generator path, except if they begin by **#**
+       in which case they are searched from the top-level directory (``bld.srcnode``).
+       The folders are simply assumed to be existing.
+
+       The node objects in the list are returned in the output list. The strings are converted
+       into node objects if possible. The node is searched from the source directory, and if a match is found,
+       the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored.
+
+       :param inlst: list of folders
+       :type inlst: space-delimited string or a list of string/nodes
+       :rtype: list of :py:class:`waflib.Node.Node`
+       :return: list of include folders as nodes
+       """
+       lst = []
+       seen = set()
+       for x in self.to_list(inlst):
+               if x in seen or not x:
+                       continue
+               seen.add(x)
+
+               # with a real lot of targets, it is sometimes interesting to cache the results below
+               if isinstance(x, Node.Node):
+                       lst.append(x)
+               else:
+                       if os.path.isabs(x):
+                               lst.append(self.bld.root.make_node(x) or x)
+                       else:
+                               if x[0] == '#':
+                                       p = self.bld.bldnode.make_node(x[1:])
+                                       v = self.bld.srcnode.make_node(x[1:])
+                               else:
+                                       p = self.path.get_bld().make_node(x)
+                                       v = self.path.make_node(x)
+                               if p.is_child_of(self.bld.bldnode):
+                                       p.mkdir()
+                               lst.append(p)
+                               lst.append(v)
+       return lst
+
+@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
+@after_method('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+       """
+       Task generator method that processes the attribute *includes*::
+
+               tg = bld(features='includes', includes='.')
+
+       The folders only need to be relative to the current directory, the equivalent build directory is
+       added automatically (for headers created in the build directory). This enable using a build directory
+       or not (``top == out``).
+
+       This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``,
+       and the list of include paths in ``tg.env.INCLUDES``.
+       """
+
+       lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
+       self.includes_nodes = lst
+       cwd = self.get_cwd()
+       self.env.INCPATHS = [x.path_from(cwd) for x in lst]
+
+class link_task(Task.Task):
+       """
+       Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`.
+
+       .. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib  waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib
+       """
+       color   = 'YELLOW'
+
+       inst_to = None
+       """Default installation path for the link task outputs, or None to disable"""
+
+       chmod   = Utils.O755
+       """Default installation mode for the link task outputs"""
+
+       def add_target(self, target):
+               """
+               Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*.
+               The settings are retrieved from ``env.clsname_PATTERN``
+               """
+               if isinstance(target, str):
+                       base = self.generator.path
+                       if target.startswith('#'):
+                               # for those who like flat structures
+                               target = target[1:]
+                               base = self.generator.bld.bldnode
+
+                       pattern = self.env[self.__class__.__name__ + '_PATTERN']
+                       if not pattern:
+                               pattern = '%s'
+                       folder, name = os.path.split(target)
+
+                       if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None):
+                               nums = self.generator.vnum.split('.')
+                               if self.env.DEST_BINFMT == 'pe':
+                                       # include the version in the dll file name,
+                                       # the import lib file name stays unversionned.
+                                       name = name + '-' + nums[0]
+                               elif self.env.DEST_OS == 'openbsd':
+                                       pattern = '%s.%s' % (pattern, nums[0])
+                                       if len(nums) >= 2:
+                                               pattern += '.%s' % nums[1]
+
+                       if folder:
+                               tmp = folder + os.sep + pattern % name
+                       else:
+                               tmp = pattern % name
+                       target = base.find_or_declare(tmp)
+               self.set_outputs(target)
+
+       def exec_command(self, *k, **kw):
+               ret = super(link_task, self).exec_command(*k, **kw)
+               if not ret and self.env.DO_MANIFEST:
+                       ret = self.exec_mf()
+               return ret
+
+       def exec_mf(self):
+               """
+               Create manifest files for VS-like compilers (msvc, ifort, ...)
+               """
+               if not self.env.MT:
+                       return 0
+
+               manifest = None
+               for out_node in self.outputs:
+                       if out_node.name.endswith('.manifest'):
+                               manifest = out_node.abspath()
+                               break
+               else:
+                       # Should never get here.  If we do, it means the manifest file was
+                       # never added to the outputs list, thus we don't have a manifest file
+                       # to embed, so we just return.
+                       return 0
+
+               # embedding mode. Different for EXE's and DLL's.
+               # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
+               mode = ''
+               for x in Utils.to_list(self.generator.features):
+                       if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
+                               mode = 1
+                       elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
+                               mode = 2
+
+               Logs.debug('msvc: embedding manifest in mode %r', mode)
+
+               lst = [] + self.env.MT
+               lst.extend(Utils.to_list(self.env.MTFLAGS))
+               lst.extend(['-manifest', manifest])
+               lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))
+
+               return super(link_task, self).exec_command(lst)
+
+class stlink_task(link_task):
+       """
+       Base for static link tasks, which use *ar* most of the time.
+       The target is always removed before being written.
+       """
+       run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+
+       chmod   = Utils.O644
+       """Default installation mode for the static libraries"""
+
+def rm_tgt(cls):
+       old = cls.run
+       def wrap(self):
+               try: os.remove(self.outputs[0].abspath())
+               except OSError: pass
+               return old(self)
+       setattr(cls, 'run', wrap)
+rm_tgt(stlink_task)
+
+@feature('c', 'cxx', 'd', 'fc', 'asm')
+@after_method('process_source')
+def apply_link(self):
+       """
+       Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and
+       use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task
+       matching a name from the attribute *features*, for example::
+
+                       def build(bld):
+                               tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app')
+
+       will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram`
+       """
+
+       for x in self.features:
+               if x == 'cprogram' and 'cxx' in self.features: # limited compat
+                       x = 'cxxprogram'
+               elif x == 'cshlib' and 'cxx' in self.features:
+                       x = 'cxxshlib'
+
+               if x in Task.classes:
+                       if issubclass(Task.classes[x], link_task):
+                               link = x
+                               break
+       else:
+               return
+
+       objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])]
+       self.link_task = self.create_task(link, objs)
+       self.link_task.add_target(self.target)
+
+       # remember that the install paths are given by the task generators
+       try:
+               inst_to = self.install_path
+       except AttributeError:
+               inst_to = self.link_task.__class__.inst_to
+       if inst_to:
+               # install a copy of the node list we have at this moment (implib not added)
+               self.install_task = self.add_install_files(
+                       install_to=inst_to, install_from=self.link_task.outputs[:],
+                       chmod=self.link_task.chmod, task=self.link_task)
+
+@taskgen_method
+def use_rec(self, name, **kw):
+       """
+       Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use``
+       """
+
+       if name in self.tmp_use_not or name in self.tmp_use_seen:
+               return
+
+       try:
+               y = self.bld.get_tgen_by_name(name)
+       except Errors.WafError:
+               self.uselib.append(name)
+               self.tmp_use_not.add(name)
+               return
+
+       self.tmp_use_seen.append(name)
+       y.post()
+
+       # bind temporary attributes on the task generator
+       y.tmp_use_objects = objects = kw.get('objects', True)
+       y.tmp_use_stlib   = stlib   = kw.get('stlib', True)
+       try:
+               link_task = y.link_task
+       except AttributeError:
+               y.tmp_use_var = ''
+       else:
+               objects = False
+               if not isinstance(link_task, stlink_task):
+                       stlib = False
+                       y.tmp_use_var = 'LIB'
+               else:
+                       y.tmp_use_var = 'STLIB'
+
+       p = self.tmp_use_prec
+       for x in self.to_list(getattr(y, 'use', [])):
+               if self.env["STLIB_" + x]:
+                       continue
+               try:
+                       p[x].append(name)
+               except KeyError:
+                       p[x] = [name]
+               self.use_rec(x, objects=objects, stlib=stlib)
+
+@feature('c', 'cxx', 'd', 'use', 'fc')
+@before_method('apply_incpaths', 'propagate_uselib_vars')
+@after_method('apply_link', 'process_source')
+def process_use(self):
+       """
+       Process the ``use`` attribute which contains a list of task generator names::
+
+               def build(bld):
+                       bld.shlib(source='a.c', target='lib1')
+                       bld.program(source='main.c', target='app', use='lib1')
+
+       See :py:func:`waflib.Tools.ccroot.use_rec`.
+       """
+
+       use_not = self.tmp_use_not = set()
+       self.tmp_use_seen = [] # we would like an ordered set
+       use_prec = self.tmp_use_prec = {}
+       self.uselib = self.to_list(getattr(self, 'uselib', []))
+       self.includes = self.to_list(getattr(self, 'includes', []))
+       names = self.to_list(getattr(self, 'use', []))
+
+       for x in names:
+               self.use_rec(x)
+
+       for x in use_not:
+               if x in use_prec:
+                       del use_prec[x]
+
+       # topological sort
+       out = self.tmp_use_sorted = []
+       tmp = []
+       for x in self.tmp_use_seen:
+               for k in use_prec.values():
+                       if x in k:
+                               break
+               else:
+                       tmp.append(x)
+
+       while tmp:
+               e = tmp.pop()
+               out.append(e)
+               try:
+                       nlst = use_prec[e]
+               except KeyError:
+                       pass
+               else:
+                       del use_prec[e]
+                       for x in nlst:
+                               for y in use_prec:
+                                       if x in use_prec[y]:
+                                               break
+                               else:
+                                       tmp.append(x)
+       if use_prec:
+               raise Errors.WafError('Cycle detected in the use processing %r' % use_prec)
+       out.reverse()
+
+       link_task = getattr(self, 'link_task', None)
+       for x in out:
+               y = self.bld.get_tgen_by_name(x)
+               var = y.tmp_use_var
+               if var and link_task:
+                       if var == 'LIB' or y.tmp_use_stlib or x in names:
+                               self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
+                               self.link_task.dep_nodes.extend(y.link_task.outputs)
+                               tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
+                               self.env.append_unique(var + 'PATH', [tmp_path])
+               else:
+                       if y.tmp_use_objects:
+                               self.add_objects_from_tgen(y)
+
+               if getattr(y, 'export_includes', None):
+                       self.includes.extend(y.to_incnodes(y.export_includes))
+
+               if getattr(y, 'export_defines', None):
+                       self.env.append_value('DEFINES', self.to_list(y.export_defines))
+
+
+       # and finally, add the use variables (no recursion needed)
+       for x in names:
+               try:
+                       y = self.bld.get_tgen_by_name(x)
+               except Errors.WafError:
+                       if not self.env['STLIB_' + x] and not x in self.uselib:
+                               self.uselib.append(x)
+               else:
+                       for k in self.to_list(getattr(y, 'use', [])):
+                               if not self.env['STLIB_' + k] and not k in self.uselib:
+                                       self.uselib.append(k)
+
+@taskgen_method
+def accept_node_to_link(self, node):
+       """
+       PRIVATE INTERNAL USE ONLY
+       """
+       return not node.name.endswith('.pdb')
+
+@taskgen_method
+def add_objects_from_tgen(self, tg):
+       """
+       Add the objects from the depending compiled tasks as link task inputs.
+
+       Some objects are filtered: for instance, .pdb files are added
+       to the compiled tasks but not to the link tasks (to avoid errors)
+       PRIVATE INTERNAL USE ONLY
+       """
+       try:
+               link_task = self.link_task
+       except AttributeError:
+               pass
+       else:
+               for tsk in getattr(tg, 'compiled_tasks', []):
+                       for x in tsk.outputs:
+                               if self.accept_node_to_link(x):
+                                       link_task.inputs.append(x)
+
+@taskgen_method
+def get_uselib_vars(self):
+       """
+       :return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
+       :rtype: list of string
+       """
+       _vars = set()
+       for x in self.features:
+               if x in USELIB_VARS:
+                       _vars |= USELIB_VARS[x]
+       return _vars
+
+@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm')
+@after_method('process_use')
+def propagate_uselib_vars(self):
+       """
+       Process uselib variables for adding flags. For example, the following target::
+
+               def build(bld):
+                       bld.env.AFLAGS_aaa = ['bar']
+                       from waflib.Tools.ccroot import USELIB_VARS
+                       USELIB_VARS['aaa'] = ['AFLAGS']
+
+                       tg = bld(features='aaa', aflags='test')
+
+       The *aflags* attribute will be processed and this method will set::
+
+                       tg.env.AFLAGS = ['bar', 'test']
+       """
+       _vars = self.get_uselib_vars()
+       env = self.env
+       app = env.append_value
+       feature_uselib = self.features + self.to_list(getattr(self, 'uselib', []))
+       for var in _vars:
+               y = var.lower()
+               val = getattr(self, y, [])
+               if val:
+                       app(var, self.to_list(val))
+
+               for x in feature_uselib:
+                       val = env['%s_%s' % (var, x)]
+                       if val:
+                               app(var, val)
+
+# ============ the code above must not know anything about import libs ==========
+
+@feature('cshlib', 'cxxshlib', 'fcshlib')
+@after_method('apply_link')
+def apply_implib(self):
+       """
+       Handle dlls and their import libs on Windows-like systems.
+
+       A ``.dll.a`` file called *import library* is generated.
+       It must be installed as it is required for linking the library.
+       """
+       if not self.env.DEST_BINFMT == 'pe':
+               return
+
+       dll = self.link_task.outputs[0]
+       if isinstance(self.target, Node.Node):
+               name = self.target.name
+       else:
+               name = os.path.split(self.target)[1]
+       implib = self.env.implib_PATTERN % name
+       implib = dll.parent.find_or_declare(implib)
+       self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
+       self.link_task.outputs.append(implib)
+
+       if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
+               node = self.path.find_resource(self.defs)
+               if not node:
+                       raise Errors.WafError('invalid def file %r' % self.defs)
+               if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+                       self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.get_cwd()))
+                       self.link_task.dep_nodes.append(node)
+               else:
+                       #gcc for windows takes *.def file a an input without any special flag
+                       self.link_task.inputs.append(node)
+
+       # where to put the import library
+       if getattr(self, 'install_task', None):
+               try:
+                       # user has given a specific installation path for the import library
+                       inst_to = self.install_path_implib
+               except AttributeError:
+                       try:
+                               # user has given an installation path for the main library, put the import library in it
+                               inst_to = self.install_path
+                       except AttributeError:
+                               # else, put the library in BINDIR and the import library in LIBDIR
+                               inst_to = '${IMPLIBDIR}'
+                               self.install_task.install_to = '${BINDIR}'
+                               if not self.env.IMPLIBDIR:
+                                       self.env.IMPLIBDIR = self.env.LIBDIR
+               self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
+                       chmod=self.link_task.chmod, task=self.link_task)
+
+# ============ the code above must not know anything about vnum processing on unix platforms =========
+
+re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$')
+@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum')
+@after_method('apply_link', 'propagate_uselib_vars')
+def apply_vnum(self):
+       """
+       Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots::
+
+               def build(bld):
+                       bld.shlib(source='a.c', target='foo', vnum='14.15.16')
+
+       In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created:
+
+       * ``libfoo.so    â†’ libfoo.so.14.15.16``
+       * ``libfoo.so.14 â†’ libfoo.so.14.15.16``
+
+       By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library.  When necessary, the compatibility can be explicitly defined using `cnum` parameter:
+
+               def build(bld):
+                       bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15')
+
+       In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library.
+
+       On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library.
+       """
+       if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
+               return
+
+       link = self.link_task
+       if not re_vnum.match(self.vnum):
+               raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self)))
+       nums = self.vnum.split('.')
+       node = link.outputs[0]
+
+       cnum = getattr(self, 'cnum', str(nums[0]))
+       cnums = cnum.split('.')
+       if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums:
+               raise Errors.WafError('invalid compatibility version %s' % cnum)
+
+       libname = node.name
+       if libname.endswith('.dylib'):
+               name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
+               name2 = libname.replace('.dylib', '.%s.dylib' % cnum)
+       else:
+               name3 = libname + '.' + self.vnum
+               name2 = libname + '.' + cnum
+
+       # add the so name for the ld linker - to disable, just unset env.SONAME_ST
+       if self.env.SONAME_ST:
+               v = self.env.SONAME_ST % name2
+               self.env.append_value('LINKFLAGS', v.split())
+
+       # the following task is just to enable execution from the build dir :-/
+       if self.env.DEST_OS != 'openbsd':
+               outs = [node.parent.make_node(name3)]
+               if name2 != name3:
+                       outs.append(node.parent.make_node(name2))
+               self.create_task('vnum', node, outs)
+
+       if getattr(self, 'install_task', None):
+               self.install_task.hasrun = Task.SKIP_ME
+               path = self.install_task.install_to
+               if self.env.DEST_OS == 'openbsd':
+                       libname = self.link_task.outputs[0].name
+                       t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
+                       self.vnum_install_task = (t1,)
+               else:
+                       t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
+                       t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
+                       if name2 != name3:
+                               t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
+                               self.vnum_install_task = (t1, t2, t3)
+                       else:
+                               self.vnum_install_task = (t1, t3)
+
+       if '-dynamiclib' in self.env.LINKFLAGS:
+               # this requires after(propagate_uselib_vars)
+               try:
+                       inst_to = self.install_path
+               except AttributeError:
+                       inst_to = self.link_task.__class__.inst_to
+               if inst_to:
+                       p = Utils.subst_vars(inst_to, self.env)
+                       path = os.path.join(p, name2)
+                       self.env.append_value('LINKFLAGS', ['-install_name', path])
+                       self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum)
+                       self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum)
+
+class vnum(Task.Task):
+       """
+       Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
+       """
+       color = 'CYAN'
+       ext_in = ['.bin']
+       def keyword(self):
+               return 'Symlinking'
+       def run(self):
+               for x in self.outputs:
+                       path = x.abspath()
+                       try:
+                               os.remove(path)
+                       except OSError:
+                               pass
+
+                       try:
+                               os.symlink(self.inputs[0].name, path)
+                       except OSError:
+                               return 1
+
+class fake_shlib(link_task):
+       """
+       Task used for reading a system library and adding the dependency on it
+       """
+       def runnable_status(self):
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return Task.ASK_LATER
+               return Task.SKIP_ME
+
+class fake_stlib(stlink_task):
+       """
+       Task used for reading a system library and adding the dependency on it
+       """
+       def runnable_status(self):
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return Task.ASK_LATER
+               return Task.SKIP_ME
+
+@conf
+def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]):
+       """
+       Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes::
+
+               def build(bld):
+                       bld.read_shlib('m')
+                       bld.program(source='main.c', use='m')
+       """
+       return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines)
+
+@conf
+def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]):
+       """
+       Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes.
+       """
+       return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines)
+
+lib_patterns = {
+       'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'],
+       'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'],
+}
+
+@feature('fake_lib')
+def process_lib(self):
+       """
+       Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`.
+       """
+       node = None
+
+       names = [x % self.name for x in lib_patterns[self.lib_type]]
+       for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS:
+               if not isinstance(x, Node.Node):
+                       x = self.bld.root.find_node(x) or self.path.find_node(x)
+                       if not x:
+                               continue
+
+               for y in names:
+                       node = x.find_node(y)
+                       if node:
+                               try:
+                                       Utils.h_file(node.abspath())
+                               except EnvironmentError:
+                                       raise ValueError('Could not read %r' % y)
+                               break
+               else:
+                       continue
+               break
+       else:
+               raise Errors.WafError('could not find library %r' % self.name)
+       self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node])
+       self.target = self.name
+
+
+class fake_o(Task.Task):
+       def runnable_status(self):
+               return Task.SKIP_ME
+
+@extension('.o', '.obj')
+def add_those_o_files(self, node):
+       tsk = self.create_task('fake_o', [], node)
+       try:
+               self.compiled_tasks.append(tsk)
+       except AttributeError:
+               self.compiled_tasks = [tsk]
+
+@feature('fake_obj')
+@before_method('process_source')
+def process_objs(self):
+       """
+       Puts object files in the task generator outputs
+       """
+       for node in self.to_nodes(self.source):
+               self.add_those_o_files(node)
+       self.source = []
+
+@conf
+def read_object(self, obj):
+       """
+       Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes.
+
+       :param obj: object file path, as string or Node
+       """
+       if not isinstance(obj, self.path.__class__):
+               obj = self.path.find_resource(obj)
+       return self(features='fake_obj', source=obj, name=obj.name)
+
+@feature('cxxprogram', 'cprogram')
+@after_method('apply_link', 'process_use')
+def set_full_paths_hpux(self):
+       """
+       On hp-ux, extend the libpaths and static library paths to absolute paths
+       """
+       if self.env.DEST_OS != 'hp-ux':
+               return
+       base = self.bld.bldnode.abspath()
+       for var in ['LIBPATH', 'STLIBPATH']:
+               lst = []
+               for x in self.env[var]:
+                       if x.startswith('/'):
+                               lst.append(x)
+                       else:
+                               lst.append(os.path.normpath(os.path.join(base, x)))
+               self.env[var] = lst
diff --git a/third_party/waf/waflib/Tools/clang.py b/third_party/waf/waflib/Tools/clang.py
new file mode 100644 (file)
index 0000000..882b68a
--- /dev/null
@@ -0,0 +1,33 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Krzysztof KosiÅ„ski 2014
+
+"""
+Detect the Clang C compiler
+"""
+
+from waflib.Tools import ccroot, ar, gcc
+from waflib.Configure import conf
+
+@conf
+def find_clang(conf):
+       """
+       Finds the program clang and executes it to ensure it really is clang
+       """
+       cc = conf.find_program('clang', var='CC')
+       conf.get_cc_version(cc, clang=True)
+       conf.env.CC_NAME = 'clang'
+
+def configure(conf):
+       conf.find_clang()
+       conf.find_program(['llvm-ar', 'ar'], var='AR')
+       conf.find_ar()
+       conf.gcc_common_flags()
+       conf.gcc_modifier_platform()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/clangxx.py b/third_party/waf/waflib/Tools/clangxx.py
new file mode 100644 (file)
index 0000000..628d3dc
--- /dev/null
@@ -0,0 +1,33 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2009-2016 (ita)
+
+"""
+Detect the Clang++ C++ compiler
+"""
+
+from waflib.Tools import ccroot, ar, gxx
+from waflib.Configure import conf
+
+@conf
+def find_clangxx(conf):
+       """
+       Finds the program clang++, and executes it to ensure it really is clang++
+       """
+       cxx = conf.find_program('clang++', var='CXX')
+       conf.get_cc_version(cxx, clang=True)
+       conf.env.CXX_NAME = 'clang'
+
+def configure(conf):
+       conf.find_clangxx()
+       conf.find_program(['llvm-ar', 'ar'], var='AR')
+       conf.find_ar()
+       conf.gxx_common_flags()
+       conf.gxx_modifier_platform()
+       conf.cxx_load_tools()
+       conf.cxx_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/compiler_c.py b/third_party/waf/waflib/Tools/compiler_c.py
new file mode 100644 (file)
index 0000000..92e9c05
--- /dev/null
@@ -0,0 +1,113 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Matthias Jahn jahn dôt matthias Ã¢t freenet dôt de, 2007 (pmarat)
+
+"""
+Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc)::
+
+       def options(opt):
+               opt.load('compiler_c')
+       def configure(cnf):
+               cnf.load('compiler_c')
+       def build(bld):
+               bld.program(source='main.c', target='app')
+
+The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register
+a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
+
+       from waflib.Tools.compiler_c import c_compiler
+       c_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
+
+       def options(opt):
+               opt.load('compiler_c')
+       def configure(cnf):
+               cnf.load('compiler_c')
+       def build(bld):
+               bld.program(source='main.c', target='app')
+
+Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
+
+       $ CC=clang waf configure
+"""
+
+import re
+from waflib.Tools import ccroot
+from waflib import Utils
+from waflib.Logs import debug
+
+c_compiler = {
+'win32':  ['msvc', 'gcc', 'clang'],
+'cygwin': ['gcc'],
+'darwin': ['clang', 'gcc'],
+'aix':    ['xlc', 'gcc', 'clang'],
+'linux':  ['gcc', 'clang', 'icc'],
+'sunos':  ['suncc', 'gcc'],
+'irix':   ['gcc', 'irixcc'],
+'hpux':   ['gcc'],
+'osf1V':  ['gcc'],
+'gnu':    ['gcc', 'clang'],
+'java':   ['gcc', 'msvc', 'clang', 'icc'],
+'default':['clang', 'gcc'],
+}
+"""
+Dict mapping platform names to Waf tools finding specific C compilers::
+
+       from waflib.Tools.compiler_c import c_compiler
+       c_compiler['linux'] = ['gcc', 'icc', 'suncc']
+"""
+
+def default_compilers():
+       build_platform = Utils.unversioned_sys_platform()
+       possible_compiler_list = c_compiler.get(build_platform, c_compiler['default'])
+       return ' '.join(possible_compiler_list)
+
+def configure(conf):
+       """
+       Detects a suitable C compiler
+
+       :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+       """
+       try:
+               test_for_compiler = conf.options.check_c_compiler or default_compilers()
+       except AttributeError:
+               conf.fatal("Add options(opt): opt.load('compiler_c')")
+
+       for compiler in re.split('[ ,]+', test_for_compiler):
+               conf.env.stash()
+               conf.start_msg('Checking for %r (C compiler)' % compiler)
+               try:
+                       conf.load(compiler)
+               except conf.errors.ConfigurationError ,e:
+                       conf.env.revert()
+                       conf.end_msg(False)
+                       debug('compiler_c: %r', e)
+               else:
+                       if conf.env.CC:
+                               conf.end_msg(conf.env.get_flat('CC'))
+                               conf.env.COMPILER_CC = compiler
+                               conf.env.commit()
+                               break
+                       conf.env.revert()
+                       conf.end_msg(False)
+       else:
+               conf.fatal('could not configure a C compiler!')
+
+def options(opt):
+       """
+       This is how to provide compiler preferences on the command-line::
+
+               $ waf configure --check-c-compiler=gcc
+       """
+       test_for_compiler = default_compilers()
+       opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py'])
+       cc_compiler_opts = opt.add_option_group('Configuration options')
+       cc_compiler_opts.add_option('--check-c-compiler', default=None,
+               help='list of C compilers to try [%s]' % test_for_compiler,
+               dest="check_c_compiler")
+
+       for x in test_for_compiler.split():
+               opt.load('%s' % x)
diff --git a/third_party/waf/waflib/Tools/compiler_cxx.py b/third_party/waf/waflib/Tools/compiler_cxx.py
new file mode 100644 (file)
index 0000000..14e9fc0
--- /dev/null
@@ -0,0 +1,114 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Matthias Jahn jahn dôt matthias Ã¢t freenet dôt de 2007 (pmarat)
+
+"""
+Try to detect a C++ compiler from the list of supported compilers (g++, msvc, etc)::
+
+       def options(opt):
+               opt.load('compiler_cxx')
+       def configure(cnf):
+               cnf.load('compiler_cxx')
+       def build(bld):
+               bld.program(source='main.cpp', target='app')
+
+The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register
+a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
+
+       from waflib.Tools.compiler_cxx import cxx_compiler
+       cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
+
+       def options(opt):
+               opt.load('compiler_cxx')
+       def configure(cnf):
+               cnf.load('compiler_cxx')
+       def build(bld):
+               bld.program(source='main.c', target='app')
+
+Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
+
+       $ CXX=clang waf configure
+"""
+
+
+import re
+from waflib.Tools import ccroot
+from waflib import Utils
+from waflib.Logs import debug
+
+cxx_compiler = {
+'win32':  ['msvc', 'g++', 'clang++'],
+'cygwin': ['g++'],
+'darwin': ['clang++', 'g++'],
+'aix':    ['xlc++', 'g++', 'clang++'],
+'linux':  ['g++', 'clang++', 'icpc'],
+'sunos':  ['sunc++', 'g++'],
+'irix':   ['g++'],
+'hpux':   ['g++'],
+'osf1V':  ['g++'],
+'gnu':    ['g++', 'clang++'],
+'java':   ['g++', 'msvc', 'clang++', 'icpc'],
+'default': ['clang++', 'g++']
+}
+"""
+Dict mapping the platform names to Waf tools finding specific C++ compilers::
+
+       from waflib.Tools.compiler_cxx import cxx_compiler
+       cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx']
+"""
+
+def default_compilers():
+       build_platform = Utils.unversioned_sys_platform()
+       possible_compiler_list = cxx_compiler.get(build_platform, cxx_compiler['default'])
+       return ' '.join(possible_compiler_list)
+
+def configure(conf):
+       """
+       Detects a suitable C++ compiler
+
+       :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+       """
+       try:
+               test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
+       except AttributeError:
+               conf.fatal("Add options(opt): opt.load('compiler_cxx')")
+
+       for compiler in re.split('[ ,]+', test_for_compiler):
+               conf.env.stash()
+               conf.start_msg('Checking for %r (C++ compiler)' % compiler)
+               try:
+                       conf.load(compiler)
+               except conf.errors.ConfigurationError ,e:
+                       conf.env.revert()
+                       conf.end_msg(False)
+                       debug('compiler_cxx: %r', e)
+               else:
+                       if conf.env.CXX:
+                               conf.end_msg(conf.env.get_flat('CXX'))
+                               conf.env.COMPILER_CXX = compiler
+                               conf.env.commit()
+                               break
+                       conf.env.revert()
+                       conf.end_msg(False)
+       else:
+               conf.fatal('could not configure a C++ compiler!')
+
+def options(opt):
+       """
+       This is how to provide compiler preferences on the command-line::
+
+               $ waf configure --check-cxx-compiler=gxx
+       """
+       test_for_compiler = default_compilers()
+       opt.load_special_tools('cxx_*.py')
+       cxx_compiler_opts = opt.add_option_group('Configuration options')
+       cxx_compiler_opts.add_option('--check-cxx-compiler', default=None,
+               help='list of C++ compilers to try [%s]' % test_for_compiler,
+               dest="check_cxx_compiler")
+
+       for x in test_for_compiler.split():
+               opt.load('%s' % x)
diff --git a/third_party/waf/waflib/Tools/compiler_d.py b/third_party/waf/waflib/Tools/compiler_d.py
new file mode 100644 (file)
index 0000000..690d146
--- /dev/null
@@ -0,0 +1,88 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+# Thomas Nagy, 2016 (ita)
+
+"""
+Try to detect a D compiler from the list of supported compilers::
+
+       def options(opt):
+               opt.load('compiler_d')
+       def configure(cnf):
+               cnf.load('compiler_d')
+       def build(bld):
+               bld.program(source='main.d', target='app')
+
+Only three D compilers are really present at the moment:
+
+* gdc
+* dmd, the ldc compiler having a very similar command-line interface
+* ldc2
+"""
+
+import re
+from waflib import Utils, Logs
+
+d_compiler = {
+'default' : ['gdc', 'dmd', 'ldc2']
+}
+"""
+Dict mapping the platform names to lists of names of D compilers to try, in order of preference::
+
+       from waflib.Tools.compiler_d import d_compiler
+       d_compiler['default'] = ['gdc', 'dmd', 'ldc2']
+"""
+
+def default_compilers():
+       build_platform = Utils.unversioned_sys_platform()
+       possible_compiler_list = d_compiler.get(build_platform, d_compiler['default'])
+       return ' '.join(possible_compiler_list)
+
+def configure(conf):
+       """
+       Detects a suitable D compiler
+
+       :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+       """
+       try:
+               test_for_compiler = conf.options.check_d_compiler or default_compilers()
+       except AttributeError:
+               conf.fatal("Add options(opt): opt.load('compiler_d')")
+
+       for compiler in re.split('[ ,]+', test_for_compiler):
+               conf.env.stash()
+               conf.start_msg('Checking for %r (D compiler)' % compiler)
+               try:
+                       conf.load(compiler)
+               except conf.errors.ConfigurationError ,e:
+                       conf.env.revert()
+                       conf.end_msg(False)
+                       Logs.debug('compiler_d: %r', e)
+               else:
+                       if conf.env.D:
+                               conf.end_msg(conf.env.get_flat('D'))
+                               conf.env.COMPILER_D = compiler
+                               conf.env.commit()
+                               break
+                       conf.env.revert()
+                       conf.end_msg(False)
+       else:
+               conf.fatal('could not configure a D compiler!')
+
+def options(opt):
+       """
+       This is how to provide compiler preferences on the command-line::
+
+               $ waf configure --check-d-compiler=dmd
+       """
+       test_for_compiler = default_compilers()
+       d_compiler_opts = opt.add_option_group('Configuration options')
+       d_compiler_opts.add_option('--check-d-compiler', default=None,
+               help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler')
+
+       for x in test_for_compiler.split():
+               opt.load('%s' % x)
diff --git a/third_party/waf/waflib/Tools/compiler_fc.py b/third_party/waf/waflib/Tools/compiler_fc.py
new file mode 100644 (file)
index 0000000..8625e04
--- /dev/null
@@ -0,0 +1,76 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+
+import re
+from waflib import Utils, Logs
+from waflib.Tools import fc
+
+fc_compiler = {
+       'win32'  : ['gfortran','ifort'],
+       'darwin' : ['gfortran', 'g95', 'ifort'],
+       'linux'  : ['gfortran', 'g95', 'ifort'],
+       'java'   : ['gfortran', 'g95', 'ifort'],
+       'default': ['gfortran'],
+       'aix'    : ['gfortran']
+}
+"""
+Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference::
+
+       from waflib.Tools.compiler_c import c_compiler
+       c_compiler['linux'] = ['gfortran', 'g95', 'ifort']
+"""
+
+def default_compilers():
+       build_platform = Utils.unversioned_sys_platform()
+       possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default'])
+       return ' '.join(possible_compiler_list)
+
+def configure(conf):
+       """
+       Detects a suitable Fortran compiler
+
+       :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+       """
+       try:
+               test_for_compiler = conf.options.check_fortran_compiler or default_compilers()
+       except AttributeError:
+               conf.fatal("Add options(opt): opt.load('compiler_fc')")
+       for compiler in re.split('[ ,]+', test_for_compiler):
+               conf.env.stash()
+               conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
+               try:
+                       conf.load(compiler)
+               except conf.errors.ConfigurationError ,e:
+                       conf.env.revert()
+                       conf.end_msg(False)
+                       Logs.debug('compiler_fortran: %r', e)
+               else:
+                       if conf.env.FC:
+                               conf.end_msg(conf.env.get_flat('FC'))
+                               conf.env.COMPILER_FORTRAN = compiler
+                               conf.env.commit()
+                               break
+                       conf.env.revert()
+                       conf.end_msg(False)
+       else:
+               conf.fatal('could not configure a Fortran compiler!')
+
+def options(opt):
+       """
+       This is how to provide compiler preferences on the command-line::
+
+               $ waf configure --check-fortran-compiler=ifort
+       """
+       test_for_compiler = default_compilers()
+       opt.load_special_tools('fc_*.py')
+       fortran_compiler_opts = opt.add_option_group('Configuration options')
+       fortran_compiler_opts.add_option('--check-fortran-compiler', default=None,
+                       help='list of Fortran compiler to try [%s]' % test_for_compiler,
+               dest="check_fortran_compiler")
+
+       for x in test_for_compiler.split():
+               opt.load('%s' % x)
diff --git a/third_party/waf/waflib/Tools/cs.py b/third_party/waf/waflib/Tools/cs.py
new file mode 100644 (file)
index 0000000..0ac0ac3
--- /dev/null
@@ -0,0 +1,185 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+C# support. A simple example::
+
+       def configure(conf):
+               conf.load('cs')
+       def build(bld):
+               bld(features='cs', source='main.cs', gen='foo')
+
+Note that the configuration may compile C# snippets::
+
+       FRAG = '''
+       namespace Moo {
+               public class Test { public static int Main(string[] args) { return 0; } }
+       }'''
+       def configure(conf):
+               conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe',
+                       bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
+"""
+
+from waflib import Utils, Task, Options, Errors
+from waflib.TaskGen import before_method, after_method, feature
+from waflib.Tools import ccroot
+from waflib.Configure import conf
+
+ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
+ccroot.lib_patterns['csshlib'] = ['%s']
+
+@feature('cs')
+@before_method('process_source')
+def apply_cs(self):
+       """
+       Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator.
+       """
+       cs_nodes = []
+       no_nodes = []
+       for x in self.to_nodes(self.source):
+               if x.name.endswith('.cs'):
+                       cs_nodes.append(x)
+               else:
+                       no_nodes.append(x)
+       self.source = no_nodes
+
+       bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe')
+       self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen))
+       tsk.env.CSTYPE = '/target:%s' % bintype
+       tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
+       self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu'))
+
+       inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
+       if inst_to:
+               # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
+               mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
+               self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
+
+@feature('cs')
+@after_method('apply_cs')
+def use_cs(self):
+       """
+       C# applications honor the **use** keyword::
+
+               def build(bld):
+                       bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib')
+                       bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi')
+       """
+       names = self.to_list(getattr(self, 'use', []))
+       get = self.bld.get_tgen_by_name
+       for x in names:
+               try:
+                       y = get(x)
+               except Errors.WafError:
+                       self.env.append_value('CSFLAGS', '/reference:%s' % x)
+                       continue
+               y.post()
+
+               tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
+               if not tsk:
+                       self.bld.fatal('cs task has no link task for use %r' % self)
+               self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
+               self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs)
+               self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())
+
+@feature('cs')
+@after_method('apply_cs', 'use_cs')
+def debug_cs(self):
+       """
+       The C# targets may create .mdb or .pdb files::
+
+               def build(bld):
+                       bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
+                       # csdebug is a value in (True, 'full', 'pdbonly')
+       """
+       csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
+       if not csdebug:
+               return
+
+       node = self.cs_task.outputs[0]
+       if self.env.CS_NAME == 'mono':
+               out = node.parent.find_or_declare(node.name + '.mdb')
+       else:
+               out = node.change_ext('.pdb')
+       self.cs_task.outputs.append(out)
+       try:
+               self.install_task.source.append(out)
+       except AttributeError:
+               pass
+
+       if csdebug == 'pdbonly':
+               val = ['/debug+', '/debug:pdbonly']
+       elif csdebug == 'full':
+               val = ['/debug+', '/debug:full']
+       else:
+               val = ['/debug-']
+       self.env.append_value('CSFLAGS', val)
+
+
+class mcs(Task.Task):
+       """
+       Compile C# files
+       """
+       color   = 'YELLOW'
+       run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+
+       def exec_command(self, cmd, **kw):
+               if '/noconfig' in cmd:
+                       raise ValueError('/noconfig is not allowed when using response files, check your flags!')
+               return super(self.__class__, self).exec_command(cmd, **kw)
+
+def configure(conf):
+       """
+       Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc)
+       """
+       csc = getattr(Options.options, 'cscbinary', None)
+       if csc:
+               conf.env.MCS = csc
+       conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS')
+       conf.env.ASS_ST = '/r:%s'
+       conf.env.RES_ST = '/resource:%s'
+
+       conf.env.CS_NAME = 'csc'
+       if str(conf.env.MCS).lower().find('mcs') > -1:
+               conf.env.CS_NAME = 'mono'
+
+def options(opt):
+       """
+       Add a command-line option for the configuration::
+
+               $ waf configure --with-csc-binary=/foo/bar/mcs
+       """
+       opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
+
+class fake_csshlib(Task.Task):
+       """
+       Task used for reading a foreign .net assembly and adding the dependency on it
+       """
+       color   = 'YELLOW'
+       inst_to = None
+
+       def runnable_status(self):
+               return Task.SKIP_ME
+
+@conf
+def read_csshlib(self, name, paths=[]):
+       """
+       Read a foreign .net assembly for the *use* system::
+
+               def build(bld):
+                       bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath])
+                       bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll')
+
+       :param name: Name of the library
+       :type name: string
+       :param paths: Folders in which the library may be found
+       :type paths: list of string
+       :return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib`
+       :rtype: :py:class:`waflib.TaskGen.task_gen`
+       """
+       return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')
diff --git a/third_party/waf/waflib/Tools/cxx.py b/third_party/waf/waflib/Tools/cxx.py
new file mode 100644 (file)
index 0000000..2ebcdfc
--- /dev/null
@@ -0,0 +1,43 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"Base for c++ programs and libraries"
+
+from waflib import TaskGen, Task
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task, stlink_task
+
+@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
+def cxx_hook(self, node):
+       "Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances"
+       return self.create_compiled_task('cxx', node)
+
+if not '.c' in TaskGen.task_gen.mappings:
+       TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
+
+class cxx(Task.Task):
+       "Compiles C++ files into object files"
+       run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+       vars    = ['CXXDEPS'] # unused variable to depend on, just in case
+       ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
+       scan    = c_preproc.scan
+
+class cxxprogram(link_task):
+       "Links object files into c++ programs"
+       run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
+       vars    = ['LINKDEPS']
+       ext_out = ['.bin']
+       inst_to = '${BINDIR}'
+
+class cxxshlib(cxxprogram):
+       "Links object files into c++ shared libraries"
+       inst_to = '${LIBDIR}'
+
+class cxxstlib(stlink_task):
+       "Links object files into c++ static libraries"
+       pass # do not remove
diff --git a/third_party/waf/waflib/Tools/d.py b/third_party/waf/waflib/Tools/d.py
new file mode 100644 (file)
index 0000000..c493b82
--- /dev/null
@@ -0,0 +1,100 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+# Thomas Nagy, 2007-2016 (ita)
+
+from waflib import Utils, Task, Errors
+from waflib.TaskGen import taskgen_method, feature, extension
+from waflib.Tools import d_scan, d_config
+from waflib.Tools.ccroot import link_task, stlink_task
+
+class d(Task.Task):
+       "Compile a d file into an object file"
+       color   = 'GREEN'
+       run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
+       scan    = d_scan.scan
+
+class d_with_header(d):
+       "Compile a d file and generate a header"
+       run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
+
+class d_header(Task.Task):
+       "Compile d headers"
+       color   = 'BLUE'
+       run_str = '${D} ${D_HEADER} ${SRC}'
+
+class dprogram(link_task):
+       "Link object files into a d program"
+       run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
+       inst_to = '${BINDIR}'
+
+class dshlib(dprogram):
+       "Link object files into a d shared library"
+       inst_to = '${LIBDIR}'
+
+class dstlib(stlink_task):
+       "Link object files into a d static library"
+       pass # do not remove
+
+@extension('.d', '.di', '.D')
+def d_hook(self, node):
+       """
+       Compile *D* files. To get .di files as well as .o files, set the following::
+
+               def build(bld):
+                       bld.program(source='foo.d', target='app', generate_headers=True)
+
+       """
+       ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o'
+       out = '%s.%d.%s' % (node.name, self.idx, ext)
+       def create_compiled_task(self, name, node):
+               task = self.create_task(name, node, node.parent.find_or_declare(out))
+               try:
+                       self.compiled_tasks.append(task)
+               except AttributeError:
+                       self.compiled_tasks = [task]
+               return task
+
+       if getattr(self, 'generate_headers', None):
+               tsk = create_compiled_task(self, 'd_with_header', node)
+               tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
+       else:
+               tsk = create_compiled_task(self, 'd', node)
+       return tsk
+
+@taskgen_method
+def generate_header(self, filename):
+       """
+       See feature request #104::
+
+               def build(bld):
+                       tg = bld.program(source='foo.d', target='app')
+                       tg.generate_header('blah.d')
+                       # is equivalent to:
+                       #tg = bld.program(source='foo.d', target='app', header_lst='blah.d')
+
+       :param filename: header to create
+       :type filename: string
+       """
+       try:
+               self.header_lst.append([filename, self.install_path])
+       except AttributeError:
+               self.header_lst = [[filename, self.install_path]]
+
+@feature('d')
+def process_header(self):
+       """
+       Process the attribute 'header_lst' to create the d header compilation tasks::
+
+               def build(bld):
+                       bld.program(source='foo.d', target='app', header_lst='blah.d')
+       """
+       for i in getattr(self, 'header_lst', []):
+               node = self.path.find_resource(i[0])
+               if not node:
+                       raise Errors.WafError('file %r not found on d obj' % i[0])
+               self.create_task('d_header', node, node.change_ext('.di'))
diff --git a/third_party/waf/waflib/Tools/d_config.py b/third_party/waf/waflib/Tools/d_config.py
new file mode 100644 (file)
index 0000000..2586733
--- /dev/null
@@ -0,0 +1,67 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016 (ita)
+
+from waflib import Utils
+from waflib.Configure import conf
+
+@conf
+def d_platform_flags(self):
+       """
+       Sets the extensions dll/so for d programs and libraries
+       """
+       v = self.env
+       if not v.DEST_OS:
+               v.DEST_OS = Utils.unversioned_sys_platform()
+       binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
+       if binfmt == 'pe':
+               v.dprogram_PATTERN = '%s.exe'
+               v.dshlib_PATTERN   = 'lib%s.dll'
+               v.dstlib_PATTERN   = 'lib%s.a'
+       elif binfmt == 'mac-o':
+               v.dprogram_PATTERN = '%s'
+               v.dshlib_PATTERN   = 'lib%s.dylib'
+               v.dstlib_PATTERN   = 'lib%s.a'
+       else:
+               v.dprogram_PATTERN = '%s'
+               v.dshlib_PATTERN   = 'lib%s.so'
+               v.dstlib_PATTERN   = 'lib%s.a'
+
+DLIB = '''
+version(D_Version2) {
+       import std.stdio;
+       int main() {
+               writefln("phobos2");
+               return 0;
+       }
+} else {
+       version(Tango) {
+               import tango.stdc.stdio;
+               int main() {
+                       printf("tango");
+                       return 0;
+               }
+       } else {
+               import std.stdio;
+               int main() {
+                       writefln("phobos1");
+                       return 0;
+               }
+       }
+}
+'''
+"""Detection string for the D standard library"""
+
+@conf
+def check_dlibrary(self, execute=True):
+       """
+       Detects the kind of standard library that comes with the compiler,
+       and sets conf.env.DLIBRARY to tango, phobos1 or phobos2
+       """
+       ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
+       if execute:
+               self.env.DLIBRARY = ret.strip()
diff --git a/third_party/waf/waflib/Tools/d_scan.py b/third_party/waf/waflib/Tools/d_scan.py
new file mode 100644 (file)
index 0000000..f5cec7e
--- /dev/null
@@ -0,0 +1,209 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016 (ita)
+
+"""
+Provide a scanner for finding dependencies on d files
+"""
+
+import re
+from waflib import Utils
+
+def filter_comments(filename):
+       """
+       :param filename: d file name
+       :type filename: string
+       :rtype: list
+       :return: a list of characters
+       """
+       txt = Utils.readf(filename)
+       i = 0
+       buf = []
+       max = len(txt)
+       begin = 0
+       while i < max:
+               c = txt[i]
+               if c == '"' or c == "'":  # skip a string or character literal
+                       buf.append(txt[begin:i])
+                       delim = c
+                       i += 1
+                       while i < max:
+                               c = txt[i]
+                               if c == delim: break
+                               elif c == '\\':  # skip the character following backslash
+                                       i += 1
+                               i += 1
+                       i += 1
+                       begin = i
+               elif c == '/':  # try to replace a comment with whitespace
+                       buf.append(txt[begin:i])
+                       i += 1
+                       if i == max: break
+                       c = txt[i]
+                       if c == '+':  # eat nesting /+ +/ comment
+                               i += 1
+                               nesting = 1
+                               c = None
+                               while i < max:
+                                       prev = c
+                                       c = txt[i]
+                                       if prev == '/' and c == '+':
+                                               nesting += 1
+                                               c = None
+                                       elif prev == '+' and c == '/':
+                                               nesting -= 1
+                                               if nesting == 0: break
+                                               c = None
+                                       i += 1
+                       elif c == '*':  # eat /* */ comment
+                               i += 1
+                               c = None
+                               while i < max:
+                                       prev = c
+                                       c = txt[i]
+                                       if prev == '*' and c == '/': break
+                                       i += 1
+                       elif c == '/':  # eat // comment
+                               i += 1
+                               while i < max and txt[i] != '\n':
+                                       i += 1
+                       else:  # no comment
+                               begin = i - 1
+                               continue
+                       i += 1
+                       begin = i
+                       buf.append(' ')
+               else:
+                       i += 1
+       buf.append(txt[begin:])
+       return buf
+
+class d_parser(object):
+       """
+       Parser for d files
+       """
+       def __init__(self, env, incpaths):
+               #self.code = ''
+               #self.module = ''
+               #self.imports = []
+
+               self.allnames = []
+
+               self.re_module = re.compile("module\s+([^;]+)")
+               self.re_import = re.compile("import\s+([^;]+)")
+               self.re_import_bindings = re.compile("([^:]+):(.*)")
+               self.re_import_alias = re.compile("[^=]+=(.+)")
+
+               self.env = env
+
+               self.nodes = []
+               self.names = []
+
+               self.incpaths = incpaths
+
+       def tryfind(self, filename):
+               """
+               Search file a file matching an module/import directive
+
+               :param filename: file to read
+               :type filename: string
+               """
+               found = 0
+               for n in self.incpaths:
+                       found = n.find_resource(filename.replace('.', '/') + '.d')
+                       if found:
+                               self.nodes.append(found)
+                               self.waiting.append(found)
+                               break
+               if not found:
+                       if not filename in self.names:
+                               self.names.append(filename)
+
+       def get_strings(self, code):
+               """
+               :param code: d code to parse
+               :type code: string
+               :return: the modules that the code uses
+               :rtype: a list of match objects
+               """
+               #self.imports = []
+               self.module = ''
+               lst = []
+
+               # get the module name (if present)
+
+               mod_name = self.re_module.search(code)
+               if mod_name:
+                       self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
+
+               # go through the code, have a look at all import occurrences
+
+               # first, lets look at anything beginning with "import" and ending with ";"
+               import_iterator = self.re_import.finditer(code)
+               if import_iterator:
+                       for import_match in import_iterator:
+                               import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
+
+                               # does this end with an import bindings declaration?
+                               # (import bindings always terminate the list of imports)
+                               bindings_match = self.re_import_bindings.match(import_match_str)
+                               if bindings_match:
+                                       import_match_str = bindings_match.group(1)
+                                       # if so, extract the part before the ":" (since the module declaration(s) is/are located there)
+
+                               # split the matching string into a bunch of strings, separated by a comma
+                               matches = import_match_str.split(',')
+
+                               for match in matches:
+                                       alias_match = self.re_import_alias.match(match)
+                                       if alias_match:
+                                               # is this an alias declaration? (alias = module name) if so, extract the module name
+                                               match = alias_match.group(1)
+
+                                       lst.append(match)
+               return lst
+
+       def start(self, node):
+               """
+               The parsing starts here
+
+               :param node: input file
+               :type node: :py:class:`waflib.Node.Node`
+               """
+               self.waiting = [node]
+               # while the stack is not empty, add the dependencies
+               while self.waiting:
+                       nd = self.waiting.pop(0)
+                       self.iter(nd)
+
+       def iter(self, node):
+               """
+               Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files
+
+               :param node: input file
+               :type node: :py:class:`waflib.Node.Node`
+               """
+               path = node.abspath() # obtain the absolute path
+               code = "".join(filter_comments(path)) # read the file and filter the comments
+               names = self.get_strings(code) # obtain the import strings
+               for x in names:
+                       # optimization
+                       if x in self.allnames: continue
+                       self.allnames.append(x)
+
+                       # for each name, see if it is like a node or not
+                       self.tryfind(x)
+
+def scan(self):
+       "look for .d/.di used by a d file"
+       env = self.env
+       gruik = d_parser(env, self.generator.includes_nodes)
+       node = self.inputs[0]
+       gruik.start(node)
+       nodes = gruik.nodes
+       names = gruik.names
+       return (nodes, names)
diff --git a/third_party/waf/waflib/Tools/dbus.py b/third_party/waf/waflib/Tools/dbus.py
new file mode 100644 (file)
index 0000000..b6951b4
--- /dev/null
@@ -0,0 +1,73 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+"""
+Compiles dbus files with **dbus-binding-tool**
+
+Typical usage::
+
+       def options(opt):
+               opt.load('compiler_c dbus')
+       def configure(conf):
+               conf.load('compiler_c dbus')
+       def build(bld):
+               tg = bld.program(
+                       includes = '.',
+                       source = bld.path.ant_glob('*.c'),
+                       target = 'gnome-hello')
+               tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
+"""
+
+from waflib import Task, Errors
+from waflib.TaskGen import taskgen_method, before_method
+
+@taskgen_method
+def add_dbus_file(self, filename, prefix, mode):
+       """
+       Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
+
+       :param filename: xml file to compile
+       :type filename: string
+       :param prefix: dbus binding tool prefix (--prefix=prefix)
+       :type prefix: string
+       :param mode: dbus binding tool mode (--mode=mode)
+       :type mode: string
+       """
+       if not hasattr(self, 'dbus_lst'):
+               self.dbus_lst = []
+       if not 'process_dbus' in self.meths:
+               self.meths.append('process_dbus')
+       self.dbus_lst.append([filename, prefix, mode])
+
+@before_method('apply_core')
+def process_dbus(self):
+       """
+       Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
+       """
+       for filename, prefix, mode in getattr(self, 'dbus_lst', []):
+               node = self.path.find_resource(filename)
+               if not node:
+                       raise Errors.WafError('file not found ' + filename)
+               tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
+               tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
+               tsk.env.DBUS_BINDING_TOOL_MODE   = mode
+
+class dbus_binding_tool(Task.Task):
+       """
+       Compiles a dbus file
+       """
+       color   = 'BLUE'
+       ext_out = ['.h']
+       run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
+       shell   = True # temporary workaround for #795
+
+def configure(conf):
+       """
+       Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
+       """
+       conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
diff --git a/third_party/waf/waflib/Tools/dmd.py b/third_party/waf/waflib/Tools/dmd.py
new file mode 100644 (file)
index 0000000..ef62015
--- /dev/null
@@ -0,0 +1,83 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+# Thomas Nagy, 2008-2016 (ita)
+
+import sys
+from waflib.Tools import ar, d
+from waflib.Configure import conf
+
+@conf
+def find_dmd(conf):
+       """
+       Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
+       """
+       conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')
+
+       # make sure that we're dealing with dmd1, dmd2, or ldc(1)
+       out = conf.cmd_and_log(conf.env.D + ['--help'])
+       if out.find("D Compiler v") == -1:
+               out = conf.cmd_and_log(conf.env.D + ['-version'])
+               if out.find("based on DMD v1.") == -1:
+                       conf.fatal("detected compiler is not dmd/ldc")
+
+@conf
+def common_flags_ldc(conf):
+       """
+       Sets the D flags required by *ldc*
+       """
+       v = conf.env
+       v.DFLAGS        = ['-d-version=Posix']
+       v.LINKFLAGS     = []
+       v.DFLAGS_dshlib = ['-relocation-model=pic']
+
+@conf
+def common_flags_dmd(conf):
+       """
+       Set the flags required by *dmd* or *dmd2*
+       """
+       v = conf.env
+
+       v.D_SRC_F           = ['-c']
+       v.D_TGT_F           = '-of%s'
+
+       v.D_LINKER          = v.D
+       v.DLNK_SRC_F        = ''
+       v.DLNK_TGT_F        = '-of%s'
+       v.DINC_ST           = '-I%s'
+
+       v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+       v.DSTLIB_ST = v.DSHLIB_ST         = '-L-l%s'
+       v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L-L%s'
+
+       v.LINKFLAGS_dprogram= ['-quiet']
+
+       v.DFLAGS_dshlib     = ['-fPIC']
+       v.LINKFLAGS_dshlib  = ['-L-shared']
+
+       v.DHEADER_ext       = '.di'
+       v.DFLAGS_d_with_header = ['-H', '-Hf']
+       v.D_HDR_F           = '%s'
+
+def configure(conf):
+       """
+       Configuration for *dmd*, *dmd2*, and *ldc*
+       """
+       conf.find_dmd()
+
+       if sys.platform == 'win32':
+               out = conf.cmd_and_log(conf.env.D + ['--help'])
+               if out.find('D Compiler v2.') > -1:
+                       conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
+
+       conf.load('ar')
+       conf.load('d')
+       conf.common_flags_dmd()
+       conf.d_platform_flags()
+
+       if str(conf.env.D).find('ldc') > -1:
+               conf.common_flags_ldc()
diff --git a/third_party/waf/waflib/Tools/errcheck.py b/third_party/waf/waflib/Tools/errcheck.py
new file mode 100644 (file)
index 0000000..83a3a5b
--- /dev/null
@@ -0,0 +1,229 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Common mistakes highlighting.
+
+There is a performance impact, so this tool is only loaded when running ``waf -v``
+"""
+
+typos = {
+'feature':'features',
+'sources':'source',
+'targets':'target',
+'include':'includes',
+'export_include':'export_includes',
+'define':'defines',
+'importpath':'includes',
+'installpath':'install_path',
+'iscopy':'is_copy',
+}
+
+meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
+
+import sys
+from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
+from waflib.Tools import ccroot
+
+def check_same_targets(self):
+       mp = Utils.defaultdict(list)
+       uids = {}
+
+       def check_task(tsk):
+               if not isinstance(tsk, Task.Task):
+                       return
+               if hasattr(tsk, 'no_errcheck_out'):
+                       return
+
+               for node in tsk.outputs:
+                       mp[node].append(tsk)
+               try:
+                       uids[tsk.uid()].append(tsk)
+               except KeyError:
+                       uids[tsk.uid()] = [tsk]
+
+       for g in self.groups:
+               for tg in g:
+                       try:
+                               for tsk in tg.tasks:
+                                       check_task(tsk)
+                       except AttributeError:
+                               # raised if not a task generator, which should be uncommon
+                               check_task(tg)
+
+       dupe = False
+       for (k, v) in mp.items():
+               if len(v) > 1:
+                       dupe = True
+                       msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "")
+                       Logs.error(msg)
+                       for x in v:
+                               if Logs.verbose > 1:
+                                       Logs.error('  %d. %r', 1 + v.index(x), x.generator)
+                               else:
+                                       Logs.error('  %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))
+                       Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')
+
+       if not dupe:
+               for (k, v) in uids.items():
+                       if len(v) > 1:
+                               Logs.error('* Several tasks use the same identifier. Please check the information on\n   https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
+                               for tsk in v:
+                                       Logs.error('  - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tsk.generator)
+
+def check_invalid_constraints(self):
+       feat = set()
+       for x in list(TaskGen.feats.values()):
+               feat.union(set(x))
+       for (x, y) in TaskGen.task_gen.prec.items():
+               feat.add(x)
+               feat.union(set(y))
+       ext = set()
+       for x in TaskGen.task_gen.mappings.values():
+               ext.add(x.__name__)
+       invalid = ext & feat
+       if invalid:
+               Logs.error('The methods %r have invalid annotations:  @extension <-> @feature/@before_method/@after_method', list(invalid))
+
+       # the build scripts have been read, so we can check for invalid after/before attributes on task classes
+       for cls in list(Task.classes.values()):
+               if sys.hexversion > 0x3000000 and issubclass(cls, Task.Task) and isinstance(cls.hcode, str):
+                       raise Errors.WafError('Class %r has hcode value %r of type <str>, expecting <bytes> (use Utils.h_cmd() ?)' % (cls, cls.hcode))
+
+               for x in ('before', 'after'):
+                       for y in Utils.to_list(getattr(cls, x, [])):
+                               if not Task.classes.get(y):
+                                       Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__)
+               if getattr(cls, 'rule', None):
+                       Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__)
+
+def replace(m):
+       """
+       Replaces existing BuildContext methods to verify parameter names,
+       for example ``bld(source=)`` has no ending *s*
+       """
+       oldcall = getattr(Build.BuildContext, m)
+       def call(self, *k, **kw):
+               ret = oldcall(self, *k, **kw)
+               for x in typos:
+                       if x in kw:
+                               if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
+                                       continue
+                               Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret)
+               return ret
+       setattr(Build.BuildContext, m, call)
+
+def enhance_lib():
+       """
+       Modifies existing classes and methods to enable error verification
+       """
+       for m in meths_typos:
+               replace(m)
+
+       # catch '..' in ant_glob patterns
+       def ant_glob(self, *k, **kw):
+               if k:
+                       lst = Utils.to_list(k[0])
+                       for pat in lst:
+                               sp = pat.split('/')
+                               if '..' in sp:
+                                       Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
+                               if '.' in sp:
+                                       Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
+               if kw.get('remove', True):
+                       try:
+                               if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
+                                       Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)', self)
+                       except AttributeError:
+                               pass
+               return self.old_ant_glob(*k, **kw)
+       Node.Node.old_ant_glob = Node.Node.ant_glob
+       Node.Node.ant_glob = ant_glob
+
+       # catch conflicting ext_in/ext_out/before/after declarations
+       old = Task.is_before
+       def is_before(t1, t2):
+               ret = old(t1, t2)
+               if ret and old(t2, t1):
+                       Logs.error('Contradictory order constraints in classes %r %r', t1, t2)
+               return ret
+       Task.is_before = is_before
+
+       # check for bld(feature='cshlib') where no 'c' is given - this can be either a mistake or on purpose
+       # so we only issue a warning
+       def check_err_features(self):
+               lst = self.to_list(self.features)
+               if 'shlib' in lst:
+                       Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
+               for x in ('c', 'cxx', 'd', 'fc'):
+                       if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
+                               Logs.error('%r features is probably missing %r', self, x)
+       TaskGen.feature('*')(check_err_features)
+
+       # check for erroneous order constraints
+       def check_err_order(self):
+               if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
+                       for x in ('before', 'after', 'ext_in', 'ext_out'):
+                               if hasattr(self, x):
+                                       Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self)
+               else:
+                       for x in ('before', 'after'):
+                               for y in self.to_list(getattr(self, x, [])):
+                                       if not Task.classes.get(y, None):
+                                               Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
+       TaskGen.feature('*')(check_err_order)
+
+       # check for @extension used with @feature/@before_method/@after_method
+       def check_compile(self):
+               check_invalid_constraints(self)
+               try:
+                       ret = self.orig_compile()
+               finally:
+                       check_same_targets(self)
+               return ret
+       Build.BuildContext.orig_compile = Build.BuildContext.compile
+       Build.BuildContext.compile = check_compile
+
+       # check for invalid build groups #914
+       def use_rec(self, name, **kw):
+               try:
+                       y = self.bld.get_tgen_by_name(name)
+               except Errors.WafError:
+                       pass
+               else:
+                       idx = self.bld.get_group_idx(self)
+                       odx = self.bld.get_group_idx(y)
+                       if odx > idx:
+                               msg = "Invalid 'use' across build groups:"
+                               if Logs.verbose > 1:
+                                       msg += '\n  target %r\n  uses:\n  %r' % (self, y)
+                               else:
+                                       msg += " %r uses %r (try 'waf -v -v' for the full error)" % (self.name, name)
+                               raise Errors.WafError(msg)
+               self.orig_use_rec(name, **kw)
+       TaskGen.task_gen.orig_use_rec = TaskGen.task_gen.use_rec
+       TaskGen.task_gen.use_rec = use_rec
+
+       # check for env.append
+       def _getattr(self, name, default=None):
+               if name == 'append' or name == 'add':
+                       raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
+               elif name == 'prepend':
+                       raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
+               if name in self.__slots__:
+                       return object.__getattr__(self, name, default)
+               else:
+                       return self[name]
+       ConfigSet.ConfigSet.__getattr__ = _getattr
+
+
+def options(opt):
+       """
+       Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
+       """
+       enhance_lib()
diff --git a/third_party/waf/waflib/Tools/fc.py b/third_party/waf/waflib/Tools/fc.py
new file mode 100644 (file)
index 0000000..5a98ce5
--- /dev/null
@@ -0,0 +1,191 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016 (ita)
+
+"""
+Fortran support
+"""
+
+from waflib import Utils, Task
+from waflib.Tools import ccroot, fc_config, fc_scan
+from waflib.TaskGen import extension
+from waflib.Configure import conf
+
+ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS'])
+ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])
+
+@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
+def fc_hook(self, node):
+       "Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances"
+       return self.create_compiled_task('fc', node)
+
+@conf
+def modfile(conf, name):
+       """
+       Turns a module name into the right module file name.
+       Defaults to all lower case.
+       """
+       return {'lower'     :name.lower() + '.mod',
+               'lower.MOD' :name.lower() + '.MOD',
+               'UPPER.mod' :name.upper() + '.mod',
+               'UPPER'     :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower']
+
+def get_fortran_tasks(tsk):
+       """
+       Obtains all fortran tasks from the same build group. Those tasks must not have
+       the attribute 'nomod' or 'mod_fortran_done'
+
+       :return: a list of :py:class:`waflib.Tools.fc.fc` instances
+       """
+       bld = tsk.generator.bld
+       tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
+       return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)]
+
+class fc(Task.Task):
+       """
+       Fortran tasks can only run when all fortran tasks in the current group are ready to be executed
+       This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
+       Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
+       """
+       color = 'GREEN'
+       run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
+       vars = ["FORTRANMODPATHFLAG"]
+
+       def scan(self):
+               """Fortran dependency scanner"""
+               tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
+               tmp.task = self
+               tmp.start(self.inputs[0])
+               return (tmp.nodes, tmp.names)
+
+       def runnable_status(self):
+               """
+               Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks
+               executed by the main thread so there are no concurrency issues
+               """
+               if getattr(self, 'mod_fortran_done', None):
+                       return super(fc, self).runnable_status()
+
+               # now, if we reach this part it is because this fortran task is the first in the list
+               bld = self.generator.bld
+
+               # obtain the fortran tasks
+               lst = get_fortran_tasks(self)
+
+               # disable this method for other tasks
+               for tsk in lst:
+                       tsk.mod_fortran_done = True
+
+               # wait for all the .f tasks to be ready for execution
+               # and ensure that the scanners are called at least once
+               for tsk in lst:
+                       ret = tsk.runnable_status()
+                       if ret == Task.ASK_LATER:
+                               # we have to wait for one of the other fortran tasks to be ready
+                               # this may deadlock if there are dependencies between the fortran tasks
+                               # but this should not happen (we are setting them here!)
+                               for x in lst:
+                                       x.mod_fortran_done = None
+
+                               # TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end
+                               return Task.ASK_LATER
+
+               ins = Utils.defaultdict(set)
+               outs = Utils.defaultdict(set)
+
+               # the .mod files to create
+               for tsk in lst:
+                       key = tsk.uid()
+                       for x in bld.raw_deps[key]:
+                               if x.startswith('MOD@'):
+                                       name = bld.modfile(x.replace('MOD@', ''))
+                                       node = bld.srcnode.find_or_declare(name)
+                                       tsk.set_outputs(node)
+                                       outs[id(node)].add(tsk)
+
+               # the .mod files to use
+               for tsk in lst:
+                       key = tsk.uid()
+                       for x in bld.raw_deps[key]:
+                               if x.startswith('USE@'):
+                                       name = bld.modfile(x.replace('USE@', ''))
+                                       node = bld.srcnode.find_resource(name)
+                                       if node and node not in tsk.outputs:
+                                               if not node in bld.node_deps[key]:
+                                                       bld.node_deps[key].append(node)
+                                               ins[id(node)].add(tsk)
+
+               # if the intersection matches, set the order
+               for k in ins.keys():
+                       for a in ins[k]:
+                               a.run_after.update(outs[k])
+
+                               # the scanner cannot output nodes, so we have to set them
+                               # ourselves as task.dep_nodes (additional input nodes)
+                               tmp = []
+                               for t in outs[k]:
+                                       tmp.extend(t.outputs)
+                               a.dep_nodes.extend(tmp)
+                               a.dep_nodes.sort(key=lambda x: x.abspath())
+
+               # the task objects have changed: clear the signature cache
+               for tsk in lst:
+                       try:
+                               delattr(tsk, 'cache_sig')
+                       except AttributeError:
+                               pass
+
+               return super(fc, self).runnable_status()
+
+class fcprogram(ccroot.link_task):
+       """Links Fortran programs"""
+       color = 'YELLOW'
+       run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
+       inst_to = '${BINDIR}'
+
+class fcshlib(fcprogram):
+       """Links Fortran libraries"""
+       inst_to = '${LIBDIR}'
+
+class fcstlib(ccroot.stlink_task):
+       """Links Fortran static libraries (uses ar by default)"""
+       pass # do not remove the pass statement
+
+class fcprogram_test(fcprogram):
+       """Custom link task to obtain compiler outputs for Fortran configuration tests"""
+
+       def runnable_status(self):
+               """This task is always executed"""
+               ret = super(fcprogram_test, self).runnable_status()
+               if ret == Task.SKIP_ME:
+                       ret = Task.RUN_ME
+               return ret
+
+       def exec_command(self, cmd, **kw):
+               """Stores the compiler std our/err onto the build context, to bld.out + bld.err"""
+               bld = self.generator.bld
+
+               kw['shell'] = isinstance(cmd, str)
+               kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
+               kw['cwd'] = self.get_cwd()
+               bld.out = bld.err = ''
+
+               bld.to_log('command: %s\n' % cmd)
+
+               kw['output'] = 0
+               try:
+                       (bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
+               except Exception:
+                       return -1
+
+               if bld.out:
+                       bld.to_log('out: %s\n' % bld.out)
+               if bld.err:
+                       bld.to_log('err: %s\n' % bld.err)
diff --git a/third_party/waf/waflib/Tools/fc_config.py b/third_party/waf/waflib/Tools/fc_config.py
new file mode 100644 (file)
index 0000000..76a17a1
--- /dev/null
@@ -0,0 +1,477 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016 (ita)
+
+"""
+Fortran configuration helpers
+"""
+
+import re, os, sys, shlex
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method
+
+FC_FRAGMENT = '        program main\n        end     program main\n'
+FC_FRAGMENT2 = '        PROGRAM MAIN\n        END\n' # what's the actual difference between these?
+
+@conf
+def fc_flags(conf):
+       """
+       Defines common fortran configuration flags and file extensions
+       """
+       v = conf.env
+
+       v.FC_SRC_F    = []
+       v.FC_TGT_F    = ['-c', '-o']
+       v.FCINCPATH_ST  = '-I%s'
+       v.FCDEFINES_ST  = '-D%s'
+
+       if not v.LINK_FC:
+               v.LINK_FC = v.FC
+
+       v.FCLNK_SRC_F = []
+       v.FCLNK_TGT_F = ['-o']
+
+       v.FCFLAGS_fcshlib   = ['-fpic']
+       v.LINKFLAGS_fcshlib = ['-shared']
+       v.fcshlib_PATTERN   = 'lib%s.so'
+
+       v.fcstlib_PATTERN   = 'lib%s.a'
+
+       v.FCLIB_ST       = '-l%s'
+       v.FCLIBPATH_ST   = '-L%s'
+       v.FCSTLIB_ST     = '-l%s'
+       v.FCSTLIBPATH_ST = '-L%s'
+       v.FCSTLIB_MARKER = '-Wl,-Bstatic'
+       v.FCSHLIB_MARKER = '-Wl,-Bdynamic'
+
+       v.SONAME_ST      = '-Wl,-h,%s'
+
+@conf
+def fc_add_flags(conf):
+       """
+       Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env
+       """
+       conf.add_os_flags('FCPPFLAGS', dup=False)
+       conf.add_os_flags('FCFLAGS', dup=False)
+       conf.add_os_flags('LINKFLAGS', dup=False)
+       conf.add_os_flags('LDFLAGS', dup=False)
+
+@conf
+def check_fortran(self, *k, **kw):
+       """
+       Compiles a Fortran program to ensure that the settings are correct
+       """
+       self.check_cc(
+               fragment         = FC_FRAGMENT,
+               compile_filename = 'test.f',
+               features         = 'fc fcprogram',
+               msg              = 'Compiling a simple fortran app')
+
+@conf
+def check_fc(self, *k, **kw):
+       """
+       Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language
+       (this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`)
+       """
+       kw['compiler'] = 'fc'
+       if not 'compile_mode' in kw:
+               kw['compile_mode'] = 'fc'
+       if not 'type' in kw:
+               kw['type'] = 'fcprogram'
+       if not 'compile_filename' in kw:
+               kw['compile_filename'] = 'test.f90'
+       if not 'code' in kw:
+               kw['code'] = FC_FRAGMENT
+       return self.check(*k, **kw)
+
+# ------------------------------------------------------------------------
+# --- These are the default platform modifiers, refactored here for
+#     convenience.  gfortran and g95 have much overlap.
+# ------------------------------------------------------------------------
+
+@conf
+def fortran_modifier_darwin(conf):
+       """
+       Defines Fortran flags and extensions for OSX systems
+       """
+       v = conf.env
+       v.FCFLAGS_fcshlib   = ['-fPIC']
+       v.LINKFLAGS_fcshlib = ['-dynamiclib']
+       v.fcshlib_PATTERN   = 'lib%s.dylib'
+       v.FRAMEWORKPATH_ST  = '-F%s'
+       v.FRAMEWORK_ST      = ['-framework']
+
+       v.LINKFLAGS_fcstlib = []
+
+       v.FCSHLIB_MARKER    = ''
+       v.FCSTLIB_MARKER    = ''
+       v.SONAME_ST         = ''
+
+@conf
+def fortran_modifier_win32(conf):
+       """
+       Defines Fortran flags for Windows platforms
+       """
+       v = conf.env
+       v.fcprogram_PATTERN = v.fcprogram_test_PATTERN  = '%s.exe'
+
+       v.fcshlib_PATTERN   = '%s.dll'
+       v.implib_PATTERN    = 'lib%s.dll.a'
+       v.IMPLIB_ST         = '-Wl,--out-implib,%s'
+
+       v.FCFLAGS_fcshlib   = []
+
+       # Auto-import is enabled by default even without this option,
+       # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
+       # that the linker emits otherwise.
+       v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
+
+@conf
+def fortran_modifier_cygwin(conf):
+       """
+       Defines Fortran flags for use on cygwin
+       """
+       fortran_modifier_win32(conf)
+       v = conf.env
+       v.fcshlib_PATTERN = 'cyg%s.dll'
+       v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
+       v.FCFLAGS_fcshlib = []
+
+# ------------------------------------------------------------------------
+
+@conf
+def check_fortran_dummy_main(self, *k, **kw):
+       """
+       Determines if a main function is needed by compiling a code snippet with
+       the C compiler and linking it with the Fortran compiler (useful on unix-like systems)
+       """
+       if not self.env.CC:
+               self.fatal('A c compiler is required for check_fortran_dummy_main')
+
+       lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN']
+       lst.extend([m.lower() for m in lst])
+       lst.append('')
+
+       self.start_msg('Detecting whether we need a dummy main')
+       for main in lst:
+               kw['fortran_main'] = main
+               try:
+                       self.check_cc(
+                               fragment = 'int %s() { return 0; }\n' % (main or 'test'),
+                               features = 'c fcprogram',
+                               mandatory = True
+                       )
+                       if not main:
+                               self.env.FC_MAIN = -1
+                               self.end_msg('no')
+                       else:
+                               self.env.FC_MAIN = main
+                               self.end_msg('yes %s' % main)
+                       break
+               except self.errors.ConfigurationError:
+                       pass
+       else:
+               self.end_msg('not found')
+               self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
+
+# ------------------------------------------------------------------------
+
+GCC_DRIVER_LINE = re.compile('^Driving:')
+POSIX_STATIC_EXT = re.compile('\S+\.a')
+POSIX_LIB_FLAGS = re.compile('-l\S+')
+
+@conf
+def is_link_verbose(self, txt):
+       """Returns True if 'useful' link options can be found in txt"""
+       assert isinstance(txt, str)
+       for line in txt.splitlines():
+               if not GCC_DRIVER_LINE.search(line):
+                       if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line):
+                               return True
+       return False
+
+@conf
+def check_fortran_verbose_flag(self, *k, **kw):
+       """
+       Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG
+       """
+       self.start_msg('fortran link verbose flag')
+       for x in ('-v', '--verbose', '-verbose', '-V'):
+               try:
+                       self.check_cc(
+                               features = 'fc fcprogram_test',
+                               fragment = FC_FRAGMENT2,
+                               compile_filename = 'test.f',
+                               linkflags = [x],
+                               mandatory=True)
+               except self.errors.ConfigurationError:
+                       pass
+               else:
+                       # output is on stderr or stdout (for xlf)
+                       if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out):
+                               self.end_msg(x)
+                               break
+       else:
+               self.end_msg('failure')
+               self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
+
+       self.env.FC_VERBOSE_FLAG = x
+       return x
+
+# ------------------------------------------------------------------------
+
+# linkflags which match those are ignored
+LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*']
+if os.name == 'nt':
+       LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname'])
+else:
+       LINKFLAGS_IGNORED.append(r'-lgcc*')
+RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]
+
+def _match_ignore(line):
+       """Returns True if the line should be ignored (Fortran verbose flag test)"""
+       for i in RLINKFLAGS_IGNORED:
+               if i.match(line):
+                       return True
+       return False
+
+def parse_fortran_link(lines):
+       """Given the output of verbose link of Fortran compiler, this returns a
+       list of flags necessary for linking using the standard linker."""
+       final_flags = []
+       for line in lines:
+               if not GCC_DRIVER_LINE.match(line):
+                       _parse_flink_line(line, final_flags)
+       return final_flags
+
+SPACE_OPTS = re.compile('^-[LRuYz]$')
+NOSPACE_OPTS = re.compile('^-[RL]')
+
+def _parse_flink_token(lexer, token, tmp_flags):
+       # Here we go (convention for wildcard is shell, not regex !)
+       #   1 TODO: we first get some root .a libraries
+       #   2 TODO: take everything starting by -bI:*
+       #   3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
+       #   -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
+       #   4 take into account -lkernel32
+       #   5 For options of the kind -[[LRuYz]], as they take one argument
+       #   after, the actual option is the next token
+       #   6 For -YP,*: take and replace by -Larg where arg is the old
+       #   argument
+       #   7 For -[lLR]*: take
+
+       # step 3
+       if _match_ignore(token):
+               pass
+       # step 4
+       elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
+               tmp_flags.append(token)
+       # step 5
+       elif SPACE_OPTS.match(token):
+               t = lexer.get_token()
+               if t.startswith('P,'):
+                       t = t[2:]
+               for opt in t.split(os.pathsep):
+                       tmp_flags.append('-L%s' % opt)
+       # step 6
+       elif NOSPACE_OPTS.match(token):
+               tmp_flags.append(token)
+       # step 7
+       elif POSIX_LIB_FLAGS.match(token):
+               tmp_flags.append(token)
+       else:
+               # ignore anything not explicitely taken into account
+               pass
+
+       t = lexer.get_token()
+       return t
+
+def _parse_flink_line(line, final_flags):
+       """private"""
+       lexer = shlex.shlex(line, posix = True)
+       lexer.whitespace_split = True
+
+       t = lexer.get_token()
+       tmp_flags = []
+       while t:
+               t = _parse_flink_token(lexer, t, tmp_flags)
+
+       final_flags.extend(tmp_flags)
+       return final_flags
+
+@conf
+def check_fortran_clib(self, autoadd=True, *k, **kw):
+       """
+       Obtains the flags for linking with the C library
+       if this check works, add uselib='CLIB' to your task generators
+       """
+       if not self.env.FC_VERBOSE_FLAG:
+               self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
+
+       self.start_msg('Getting fortran runtime link flags')
+       try:
+               self.check_cc(
+                       fragment = FC_FRAGMENT2,
+                       compile_filename = 'test.f',
+                       features = 'fc fcprogram_test',
+                       linkflags = [self.env.FC_VERBOSE_FLAG]
+               )
+       except Exception:
+               self.end_msg(False)
+               if kw.get('mandatory', True):
+                       conf.fatal('Could not find the c library flags')
+       else:
+               out = self.test_bld.err
+               flags = parse_fortran_link(out.splitlines())
+               self.end_msg('ok (%s)' % ' '.join(flags))
+               self.env.LINKFLAGS_CLIB = flags
+               return flags
+       return []
+
+def getoutput(conf, cmd, stdin=False):
+       """
+       Obtains Fortran command outputs
+       """
+       from waflib import Errors
+       if conf.env.env:
+               env = conf.env.env
+       else:
+               env = dict(os.environ)
+               env['LANG'] = 'C'
+       input = stdin and '\n' or None
+       try:
+               out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
+       except Errors.WafError ,e:
+               # An WafError might indicate an error code during the command
+               # execution, in this case we still obtain the stderr and stdout,
+               # which we can use to find the version string.
+               if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')):
+                       raise e
+               else:
+                       # Ignore the return code and return the original
+                       # stdout and stderr.
+                       out = e.stdout
+                       err = e.stderr
+       except Exception:
+               conf.fatal('could not determine the compiler version %r' % cmd)
+       return (out, err)
+
+# ------------------------------------------------------------------------
+
+ROUTINES_CODE = """\
+      subroutine foobar()
+      return
+      end
+      subroutine foo_bar()
+      return
+      end
+"""
+
+MAIN_CODE = """
+void %(dummy_func_nounder)s(void);
+void %(dummy_func_under)s(void);
+int %(main_func_name)s() {
+  %(dummy_func_nounder)s();
+  %(dummy_func_under)s();
+  return 0;
+}
+"""
+
+@feature('link_main_routines_func')
+@before_method('process_source')
+def link_main_routines_tg_method(self):
+       """
+       The configuration test declares a unique task generator,
+       so we create other task generators from there for fortran link tests
+       """
+       def write_test_file(task):
+               task.outputs[0].write(task.generator.code)
+       bld = self.bld
+       bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__)
+       bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE)
+       bld(features='fc fcstlib', source='test.f', target='test')
+       bld(features='c fcprogram', source='main.c', target='app', use='test')
+
+def mangling_schemes():
+       """
+       Generate triplets for use with mangle_name
+       (used in check_fortran_mangling)
+       the order is tuned for gfortan
+       """
+       for u in ('_', ''):
+               for du in ('', '_'):
+                       for c in ("lower", "upper"):
+                               yield (u, du, c)
+
+def mangle_name(u, du, c, name):
+       """Mangle a name from a triplet (used in check_fortran_mangling)"""
+       return getattr(name, c)() + u + (name.find('_') != -1 and du or '')
+
+@conf
+def check_fortran_mangling(self, *k, **kw):
+       """
+       Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found
+
+       This test will compile a fortran static library, then link a c app against it
+       """
+       if not self.env.CC:
+               self.fatal('A c compiler is required for link_main_routines')
+       if not self.env.FC:
+               self.fatal('A fortran compiler is required for link_main_routines')
+       if not self.env.FC_MAIN:
+               self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
+
+       self.start_msg('Getting fortran mangling scheme')
+       for (u, du, c) in mangling_schemes():
+               try:
+                       self.check_cc(
+                               compile_filename   = [],
+                               features           = 'link_main_routines_func',
+                               msg                = 'nomsg',
+                               errmsg             = 'nomsg',
+                               dummy_func_nounder = mangle_name(u, du, c, 'foobar'),
+                               dummy_func_under   = mangle_name(u, du, c, 'foo_bar'),
+                               main_func_name     = self.env.FC_MAIN
+                       )
+               except self.errors.ConfigurationError:
+                       pass
+               else:
+                       self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c))
+                       self.env.FORTRAN_MANGLING = (u, du, c)
+                       break
+       else:
+               self.end_msg(False)
+               self.fatal('mangler not found')
+       return (u, du, c)
+
+@feature('pyext')
+@before_method('propagate_uselib_vars', 'apply_link')
+def set_lib_pat(self):
+       """Sets the Fortran flags for linking with Python"""
+       self.env.fcshlib_PATTERN = self.env.pyext_PATTERN
+
+@conf
+def detect_openmp(self):
+       """
+       Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
+       """
+       for x in ('-qopenmp', '-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
+               try:
+                       self.check_fc(
+                               msg          = 'Checking for OpenMP flag %s' % x,
+                               fragment     = 'program main\n  call omp_get_num_threads()\nend program main',
+                               fcflags      = x,
+                               linkflags    = x,
+                               uselib_store = 'OPENMP'
+                       )
+               except self.errors.ConfigurationError:
+                       pass
+               else:
+                       break
+       else:
+               self.fatal('Could not find OpenMP')
diff --git a/third_party/waf/waflib/Tools/fc_scan.py b/third_party/waf/waflib/Tools/fc_scan.py
new file mode 100644 (file)
index 0000000..5e44126
--- /dev/null
@@ -0,0 +1,117 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016 (ita)
+
+import re
+
+INC_REGEX = """(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
+USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+
+re_inc = re.compile(INC_REGEX, re.I)
+re_use = re.compile(USE_REGEX, re.I)
+re_mod = re.compile(MOD_REGEX, re.I)
+
+class fortran_parser(object):
+       """
+       This parser returns:
+
+       * the nodes corresponding to the module names to produce
+       * the nodes corresponding to the include files used
+       * the module names used by the fortran files
+       """
+       def __init__(self, incpaths):
+               self.seen = []
+               """Files already parsed"""
+
+               self.nodes = []
+               """List of :py:class:`waflib.Node.Node` representing the dependencies to return"""
+
+               self.names = []
+               """List of module names to return"""
+
+               self.incpaths = incpaths
+               """List of :py:class:`waflib.Node.Node` representing the include paths"""
+
+       def find_deps(self, node):
+               """
+               Parses a Fortran file to obtain the dependencies used/provided
+
+               :param node: fortran file to read
+               :type node: :py:class:`waflib.Node.Node`
+               :return: lists representing the includes, the modules used, and the modules created by a fortran file
+               :rtype: tuple of list of strings
+               """
+               txt = node.read()
+               incs = []
+               uses = []
+               mods = []
+               for line in txt.splitlines():
+                       # line by line regexp search? optimize?
+                       m = re_inc.search(line)
+                       if m:
+                               incs.append(m.group(1))
+                       m = re_use.search(line)
+                       if m:
+                               uses.append(m.group(1))
+                       m = re_mod.search(line)
+                       if m:
+                               mods.append(m.group(1))
+               return (incs, uses, mods)
+
+       def start(self, node):
+               """
+               Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on
+
+               :param node: fortran file
+               :type node: :py:class:`waflib.Node.Node`
+               """
+               self.waiting = [node]
+               while self.waiting:
+                       nd = self.waiting.pop(0)
+                       self.iter(nd)
+
+       def iter(self, node):
+               """
+               Processes a single file during dependency parsing. Extracts files used
+               modules used and modules provided.
+               """
+               incs, uses, mods = self.find_deps(node)
+               for x in incs:
+                       if x in self.seen:
+                               continue
+                       self.seen.append(x)
+                       self.tryfind_header(x)
+
+               for x in uses:
+                       name = "USE@%s" % x
+                       if not name in self.names:
+                               self.names.append(name)
+
+               for x in mods:
+                       name = "MOD@%s" % x
+                       if not name in self.names:
+                               self.names.append(name)
+
+       def tryfind_header(self, filename):
+               """
+               Adds an include file to the list of nodes to process
+
+               :param filename: file name
+               :type filename: string
+               """
+               found = None
+               for n in self.incpaths:
+                       found = n.find_resource(filename)
+                       if found:
+                               self.nodes.append(found)
+                               self.waiting.append(found)
+                               break
+               if not found:
+                       if not filename in self.names:
+                               self.names.append(filename)
diff --git a/third_party/waf/waflib/Tools/flex.py b/third_party/waf/waflib/Tools/flex.py
new file mode 100644 (file)
index 0000000..1a115d3
--- /dev/null
@@ -0,0 +1,64 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# John O'Meara, 2006
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+The **flex** program is a code generator which creates C or C++ files.
+The generated files are compiled into object files.
+"""
+
+import os, re
+from waflib import Task, TaskGen
+from waflib.Tools import ccroot
+
+def decide_ext(self, node):
+       if 'cxx' in self.features:
+               return ['.lex.cc']
+       return ['.lex.c']
+
+def flexfun(tsk):
+       env = tsk.env
+       bld = tsk.generator.bld
+       wd = bld.variant_dir
+       def to_list(xx):
+               if isinstance(xx, str): return [xx]
+               return xx
+       tsk.last_cmd = lst = []
+       lst.extend(to_list(env.FLEX))
+       lst.extend(to_list(env.FLEXFLAGS))
+       inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
+       if env.FLEX_MSYS:
+               inputs = [x.replace(os.sep, '/') for x in inputs]
+       lst.extend(inputs)
+       lst = [x for x in lst if x]
+       txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
+       tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
+
+TaskGen.declare_chain(
+       name = 'flex',
+       rule = flexfun, # issue #854
+       ext_in = '.l',
+       decider = decide_ext,
+)
+
+# To support the following:
+# bld(features='c', flexflags='-P/foo')
+Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX']
+ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
+ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')
+
+def configure(conf):
+       """
+       Detect the *flex* program
+       """
+       conf.find_program('flex', var='FLEX')
+       conf.env.FLEXFLAGS = ['-t']
+
+       if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
+               # this is the flex shipped with MSYS
+               conf.env.FLEX_MSYS = True
diff --git a/third_party/waf/waflib/Tools/g95.py b/third_party/waf/waflib/Tools/g95.py
new file mode 100644 (file)
index 0000000..33e7dfc
--- /dev/null
@@ -0,0 +1,69 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# KWS 2010
+# Thomas Nagy 2016 (ita)
+
+import re
+from waflib import Utils
+from waflib.Tools import fc, fc_config, fc_scan, ar
+from waflib.Configure import conf
+
+@conf
+def find_g95(conf):
+       fc = conf.find_program('g95', var='FC')
+       conf.get_g95_version(fc)
+       conf.env.FC_NAME = 'G95'
+
+@conf
+def g95_flags(conf):
+       v = conf.env
+       v.FCFLAGS_fcshlib   = ['-fPIC']
+       v.FORTRANMODFLAG  = ['-fmod=', ''] # template for module path
+       v.FCFLAGS_DEBUG = ['-Werror'] # why not
+
+@conf
+def g95_modifier_win32(conf):
+       fc_config.fortran_modifier_win32(conf)
+
+@conf
+def g95_modifier_cygwin(conf):
+       fc_config.fortran_modifier_cygwin(conf)
+
+@conf
+def g95_modifier_darwin(conf):
+       fc_config.fortran_modifier_darwin(conf)
+
+@conf
+def g95_modifier_platform(conf):
+       dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
+       g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
+       if g95_modifier_func:
+               g95_modifier_func()
+
+@conf
+def get_g95_version(conf, fc):
+       """get the compiler version"""
+
+       version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
+       cmd = fc + ['--version']
+       out, err = fc_config.getoutput(conf, cmd, stdin=False)
+       if out:
+               match = version_re(out)
+       else:
+               match = version_re(err)
+       if not match:
+               conf.fatal('cannot determine g95 version')
+       k = match.groupdict()
+       conf.env.FC_VERSION = (k['major'], k['minor'])
+
+def configure(conf):
+       conf.find_g95()
+       conf.find_ar()
+       conf.fc_flags()
+       conf.fc_add_flags()
+       conf.g95_flags()
+       conf.g95_modifier_platform()
diff --git a/third_party/waf/waflib/Tools/gas.py b/third_party/waf/waflib/Tools/gas.py
new file mode 100644 (file)
index 0000000..f1b648e
--- /dev/null
@@ -0,0 +1,22 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2016 (ita)
+
+"Detect as/gas/gcc for compiling assembly files"
+
+import waflib.Tools.asm # - leave this
+from waflib.Tools import ar
+
+def configure(conf):
+       """
+       Find the programs gas/as/gcc and set the variable *AS*
+       """
+       conf.find_program(['gas', 'gcc'], var='AS')
+       conf.env.AS_TGT_F = ['-c', '-o']
+       conf.env.ASLNK_TGT_F = ['-o']
+       conf.find_ar()
+       conf.load('asm')
diff --git a/third_party/waf/waflib/Tools/gcc.py b/third_party/waf/waflib/Tools/gcc.py
new file mode 100644 (file)
index 0000000..78f2867
--- /dev/null
@@ -0,0 +1,158 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+
+"""
+gcc/llvm detection.
+"""
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_gcc(conf):
+       """
+       Find the program gcc, and if present, try to detect its version number
+       """
+       cc = conf.find_program(['gcc', 'cc'], var='CC')
+       conf.get_cc_version(cc, gcc=True)
+       conf.env.CC_NAME = 'gcc'
+
+@conf
+def gcc_common_flags(conf):
+       """
+       Common flags for gcc on nearly all platforms
+       """
+       v = conf.env
+
+       v.CC_SRC_F            = []
+       v.CC_TGT_F            = ['-c', '-o']
+
+       if not v.LINK_CC:
+               v.LINK_CC = v.CC
+
+       v.CCLNK_SRC_F         = []
+       v.CCLNK_TGT_F         = ['-o']
+       v.CPPPATH_ST          = '-I%s'
+       v.DEFINES_ST          = '-D%s'
+
+       v.LIB_ST              = '-l%s' # template for adding libs
+       v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+       v.STLIB_ST            = '-l%s'
+       v.STLIBPATH_ST        = '-L%s'
+       v.RPATH_ST            = '-Wl,-rpath,%s'
+
+       v.SONAME_ST           = '-Wl,-h,%s'
+       v.SHLIB_MARKER        = '-Wl,-Bdynamic'
+       v.STLIB_MARKER        = '-Wl,-Bstatic'
+
+       v.cprogram_PATTERN    = '%s'
+
+       v.CFLAGS_cshlib       = ['-fPIC']
+       v.LINKFLAGS_cshlib    = ['-shared']
+       v.cshlib_PATTERN      = 'lib%s.so'
+
+       v.LINKFLAGS_cstlib    = ['-Wl,-Bstatic']
+       v.cstlib_PATTERN      = 'lib%s.a'
+
+       v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
+       v.CFLAGS_MACBUNDLE    = ['-fPIC']
+       v.macbundle_PATTERN   = '%s.bundle'
+
+@conf
+def gcc_modifier_win32(conf):
+       """Configuration flags for executing gcc on Windows"""
+       v = conf.env
+       v.cprogram_PATTERN    = '%s.exe'
+
+       v.cshlib_PATTERN      = '%s.dll'
+       v.implib_PATTERN      = 'lib%s.dll.a'
+       v.IMPLIB_ST           = '-Wl,--out-implib,%s'
+
+       v.CFLAGS_cshlib       = []
+
+       # Auto-import is enabled by default even without this option,
+       # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
+       # that the linker emits otherwise.
+       v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
+
+@conf
+def gcc_modifier_cygwin(conf):
+       """Configuration flags for executing gcc on Cygwin"""
+       gcc_modifier_win32(conf)
+       v = conf.env
+       v.cshlib_PATTERN = 'cyg%s.dll'
+       v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
+       v.CFLAGS_cshlib = []
+
+@conf
+def gcc_modifier_darwin(conf):
+       """Configuration flags for executing gcc on MacOS"""
+       v = conf.env
+       v.CFLAGS_cshlib       = ['-fPIC']
+       v.LINKFLAGS_cshlib    = ['-dynamiclib']
+       v.cshlib_PATTERN      = 'lib%s.dylib'
+       v.FRAMEWORKPATH_ST    = '-F%s'
+       v.FRAMEWORK_ST        = ['-framework']
+       v.ARCH_ST             = ['-arch']
+
+       v.LINKFLAGS_cstlib    = []
+
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+       v.SONAME_ST           = []
+
+@conf
+def gcc_modifier_aix(conf):
+       """Configuration flags for executing gcc on AIX"""
+       v = conf.env
+       v.LINKFLAGS_cprogram  = ['-Wl,-brtl']
+       v.LINKFLAGS_cshlib    = ['-shared','-Wl,-brtl,-bexpfull']
+       v.SHLIB_MARKER        = []
+
+@conf
+def gcc_modifier_hpux(conf):
+       v = conf.env
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+       v.CFLAGS_cshlib       = ['-fPIC','-DPIC']
+       v.cshlib_PATTERN      = 'lib%s.sl'
+
+@conf
+def gcc_modifier_openbsd(conf):
+       conf.env.SONAME_ST = []
+
+@conf
+def gcc_modifier_osf1V(conf):
+       v = conf.env
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+       v.SONAME_ST           = []
+
+@conf
+def gcc_modifier_platform(conf):
+       """Execute platform-specific functions based on *gcc_modifier_+NAME*"""
+       # * set configurations specific for a platform.
+       # * the destination platform is detected automatically by looking at the macros the compiler predefines,
+       #   and if it's not recognised, it fallbacks to sys.platform.
+       gcc_modifier_func = getattr(conf, 'gcc_modifier_' + conf.env.DEST_OS, None)
+       if gcc_modifier_func:
+               gcc_modifier_func()
+
+def configure(conf):
+       """
+       Configuration for gcc
+       """
+       conf.find_gcc()
+       conf.find_ar()
+       conf.gcc_common_flags()
+       conf.gcc_modifier_platform()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/gdc.py b/third_party/waf/waflib/Tools/gdc.py
new file mode 100644 (file)
index 0000000..a9d1d20
--- /dev/null
@@ -0,0 +1,58 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+
+from waflib.Tools import ar, d
+from waflib.Configure import conf
+
+@conf
+def find_gdc(conf):
+       """
+       Finds the program gdc and set the variable *D*
+       """
+       conf.find_program('gdc', var='D')
+
+       out = conf.cmd_and_log(conf.env.D + ['--version'])
+       if out.find("gdc") == -1:
+               conf.fatal("detected compiler is not gdc")
+
+@conf
+def common_flags_gdc(conf):
+       """
+       Sets the flags required by *gdc*
+       """
+       v = conf.env
+
+       v.DFLAGS            = []
+
+       v.D_SRC_F           = ['-c']
+       v.D_TGT_F           = '-o%s'
+
+       v.D_LINKER          = v.D
+       v.DLNK_SRC_F        = ''
+       v.DLNK_TGT_F        = '-o%s'
+       v.DINC_ST           = '-I%s'
+
+       v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+       v.DSTLIB_ST = v.DSHLIB_ST         = '-l%s'
+       v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L%s'
+
+       v.LINKFLAGS_dshlib  = ['-shared']
+
+       v.DHEADER_ext       = '.di'
+       v.DFLAGS_d_with_header = '-fintfc'
+       v.D_HDR_F           = '-fintfc-file=%s'
+
+def configure(conf):
+       """
+       Configuration for gdc
+       """
+       conf.find_gdc()
+       conf.load('ar')
+       conf.load('d')
+       conf.common_flags_gdc()
+       conf.d_platform_flags()
diff --git a/third_party/waf/waflib/Tools/gfortran.py b/third_party/waf/waflib/Tools/gfortran.py
new file mode 100644 (file)
index 0000000..09eeee8
--- /dev/null
@@ -0,0 +1,94 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016 (ita)
+
+import re
+from waflib import Utils
+from waflib.Tools import fc, fc_config, fc_scan, ar
+from waflib.Configure import conf
+
+@conf
+def find_gfortran(conf):
+       """Find the gfortran program (will look in the environment variable 'FC')"""
+       fc = conf.find_program(['gfortran','g77'], var='FC')
+       # (fallback to g77 for systems, where no gfortran is available)
+       conf.get_gfortran_version(fc)
+       conf.env.FC_NAME = 'GFORTRAN'
+
+@conf
+def gfortran_flags(conf):
+       v = conf.env
+       v.FCFLAGS_fcshlib = ['-fPIC']
+       v.FORTRANMODFLAG = ['-J', ''] # template for module path
+       v.FCFLAGS_DEBUG = ['-Werror'] # why not
+
+@conf
+def gfortran_modifier_win32(conf):
+       fc_config.fortran_modifier_win32(conf)
+
+@conf
+def gfortran_modifier_cygwin(conf):
+       fc_config.fortran_modifier_cygwin(conf)
+
+@conf
+def gfortran_modifier_darwin(conf):
+       fc_config.fortran_modifier_darwin(conf)
+
+@conf
+def gfortran_modifier_platform(conf):
+       dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
+       gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
+       if gfortran_modifier_func:
+               gfortran_modifier_func()
+
+@conf
+def get_gfortran_version(conf, fc):
+       """Get the compiler version"""
+
+       # ensure this is actually gfortran, not an imposter.
+       version_re = re.compile(r"GNU\s*Fortran", re.I).search
+       cmd = fc + ['--version']
+       out, err = fc_config.getoutput(conf, cmd, stdin=False)
+       if out: match = version_re(out)
+       else: match = version_re(err)
+       if not match:
+               conf.fatal('Could not determine the compiler type')
+
+       # --- now get more detailed info -- see c_config.get_cc_version
+       cmd = fc + ['-dM', '-E', '-']
+       out, err = fc_config.getoutput(conf, cmd, stdin=True)
+
+       if out.find('__GNUC__') < 0:
+               conf.fatal('Could not determine the compiler type')
+
+       k = {}
+       out = out.splitlines()
+       import shlex
+
+       for line in out:
+               lst = shlex.split(line)
+               if len(lst)>2:
+                       key = lst[1]
+                       val = lst[2]
+                       k[key] = val
+
+       def isD(var):
+               return var in k
+
+       def isT(var):
+               return var in k and k[var] != '0'
+
+       conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
+
+def configure(conf):
+       conf.find_gfortran()
+       conf.find_ar()
+       conf.fc_flags()
+       conf.fc_add_flags()
+       conf.gfortran_flags()
+       conf.gfortran_modifier_platform()
diff --git a/third_party/waf/waflib/Tools/glib2.py b/third_party/waf/waflib/Tools/glib2.py
new file mode 100644 (file)
index 0000000..18d6f04
--- /dev/null
@@ -0,0 +1,492 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+Support for GLib2 tools:
+
+* marshal
+* enums
+* gsettings
+* gresource
+"""
+
+import os
+import functools
+from waflib import Context, Task, Utils, Options, Errors, Logs
+from waflib.TaskGen import taskgen_method, before_method, feature, extension
+from waflib.Configure import conf
+
+################## marshal files
+
+@taskgen_method
+def add_marshal_file(self, filename, prefix):
+       """
+       Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
+
+       :param filename: xml file to compile
+       :type filename: string
+       :param prefix: marshal prefix (--prefix=prefix)
+       :type prefix: string
+       """
+       if not hasattr(self, 'marshal_list'):
+               self.marshal_list = []
+       self.meths.append('process_marshal')
+       self.marshal_list.append((filename, prefix))
+
+@before_method('process_source')
+def process_marshal(self):
+       """
+       Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
+       Adds the c file created to the list of source to process.
+       """
+       for f, prefix in getattr(self, 'marshal_list', []):
+               node = self.path.find_resource(f)
+
+               if not node:
+                       raise Errors.WafError('file not found %r' % f)
+
+               h_node = node.change_ext('.h')
+               c_node = node.change_ext('.c')
+
+               task = self.create_task('glib_genmarshal', node, [h_node, c_node])
+               task.env.GLIB_GENMARSHAL_PREFIX = prefix
+       self.source = self.to_nodes(getattr(self, 'source', []))
+       self.source.append(c_node)
+
+class glib_genmarshal(Task.Task):
+       vars    = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
+       color   = 'BLUE'
+       ext_out = ['.h']
+       def run(self):
+               bld = self.generator.bld
+
+               get = self.env.get_flat
+               cmd1 = "%s %s --prefix=%s --header > %s" % (
+                       get('GLIB_GENMARSHAL'),
+                       self.inputs[0].srcpath(),
+                       get('GLIB_GENMARSHAL_PREFIX'),
+                       self.outputs[0].abspath()
+               )
+
+               ret = bld.exec_command(cmd1)
+               if ret: return ret
+
+               #print self.outputs[1].abspath()
+               c = '''#include "%s"\n''' % self.outputs[0].name
+               self.outputs[1].write(c)
+
+               cmd2 = "%s %s --prefix=%s --body >> %s" % (
+                       get('GLIB_GENMARSHAL'),
+                       self.inputs[0].srcpath(),
+                       get('GLIB_GENMARSHAL_PREFIX'),
+                       self.outputs[1].abspath()
+               )
+               return bld.exec_command(cmd2)
+
+########################## glib-mkenums
+
+@taskgen_method
+def add_enums_from_template(self, source='', target='', template='', comments=''):
+       """
+       Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
+
+       :param source: enum file to process
+       :type source: string
+       :param target: target file
+       :type target: string
+       :param template: template file
+       :type template: string
+       :param comments: comments
+       :type comments: string
+       """
+       if not hasattr(self, 'enums_list'):
+               self.enums_list = []
+       self.meths.append('process_enums')
+       self.enums_list.append({'source': source,
+                               'target': target,
+                               'template': template,
+                               'file-head': '',
+                               'file-prod': '',
+                               'file-tail': '',
+                               'enum-prod': '',
+                               'value-head': '',
+                               'value-prod': '',
+                               'value-tail': '',
+                               'comments': comments})
+
+@taskgen_method
+def add_enums(self, source='', target='',
+              file_head='', file_prod='', file_tail='', enum_prod='',
+              value_head='', value_prod='', value_tail='', comments=''):
+       """
+       Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
+
+       :param source: enum file to process
+       :type source: string
+       :param target: target file
+       :type target: string
+       :param file_head: unused
+       :param file_prod: unused
+       :param file_tail: unused
+       :param enum_prod: unused
+       :param value_head: unused
+       :param value_prod: unused
+       :param value_tail: unused
+       :param comments: comments
+       :type comments: string
+       """
+       if not hasattr(self, 'enums_list'):
+               self.enums_list = []
+       self.meths.append('process_enums')
+       self.enums_list.append({'source': source,
+                               'template': '',
+                               'target': target,
+                               'file-head': file_head,
+                               'file-prod': file_prod,
+                               'file-tail': file_tail,
+                               'enum-prod': enum_prod,
+                               'value-head': value_head,
+                               'value-prod': value_prod,
+                               'value-tail': value_tail,
+                               'comments': comments})
+
+@before_method('process_source')
+def process_enums(self):
+       """
+       Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
+       """
+       for enum in getattr(self, 'enums_list', []):
+               task = self.create_task('glib_mkenums')
+               env = task.env
+
+               inputs = []
+
+               # process the source
+               source_list = self.to_list(enum['source'])
+               if not source_list:
+                       raise Errors.WafError('missing source ' + str(enum))
+               source_list = [self.path.find_resource(k) for k in source_list]
+               inputs += source_list
+               env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
+
+               # find the target
+               if not enum['target']:
+                       raise Errors.WafError('missing target ' + str(enum))
+               tgt_node = self.path.find_or_declare(enum['target'])
+               if tgt_node.name.endswith('.c'):
+                       self.source.append(tgt_node)
+               env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
+
+
+               options = []
+
+               if enum['template']: # template, if provided
+                       template_node = self.path.find_resource(enum['template'])
+                       options.append('--template %s' % (template_node.abspath()))
+                       inputs.append(template_node)
+               params = {'file-head' : '--fhead',
+                          'file-prod' : '--fprod',
+                          'file-tail' : '--ftail',
+                          'enum-prod' : '--eprod',
+                          'value-head' : '--vhead',
+                          'value-prod' : '--vprod',
+                          'value-tail' : '--vtail',
+                          'comments': '--comments'}
+               for param, option in params.items():
+                       if enum[param]:
+                               options.append('%s %r' % (option, enum[param]))
+
+               env.GLIB_MKENUMS_OPTIONS = ' '.join(options)
+
+               # update the task instance
+               task.set_inputs(inputs)
+               task.set_outputs(tgt_node)
+
+class glib_mkenums(Task.Task):
+       """
+       Processes enum files
+       """
+       run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
+       color   = 'PINK'
+       ext_out = ['.h']
+
+######################################### gsettings
+
+@taskgen_method
+def add_settings_schemas(self, filename_list):
+       """
+       Adds settings files to process to *settings_schema_files*
+
+       :param filename_list: files
+       :type filename_list: list of string
+       """
+       if not hasattr(self, 'settings_schema_files'):
+               self.settings_schema_files = []
+
+       if not isinstance(filename_list, list):
+               filename_list = [filename_list]
+
+       self.settings_schema_files.extend(filename_list)
+
+@taskgen_method
+def add_settings_enums(self, namespace, filename_list):
+       """
+       Called only once by task generator to set the enums namespace.
+
+       :param namespace: namespace
+       :type namespace: string
+       :param filename_list: enum files to process
+       :type filename_list: file list
+       """
+       if hasattr(self, 'settings_enum_namespace'):
+               raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
+       self.settings_enum_namespace = namespace
+
+       if type(filename_list) != 'list':
+               filename_list = [filename_list]
+       self.settings_enum_files = filename_list
+
+@feature('glib2')
+def process_settings(self):
+       """
+       Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
+       same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.
+
+       """
+       enums_tgt_node = []
+       install_files = []
+
+       settings_schema_files = getattr(self, 'settings_schema_files', [])
+       if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
+               raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
+
+       # 1. process gsettings_enum_files (generate .enums.xml)
+       #
+       if hasattr(self, 'settings_enum_files'):
+               enums_task = self.create_task('glib_mkenums')
+
+               source_list = self.settings_enum_files
+               source_list = [self.path.find_resource(k) for k in source_list]
+               enums_task.set_inputs(source_list)
+               enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
+
+               target = self.settings_enum_namespace + '.enums.xml'
+               tgt_node = self.path.find_or_declare(target)
+               enums_task.set_outputs(tgt_node)
+               enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
+               enums_tgt_node = [tgt_node]
+
+               install_files.append(tgt_node)
+
+               options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead "  <@type@ id=\\"%s.@EnumName@\\">" --vprod "    <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail "  </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
+               enums_task.env.GLIB_MKENUMS_OPTIONS = options
+
+       # 2. process gsettings_schema_files (validate .gschema.xml files)
+       #
+       for schema in settings_schema_files:
+               schema_task = self.create_task ('glib_validate_schema')
+
+               schema_node = self.path.find_resource(schema)
+               if not schema_node:
+                       raise Errors.WafError("Cannot find the schema file %r" % schema)
+               install_files.append(schema_node)
+               source_list = enums_tgt_node + [schema_node]
+
+               schema_task.set_inputs (source_list)
+               schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]
+
+               target_node = schema_node.change_ext('.xml.valid')
+               schema_task.set_outputs (target_node)
+               schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()
+
+       # 3. schemas install task
+       def compile_schemas_callback(bld):
+               if not bld.is_install:
+                       return
+               compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
+               destdir = Options.options.destdir
+               paths = bld._compile_schemas_registered
+               if destdir:
+                       paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
+               for path in paths:
+                       Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
+                       if self.bld.exec_command(compile_schemas + [path]):
+                               Logs.warn('Could not update GSettings schema cache %r' % path)
+
+       if self.bld.is_install:
+               schemadir = self.env.GSETTINGSSCHEMADIR
+               if not schemadir:
+                       raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
+
+               if install_files:
+                       self.add_install_files(install_to=schemadir, install_from=install_files)
+                       registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
+                       if not registered_schemas:
+                               registered_schemas = self.bld._compile_schemas_registered = set()
+                               self.bld.add_post_fun(compile_schemas_callback)
+                       registered_schemas.add(schemadir)
+
+class glib_validate_schema(Task.Task):
+       """
+       Validates schema files
+       """
+       run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
+       color   = 'PINK'
+
+################## gresource
+
+@extension('.gresource.xml')
+def process_gresource_source(self, node):
+       """
+       Creates tasks that turn ``.gresource.xml`` files to C code
+       """
+       if not self.env.GLIB_COMPILE_RESOURCES:
+               raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")
+
+       if 'gresource' in self.features:
+               return
+
+       h_node = node.change_ext('_xml.h')
+       c_node = node.change_ext('_xml.c')
+       self.create_task('glib_gresource_source', node, [h_node, c_node])
+       self.source.append(c_node)
+
+@feature('gresource')
+def process_gresource_bundle(self):
+       """
+       Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files::
+
+               def build(bld):
+                       bld(
+                               features='gresource',
+                               source=['resources1.gresource.xml', 'resources2.gresource.xml'],
+                               install_path='${LIBDIR}/${PACKAGE}'
+                       )
+
+       :param source: XML files to process
+       :type source: list of string
+       :param install_path: installation path
+       :type install_path: string
+       """
+       for i in self.to_list(self.source):
+               node = self.path.find_resource(i)
+
+               task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
+               inst_to = getattr(self, 'install_path', None)
+               if inst_to:
+                       self.add_install_files(install_to=inst_to, install_from=task.outputs)
+
+class glib_gresource_base(Task.Task):
+       """
+       Base class for gresource based tasks
+       """
+       color    = 'BLUE'
+       base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
+
+       def scan(self):
+               """
+               Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command``
+               """
+               bld = self.generator.bld
+               kw = {}
+               kw['cwd'] = self.get_cwd()
+               kw['quiet'] = Context.BOTH
+
+               cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
+                       self.inputs[0].parent.srcpath(),
+                       self.inputs[0].bld_dir(),
+                       self.inputs[0].bldpath()
+               ), self.env)
+
+               output = bld.cmd_and_log(cmd, **kw)
+
+               nodes = []
+               names = []
+               for dep in output.splitlines():
+                       if dep:
+                               node = bld.bldnode.find_node(dep)
+                               if node:
+                                       nodes.append(node)
+                               else:
+                                       names.append(dep)
+
+               return (nodes, names)
+
+class glib_gresource_source(glib_gresource_base):
+       """
+       Task to generate C source code (.h and .c files) from a gresource.xml file
+       """
+       vars    = ['GLIB_COMPILE_RESOURCES']
+       fun_h   = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
+       fun_c   = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
+       ext_out = ['.h']
+
+       def run(self):
+               return self.fun_h[0](self) or self.fun_c[0](self)
+
+class glib_gresource_bundle(glib_gresource_base):
+       """
+       Task to generate a .gresource binary file from a gresource.xml file
+       """
+       run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
+       shell   = True # temporary workaround for #795
+
+@conf
+def find_glib_genmarshal(conf):
+       conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
+
+@conf
+def find_glib_mkenums(conf):
+       if not conf.env.PERL:
+               conf.find_program('perl', var='PERL')
+       conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS')
+
+@conf
+def find_glib_compile_schemas(conf):
+       # when cross-compiling, gsettings.m4 locates the program with the following:
+       #   pkg-config --variable glib_compile_schemas gio-2.0
+       conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')
+
+       def getstr(varname):
+               return getattr(Options.options, varname, getattr(conf.env,varname, ''))
+
+       # TODO make this dependent on the gnu_dirs tool?
+       gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
+       if not gsettingsschemadir:
+               datadir = getstr('DATADIR')
+               if not datadir:
+                       prefix = conf.env.PREFIX
+                       datadir = os.path.join(prefix, 'share')
+               gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')
+
+       conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir
+
+@conf
+def find_glib_compile_resources(conf):
+       conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')
+
+def configure(conf):
+       """
+       Finds the following programs:
+
+       * *glib-genmarshal* and set *GLIB_GENMARSHAL*
+       * *glib-mkenums* and set *GLIB_MKENUMS*
+       * *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
+       * *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)
+       """
+       conf.find_glib_genmarshal()
+       conf.find_glib_mkenums()
+       conf.find_glib_compile_schemas(mandatory=False)
+       conf.find_glib_compile_resources(mandatory=False)
+
+def options(opt):
+       """
+       Adds the ``--gsettingsschemadir`` command-line option
+       """
+       gr = opt.add_option_group('Installation directories')
+       gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')
diff --git a/third_party/waf/waflib/Tools/gnu_dirs.py b/third_party/waf/waflib/Tools/gnu_dirs.py
new file mode 100644 (file)
index 0000000..d5b26f7
--- /dev/null
@@ -0,0 +1,134 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+"""
+Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::
+
+       opt.load('gnu_dirs')
+
+and::
+
+       conf.load('gnu_dirs')
+
+Add options for the standard GNU directories, this tool will add the options
+found in autotools, and will update the environment with the following
+installation variables:
+
+============== ========================================= =======================
+Variable       Description                               Default Value
+============== ========================================= =======================
+PREFIX         installation prefix                       /usr/local
+EXEC_PREFIX    installation prefix for binaries          PREFIX
+BINDIR         user commands                             EXEC_PREFIX/bin
+SBINDIR        system binaries                           EXEC_PREFIX/sbin
+LIBEXECDIR     program-specific binaries                 EXEC_PREFIX/libexec
+SYSCONFDIR     host-specific configuration               PREFIX/etc
+SHAREDSTATEDIR architecture-independent variable data    PREFIX/com
+LOCALSTATEDIR  variable data                             PREFIX/var
+LIBDIR         object code libraries                     EXEC_PREFIX/lib
+INCLUDEDIR     header files                              PREFIX/include
+OLDINCLUDEDIR  header files for non-GCC compilers        /usr/include
+DATAROOTDIR    architecture-independent data root        PREFIX/share
+DATADIR        architecture-independent data             DATAROOTDIR
+INFODIR        GNU "info" documentation                  DATAROOTDIR/info
+LOCALEDIR      locale-dependent data                     DATAROOTDIR/locale
+MANDIR         manual pages                              DATAROOTDIR/man
+DOCDIR         documentation root                        DATAROOTDIR/doc/APPNAME
+HTMLDIR        HTML documentation                        DOCDIR
+DVIDIR         DVI documentation                         DOCDIR
+PDFDIR         PDF documentation                         DOCDIR
+PSDIR          PostScript documentation                  DOCDIR
+============== ========================================= =======================
+"""
+
+import os, re
+from waflib import Utils, Options, Context
+
+gnuopts = '''
+bindir, user commands, ${EXEC_PREFIX}/bin
+sbindir, system binaries, ${EXEC_PREFIX}/sbin
+libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
+sysconfdir, host-specific configuration, ${PREFIX}/etc
+sharedstatedir, architecture-independent variable data, ${PREFIX}/com
+localstatedir, variable data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib%s
+includedir, header files, ${PREFIX}/include
+oldincludedir, header files for non-GCC compilers, /usr/include
+datarootdir, architecture-independent data root, ${PREFIX}/share
+datadir, architecture-independent data, ${DATAROOTDIR}
+infodir, GNU "info" documentation, ${DATAROOTDIR}/info
+localedir, locale-dependent data, ${DATAROOTDIR}/locale
+mandir, manual pages, ${DATAROOTDIR}/man
+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
+htmldir, HTML documentation, ${DOCDIR}
+dvidir, DVI documentation, ${DOCDIR}
+pdfdir, PDF documentation, ${DOCDIR}
+psdir, PostScript documentation, ${DOCDIR}
+''' % Utils.lib64()
+
+_options = [x.split(', ') for x in gnuopts.splitlines() if x]
+
+def configure(conf):
+       """
+       Reads the command-line options to set lots of variables in *conf.env*. The variables
+       BINDIR and LIBDIR will be overwritten.
+       """
+       def get_param(varname, default):
+               return getattr(Options.options, varname, '') or default
+
+       env = conf.env
+       env.LIBDIR = env.BINDIR = []
+       env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
+       env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE
+
+       complete = False
+       iter = 0
+       while not complete and iter < len(_options) + 1:
+               iter += 1
+               complete = True
+               for name, help, default in _options:
+                       name = name.upper()
+                       if not env[name]:
+                               try:
+                                       env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
+                               except TypeError:
+                                       complete = False
+
+       if not complete:
+               lst = [x for x, _, _ in _options if not env[x.upper()]]
+               raise conf.errors.WafError('Variable substitution failure %r' % lst)
+
+def options(opt):
+       """
+       Adds lots of command-line options, for example::
+
+               --exec-prefix: EXEC_PREFIX
+       """
+       inst_dir = opt.add_option_group('Installation prefix',
+'By default, "waf install" will put the files in\
+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
+
+       for k in ('--prefix', '--destdir'):
+               option = opt.parser.get_option(k)
+               if option:
+                       opt.parser.remove_option(k)
+                       inst_dir.add_option(option)
+
+       inst_dir.add_option('--exec-prefix',
+               help = 'installation prefix for binaries [PREFIX]',
+               default = '',
+               dest = 'EXEC_PREFIX')
+
+       dirs_options = opt.add_option_group('Installation directories')
+
+       for name, help, default in _options:
+               option_name = '--' + name
+               str_default = default
+               str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
+               dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
diff --git a/third_party/waf/waflib/Tools/gxx.py b/third_party/waf/waflib/Tools/gxx.py
new file mode 100644 (file)
index 0000000..9a068c6
--- /dev/null
@@ -0,0 +1,159 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+
+"""
+g++/llvm detection.
+"""
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_gxx(conf):
+       """
+       Finds the program g++, and if present, try to detect its version number
+       """
+       cxx = conf.find_program(['g++', 'c++'], var='CXX')
+       conf.get_cc_version(cxx, gcc=True)
+       conf.env.CXX_NAME = 'gcc'
+
+@conf
+def gxx_common_flags(conf):
+       """
+       Common flags for g++ on nearly all platforms
+       """
+       v = conf.env
+
+       v.CXX_SRC_F           = []
+       v.CXX_TGT_F           = ['-c', '-o']
+
+       if not v.LINK_CXX:
+               v.LINK_CXX = v.CXX
+
+       v.CXXLNK_SRC_F        = []
+       v.CXXLNK_TGT_F        = ['-o']
+       v.CPPPATH_ST          = '-I%s'
+       v.DEFINES_ST          = '-D%s'
+
+       v.LIB_ST              = '-l%s' # template for adding libs
+       v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+       v.STLIB_ST            = '-l%s'
+       v.STLIBPATH_ST        = '-L%s'
+       v.RPATH_ST            = '-Wl,-rpath,%s'
+
+       v.SONAME_ST           = '-Wl,-h,%s'
+       v.SHLIB_MARKER        = '-Wl,-Bdynamic'
+       v.STLIB_MARKER        = '-Wl,-Bstatic'
+
+       v.cxxprogram_PATTERN  = '%s'
+
+       v.CXXFLAGS_cxxshlib   = ['-fPIC']
+       v.LINKFLAGS_cxxshlib  = ['-shared']
+       v.cxxshlib_PATTERN    = 'lib%s.so'
+
+       v.LINKFLAGS_cxxstlib  = ['-Wl,-Bstatic']
+       v.cxxstlib_PATTERN    = 'lib%s.a'
+
+       v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
+       v.CXXFLAGS_MACBUNDLE  = ['-fPIC']
+       v.macbundle_PATTERN   = '%s.bundle'
+
+@conf
+def gxx_modifier_win32(conf):
+       """Configuration flags for executing gcc on Windows"""
+       v = conf.env
+       v.cxxprogram_PATTERN  = '%s.exe'
+
+       v.cxxshlib_PATTERN    = '%s.dll'
+       v.implib_PATTERN      = 'lib%s.dll.a'
+       v.IMPLIB_ST           = '-Wl,--out-implib,%s'
+
+       v.CXXFLAGS_cxxshlib   = []
+
+       # Auto-import is enabled by default even without this option,
+       # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
+       # that the linker emits otherwise.
+       v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
+
+@conf
+def gxx_modifier_cygwin(conf):
+       """Configuration flags for executing g++ on Cygwin"""
+       gxx_modifier_win32(conf)
+       v = conf.env
+       v.cxxshlib_PATTERN    = 'cyg%s.dll'
+       v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
+       v.CXXFLAGS_cxxshlib   = []
+
+@conf
+def gxx_modifier_darwin(conf):
+       """Configuration flags for executing g++ on MacOS"""
+       v = conf.env
+       v.CXXFLAGS_cxxshlib   = ['-fPIC']
+       v.LINKFLAGS_cxxshlib  = ['-dynamiclib']
+       v.cxxshlib_PATTERN    = 'lib%s.dylib'
+       v.FRAMEWORKPATH_ST    = '-F%s'
+       v.FRAMEWORK_ST        = ['-framework']
+       v.ARCH_ST             = ['-arch']
+
+       v.LINKFLAGS_cxxstlib  = []
+
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+       v.SONAME_ST           = []
+
+@conf
+def gxx_modifier_aix(conf):
+       """Configuration flags for executing g++ on AIX"""
+       v = conf.env
+       v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
+
+       v.LINKFLAGS_cxxshlib  = ['-shared', '-Wl,-brtl,-bexpfull']
+       v.SHLIB_MARKER        = []
+
+@conf
+def gxx_modifier_hpux(conf):
+       v = conf.env
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+       v.CFLAGS_cxxshlib     = ['-fPIC','-DPIC']
+       v.cxxshlib_PATTERN    = 'lib%s.sl'
+
+@conf
+def gxx_modifier_openbsd(conf):
+       conf.env.SONAME_ST = []
+
+@conf
+def gcc_modifier_osf1V(conf):
+       v = conf.env
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+       v.SONAME_ST           = []
+
+@conf
+def gxx_modifier_platform(conf):
+       """Execute platform-specific functions based on *gxx_modifier_+NAME*"""
+       # * set configurations specific for a platform.
+       # * the destination platform is detected automatically by looking at the macros the compiler predefines,
+       #   and if it's not recognised, it fallbacks to sys.platform.
+       gxx_modifier_func = getattr(conf, 'gxx_modifier_' + conf.env.DEST_OS, None)
+       if gxx_modifier_func:
+               gxx_modifier_func()
+
+def configure(conf):
+       """
+       Configuration for g++
+       """
+       conf.find_gxx()
+       conf.find_ar()
+       conf.gxx_common_flags()
+       conf.gxx_modifier_platform()
+       conf.cxx_load_tools()
+       conf.cxx_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/icc.py b/third_party/waf/waflib/Tools/icc.py
new file mode 100644 (file)
index 0000000..744af9c
--- /dev/null
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Stian Selnes 2008
+# Thomas Nagy 2009-2016 (ita)
+
+"""
+Detects the Intel C compiler
+"""
+
+import sys
+from waflib.Tools import ccroot, ar, gcc
+from waflib.Configure import conf
+
+@conf
+def find_icc(conf):
+       """
+       Finds the program icc and execute it to ensure it really is icc
+       """
+       cc = conf.find_program(['icc', 'ICL'], var='CC')
+       conf.get_cc_version(cc, icc=True)
+       conf.env.CC_NAME = 'icc'
+
+def configure(conf):
+       conf.find_icc()
+       conf.find_ar()
+       conf.gcc_common_flags()
+       conf.gcc_modifier_platform()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/icpc.py b/third_party/waf/waflib/Tools/icpc.py
new file mode 100644 (file)
index 0000000..c8a6278
--- /dev/null
@@ -0,0 +1,33 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2009-2016 (ita)
+
+"""
+Detects the Intel C++ compiler
+"""
+
+import sys
+from waflib.Tools import ccroot, ar, gxx
+from waflib.Configure import conf
+
+@conf
+def find_icpc(conf):
+       """
+       Finds the program icpc, and execute it to ensure it really is icpc
+       """
+       cxx = conf.find_program('icpc', var='CXX')
+       conf.get_cc_version(cxx, icc=True)
+       conf.env.CXX_NAME = 'icc'
+
+def configure(conf):
+       conf.find_icpc()
+       conf.find_ar()
+       conf.gxx_common_flags()
+       conf.gxx_modifier_platform()
+       conf.cxx_load_tools()
+       conf.cxx_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/ifort.py b/third_party/waf/waflib/Tools/ifort.py
new file mode 100644 (file)
index 0000000..636d863
--- /dev/null
@@ -0,0 +1,414 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016 (ita)
+
+import os, re
+from waflib import Utils, Logs, Errors
+from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
+from waflib.Configure import conf
+from waflib.TaskGen import after_method, feature
+
+@conf
+def find_ifort(conf):
+       fc = conf.find_program('ifort', var='FC')
+       conf.get_ifort_version(fc)
+       conf.env.FC_NAME = 'IFORT'
+
+@conf
+def ifort_modifier_win32(self):
+       v = self.env
+       v.IFORT_WIN32 = True
+       v.FCSTLIB_MARKER = ''
+       v.FCSHLIB_MARKER = ''
+
+       v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib'
+       v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s'
+       v.FCINCPATH_ST = '/I%s'
+       v.FCDEFINES_ST = '/D%s'
+
+       v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
+       v.fcshlib_PATTERN = '%s.dll'
+       v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib'
+
+       v.FCLNK_TGT_F = '/out:'
+       v.FC_TGT_F = ['/c', '/o', '']
+       v.FCFLAGS_fcshlib = ''
+       v.LINKFLAGS_fcshlib = '/DLL'
+       v.AR_TGT_F = '/out:'
+       v.IMPLIB_ST = '/IMPLIB:%s'
+
+       v.append_value('LINKFLAGS', '/subsystem:console')
+       if v.IFORT_MANIFEST:
+               v.append_value('LINKFLAGS', ['/MANIFEST'])
+
+@conf
+def ifort_modifier_darwin(conf):
+       fc_config.fortran_modifier_darwin(conf)
+
+@conf
+def ifort_modifier_platform(conf):
+       dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
+       ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
+       if ifort_modifier_func:
+               ifort_modifier_func()
+
+@conf
+def get_ifort_version(conf, fc):
+       """
+       Detects the compiler version and sets ``conf.env.FC_VERSION``
+       """
+       version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
+       if Utils.is_win32:
+               cmd = fc
+       else:
+               cmd = fc + ['-logo']
+
+       out, err = fc_config.getoutput(conf, cmd, stdin=False)
+       match = version_re(out) or version_re(err)
+       if not match:
+               conf.fatal('cannot determine ifort version.')
+       k = match.groupdict()
+       conf.env.FC_VERSION = (k['major'], k['minor'])
+
+def configure(conf):
+       """
+       Detects the Intel Fortran compilers
+       """
+       if Utils.is_win32:
+               compiler, version, path, includes, libdirs, arch = conf.detect_ifort(True)
+               v = conf.env
+               v.DEST_CPU = arch
+               v.PATH = path
+               v.INCLUDES = includes
+               v.LIBPATH = libdirs
+               v.MSVC_COMPILER = compiler
+               try:
+                       v.MSVC_VERSION = float(version)
+               except Exception:
+                       raise
+                       v.MSVC_VERSION = float(version[:-3])
+
+               conf.find_ifort_win32()
+               conf.ifort_modifier_win32()
+       else:
+               conf.find_ifort()
+               conf.find_program('xiar', var='AR')
+               conf.find_ar()
+               conf.fc_flags()
+               conf.fc_add_flags()
+               conf.ifort_modifier_platform()
+
+
+all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
+"""List of icl platforms"""
+
+@conf
+def gather_ifort_versions(conf, versions):
+       """
+       List compiler versions by looking up registry keys
+       """
+       version_pattern = re.compile('^...?.?\....?.?')
+       try:
+               all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
+       except WindowsError:
+               try:
+                       all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
+               except WindowsError:
+                       return
+       index = 0
+       while 1:
+               try:
+                       version = Utils.winreg.EnumKey(all_versions, index)
+               except WindowsError:
+                       break
+               index += 1
+               if not version_pattern.match(version):
+                       continue
+               targets = {}
+               for target,arch in all_ifort_platforms:
+                       if target=='intel64': targetDir='EM64T_NATIVE'
+                       else: targetDir=target
+                       try:
+                               Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+                               icl_version=Utils.winreg.OpenKey(all_versions,version)
+                               path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+                       except WindowsError:
+                               pass
+                       else:
+                               batch_file=os.path.join(path,'bin','iclvars.bat')
+                               if os.path.isfile(batch_file):
+                                       targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+
+               for target,arch in all_ifort_platforms:
+                       try:
+                               icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
+                               path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+                       except WindowsError:
+                               continue
+                       else:
+                               batch_file=os.path.join(path,'bin','iclvars.bat')
+                               if os.path.isfile(batch_file):
+                                       targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+               major = version[0:2]
+               versions['intel ' + major] = targets
+
+@conf
+def setup_ifort(conf, versiondict):
+       """
+       Checks installed compilers and targets and returns the first combination from the user's
+       options, env, or the global supported lists that checks.
+
+       :param versiondict: dict(platform -> dict(architecture -> configuration))
+       :type versiondict: dict(string -> dict(string -> target_compiler)
+       :return: the compiler, revision, path, include dirs, library paths and target architecture
+       :rtype: tuple of strings
+       """
+       platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms]
+       desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
+       for version in desired_versions:
+               try:
+                       targets = versiondict[version]
+               except KeyError:
+                       continue
+               for arch in platforms:
+                       try:
+                               cfg = targets[arch]
+                       except KeyError:
+                               continue
+                       cfg.evaluate()
+                       if cfg.is_valid:
+                               compiler,revision = version.rsplit(' ', 1)
+                               return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+       conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
+
+@conf
+def get_ifort_version_win32(conf, compiler, version, target, vcvars):
+       # FIXME hack
+       try:
+               conf.msvc_cnt += 1
+       except AttributeError:
+               conf.msvc_cnt = 1
+       batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
+       batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+""" % (vcvars,target))
+       sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
+       batfile.delete()
+       lines = sout.splitlines()
+
+       if not lines[0]:
+               lines.pop(0)
+
+       MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
+       for line in lines:
+               if line.startswith('PATH='):
+                       path = line[5:]
+                       MSVC_PATH = path.split(';')
+               elif line.startswith('INCLUDE='):
+                       MSVC_INCDIR = [i for i in line[8:].split(';') if i]
+               elif line.startswith('LIB='):
+                       MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
+       if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
+               conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')
+
+       # Check if the compiler is usable at all.
+       # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
+       env = dict(os.environ)
+       env.update(PATH = path)
+       compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+       fc = conf.find_program(compiler_name, path_list=MSVC_PATH)
+
+       # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
+       if 'CL' in env:
+               del(env['CL'])
+
+       try:
+               conf.cmd_and_log(fc + ['/help'], env=env)
+       except UnicodeError:
+               st = Utils.ex_stack()
+               if conf.logger:
+                       conf.logger.error(st)
+               conf.fatal('ifort: Unicode error - check the code page?')
+       except Exception ,e:
+               Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
+               conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
+       else:
+               Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target)
+       finally:
+               conf.env[compiler_name] = ''
+
+       return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
+
+class target_compiler(object):
+       """
+       Wraps a compiler configuration; call evaluate() to determine
+       whether the configuration is usable.
+       """
+       def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
+               """
+               :param ctx: configuration context to use to eventually get the version environment
+               :param compiler: compiler name
+               :param cpu: target cpu
+               :param version: compiler version number
+               :param bat_target: ?
+               :param bat: path to the batch file to run
+               :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
+               """
+               self.conf = ctx
+               self.name = None
+               self.is_valid = False
+               self.is_done = False
+
+               self.compiler = compiler
+               self.cpu = cpu
+               self.version = version
+               self.bat_target = bat_target
+               self.bat = bat
+               self.callback = callback
+
+       def evaluate(self):
+               if self.is_done:
+                       return
+               self.is_done = True
+               try:
+                       vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat)
+               except Errors.ConfigurationError:
+                       self.is_valid = False
+                       return
+               if self.callback:
+                       vs = self.callback(self, vs)
+               self.is_valid = True
+               (self.bindirs, self.incdirs, self.libdirs) = vs
+
+       def __str__(self):
+               return str((self.bindirs, self.incdirs, self.libdirs))
+
+       def __repr__(self):
+               return repr((self.bindirs, self.incdirs, self.libdirs))
+
+@conf
+def detect_ifort(self):
+       return self.setup_ifort(self.get_ifort_versions(False))
+
+@conf
+def get_ifort_versions(self, eval_and_save=True):
+       """
+       :return: platforms to compiler configurations
+       :rtype: dict
+       """
+       dct = {}
+       self.gather_ifort_versions(dct)
+       return dct
+
+def _get_prog_names(self, compiler):
+       if compiler=='intel':
+               compiler_name = 'ifort'
+               linker_name = 'XILINK'
+               lib_name = 'XILIB'
+       else:
+               # assumes CL.exe
+               compiler_name = 'CL'
+               linker_name = 'LINK'
+               lib_name = 'LIB'
+       return compiler_name, linker_name, lib_name
+
+@conf
+def find_ifort_win32(conf):
+       # the autodetection is supposed to be performed before entering in this method
+       v = conf.env
+       path = v.PATH
+       compiler = v.MSVC_COMPILER
+       version = v.MSVC_VERSION
+
+       compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+       v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11)
+
+       # compiler
+       fc = conf.find_program(compiler_name, var='FC', path_list=path)
+
+       # before setting anything, check if the compiler is really intel fortran
+       env = dict(conf.environ)
+       if path: env.update(PATH = ';'.join(path))
+       if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
+               conf.fatal('not intel fortran compiler could not be identified')
+
+       v.FC_NAME = 'IFORT'
+
+       if not v.LINK_FC:
+               conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True)
+
+       if not v.AR:
+               conf.find_program(lib_name, path_list=path, var='AR', mandatory=True)
+               v.ARFLAGS = ['/nologo']
+
+       # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
+       if v.IFORT_MANIFEST:
+               conf.find_program('MT', path_list=path, var='MT')
+               v.MTFLAGS = ['/nologo']
+
+       try:
+               conf.load('winres')
+       except Errors.WafError:
+               Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+
+#######################################################################################################
+##### conf above, build below
+
+@after_method('apply_link')
+@feature('fc')
+def apply_flags_ifort(self):
+       """
+       Adds additional flags implied by msvc, such as subsystems and pdb files::
+
+               def build(bld):
+                       bld.stlib(source='main.c', target='bar', subsystem='gruik')
+       """
+       if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None):
+               return
+
+       is_static = isinstance(self.link_task, ccroot.stlink_task)
+
+       subsystem = getattr(self, 'subsystem', '')
+       if subsystem:
+               subsystem = '/subsystem:%s' % subsystem
+               flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
+               self.env.append_value(flags, subsystem)
+
+       if not is_static:
+               for f in self.env.LINKFLAGS:
+                       d = f.lower()
+                       if d[1:] == 'debug':
+                               pdbnode = self.link_task.outputs[0].change_ext('.pdb')
+                               self.link_task.outputs.append(pdbnode)
+
+                               if getattr(self, 'install_task', None):
+                                       self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode)
+
+                               break
+
+@feature('fcprogram', 'fcshlib', 'fcprogram_test')
+@after_method('apply_link')
+def apply_manifest_ifort(self):
+       """
+       Enables manifest embedding in Fortran DLLs when using ifort on Windows
+       See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
+       """
+       if self.env.IFORT_WIN32 and getattr(self, 'link_task', None):
+               # it seems ifort.exe cannot be called for linking
+               self.link_task.env.FC = self.env.LINK_FC
+
+       if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None):
+               out_node = self.link_task.outputs[0]
+               man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
+               self.link_task.outputs.append(man_node)
+               self.env.DO_MANIFEST = True
diff --git a/third_party/waf/waflib/Tools/intltool.py b/third_party/waf/waflib/Tools/intltool.py
new file mode 100644 (file)
index 0000000..22ea3ff
--- /dev/null
@@ -0,0 +1,229 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+Support for translation tools such as msgfmt and intltool
+
+Usage::
+
+       def configure(conf):
+               conf.load('gnu_dirs intltool')
+
+       def build(bld):
+               # process the .po files into .gmo files, and install them in LOCALEDIR
+               bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
+
+               # process an input file, substituting the translations from the po dir
+               bld(
+                       features  = "intltool_in",
+                       podir     = "../po",
+                       style     = "desktop",
+                       flags     = ["-u"],
+                       source    = 'kupfer.desktop.in',
+                       install_path = "${DATADIR}/applications",
+               )
+
+Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
+"""
+
+import os, re
+from waflib import Context, Task, Utils, Logs
+import waflib.Tools.ccroot
+from waflib.TaskGen import feature, before_method, taskgen_method
+from waflib.Logs import error
+from waflib.Configure import conf
+
+_style_flags = {
+       'ba': '-b',
+       'desktop': '-d',
+       'keys': '-k',
+       'quoted': '--quoted-style',
+       'quotedxml': '--quotedxml-style',
+       'rfc822deb': '-r',
+       'schemas': '-s',
+       'xml': '-x',
+}
+
+@taskgen_method
+def ensure_localedir(self):
+       """
+       Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale
+       """
+       # use the tool gnu_dirs to provide options to define this
+       if not self.env.LOCALEDIR:
+               if self.env.DATAROOTDIR:
+                       self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale')
+               else:
+                       self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale')
+
+@before_method('process_source')
+@feature('intltool_in')
+def apply_intltool_in_f(self):
+       """
+       Creates tasks to translate files by intltool-merge::
+
+               def build(bld):
+                       bld(
+                               features  = "intltool_in",
+                               podir     = "../po",
+                               style     = "desktop",
+                               flags     = ["-u"],
+                               source    = 'kupfer.desktop.in',
+                               install_path = "${DATADIR}/applications",
+                       )
+
+       :param podir: location of the .po files
+       :type podir: string
+       :param source: source files to process
+       :type source: list of string
+       :param style: the intltool-merge mode of operation, can be one of the following values:
+         ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``.
+         See the ``intltool-merge`` man page for more information about supported modes of operation.
+       :type style: string
+       :param flags: compilation flags ("-quc" by default)
+       :type flags: list of string
+       :param install_path: installation path
+       :type install_path: string
+       """
+       try: self.meths.remove('process_source')
+       except ValueError: pass
+
+       self.ensure_localedir()
+
+       podir = getattr(self, 'podir', '.')
+       podirnode = self.path.find_dir(podir)
+       if not podirnode:
+               error("could not find the podir %r" % podir)
+               return
+
+       cache = getattr(self, 'intlcache', '.intlcache')
+       self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)]
+       self.env.INTLPODIR = podirnode.bldpath()
+       self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT))
+
+       if '-c' in self.env.INTLFLAGS:
+               self.bld.fatal('Redundant -c flag in intltool task %r' % self)
+
+       style = getattr(self, 'style', None)
+       if style:
+               try:
+                       style_flag = _style_flags[style]
+               except KeyError:
+                       self.bld.fatal('intltool_in style "%s" is not valid' % style)
+
+               self.env.append_unique('INTLFLAGS', [style_flag])
+
+       for i in self.to_list(self.source):
+               node = self.path.find_resource(i)
+
+               task = self.create_task('intltool', node, node.change_ext(''))
+               inst = getattr(self, 'install_path', None)
+               if inst:
+                       self.add_install_files(install_to=inst, install_from=task.outputs)
+
+@feature('intltool_po')
+def apply_intltool_po(self):
+       """
+       Creates tasks to process po files::
+
+               def build(bld):
+                       bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
+
+       The relevant task generator arguments are:
+
+       :param podir: directory of the .po files
+       :type podir: string
+       :param appname: name of the application
+       :type appname: string
+       :param install_path: installation directory
+       :type install_path: string
+
+       The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
+       """
+       try: self.meths.remove('process_source')
+       except ValueError: pass
+
+       self.ensure_localedir()
+
+       appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name'))
+       podir = getattr(self, 'podir', '.')
+       inst = getattr(self, 'install_path', '${LOCALEDIR}')
+
+       linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
+       if linguas:
+               # scan LINGUAS file for locales to process
+               file = open(linguas.abspath())
+               langs = []
+               for line in file.readlines():
+                       # ignore lines containing comments
+                       if not line.startswith('#'):
+                               langs += line.split()
+               file.close()
+               re_linguas = re.compile('[-a-zA-Z_@.]+')
+               for lang in langs:
+                       # Make sure that we only process lines which contain locales
+                       if re_linguas.match(lang):
+                               node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
+                               task = self.create_task('po', node, node.change_ext('.mo'))
+
+                               if inst:
+                                       filename = task.outputs[0].name
+                                       (langname, ext) = os.path.splitext(filename)
+                                       inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
+                                       self.add_install_as(install_to=inst_file, install_from=task.outputs[0],
+                                               chmod=getattr(self, 'chmod', Utils.O644))
+
+       else:
+               Logs.pprint('RED', "Error no LINGUAS file found in po directory")
+
+class po(Task.Task):
+       """
+       Compiles .po files into .gmo files
+       """
+       run_str = '${MSGFMT} -o ${TGT} ${SRC}'
+       color   = 'BLUE'
+
+class intltool(Task.Task):
+       """
+       Calls intltool-merge to update translation files
+       """
+       run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
+       color   = 'BLUE'
+
+@conf
+def find_msgfmt(conf):
+       """
+       Detects msgfmt and sets the ``MSGFMT`` variable
+       """
+       conf.find_program('msgfmt', var='MSGFMT')
+
+@conf
+def find_intltool_merge(conf):
+       """
+       Detects intltool-merge
+       """
+       if not conf.env.PERL:
+               conf.find_program('perl', var='PERL')
+       conf.env.INTLCACHE_ST = '--cache=%s'
+       conf.env.INTLFLAGS_DEFAULT = ['-q', '-u']
+       conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL')
+
+def configure(conf):
+       """
+       Detects the program *msgfmt* and set *conf.env.MSGFMT*.
+       Detects the program *intltool-merge* and set *conf.env.INTLTOOL*.
+       It is possible to set INTLTOOL in the environment, but it must not have spaces in it::
+
+               $ INTLTOOL="/path/to/the program/intltool" waf configure
+
+       If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
+       """
+       conf.find_msgfmt()
+       conf.find_intltool_merge()
+       if conf.env.CC or conf.env.CXX:
+               conf.check(header_name='locale.h')
diff --git a/third_party/waf/waflib/Tools/irixcc.py b/third_party/waf/waflib/Tools/irixcc.py
new file mode 100644 (file)
index 0000000..413261e
--- /dev/null
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# imported from samba
+
+"""
+Compiler definition for irix/MIPSpro cc compiler
+"""
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_irixcc(conf):
+       v = conf.env
+       cc = None
+       if v.CC:
+               cc = v.CC
+       elif 'CC' in conf.environ:
+               cc = conf.environ['CC']
+       if not cc:
+               cc = conf.find_program('cc', var='CC')
+       if not cc:
+               conf.fatal('irixcc was not found')
+
+       try:
+               conf.cmd_and_log(cc + ['-version'])
+       except Exception:
+               conf.fatal('%r -version could not be executed' % cc)
+
+       v.CC = cc
+       v.CC_NAME = 'irix'
+
+@conf
+def irixcc_common_flags(conf):
+       v = conf.env
+
+       v.CC_SRC_F            = ''
+       v.CC_TGT_F            = ['-c', '-o']
+       v.CPPPATH_ST          = '-I%s'
+       v.DEFINES_ST          = '-D%s'
+
+       if not v.LINK_CC:
+               v.LINK_CC = v.CC
+
+       v.CCLNK_SRC_F         = ''
+       v.CCLNK_TGT_F         = ['-o']
+
+       v.LIB_ST              = '-l%s' # template for adding libs
+       v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+       v.STLIB_ST            = '-l%s'
+       v.STLIBPATH_ST        = '-L%s'
+
+       v.cprogram_PATTERN    = '%s'
+       v.cshlib_PATTERN      = 'lib%s.so'
+       v.cstlib_PATTERN      = 'lib%s.a'
+
+def configure(conf):
+       conf.find_irixcc()
+       conf.find_cpp()
+       conf.find_ar()
+       conf.irixcc_common_flags()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/javaw.py b/third_party/waf/waflib/Tools/javaw.py
new file mode 100644 (file)
index 0000000..1e56d48
--- /dev/null
@@ -0,0 +1,463 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+Java support
+
+Javac is one of the few compilers that behaves very badly:
+
+#. it outputs files where it wants to (-d is only for the package root)
+
+#. it recompiles files silently behind your back
+
+#. it outputs an undefined amount of files (inner classes)
+
+Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of
+running one of the following commands::
+
+   ./waf configure
+   python waf configure
+
+You would have to run::
+
+   java -jar /path/to/jython.jar waf configure
+
+[1] http://www.jython.org/
+"""
+
+import os, shutil
+from waflib import Task, Utils, Errors, Node
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method, after_method
+
+from waflib.Tools import ccroot
+ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])
+
+SOURCE_RE = '**/*.java'
+JAR_RE = '**/*'
+
+class_check_source = '''
+public class Test {
+       public static void main(String[] argv) {
+               Class lib;
+               if (argv.length < 1) {
+                       System.err.println("Missing argument");
+                       System.exit(77);
+               }
+               try {
+                       lib = Class.forName(argv[0]);
+               } catch (ClassNotFoundException e) {
+                       System.err.println("ClassNotFoundException");
+                       System.exit(1);
+               }
+               lib = null;
+               System.exit(0);
+       }
+}
+'''
+
+@feature('javac')
+@before_method('process_source')
+def apply_java(self):
+       """
+       Create a javac task for compiling *.java files*. There can be
+       only one javac task by task generator.
+       """
+       Utils.def_attrs(self, jarname='', classpath='',
+               sourcepath='.', srcdir='.',
+               jar_mf_attributes={}, jar_mf_classpath=[])
+
+       outdir = getattr(self, 'outdir', None)
+       if outdir:
+               if not isinstance(outdir, Node.Node):
+                       outdir = self.path.get_bld().make_node(self.outdir)
+       else:
+               outdir = self.path.get_bld()
+       outdir.mkdir()
+       self.outdir = outdir
+       self.env.OUTDIR = outdir.abspath()
+
+       self.javac_task = tsk = self.create_task('javac')
+       tmp = []
+
+       srcdir = getattr(self, 'srcdir', '')
+       if isinstance(srcdir, Node.Node):
+               srcdir = [srcdir]
+       for x in Utils.to_list(srcdir):
+               if isinstance(x, Node.Node):
+                       y = x
+               else:
+                       y = self.path.find_dir(x)
+                       if not y:
+                               self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
+               tmp.append(y)
+       tsk.srcdir = tmp
+
+       if getattr(self, 'compat', None):
+               tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)])
+
+       if hasattr(self, 'sourcepath'):
+               fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
+               names = os.pathsep.join([x.srcpath() for x in fold])
+       else:
+               names = [x.srcpath() for x in tsk.srcdir]
+
+       if names:
+               tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
+
+@feature('javac')
+@after_method('apply_java')
+def use_javac_files(self):
+       """
+       Processes the *use* attribute referring to other java compilations
+       """
+       lst = []
+       self.uselib = self.to_list(getattr(self, 'uselib', []))
+       names = self.to_list(getattr(self, 'use', []))
+       get = self.bld.get_tgen_by_name
+       for x in names:
+               try:
+                       y = get(x)
+               except Errors.WafError:
+                       self.uselib.append(x)
+               else:
+                       y.post()
+                       if hasattr(y, 'jar_task'):
+                               lst.append(y.jar_task.outputs[0].abspath())
+                               self.javac_task.set_run_after(y.jar_task)
+                       else:
+                               for tsk in y.tasks:
+                                       self.javac_task.set_run_after(tsk)
+       self.env.append_value('CLASSPATH', lst)
+
+@feature('javac')
+@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
+def set_classpath(self):
+       """
+       Sets the CLASSPATH value on the *javac* task previously created.
+       """
+       self.env.append_value('CLASSPATH', getattr(self, 'classpath', []))
+       for x in self.tasks:
+               x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
+
+@feature('jar')
+@after_method('apply_java', 'use_javac_files')
+@before_method('process_source')
+def jar_files(self):
+       """
+       Creates a jar task (one maximum per task generator)
+       """
+       destfile = getattr(self, 'destfile', 'test.jar')
+       jaropts = getattr(self, 'jaropts', [])
+       manifest = getattr(self, 'manifest', None)
+
+       basedir = getattr(self, 'basedir', None)
+       if basedir:
+               if not isinstance(self.basedir, Node.Node):
+                       basedir = self.path.get_bld().make_node(basedir)
+       else:
+               basedir = self.path.get_bld()
+       if not basedir:
+               self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self))
+
+       self.jar_task = tsk = self.create_task('jar_create')
+       if manifest:
+               jarcreate = getattr(self, 'jarcreate', 'cfm')
+               if not isinstance(manifest,Node.Node):
+                       node = self.path.find_or_declare(manifest)
+               else:
+                       node = manifest
+               tsk.dep_nodes.append(node)
+               jaropts.insert(0, node.abspath())
+       else:
+               jarcreate = getattr(self, 'jarcreate', 'cf')
+       if not isinstance(destfile, Node.Node):
+               destfile = self.path.find_or_declare(destfile)
+       if not destfile:
+               self.bld.fatal('invalid destfile %r for %r' % (destfile, self))
+       tsk.set_outputs(destfile)
+       tsk.basedir = basedir
+
+       jaropts.append('-C')
+       jaropts.append(basedir.bldpath())
+       jaropts.append('.')
+
+       tsk.env.JAROPTS = jaropts
+       tsk.env.JARCREATE = jarcreate
+
+       if getattr(self, 'javac_task', None):
+               tsk.set_run_after(self.javac_task)
+
+@feature('jar')
+@after_method('jar_files')
+def use_jar_files(self):
+       """
+       Processes the *use* attribute to set the build order on the
+       tasks created by another task generator.
+       """
+       self.uselib = self.to_list(getattr(self, 'uselib', []))
+       names = self.to_list(getattr(self, 'use', []))
+       get = self.bld.get_tgen_by_name
+       for x in names:
+               try:
+                       y = get(x)
+               except Errors.WafError:
+                       self.uselib.append(x)
+               else:
+                       y.post()
+                       self.jar_task.run_after.update(y.tasks)
+
+class JTask(Task.Task):
+       """
+       Base class for java and jar tasks; provides functionality to run long commands
+       """
+       def split_argfile(self, cmd):
+               inline = [cmd[0]]
+               infile = []
+               for x in cmd[1:]:
+                       # jar and javac do not want -J flags in @file
+                       if x.startswith('-J'):
+                               inline.append(x)
+                       else:
+                               infile.append(self.quote_flag(x))
+               return (inline, infile)
+
+class jar_create(JTask):
+       """
+       Creates a jar file
+       """
+       color   = 'GREEN'
+       run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
+
+       def runnable_status(self):
+               """
+               Wait for dependent tasks to be executed, then read the
+               files to update the list of inputs.
+               """
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return Task.ASK_LATER
+               if not self.inputs:
+                       global JAR_RE
+                       try:
+                               self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
+                       except Exception:
+                               raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
+               return super(jar_create, self).runnable_status()
+
+class javac(JTask):
+       """
+       Compiles java files
+       """
+       color   = 'BLUE'
+       run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
+       vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
+       """
+       The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
+       """
+       def uid(self):
+               """Identify java tasks by input&output folder"""
+               lst = [self.__class__.__name__, self.generator.outdir.abspath()]
+               for x in self.srcdir:
+                       lst.append(x.abspath())
+               return Utils.h_list(lst)
+
+       def runnable_status(self):
+               """
+               Waits for dependent tasks to be complete, then read the file system to find the input nodes.
+               """
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return Task.ASK_LATER
+
+               if not self.inputs:
+                       global SOURCE_RE
+                       self.inputs  = []
+                       for x in self.srcdir:
+                               self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
+               return super(javac, self).runnable_status()
+
+       def post_run(self):
+               """
+               List class files created
+               """
+               for node in self.generator.outdir.ant_glob('**/*.class'):
+                       self.generator.bld.node_sigs[node] = self.uid()
+               self.generator.bld.task_sigs[self.uid()] = self.cache_sig
+
+@feature('javadoc')
+@after_method('process_rule')
+def create_javadoc(self):
+       """
+       Creates a javadoc task (feature 'javadoc')
+       """
+       tsk = self.create_task('javadoc')
+       tsk.classpath = getattr(self, 'classpath', [])
+       self.javadoc_package = Utils.to_list(self.javadoc_package)
+       if not isinstance(self.javadoc_output, Node.Node):
+               self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)
+
+class javadoc(Task.Task):
+       """
+       Builds java documentation
+       """
+       color = 'BLUE'
+
+       def __str__(self):
+               return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output)
+
+       def run(self):
+               env = self.env
+               bld = self.generator.bld
+               wd = bld.bldnode
+
+               #add src node + bld node (for generated java code)
+               srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
+               srcpath += os.pathsep
+               srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir
+
+               classpath = env.CLASSPATH
+               classpath += os.pathsep
+               classpath += os.pathsep.join(self.classpath)
+               classpath = "".join(classpath)
+
+               self.last_cmd = lst = []
+               lst.extend(Utils.to_list(env.JAVADOC))
+               lst.extend(['-d', self.generator.javadoc_output.abspath()])
+               lst.extend(['-sourcepath', srcpath])
+               lst.extend(['-classpath', classpath])
+               lst.extend(['-subpackages'])
+               lst.extend(self.generator.javadoc_package)
+               lst = [x for x in lst if x]
+
+               self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
+
+       def post_run(self):
+               nodes = self.generator.javadoc_output.ant_glob('**')
+               for node in nodes:
+                       self.generator.bld.node_sigs[node] = self.uid()
+               self.generator.bld.task_sigs[self.uid()] = self.cache_sig
+
+def configure(self):
+       """
+       Detects the javac, java and jar programs
+       """
+       # If JAVA_PATH is set, we prepend it to the path list
+       java_path = self.environ['PATH'].split(os.pathsep)
+       v = self.env
+
+       if 'JAVA_HOME' in self.environ:
+               java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
+               self.env.JAVA_HOME = [self.environ['JAVA_HOME']]
+
+       for x in 'javac java jar javadoc'.split():
+               self.find_program(x, var=x.upper(), path_list=java_path)
+
+       if 'CLASSPATH' in self.environ:
+               v.CLASSPATH = self.environ['CLASSPATH']
+
+       if not v.JAR:
+               self.fatal('jar is required for making java packages')
+       if not v.JAVAC:
+               self.fatal('javac is required for compiling java classes')
+
+       v.JARCREATE = 'cf' # can use cvf
+       v.JAVACFLAGS = []
+
+@conf
+def check_java_class(self, classname, with_classpath=None):
+       """
+       Checks if the specified java class exists
+
+       :param classname: class to check, like java.util.HashMap
+       :type classname: string
+       :param with_classpath: additional classpath to give
+       :type with_classpath: string
+       """
+       javatestdir = '.waf-javatest'
+
+       classpath = javatestdir
+       if self.env.CLASSPATH:
+               classpath += os.pathsep + self.env.CLASSPATH
+       if isinstance(with_classpath, str):
+               classpath += os.pathsep + with_classpath
+
+       shutil.rmtree(javatestdir, True)
+       os.mkdir(javatestdir)
+
+       Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)
+
+       # Compile the source
+       self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False)
+
+       # Try to run the app
+       cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname]
+       self.to_log("%s\n" % str(cmd))
+       found = self.exec_command(cmd, shell=False)
+
+       self.msg('Checking for java class %s' % classname, not found)
+
+       shutil.rmtree(javatestdir, True)
+
+       return found
+
+@conf
+def check_jni_headers(conf):
+       """
+       Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::
+
+               def options(opt):
+                       opt.load('compiler_c')
+
+               def configure(conf):
+                       conf.load('compiler_c java')
+                       conf.check_jni_headers()
+
+               def build(bld):
+                       bld.shlib(source='a.c', target='app', use='JAVA')
+       """
+       if not conf.env.CC_NAME and not conf.env.CXX_NAME:
+               conf.fatal('load a compiler first (gcc, g++, ..)')
+
+       if not conf.env.JAVA_HOME:
+               conf.fatal('set JAVA_HOME in the system environment')
+
+       # jni requires the jvm
+       javaHome = conf.env.JAVA_HOME[0]
+
+       dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
+       if dir is None:
+               dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?!
+       if dir is None:
+               conf.fatal('JAVA_HOME does not seem to be set properly')
+
+       f = dir.ant_glob('**/(jni|jni_md).h')
+       incDirs = [x.parent.abspath() for x in f]
+
+       dir = conf.root.find_dir(conf.env.JAVA_HOME[0])
+       f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
+       libDirs = [x.parent.abspath() for x in f] or [javaHome]
+
+       # On windows, we need both the .dll and .lib to link.  On my JDK, they are
+       # in different directories...
+       f = dir.ant_glob('**/*jvm.(lib)')
+       if f:
+               libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]
+
+       if conf.env.DEST_OS == 'freebsd':
+               conf.env.append_unique('LINKFLAGS_JAVA', '-pthread')
+       for d in libDirs:
+               try:
+                       conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
+                               libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA')
+               except Exception:
+                       pass
+               else:
+                       break
+       else:
+               conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
diff --git a/third_party/waf/waflib/Tools/kde4.py b/third_party/waf/waflib/Tools/kde4.py
new file mode 100644 (file)
index 0000000..f39f24a
--- /dev/null
@@ -0,0 +1,93 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Support for the KDE4 libraries and msgfmt
+"""
+
+import os, re
+from waflib import Task, Utils
+from waflib.TaskGen import feature
+
+@feature('msgfmt')
+def apply_msgfmt(self):
+       """
+       Process all languages to create .mo files and to install them::
+
+               def build(bld):
+                       bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
+       """
+       for lang in self.to_list(self.langs):
+               node = self.path.find_resource(lang+'.po')
+               task = self.create_task('msgfmt', node, node.change_ext('.mo'))
+
+               langname = lang.split('/')
+               langname = langname[-1]
+
+               inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
+
+               self.bld.install_as(
+                       inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
+                       task.outputs[0],
+                       chmod = getattr(self, 'chmod', Utils.O644))
+
+class msgfmt(Task.Task):
+       """
+       Transform .po files into .mo files
+       """
+       color   = 'BLUE'
+       run_str = '${MSGFMT} ${SRC} -o ${TGT}'
+
+def configure(self):
+       """
+       Detect kde4-config and set various variables for the *use* system::
+
+               def options(opt):
+                       opt.load('compiler_cxx kde4')
+               def configure(conf):
+                       conf.load('compiler_cxx kde4')
+               def build(bld):
+                       bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
+       """
+       kdeconfig = self.find_program('kde4-config')
+       prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
+       fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
+       try: os.stat(fname)
+       except OSError:
+               fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
+               try: os.stat(fname)
+               except OSError: self.fatal('could not open %s' % fname)
+
+       try:
+               txt = Utils.readf(fname)
+       except EnvironmentError:
+               self.fatal('could not read %s' % fname)
+
+       txt = txt.replace('\\\n', '\n')
+       fu = re.compile('#(.*)\n')
+       txt = fu.sub('', txt)
+
+       setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
+       found = setregexp.findall(txt)
+
+       for (_, key, val) in found:
+               #print key, val
+               self.env[key] = val
+
+       # well well, i could just write an interpreter for cmake files
+       self.env['LIB_KDECORE']= ['kdecore']
+       self.env['LIB_KDEUI']  = ['kdeui']
+       self.env['LIB_KIO']    = ['kio']
+       self.env['LIB_KHTML']  = ['khtml']
+       self.env['LIB_KPARTS'] = ['kparts']
+
+       self.env['LIBPATH_KDECORE']  = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
+       self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
+       self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
+
+       self.find_program('msgfmt', var='MSGFMT')
diff --git a/third_party/waf/waflib/Tools/ldc2.py b/third_party/waf/waflib/Tools/ldc2.py
new file mode 100644 (file)
index 0000000..80a09fd
--- /dev/null
@@ -0,0 +1,59 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Alex Rønne Petersen, 2012 (alexrp/Zor)
+
+from waflib.Tools import ar, d
+from waflib.Configure import conf
+
+@conf
+def find_ldc2(conf):
+       """
+       Finds the program *ldc2* and set the variable *D*
+       """
+       conf.find_program(['ldc2'], var='D')
+
+       out = conf.cmd_and_log(conf.env.D + ['-version'])
+       if out.find("based on DMD v2.") == -1:
+               conf.fatal("detected compiler is not ldc2")
+
+@conf
+def common_flags_ldc2(conf):
+       """
+       Sets the D flags required by *ldc2*
+       """
+       v = conf.env
+
+       v.D_SRC_F           = ['-c']
+       v.D_TGT_F           = '-of%s'
+
+       v.D_LINKER          = v.D
+       v.DLNK_SRC_F        = ''
+       v.DLNK_TGT_F        = '-of%s'
+       v.DINC_ST           = '-I%s'
+
+       v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+       v.DSTLIB_ST = v.DSHLIB_ST         = '-L-l%s'
+       v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L-L%s'
+
+       v.LINKFLAGS_dshlib  = ['-L-shared']
+
+       v.DHEADER_ext       = '.di'
+       v.DFLAGS_d_with_header = ['-H', '-Hf']
+       v.D_HDR_F           = '%s'
+
+       v.LINKFLAGS     = []
+       v.DFLAGS_dshlib = ['-relocation-model=pic']
+
+def configure(conf):
+       """
+       Configuration for *ldc2*
+       """
+       conf.find_ldc2()
+       conf.load('ar')
+       conf.load('d')
+       conf.common_flags_ldc2()
+       conf.d_platform_flags()
diff --git a/third_party/waf/waflib/Tools/lua.py b/third_party/waf/waflib/Tools/lua.py
new file mode 100644 (file)
index 0000000..f864970
--- /dev/null
@@ -0,0 +1,41 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Sebastian Schlingmann, 2008
+# Thomas Nagy, 2008-2016 (ita)
+
+"""
+Lua support.
+
+Compile *.lua* files into *.luac*::
+
+       def configure(conf):
+               conf.load('lua')
+               conf.env.LUADIR = '/usr/local/share/myapp/scripts/'
+       def build(bld):
+               bld(source='foo.lua')
+"""
+
+from waflib.TaskGen import extension
+from waflib import Task
+
+@extension('.lua')
+def add_lua(self, node):
+       tsk = self.create_task('luac', node, node.change_ext('.luac'))
+       inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
+       if inst_to:
+               self.add_install_files(install_to=inst_to, install_from=tsk.outputs)
+       return tsk
+
+class luac(Task.Task):
+       run_str = '${LUAC} -s -o ${TGT} ${SRC}'
+       color   = 'PINK'
+
+def configure(conf):
+       """
+       Detect the luac compiler and set *conf.env.LUAC*
+       """
+       conf.find_program('luac', var='LUAC')
diff --git a/third_party/waf/waflib/Tools/md5_tstamp.py b/third_party/waf/waflib/Tools/md5_tstamp.py
new file mode 100644 (file)
index 0000000..4843427
--- /dev/null
@@ -0,0 +1,43 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Re-calculate md5 hashes of files only when the file times or the file
+size have changed.
+
+The hashes can also reflect either the file contents (STRONGEST=True) or the
+file time and file size.
+
+The performance benefits of this module are usually insignificant.
+"""
+
+import os, stat
+from waflib import Utils, Build, Node
+
+STRONGEST = True
+
+Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+def h_file(self):
+       filename = self.abspath()
+       st = os.stat(filename)
+
+       cache = self.ctx.hashes_md5_tstamp
+       if filename in cache and cache[filename][0] == st.st_mtime:
+               return cache[filename][1]
+
+       global STRONGEST
+       if STRONGEST:
+               ret = Utils.h_file(filename)
+       else:
+               if stat.S_ISDIR(st[stat.ST_MODE]):
+                       raise IOError('Not a file')
+               ret = Utils.md5(str((st.st_mtime, st.st_size))).digest()
+
+       cache[filename] = (st.st_mtime, ret)
+       return ret
+h_file.__doc__ = Node.Node.h_file.__doc__
+Node.Node.h_file = h_file
diff --git a/third_party/waf/waflib/Tools/msvc.py b/third_party/waf/waflib/Tools/msvc.py
new file mode 100644 (file)
index 0000000..7cc2fef
--- /dev/null
@@ -0,0 +1,980 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2006 (dv)
+# Tamas Pal, 2007 (folti)
+# Nicolas Mercier, 2009
+# Matt Clarkson, 2012
+
+"""
+Microsoft Visual C++/Intel C++ compiler support
+
+Usage::
+
+       $ waf configure --msvc_version="msvc 10.0,msvc 9.0" --msvc_target="x64"
+
+or::
+
+       def configure(conf):
+               conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
+               conf.env.MSVC_TARGETS = ['x64']
+               conf.load('msvc')
+
+or::
+
+       def configure(conf):
+               conf.load('msvc', funs='no_autodetect')
+               conf.check_lib_msvc('gdi32')
+               conf.check_libs_msvc('kernel32 user32')
+       def build(bld):
+               tg = bld.program(source='main.c', target='app', use='KERNEL32 USER32 GDI32')
+
+Platforms and targets will be tested in the order they appear;
+the first good configuration will be used.
+
+To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option
+or set ``conf.env.MSVC_LAZY_AUTODETECT=False``.
+
+Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm
+
+Compilers supported:
+
+* msvc       => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 12.0 (Visual Studio 2013)
+* wsdk       => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0
+* icl        => Intel compiler, versions 9, 10, 11, 13
+* winphone   => Visual Studio to target Windows Phone 8 native (version 8.0 for now)
+* Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
+* PocketPC   => Compiler/SDK for PocketPC devices (armv4/v4i)
+
+To use WAF in a VS2008 Make file project (see http://code.google.com/p/waf/issues/detail?id=894)
+You may consider to set the environment variable "VS_UNICODE_OUTPUT" to nothing before calling waf.
+So in your project settings use something like 'cmd.exe /C "set VS_UNICODE_OUTPUT=& set PYTHONUNBUFFERED=true & waf build"'.
+cmd.exe  /C  "chcp 1252 & set PYTHONUNBUFFERED=true && set && waf  configure"
+Setting PYTHONUNBUFFERED gives the unbuffered output.
+"""
+
+import os, sys, re
+from waflib import Utils, Logs, Options, Errors
+from waflib.TaskGen import after_method, feature
+
+from waflib.Configure import conf
+from waflib.Tools import ccroot, c, cxx, ar
+
+g_msvc_systemlibs = '''
+aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
+cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
+credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
+ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
+faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
+gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
+kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
+mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
+msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
+netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
+odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
+osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
+ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
+rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
+shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
+traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
+version vfw32 wbemuuid  webpost wiaguid wininet winmm winscard winspool winstrm
+wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
+'''.split()
+"""importlibs provided by MSVC/Platform SDK. Do NOT search them"""
+
+all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'),
+                                               ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'),
+                                               ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ]
+"""List of msvc platforms"""
+
+all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
+"""List of wince platforms"""
+
+all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
+"""List of icl platforms"""
+
+def options(opt):
+       opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='')
+       opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='')
+       opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy')
+
+@conf
+def setup_msvc(conf, versiondict):
+       """
+       Checks installed compilers and targets and returns the first combination from the user's
+       options, env, or the global supported lists that checks.
+
+       :param versiondict: dict(platform -> dict(architecture -> configuration))
+       :type versiondict: dict(string -> dict(string -> target_compiler)
+       :return: the compiler, revision, path, include dirs, library paths and target architecture
+       :rtype: tuple of strings
+       """
+       platforms = getattr(Options.options, 'msvc_targets', '').split(',')
+       if platforms == ['']:
+               platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
+       desired_versions = getattr(Options.options, 'msvc_version', '').split(',')
+       if desired_versions == ['']:
+               desired_versions = conf.env.MSVC_VERSIONS or list(versiondict.keys())
+
+       # Override lazy detection by evaluating after the fact.
+       lazy_detect = getattr(Options.options, 'msvc_lazy', True)
+       if conf.env.MSVC_LAZY_AUTODETECT is False:
+               lazy_detect = False
+
+       if not lazy_detect:
+               for val in versiondict.values():
+                       for arch in list(val.keys()):
+                               cfg = val[arch]
+                               cfg.evaluate()
+                               if not cfg.is_valid:
+                                       del val[arch]
+               conf.env.MSVC_INSTALLED_VERSIONS = versiondict
+
+       for version in desired_versions:
+               try:
+                       targets = versiondict[version]
+               except KeyError:
+                       continue
+               for arch in platforms:
+                       try:
+                               cfg = targets[arch]
+                       except KeyError:
+                               continue
+                       cfg.evaluate()
+                       if cfg.is_valid:
+                               compiler,revision = version.rsplit(' ', 1)
+                               return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+       conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
+
+@conf
+def get_msvc_version(conf, compiler, version, target, vcvars):
+       """
+       Checks that an installed compiler actually runs and uses vcvars to obtain the
+       environment needed by the compiler.
+
+       :param compiler: compiler type, for looking up the executable name
+       :param version: compiler version, for debugging only
+       :param target: target architecture
+       :param vcvars: batch file to run to check the environment
+       :return: the location of the compiler executable, the location of include dirs, and the library paths
+       :rtype: tuple of strings
+       """
+       Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
+
+       try:
+               conf.msvc_cnt += 1
+       except AttributeError:
+               conf.msvc_cnt = 1
+       batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
+       batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+""" % (vcvars,target))
+       sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
+       lines = sout.splitlines()
+
+       if not lines[0]:
+               lines.pop(0)
+
+       MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
+       for line in lines:
+               if line.startswith('PATH='):
+                       path = line[5:]
+                       MSVC_PATH = path.split(';')
+               elif line.startswith('INCLUDE='):
+                       MSVC_INCDIR = [i for i in line[8:].split(';') if i]
+               elif line.startswith('LIB='):
+                       MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
+       if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
+               conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
+
+       # Check if the compiler is usable at all.
+       # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
+       env = dict(os.environ)
+       env.update(PATH = path)
+       compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+       cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
+
+       # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
+       if 'CL' in env:
+               del(env['CL'])
+
+       try:
+               conf.cmd_and_log(cxx + ['/help'], env=env)
+       except UnicodeError:
+               st = Utils.ex_stack()
+               if conf.logger:
+                       conf.logger.error(st)
+               conf.fatal('msvc: Unicode error - check the code page?')
+       except Exception ,e:
+               Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e))
+               conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
+       else:
+               Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
+       finally:
+               conf.env[compiler_name] = ''
+
+       return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
+
+@conf
+def gather_wsdk_versions(conf, versions):
+       """
+       Use winreg to add the msvc versions to the input list
+
+       :param versions: list to modify
+       :type versions: list
+       """
+       version_pattern = re.compile('^v..?.?\...?.?')
+       try:
+               all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
+       except WindowsError:
+               try:
+                       all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
+               except WindowsError:
+                       return
+       index = 0
+       while 1:
+               try:
+                       version = Utils.winreg.EnumKey(all_versions, index)
+               except WindowsError:
+                       break
+               index += 1
+               if not version_pattern.match(version):
+                       continue
+               try:
+                       msvc_version = Utils.winreg.OpenKey(all_versions, version)
+                       path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
+               except WindowsError:
+                       continue
+               if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
+                       targets = {}
+                       for target,arch in all_msvc_platforms:
+                               targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
+                       versions['wsdk ' + version[1:]] = targets
+
+def gather_wince_supported_platforms():
+       """
+       Checks SmartPhones SDKs
+
+       :param versions: list to modify
+       :type versions: list
+       """
+       supported_wince_platforms = []
+       try:
+               ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
+       except WindowsError:
+               try:
+                       ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
+               except WindowsError:
+                       ce_sdk = ''
+       if not ce_sdk:
+               return supported_wince_platforms
+
+       index = 0
+       while 1:
+               try:
+                       sdk_device = Utils.winreg.EnumKey(ce_sdk, index)
+                       sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
+               except WindowsError:
+                       break
+               index += 1
+               try:
+                       path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir')
+               except WindowsError:
+                       try:
+                               path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation')
+                       except WindowsError:
+                               continue
+                       path,xml = os.path.split(path)
+               path = str(path)
+               path,device = os.path.split(path)
+               if not device:
+                       path,device = os.path.split(path)
+               platforms = []
+               for arch,compiler in all_wince_platforms:
+                       if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
+                               platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
+               if platforms:
+                       supported_wince_platforms.append((device, platforms))
+       return supported_wince_platforms
+
+def gather_msvc_detected_versions():
+       #Detected MSVC versions!
+       version_pattern = re.compile('^(\d\d?\.\d\d?)(Exp)?$')
+       detected_versions = []
+       for vcver,vcvar in (('VCExpress','Exp'), ('VisualStudio','')):
+               prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver
+               try:
+                       all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
+               except WindowsError:
+                       prefix = 'SOFTWARE\\Microsoft\\' + vcver
+                       try:
+                               all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
+                       except WindowsError:
+                               continue
+
+               index = 0
+               while 1:
+                       try:
+                               version = Utils.winreg.EnumKey(all_versions, index)
+                       except WindowsError:
+                               break
+                       index += 1
+                       match = version_pattern.match(version)
+                       if match:
+                               versionnumber = float(match.group(1))
+                       else:
+                               continue
+                       detected_versions.append((versionnumber, version+vcvar, prefix+'\\'+version))
+       def fun(tup):
+               return tup[0]
+
+       detected_versions.sort(key = fun)
+       return detected_versions
+
+class target_compiler(object):
+       """
+       Wrap a compiler configuration; call evaluate() to determine
+       whether the configuration is usable.
+       """
+       def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
+               """
+               :param ctx: configuration context to use to eventually get the version environment
+               :param compiler: compiler name
+               :param cpu: target cpu
+               :param version: compiler version number
+               :param bat_target: ?
+               :param bat: path to the batch file to run
+               :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
+               """
+               self.conf = ctx
+               self.name = None
+               self.is_valid = False
+               self.is_done = False
+
+               self.compiler = compiler
+               self.cpu = cpu
+               self.version = version
+               self.bat_target = bat_target
+               self.bat = bat
+               self.callback = callback
+
+       def evaluate(self):
+               if self.is_done:
+                       return
+               self.is_done = True
+               try:
+                       vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat)
+               except Errors.ConfigurationError:
+                       self.is_valid = False
+                       return
+               if self.callback:
+                       vs = self.callback(self, vs)
+               self.is_valid = True
+               (self.bindirs, self.incdirs, self.libdirs) = vs
+
+       def __str__(self):
+               return str((self.bindirs, self.incdirs, self.libdirs))
+
+       def __repr__(self):
+               return repr((self.bindirs, self.incdirs, self.libdirs))
+
+@conf
+def gather_msvc_targets(conf, versions, version, vc_path):
+       #Looking for normal MSVC compilers!
+       targets = {}
+
+       if os.path.isfile(os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')):
+               for target,realtarget in all_msvc_platforms[::-1]:
+                       targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat'))
+       elif os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')):
+               for target,realtarget in all_msvc_platforms[::-1]:
+                       targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'vcvarsall.bat'))
+       elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')):
+               targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))
+       elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')):
+               targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))
+       if targets:
+               versions['msvc ' + version] = targets
+
+@conf
+def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms):
+       #Looking for Win CE compilers!
+       for device,platforms in supported_platforms:
+               targets = {}
+               for platform,compiler,include,lib in platforms:
+                       winCEpath = os.path.join(vc_path, 'ce')
+                       if not os.path.isdir(winCEpath):
+                               continue
+
+                       if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
+                               bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)]
+                               incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include]
+                               libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib]
+                               def combine_common(obj, compiler_env):
+                                       (common_bindirs,_1,_2) = compiler_env
+                                       return (bindirs + common_bindirs, incdirs, libdirs)
+                               targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common)
+               if targets:
+                       versions[device + ' ' + version] = targets
+
+@conf
+def gather_winphone_targets(conf, versions, version, vc_path, vsvars):
+       #Looking for WinPhone compilers
+       targets = {}
+       for target,realtarget in all_msvc_platforms[::-1]:
+               targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars)
+       if targets:
+               versions['winphone ' + version] = targets
+
+@conf
+def gather_msvc_versions(conf, versions):
+       vc_paths = []
+       for (v,version,reg) in gather_msvc_detected_versions():
+               try:
+                       try:
+                               msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC")
+                       except WindowsError:
+                               msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++")
+                       path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir')
+               except WindowsError:
+                       try:
+                               msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
+                               path,type = Utils.winreg.QueryValueEx(msvc_version, version)
+                       except WindowsError:
+                               continue
+                       else:
+                               vc_paths.append((version, os.path.abspath(str(path))))
+                       continue
+               else:
+                       vc_paths.append((version, os.path.abspath(str(path))))
+
+       wince_supported_platforms = gather_wince_supported_platforms()
+
+       for version,vc_path in vc_paths:
+               vs_path = os.path.dirname(vc_path)
+               vsvars = os.path.join(vs_path, 'Common7', 'Tools', 'vsvars32.bat')
+               if wince_supported_platforms and os.path.isfile(vsvars):
+                       conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms)
+
+       # WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path.
+       # Stop after one is found.
+       for version,vc_path in vc_paths:
+               vs_path = os.path.dirname(vc_path)
+               vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat')
+               if os.path.isfile(vsvars):
+                       conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars)
+                       break
+
+       for version,vc_path in vc_paths:
+               vs_path = os.path.dirname(vc_path)
+               conf.gather_msvc_targets(versions, version, vc_path)
+
+@conf
+def gather_icl_versions(conf, versions):
+       """
+       Checks ICL compilers
+
+       :param versions: list to modify
+       :type versions: list
+       """
+       version_pattern = re.compile('^...?.?\....?.?')
+       try:
+               all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
+       except WindowsError:
+               try:
+                       all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
+               except WindowsError:
+                       return
+       index = 0
+       while 1:
+               try:
+                       version = Utils.winreg.EnumKey(all_versions, index)
+               except WindowsError:
+                       break
+               index += 1
+               if not version_pattern.match(version):
+                       continue
+               targets = {}
+               for target,arch in all_icl_platforms:
+                       if target=='intel64': targetDir='EM64T_NATIVE'
+                       else: targetDir=target
+                       try:
+                               Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+                               icl_version=Utils.winreg.OpenKey(all_versions,version)
+                               path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+                       except WindowsError:
+                               pass
+                       else:
+                               batch_file=os.path.join(path,'bin','iclvars.bat')
+                               if os.path.isfile(batch_file):
+                                       targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+               for target,arch in all_icl_platforms:
+                       try:
+                               icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
+                               path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+                       except WindowsError:
+                               continue
+                       else:
+                               batch_file=os.path.join(path,'bin','iclvars.bat')
+                               if os.path.isfile(batch_file):
+                                       targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+               major = version[0:2]
+               versions['intel ' + major] = targets
+
+@conf
+def gather_intel_composer_versions(conf, versions):
+       """
+       Checks ICL compilers that are part of Intel Composer Suites
+
+       :param versions: list to modify
+       :type versions: list
+       """
+       version_pattern = re.compile('^...?.?\...?.?.?')
+       try:
+               all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites')
+       except WindowsError:
+               try:
+                       all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
+               except WindowsError:
+                       return
+       index = 0
+       while 1:
+               try:
+                       version = Utils.winreg.EnumKey(all_versions, index)
+               except WindowsError:
+                       break
+               index += 1
+               if not version_pattern.match(version):
+                       continue
+               targets = {}
+               for target,arch in all_icl_platforms:
+                       if target=='intel64': targetDir='EM64T_NATIVE'
+                       else: targetDir=target
+                       try:
+                               try:
+                                       defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
+                               except WindowsError:
+                                       if targetDir == 'EM64T_NATIVE':
+                                               defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
+                                       else:
+                                               raise
+                               uid,type = Utils.winreg.QueryValueEx(defaults, 'SubKey')
+                               Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
+                               icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
+                               path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+                       except WindowsError:
+                               pass
+                       else:
+                               batch_file=os.path.join(path,'bin','iclvars.bat')
+                               if os.path.isfile(batch_file):
+                                       targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+                               # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012
+                               # http://software.intel.com/en-us/forums/topic/328487
+                               compilervars_warning_attr = '_compilervars_warning_key'
+                               if version[0:2] == '13' and getattr(conf, compilervars_warning_attr, True):
+                                       setattr(conf, compilervars_warning_attr, False)
+                                       patch_url = 'http://software.intel.com/en-us/forums/topic/328487'
+                                       compilervars_arch = os.path.join(path, 'bin', 'compilervars_arch.bat')
+                                       for vscomntool in ('VS110COMNTOOLS', 'VS100COMNTOOLS'):
+                                               if vscomntool in os.environ:
+                                                       vs_express_path = os.environ[vscomntool] + r'..\IDE\VSWinExpress.exe'
+                                                       dev_env_path = os.environ[vscomntool] + r'..\IDE\devenv.exe'
+                                                       if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and
+                                                               not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)):
+                                                               Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU '
+                                                               '(VSWinExpress.exe) but it does not seem to be installed at %r. '
+                                                               'The intel command line set up will fail to configure unless the file %r'
+                                                               'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url))
+               major = version[0:2]
+               versions['intel ' + major] = targets
+
+@conf
+def detect_msvc(self):
+       return self.setup_msvc(self.get_msvc_versions())
+
+@conf
+def get_msvc_versions(self):
+       """
+       :return: platform to compiler configurations
+       :rtype: dict
+       """
+       dct = Utils.ordered_iter_dict()
+       self.gather_icl_versions(dct)
+       self.gather_intel_composer_versions(dct)
+       self.gather_wsdk_versions(dct)
+       self.gather_msvc_versions(dct)
+       Logs.debug('msvc: detected versions %r', list(dct.keys()))
+       return dct
+
+@conf
+def find_lt_names_msvc(self, libname, is_static=False):
+       """
+       Win32/MSVC specific code to glean out information from libtool la files.
+       this function is not attached to the task_gen class. Returns a triplet:
+       (library absolute path, library name without extension, whether the library is static)
+       """
+       lt_names=[
+               'lib%s.la' % libname,
+               '%s.la' % libname,
+       ]
+
+       for path in self.env.LIBPATH:
+               for la in lt_names:
+                       laf=os.path.join(path,la)
+                       dll=None
+                       if os.path.exists(laf):
+                               ltdict = Utils.read_la_file(laf)
+                               lt_libdir=None
+                               if ltdict.get('libdir', ''):
+                                       lt_libdir = ltdict['libdir']
+                               if not is_static and ltdict.get('library_names', ''):
+                                       dllnames=ltdict['library_names'].split()
+                                       dll=dllnames[0].lower()
+                                       dll=re.sub('\.dll$', '', dll)
+                                       return (lt_libdir, dll, False)
+                               elif ltdict.get('old_library', ''):
+                                       olib=ltdict['old_library']
+                                       if os.path.exists(os.path.join(path,olib)):
+                                               return (path, olib, True)
+                                       elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
+                                               return (lt_libdir, olib, True)
+                                       else:
+                                               return (None, olib, True)
+                               else:
+                                       raise self.errors.WafError('invalid libtool object file: %s' % laf)
+       return (None, None, None)
+
+@conf
+def libname_msvc(self, libname, is_static=False):
+       lib = libname.lower()
+       lib = re.sub('\.lib$','',lib)
+
+       if lib in g_msvc_systemlibs:
+               return lib
+
+       lib=re.sub('^lib','',lib)
+
+       if lib == 'm':
+               return None
+
+       (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
+
+       if lt_path != None and lt_libname != None:
+               if lt_static == True:
+                       # file existence check has been made by find_lt_names
+                       return os.path.join(lt_path,lt_libname)
+
+       if lt_path != None:
+               _libpaths = [lt_path] + self.env.LIBPATH
+       else:
+               _libpaths = self.env.LIBPATH
+
+       static_libs=[
+               'lib%ss.lib' % lib,
+               'lib%s.lib' % lib,
+               '%ss.lib' % lib,
+               '%s.lib' %lib,
+               ]
+
+       dynamic_libs=[
+               'lib%s.dll.lib' % lib,
+               'lib%s.dll.a' % lib,
+               '%s.dll.lib' % lib,
+               '%s.dll.a' % lib,
+               'lib%s_d.lib' % lib,
+               '%s_d.lib' % lib,
+               '%s.lib' %lib,
+               ]
+
+       libnames=static_libs
+       if not is_static:
+               libnames=dynamic_libs + static_libs
+
+       for path in _libpaths:
+               for libn in libnames:
+                       if os.path.exists(os.path.join(path, libn)):
+                               Logs.debug('msvc: lib found: %s', os.path.join(path,libn))
+                               return re.sub('\.lib$', '',libn)
+
+       #if no lib can be found, just return the libname as msvc expects it
+       self.fatal('The library %r could not be found' % libname)
+       return re.sub('\.lib$', '', libname)
+
+@conf
+def check_lib_msvc(self, libname, is_static=False, uselib_store=None):
+       """
+       Ideally we should be able to place the lib in the right env var, either STLIB or LIB,
+       but we don't distinguish static libs from shared libs.
+       This is ok since msvc doesn't have any special linker flag to select static libs (no env.STLIB_MARKER)
+       """
+       libn = self.libname_msvc(libname, is_static)
+
+       if not uselib_store:
+               uselib_store = libname.upper()
+
+       if False and is_static: # disabled
+               self.env['STLIB_' + uselib_store] = [libn]
+       else:
+               self.env['LIB_' + uselib_store] = [libn]
+
+@conf
+def check_libs_msvc(self, libnames, is_static=False):
+       for libname in Utils.to_list(libnames):
+               self.check_lib_msvc(libname, is_static)
+
+def configure(conf):
+       """
+       Configuration methods to call for detecting msvc
+       """
+       conf.autodetect(True)
+       conf.find_msvc()
+       conf.msvc_common_flags()
+       conf.cc_load_tools()
+       conf.cxx_load_tools()
+       conf.cc_add_flags()
+       conf.cxx_add_flags()
+       conf.link_add_flags()
+       conf.visual_studio_add_flags()
+
+@conf
+def no_autodetect(conf):
+       conf.env.NO_MSVC_DETECT = 1
+       configure(conf)
+
+@conf
+def autodetect(conf, arch=False):
+       v = conf.env
+       if v.NO_MSVC_DETECT:
+               return
+
+       compiler, version, path, includes, libdirs, cpu = conf.detect_msvc()
+       if arch:
+               v.DEST_CPU = cpu
+
+       v.PATH = path
+       v.INCLUDES = includes
+       v.LIBPATH = libdirs
+       v.MSVC_COMPILER = compiler
+       try:
+               v.MSVC_VERSION = float(version)
+       except TypeError:
+               v.MSVC_VERSION = float(version[:-3])
+
+def _get_prog_names(conf, compiler):
+       if compiler == 'intel':
+               compiler_name = 'ICL'
+               linker_name = 'XILINK'
+               lib_name = 'XILIB'
+       else:
+               # assumes CL.exe
+               compiler_name = 'CL'
+               linker_name = 'LINK'
+               lib_name = 'LIB'
+       return compiler_name, linker_name, lib_name
+
+@conf
+def find_msvc(conf):
+       """Due to path format limitations, limit operation only to native Win32. Yeah it sucks."""
+       if sys.platform == 'cygwin':
+               conf.fatal('MSVC module does not work under cygwin Python!')
+
+       # the autodetection is supposed to be performed before entering in this method
+       v = conf.env
+       path = v.PATH
+       compiler = v.MSVC_COMPILER
+       version = v.MSVC_VERSION
+
+       compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+       v.MSVC_MANIFEST = (compiler == 'msvc' and version >= 8) or (compiler == 'wsdk' and version >= 6) or (compiler == 'intel' and version >= 11)
+
+       # compiler
+       cxx = conf.find_program(compiler_name, var='CXX', path_list=path)
+
+       # before setting anything, check if the compiler is really msvc
+       env = dict(conf.environ)
+       if path: env.update(PATH = ';'.join(path))
+       if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env):
+               conf.fatal('the msvc compiler could not be identified')
+
+       # c/c++ compiler
+       v.CC = v.CXX = cxx
+       v.CC_NAME = v.CXX_NAME = 'msvc'
+
+       # linker
+       if not v.LINK_CXX:
+               # TODO: var=LINK_CXX to let so that LINK_CXX can be overridden?
+               v.LINK_CXX = conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name)
+
+       if not v.LINK_CC:
+               v.LINK_CC = v.LINK_CXX
+
+       # staticlib linker
+       if not v.AR:
+               stliblink = conf.find_program(lib_name, path_list=path, var='AR')
+               if not stliblink:
+                       return
+               v.ARFLAGS = ['/nologo']
+
+       # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
+       if v.MSVC_MANIFEST:
+               conf.find_program('MT', path_list=path, var='MT')
+               v.MTFLAGS = ['/nologo']
+
+       try:
+               conf.load('winres')
+       except Errors.ConfigurationError:
+               Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+
+@conf
+def visual_studio_add_flags(self):
+       """visual studio flags found in the system environment"""
+       v = self.env
+       if self.environ.get('INCLUDE'):
+               v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S'
+       if self.environ.get('LIB'):
+               v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x])
+
+@conf
+def msvc_common_flags(conf):
+       """
+       Setup the flags required for executing the msvc compiler
+       """
+       v = conf.env
+
+       v.DEST_BINFMT = 'pe'
+       v.append_value('CFLAGS', ['/nologo'])
+       v.append_value('CXXFLAGS', ['/nologo'])
+       v.append_value('LINKFLAGS', ['/nologo'])
+       v.DEFINES_ST   = '/D%s'
+
+       v.CC_SRC_F     = ''
+       v.CC_TGT_F     = ['/c', '/Fo']
+       v.CXX_SRC_F    = ''
+       v.CXX_TGT_F    = ['/c', '/Fo']
+
+       if (v.MSVC_COMPILER == 'msvc' and v.MSVC_VERSION >= 8) or (v.MSVC_COMPILER == 'wsdk' and v.MSVC_VERSION >= 6):
+               v.CC_TGT_F = ['/FC'] + v.CC_TGT_F
+               v.CXX_TGT_F = ['/FC'] + v.CXX_TGT_F
+
+       v.CPPPATH_ST = '/I%s' # template for adding include paths
+
+       v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:'
+
+       # Subsystem specific flags
+       v.CFLAGS_CONSOLE   = v.CXXFLAGS_CONSOLE   = ['/SUBSYSTEM:CONSOLE']
+       v.CFLAGS_NATIVE    = v.CXXFLAGS_NATIVE    = ['/SUBSYSTEM:NATIVE']
+       v.CFLAGS_POSIX     = v.CXXFLAGS_POSIX     = ['/SUBSYSTEM:POSIX']
+       v.CFLAGS_WINDOWS   = v.CXXFLAGS_WINDOWS   = ['/SUBSYSTEM:WINDOWS']
+       v.CFLAGS_WINDOWSCE = v.CXXFLAGS_WINDOWSCE = ['/SUBSYSTEM:WINDOWSCE']
+
+       # CRT specific flags
+       v.CFLAGS_CRT_MULTITHREADED     = v.CXXFLAGS_CRT_MULTITHREADED     = ['/MT']
+       v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD']
+
+       v.CFLAGS_CRT_MULTITHREADED_DBG     = v.CXXFLAGS_CRT_MULTITHREADED_DBG     = ['/MTd']
+       v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = ['/MDd']
+
+       v.LIB_ST            = '%s.lib'
+       v.LIBPATH_ST        = '/LIBPATH:%s'
+       v.STLIB_ST          = '%s.lib'
+       v.STLIBPATH_ST      = '/LIBPATH:%s'
+
+       if v.MSVC_MANIFEST:
+               v.append_value('LINKFLAGS', ['/MANIFEST'])
+
+       v.CFLAGS_cshlib     = []
+       v.CXXFLAGS_cxxshlib = []
+       v.LINKFLAGS_cshlib  = v.LINKFLAGS_cxxshlib = ['/DLL']
+       v.cshlib_PATTERN    = v.cxxshlib_PATTERN = '%s.dll'
+       v.implib_PATTERN    = '%s.lib'
+       v.IMPLIB_ST         = '/IMPLIB:%s'
+
+       v.LINKFLAGS_cstlib  = []
+       v.cstlib_PATTERN    = v.cxxstlib_PATTERN = '%s.lib'
+
+       v.cprogram_PATTERN  = v.cxxprogram_PATTERN = '%s.exe'
+
+
+#######################################################################################################
+##### conf above, build below
+
+@after_method('apply_link')
+@feature('c', 'cxx')
+def apply_flags_msvc(self):
+       """
+       Add additional flags implied by msvc, such as subsystems and pdb files::
+
+               def build(bld):
+                       bld.stlib(source='main.c', target='bar', subsystem='gruik')
+       """
+       if self.env.CC_NAME != 'msvc' or not getattr(self, 'link_task', None):
+               return
+
+       is_static = isinstance(self.link_task, ccroot.stlink_task)
+
+       subsystem = getattr(self, 'subsystem', '')
+       if subsystem:
+               subsystem = '/subsystem:%s' % subsystem
+               flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
+               self.env.append_value(flags, subsystem)
+
+       if not is_static:
+               for f in self.env.LINKFLAGS:
+                       d = f.lower()
+                       if d[1:] == 'debug':
+                               pdbnode = self.link_task.outputs[0].change_ext('.pdb')
+                               self.link_task.outputs.append(pdbnode)
+
+                               if getattr(self, 'install_task', None):
+                                       self.pdb_install_task = self.add_install_files(
+                                               install_to=self.install_task.install_to, install_from=pdbnode)
+                               break
+
+@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib')
+@after_method('apply_link')
+def apply_manifest(self):
+       """
+       Special linker for MSVC with support for embedding manifests into DLL's
+       and executables compiled by Visual Studio 2005 or probably later. Without
+       the manifest file, the binaries are unusable.
+       See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
+       """
+       if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST and getattr(self, 'link_task', None):
+               out_node = self.link_task.outputs[0]
+               man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
+               self.link_task.outputs.append(man_node)
+               self.env.DO_MANIFEST = True
+
+def make_winapp(self, family):
+       append = self.env.append_unique
+       append('DEFINES', 'WINAPI_FAMILY=%s' % family)
+       append('CXXFLAGS', ['/ZW', '/TP'])
+       for lib_path in self.env.LIBPATH:
+               append('CXXFLAGS','/AI%s'%lib_path)
+
+@feature('winphoneapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_winphone_app(self):
+       """
+       Insert configuration flags for windows phone applications (adds /ZW, /TP...)
+       """
+       make_winapp(self, 'WINAPI_FAMILY_PHONE_APP')
+       conf.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
+
+@feature('winapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_windows_app(self):
+       """
+       Insert configuration flags for windows applications (adds /ZW, /TP...)
+       """
+       make_winapp(self, 'WINAPI_FAMILY_DESKTOP_APP')
diff --git a/third_party/waf/waflib/Tools/nasm.py b/third_party/waf/waflib/Tools/nasm.py
new file mode 100644 (file)
index 0000000..ab0e21f
--- /dev/null
@@ -0,0 +1,30 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2016 (ita)
+
+"""
+Nasm tool (asm processing)
+"""
+
+import os
+import waflib.Tools.asm # leave this
+from waflib.TaskGen import feature
+
+@feature('asm')
+def apply_nasm_vars(self):
+       """provided for compatibility"""
+       self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))
+
+def configure(conf):
+       """
+       Detect nasm/yasm and set the variable *AS*
+       """
+       conf.find_program(['nasm', 'yasm'], var='AS')
+       conf.env.AS_TGT_F = ['-o']
+       conf.env.ASLNK_TGT_F = ['-o']
+       conf.load('asm')
+       conf.env.ASMPATH_ST = '-I%s' + os.sep
diff --git a/third_party/waf/waflib/Tools/nobuild.py b/third_party/waf/waflib/Tools/nobuild.py
new file mode 100644 (file)
index 0000000..4086a8b
--- /dev/null
@@ -0,0 +1,27 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Override the build commands to write empty files.
+This is useful for profiling and evaluating the Python overhead.
+
+To use::
+
+    def build(bld):
+        ...
+        bld.load('nobuild')
+
+"""
+
+from waflib import Task
+def build(bld):
+       def run(self):
+               for x in self.outputs:
+                       x.write('')
+       for (name, cls) in Task.classes.items():
+               cls.run = run
diff --git a/third_party/waf/waflib/Tools/perl.py b/third_party/waf/waflib/Tools/perl.py
new file mode 100644 (file)
index 0000000..cc8fe47
--- /dev/null
@@ -0,0 +1,158 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# andersg at 0x63.nu 2007
+# Thomas Nagy 2016 (ita)
+
+"""
+Support for Perl extensions. A C/C++ compiler is required::
+
+       def options(opt):
+               opt.load('compiler_c perl')
+       def configure(conf):
+               conf.load('compiler_c perl')
+               conf.check_perl_version((5,6,0))
+               conf.check_perl_ext_devel()
+               conf.check_perl_module('Cairo')
+               conf.check_perl_module('Devel::PPPort 4.89')
+       def build(bld):
+               bld(
+                       features     = 'c cshlib perlext',
+                       source       = 'Mytest.xs',
+                       target       = 'Mytest',
+                       install_path = '${ARCHDIR_PERL}/auto')
+               bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm')
+"""
+
+import os
+from waflib import Task, Options, Utils
+from waflib.Configure import conf
+from waflib.TaskGen import extension, feature, before_method
+
+@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars')
+@feature('perlext')
+def init_perlext(self):
+       """
+       Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
+       *lib* prefix from library names.
+       """
+       self.uselib = self.to_list(getattr(self, 'uselib', []))
+       if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
+       self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
+
+@extension('.xs')
+def xsubpp_file(self, node):
+       """
+       Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files
+       """
+       outnode = node.change_ext('.c')
+       self.create_task('xsubpp', node, outnode)
+       self.source.append(outnode)
+
+class xsubpp(Task.Task):
+       """
+       Process *.xs* files
+       """
+       run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
+       color   = 'BLUE'
+       ext_out = ['.h']
+
+@conf
+def check_perl_version(self, minver=None):
+       """
+       Check if Perl is installed, and set the variable PERL.
+       minver is supposed to be a tuple
+       """
+       res = True
+       if minver:
+               cver = '.'.join(map(str,minver))
+       else:
+               cver = ''
+
+       self.start_msg('Checking for minimum perl version %s' % cver)
+
+       perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None))
+       version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V'])
+       if not version:
+               res = False
+               version = "Unknown"
+       elif not minver is None:
+               ver = tuple(map(int, version.split(".")))
+               if ver < minver:
+                       res = False
+
+       self.end_msg(version, color=res and 'GREEN' or 'YELLOW')
+       return res
+
+@conf
+def check_perl_module(self, module):
+       """
+       Check if specified perlmodule is installed.
+
+       The minimum version can be specified by specifying it after modulename
+       like this::
+
+               def configure(conf):
+                       conf.check_perl_module("Some::Module 2.92")
+       """
+       cmd = self.env.PERL + ['-e', 'use %s' % module]
+       self.start_msg('perl module %s' % module)
+       try:
+               r = self.cmd_and_log(cmd)
+       except Exception:
+               self.end_msg(False)
+               return None
+       self.end_msg(r or True)
+       return r
+
+@conf
+def check_perl_ext_devel(self):
+       """
+       Check for configuration needed to build perl extensions.
+
+       Sets different xxx_PERLEXT variables in the environment.
+
+       Also sets the ARCHDIR_PERL variable useful as installation path,
+       which can be overridden by ``--with-perl-archdir`` option.
+       """
+
+       env = self.env
+       perl = env.PERL
+       if not perl:
+               self.fatal('find perl first')
+
+       def cmd_perl_config(s):
+               return perl + ['-MConfig', '-e', 'print \"%s\"' % s]
+       def cfg_str(cfg):
+               return self.cmd_and_log(cmd_perl_config(cfg))
+       def cfg_lst(cfg):
+               return Utils.to_list(cfg_str(cfg))
+       def find_xsubpp():
+               for var in ('privlib', 'vendorlib'):
+                       xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var)
+                       if xsubpp and os.path.isfile(xsubpp[0]):
+                               return xsubpp
+               return self.find_program('xsubpp')
+
+       env.LINKFLAGS_PERLEXT = cfg_lst('$Config{lddlflags}')
+       env.INCLUDES_PERLEXT = cfg_lst('$Config{archlib}/CORE')
+       env.CFLAGS_PERLEXT = cfg_lst('$Config{ccflags} $Config{cccdlflags}')
+       env.EXTUTILS_TYPEMAP = cfg_lst('$Config{privlib}/ExtUtils/typemap')
+       env.XSUBPP = find_xsubpp()
+
+       if not getattr(Options.options, 'perlarchdir', None):
+               env.ARCHDIR_PERL = cfg_str('$Config{sitearch}')
+       else:
+               env.ARCHDIR_PERL = getattr(Options.options, 'perlarchdir')
+
+       env.perlext_PATTERN = '%s.' + cfg_str('$Config{dlext}')
+
+def options(opt):
+       """
+       Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
+       """
+       opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
+       opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
diff --git a/third_party/waf/waflib/Tools/python.py b/third_party/waf/waflib/Tools/python.py
new file mode 100644 (file)
index 0000000..587fc9c
--- /dev/null
@@ -0,0 +1,630 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2015 (ita)
+# Gustavo Carneiro (gjc), 2007
+
+"""
+Support for Python, detect the headers and libraries and provide
+*use* variables to link C/C++ programs against them::
+
+       def options(opt):
+               opt.load('compiler_c python')
+       def configure(conf):
+               conf.load('compiler_c python')
+               conf.check_python_version((2,4,2))
+               conf.check_python_headers()
+       def build(bld):
+               bld.program(features='pyembed', source='a.c', target='myprog')
+               bld.shlib(features='pyext', source='b.c', target='mylib')
+"""
+
+import os, sys
+from waflib import Utils, Options, Errors, Logs, Task, Node
+from waflib.TaskGen import extension, before_method, after_method, feature
+from waflib.Configure import conf
+
+FRAG = '''
+#include <Python.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+       void Py_Initialize(void);
+       void Py_Finalize(void);
+#ifdef __cplusplus
+}
+#endif
+int main(int argc, char **argv)
+{
+   (void)argc; (void)argv;
+   Py_Initialize();
+   Py_Finalize();
+   return 0;
+}
+'''
+"""
+Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
+"""
+
+INST = '''
+import sys, py_compile
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
+'''
+"""
+Piece of Python code used in :py:func:`waflib.Tools.python.pytask` for byte-compiling python files
+"""
+
+DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
+
+@before_method('process_source')
+@feature('py')
+def feature_py(self):
+       """
+       Create tasks to byte-compile .py files and install them, if requested
+       """
+       self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+       install_from = getattr(self, 'install_from', None)
+       if install_from and not isinstance(install_from, Node.Node):
+               install_from = self.path.find_dir(install_from)
+       self.install_from = install_from
+
+       ver = self.env.PYTHON_VERSION
+       if not ver:
+               self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
+
+       if int(ver.replace('.', '')) > 31:
+               self.install_32 = True
+
+@extension('.py')
+def process_py(self, node):
+       """
+       Add signature of .py file, so it will be byte-compiled when necessary
+       """
+       assert(getattr(self, 'install_path')), 'add features="py"'
+
+       # where to install the python file
+       if self.install_path:
+               if self.install_from:
+                       self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=True)
+               else:
+                       self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=True)
+
+       lst = []
+       if self.env.PYC:
+               lst.append('pyc')
+       if self.env.PYO:
+               lst.append('pyo')
+
+       if self.install_path:
+               if self.install_from:
+                       pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env)
+               else:
+                       pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env)
+       else:
+               pyd = node.abspath()
+
+       for ext in lst:
+               if self.env.PYTAG and not self.env.NOPYCACHE:
+                       # __pycache__ installation for python 3.2 - PEP 3147
+                       name = node.name[:-3]
+                       pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
+                       pyobj.parent.mkdir()
+               else:
+                       pyobj = node.change_ext(".%s" % ext)
+
+               tsk = self.create_task(ext, node, pyobj)
+               tsk.pyd = pyd
+
+               if self.install_path:
+                       self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=True)
+
+class pyc(Task.Task):
+       """
+       Byte-compiling python files
+       """
+       color = 'PINK'
+       def __str__(self):
+               node = self.outputs[0]
+               return node.path_from(node.ctx.launch_node())
+       def run(self):
+               cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
+               ret = self.generator.bld.exec_command(cmd)
+               return ret
+
+class pyo(Task.Task):
+       """
+       Byte-compiling python files
+       """
+       color = 'PINK'
+       def __str__(self):
+               node = self.outputs[0]
+               return node.path_from(node.ctx.launch_node())
+       def run(self):
+               cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
+               ret = self.generator.bld.exec_command(cmd)
+               return ret
+
+@feature('pyext')
+@before_method('propagate_uselib_vars', 'apply_link')
+@after_method('apply_bundle')
+def init_pyext(self):
+       """
+       Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
+       *lib* prefix from library names.
+       """
+       self.uselib = self.to_list(getattr(self, 'uselib', []))
+       if not 'PYEXT' in self.uselib:
+               self.uselib.append('PYEXT')
+       # override shlib_PATTERN set by the osx module
+       self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
+       self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN
+
+       try:
+               if not self.install_path:
+                       return
+       except AttributeError:
+               self.install_path = '${PYTHONARCHDIR}'
+
+@feature('pyext')
+@before_method('apply_link', 'apply_bundle')
+def set_bundle(self):
+       """Mac-specific pyext extension that enables bundles from c_osx.py"""
+       if Utils.unversioned_sys_platform() == 'darwin':
+               self.mac_bundle = True
+
+@before_method('propagate_uselib_vars')
+@feature('pyembed')
+def init_pyembed(self):
+       """
+       Add the PYEMBED variable.
+       """
+       self.uselib = self.to_list(getattr(self, 'uselib', []))
+       if not 'PYEMBED' in self.uselib:
+               self.uselib.append('PYEMBED')
+
+@conf
+def get_python_variables(self, variables, imports=None):
+       """
+       Spawn a new python process to dump configuration variables
+
+       :param variables: variables to print
+       :type variables: list of string
+       :param imports: one import by element
+       :type imports: list of string
+       :return: the variable values
+       :rtype: list of string
+       """
+       if not imports:
+               try:
+                       imports = self.python_imports
+               except AttributeError:
+                       imports = DISTUTILS_IMP
+
+       program = list(imports) # copy
+       program.append('')
+       for v in variables:
+               program.append("print(repr(%s))" % v)
+       os_env = dict(os.environ)
+       try:
+               del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
+       except KeyError:
+               pass
+
+       try:
+               out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
+       except Errors.WafError:
+               self.fatal('The distutils module is unusable: install "python-devel"?')
+       self.to_log(out)
+       return_values = []
+       for s in out.splitlines():
+               s = s.strip()
+               if not s:
+                       continue
+               if s == 'None':
+                       return_values.append(None)
+               elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
+                       return_values.append(eval(s))
+               elif s[0].isdigit():
+                       return_values.append(int(s))
+               else: break
+       return return_values
+
+@conf
+def test_pyembed(self, mode, msg='Testing pyembed configuration'):
+       self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg,
+               fragment=FRAG, errmsg='Could not build a python embedded interpreter',
+               features='%s %sprogram pyembed' % (mode, mode))
+
+@conf
+def test_pyext(self, mode, msg='Testing pyext configuration'):
+       self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg,
+               fragment=FRAG, errmsg='Could not build python extensions',
+               features='%s %sshlib pyext' % (mode, mode))
+
+@conf
+def python_cross_compile(self, features='pyembed pyext'):
+       """
+       For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
+       PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure
+
+       The following variables are used:
+       PYTHON_VERSION    required
+       PYTAG             required
+       PYTHON_LDFLAGS    required
+       pyext_PATTERN     required
+       PYTHON_PYEXT_LDFLAGS
+       PYTHON_PYEMBED_LDFLAGS
+       """
+       features = Utils.to_list(features)
+       if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
+               return False
+
+       for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
+               if not x in self.environ:
+                       self.fatal('Please set %s in the os environment' % x)
+               else:
+                       self.env[x] = self.environ[x]
+
+       xx = self.env.CXX_NAME and 'cxx' or 'c'
+       if 'pyext' in features:
+               flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
+               if flags is None:
+                       self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
+               else:
+                       self.parse_flags(flags, 'PYEXT')
+               self.test_pyext(xx)
+       if 'pyembed' in features:
+               flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
+               if flags is None:
+                       self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
+               else:
+                       self.parse_flags(flags, 'PYEMBED')
+               self.test_pyembed(xx)
+       return True
+
+@conf
+def check_python_headers(conf, features='pyembed pyext'):
+       """
+       Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
+       On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
+
+       * PYEXT: for compiling python extensions
+       * PYEMBED: for embedding a python interpreter
+       """
+       features = Utils.to_list(features)
+       assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
+       env = conf.env
+       if not env.CC_NAME and not env.CXX_NAME:
+               conf.fatal('load a compiler first (gcc, g++, ..)')
+
+       # bypass all the code below for cross-compilation
+       if conf.python_cross_compile(features):
+               return
+
+       if not env.PYTHON_VERSION:
+               conf.check_python_version()
+
+       pybin = env.PYTHON
+       if not pybin:
+               conf.fatal('Could not find the python executable')
+
+       # so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
+       v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
+       try:
+               lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
+       except RuntimeError:
+               conf.fatal("Python development headers not found (-v for details).")
+
+       vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
+       conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))
+
+       dct = dict(zip(v, lst))
+       x = 'MACOSX_DEPLOYMENT_TARGET'
+       if dct[x]:
+               env[x] = conf.environ[x] = dct[x]
+       env.pyext_PATTERN = '%s' + dct['SO'] # not a mistake
+
+
+       # Try to get pythonX.Y-config
+       num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
+       conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)
+
+       if env.PYTHON_CONFIG:
+               # python2.6-config requires 3 runs
+               all_flags = [['--cflags', '--libs', '--ldflags']]
+               if sys.hexversion < 0x2070000:
+                       all_flags = [[k] for k in all_flags[0]]
+
+               xx = env.CXX_NAME and 'cxx' or 'c'
+
+               if 'pyembed' in features:
+                       for flags in all_flags:
+                               conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)
+
+                       try:
+                               conf.test_pyembed(xx)
+                       except conf.errors.ConfigurationError:
+                               # python bug 7352
+                               if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
+                                       env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
+                                       conf.test_pyembed(xx)
+                               else:
+                                       raise
+
+               if 'pyext' in features:
+                       for flags in all_flags:
+                               conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)
+
+                       try:
+                               conf.test_pyext(xx)
+                       except conf.errors.ConfigurationError:
+                               # python bug 7352
+                               if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
+                                       env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
+                                       conf.test_pyext(xx)
+                               else:
+                                       raise
+
+               conf.define('HAVE_PYTHON_H', 1)
+               return
+
+       # No python-config, do something else on windows systems
+       all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
+       conf.parse_flags(all_flags, 'PYEMBED')
+
+       all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
+       conf.parse_flags(all_flags, 'PYEXT')
+
+       result = None
+       if not dct["LDVERSION"]:
+               dct["LDVERSION"] = env.PYTHON_VERSION
+
+       # further simplification will be complicated
+       for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')):
+
+               # LIBPATH_PYEMBED is already set; see if it works.
+               if not result and env.LIBPATH_PYEMBED:
+                       path = env.LIBPATH_PYEMBED
+                       conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
+                       result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
+
+               if not result and dct['LIBDIR']:
+                       path = [dct['LIBDIR']]
+                       conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
+                       result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)
+
+               if not result and dct['LIBPL']:
+                       path = [dct['LIBPL']]
+                       conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
+                       result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)
+
+               if not result:
+                       path = [os.path.join(dct['prefix'], "libs")]
+                       conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
+                       result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
+
+               if result:
+                       break # do not forget to set LIBPATH_PYEMBED
+
+       if result:
+               env.LIBPATH_PYEMBED = path
+               env.append_value('LIB_PYEMBED', [name])
+       else:
+               conf.to_log("\n\n### LIB NOT FOUND\n")
+
+       # under certain conditions, python extensions must link to
+       # python libraries, not just python embedding programs.
+       if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
+               env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
+               env.LIB_PYEXT = env.LIB_PYEMBED
+
+       conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
+       env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
+       env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
+
+       # Code using the Python API needs to be compiled with -fno-strict-aliasing
+       if env.CC_NAME == 'gcc':
+               env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+               env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
+       if env.CXX_NAME == 'gcc':
+               env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+               env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
+
+       if env.CC_NAME == "msvc":
+               from distutils.msvccompiler import MSVCCompiler
+               dist_compiler = MSVCCompiler()
+               dist_compiler.initialize()
+               env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
+               env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
+               env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
+
+       # See if it compiles
+       conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')
+
+@conf
+def check_python_version(conf, minver=None):
+       """
+       Check if the python interpreter is found matching a given minimum version.
+       minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
+
+       If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
+       (eg. '2.4') of the actual python version found, and PYTHONDIR is
+       defined, pointing to the site-packages directory appropriate for
+       this python version, where modules/packages/extensions should be
+       installed.
+
+       :param minver: minimum version
+       :type minver: tuple of int
+       """
+       assert minver is None or isinstance(minver, tuple)
+       pybin = conf.env.PYTHON
+       if not pybin:
+               conf.fatal('could not find the python executable')
+
+       # Get python version string
+       cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
+       Logs.debug('python: Running python command %r', cmd)
+       lines = conf.cmd_and_log(cmd).split()
+       assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines)
+       pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
+
+       # Compare python version with the minimum required
+       result = (minver is None) or (pyver_tuple >= minver)
+
+       if result:
+               # define useful environment variables
+               pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
+               conf.env.PYTHON_VERSION = pyver
+
+               if 'PYTHONDIR' in conf.env:
+                       # Check if --pythondir was specified
+                       pydir = conf.env.PYTHONDIR
+               elif 'PYTHONDIR' in conf.environ:
+                       # Check environment for PYTHONDIR
+                       pydir = conf.environ['PYTHONDIR']
+               else:
+                       # Finally, try to guess
+                       if Utils.is_win32:
+                               (python_LIBDEST, pydir) = conf.get_python_variables(
+                                         ["get_config_var('LIBDEST') or ''",
+                                          "get_python_lib(standard_lib=0) or ''"])
+                       else:
+                               python_LIBDEST = None
+                               (pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
+                       if python_LIBDEST is None:
+                               if conf.env.LIBDIR:
+                                       python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver)
+                               else:
+                                       python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver)
+
+               if 'PYTHONARCHDIR' in conf.env:
+                       # Check if --pythonarchdir was specified
+                       pyarchdir = conf.env.PYTHONARCHDIR
+               elif 'PYTHONARCHDIR' in conf.environ:
+                       # Check environment for PYTHONDIR
+                       pyarchdir = conf.environ['PYTHONARCHDIR']
+               else:
+                       # Finally, try to guess
+                       (pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
+                       if not pyarchdir:
+                               pyarchdir = pydir
+
+               if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
+                       conf.define('PYTHONDIR', pydir)
+                       conf.define('PYTHONARCHDIR', pyarchdir)
+
+               conf.env.PYTHONDIR = pydir
+               conf.env.PYTHONARCHDIR = pyarchdir
+
+       # Feedback
+       pyver_full = '.'.join(map(str, pyver_tuple[:3]))
+       if minver is None:
+               conf.msg('Checking for python version', pyver_full)
+       else:
+               minver_str = '.'.join(map(str, minver))
+               conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW')
+
+       if not result:
+               conf.fatal('The python version is too old, expecting %r' % (minver,))
+
+PYTHON_MODULE_TEMPLATE = '''
+import %s as current_module
+version = getattr(current_module, '__version__', None)
+if version is not None:
+       print(str(version))
+else:
+       print('unknown version')
+'''
+
+@conf
+def check_python_module(conf, module_name, condition=''):
+       """
+       Check if the selected python interpreter can import the given python module::
+
+               def configure(conf):
+                       conf.check_python_module('pygccxml')
+                       conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")
+
+       :param module_name: module
+       :type module_name: string
+       """
+       msg = "Checking for python module %r" % module_name
+       if condition:
+               msg = '%s (%s)' % (msg, condition)
+       conf.start_msg(msg)
+       try:
+               ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
+       except Exception:
+               conf.end_msg(False)
+               conf.fatal('Could not find the python module %r' % module_name)
+
+       ret = ret.strip()
+       if condition:
+               conf.end_msg(ret)
+               if ret == 'unknown version':
+                       conf.fatal('Could not check the %s version' % module_name)
+
+               from distutils.version import LooseVersion
+               def num(*k):
+                       if isinstance(k[0], int):
+                               return LooseVersion('.'.join([str(x) for x in k]))
+                       else:
+                               return LooseVersion(k[0])
+               d = {'num': num, 'ver': LooseVersion(ret)}
+               ev = eval(condition, {}, d)
+               if not ev:
+                       conf.fatal('The %s version does not satisfy the requirements' % module_name)
+       else:
+               if ret == 'unknown version':
+                       conf.end_msg(True)
+               else:
+                       conf.end_msg(ret)
+
+def configure(conf):
+       """
+       Detect the python interpreter
+       """
+       v = conf.env
+       if getattr(Options.options, 'pythondir', None):
+               v.PYTHONDIR = Options.options.pythondir
+       if getattr(Options.options, 'pythonarchdir', None):
+               v.PYTHONARCHDIR = Options.options.pythonarchdir
+       if getattr(Options.options, 'nopycache', None):
+               v.NOPYCACHE=Options.options.nopycache
+
+       if not v.PYTHON:
+               v.PYTHON = getattr(Options.options, 'python', None) or sys.executable
+       v.PYTHON = Utils.to_list(v.PYTHON)
+       conf.find_program('python', var='PYTHON')
+
+       v.PYFLAGS = ''
+       v.PYFLAGS_OPT = '-O'
+
+       v.PYC = getattr(Options.options, 'pyc', 1)
+       v.PYO = getattr(Options.options, 'pyo', 1)
+
+       try:
+               v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip()
+       except Errors.WafError:
+               pass
+
+def options(opt):
+       """
+       Add python-specific options
+       """
+       pyopt=opt.add_option_group("Python Options")
+       pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
+                                        help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
+       pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
+                                        help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
+       pyopt.add_option('--nopycache',dest='nopycache', action='store_true',
+                                        help='Do not use __pycache__ directory to install objects [Default:auto]')
+       pyopt.add_option('--python', dest="python",
+                                        help='python binary to be used [Default: %s]' % sys.executable)
+       pyopt.add_option('--pythondir', dest='pythondir',
+                                        help='Installation path for python modules (py, platform-independent .py and .pyc files)')
+       pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
+                                        help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
diff --git a/third_party/waf/waflib/Tools/qt4.py b/third_party/waf/waflib/Tools/qt4.py
new file mode 100644 (file)
index 0000000..f3a43c2
--- /dev/null
@@ -0,0 +1,696 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+
+Tool Description
+================
+
+This tool helps with finding Qt4 tools and libraries,
+and also provides syntactic sugar for using Qt4 tools.
+
+The following snippet illustrates the tool usage::
+
+       def options(opt):
+               opt.load('compiler_cxx qt4')
+
+       def configure(conf):
+               conf.load('compiler_cxx qt4')
+
+       def build(bld):
+               bld(
+                       features = 'qt4 cxx cxxprogram',
+                       uselib   = 'QTCORE QTGUI QTOPENGL QTSVG',
+                       source   = 'main.cpp textures.qrc aboutDialog.ui',
+                       target   = 'window',
+               )
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "qt4" tool.
+
+You also need to edit your sources accordingly:
+
+- the normal way of doing things is to have your C++ files
+  include the .moc file.
+  This is regarded as the best practice (and provides much faster
+  compilations).
+  It also implies that the include paths have beenset properly.
+
+- to have the include paths added automatically, use the following::
+
+     from waflib.TaskGen import feature, before_method, after_method
+     @feature('cxx')
+     @after_method('process_source')
+     @before_method('apply_incpaths')
+     def add_includes_paths(self):
+        incs = set(self.to_list(getattr(self, 'includes', '')))
+        for x in self.compiled_tasks:
+            incs.add(x.inputs[0].parent.path_from(self.path))
+        self.includes = list(incs)
+
+Note: another tool provides Qt processing that does not require
+.moc includes, see 'playground/slow_qt/'.
+
+A few options (--qt{dir,bin,...}) and environment variables
+(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
+tool path selection, etc; please read the source for more info.
+
+"""
+
+try:
+       from xml.sax import make_parser
+       from xml.sax.handler import ContentHandler
+except ImportError:
+       has_xml = False
+       ContentHandler = object
+else:
+       has_xml = True
+
+import os, sys
+from waflib.Tools import cxx
+from waflib import Task, Utils, Options, Errors, Context
+from waflib.TaskGen import feature, after_method, extension
+from waflib.Configure import conf
+from waflib import Logs
+
+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
+"""
+File extensions associated to the .moc files
+"""
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI  = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
+"""
+File extensions of C++ files that may require a .moc processing
+"""
+
+QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"
+
+class qxx(Task.classes['cxx']):
+       """
+       Each C++ file can have zero or several .moc files to create.
+       They are known only when the files are scanned (preprocessor)
+       To avoid scanning the c++ files each time (parsing C/C++), the results
+       are retrieved from the task cache (bld.node_deps/bld.raw_deps).
+       The moc tasks are also created *dynamically* during the build.
+       """
+
+       def __init__(self, *k, **kw):
+               Task.Task.__init__(self, *k, **kw)
+               self.moc_done = 0
+
+       def runnable_status(self):
+               """
+               Compute the task signature to make sure the scanner was executed. Create the
+               moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
+               then postpone the task execution (there is no need to recompute the task signature).
+               """
+               if self.moc_done:
+                       return Task.Task.runnable_status(self)
+               else:
+                       for t in self.run_after:
+                               if not t.hasrun:
+                                       return Task.ASK_LATER
+                       self.add_moc_tasks()
+                       return Task.Task.runnable_status(self)
+
+       def create_moc_task(self, h_node, m_node):
+               """
+               If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
+               It is not possible to change the file names, but we can assume that the moc transformation will be identical,
+               and the moc tasks can be shared in a global cache.
+
+               The defines passed to moc will then depend on task generator order. If this is not acceptable, then
+               use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
+               """
+               try:
+                       moc_cache = self.generator.bld.moc_cache
+               except AttributeError:
+                       moc_cache = self.generator.bld.moc_cache = {}
+
+               try:
+                       return moc_cache[h_node]
+               except KeyError:
+                       tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
+                       tsk.set_inputs(h_node)
+                       tsk.set_outputs(m_node)
+
+                       if self.generator:
+                               self.generator.tasks.append(tsk)
+
+                       # direct injection in the build phase (safe because called from the main thread)
+                       gen = self.generator.bld.producer
+                       gen.outstanding.insert(0, tsk)
+                       gen.total += 1
+
+                       return tsk
+
+       def moc_h_ext(self):
+               ext = []
+               try:
+                       ext = Options.options.qt_header_ext.split()
+               except AttributeError:
+                       pass
+               if not ext:
+                       ext = MOC_H
+               return ext
+
+       def add_moc_tasks(self):
+               """
+               Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
+               """
+               node = self.inputs[0]
+               bld = self.generator.bld
+
+               try:
+                       # compute the signature once to know if there is a moc file to create
+                       self.signature()
+               except KeyError:
+                       # the moc file may be referenced somewhere else
+                       pass
+               else:
+                       # remove the signature, it must be recomputed with the moc task
+                       delattr(self, 'cache_sig')
+
+               include_nodes = [node.parent] + self.generator.includes_nodes
+
+               moctasks = []
+               mocfiles = set([])
+               for d in bld.raw_deps.get(self.uid(), []):
+                       if not d.endswith('.moc'):
+                               continue
+
+                       # process that base.moc only once
+                       if d in mocfiles:
+                               continue
+                       mocfiles.add(d)
+
+                       # find the source associated with the moc file
+                       h_node = None
+
+                       base2 = d[:-4]
+                       for x in include_nodes:
+                               for e in self.moc_h_ext():
+                                       h_node = x.find_node(base2 + e)
+                                       if h_node:
+                                               break
+                               if h_node:
+                                       m_node = h_node.change_ext('.moc')
+                                       break
+                       else:
+                               # foo.cpp -> foo.cpp.moc
+                               for k in EXT_QT4:
+                                       if base2.endswith(k):
+                                               for x in include_nodes:
+                                                       h_node = x.find_node(base2)
+                                                       if h_node:
+                                                               break
+                                               if h_node:
+                                                       m_node = h_node.change_ext(k + '.moc')
+                                                       break
+
+                       if not h_node:
+                               raise Errors.WafError('No source found for %r which is a moc file' % d)
+
+                       # create the moc task
+                       task = self.create_moc_task(h_node, m_node)
+                       moctasks.append(task)
+
+               # simple scheduler dependency: run the moc task before others
+               self.run_after.update(set(moctasks))
+               self.moc_done = 1
+
+class trans_update(Task.Task):
+       """Update a .ts files from a list of C++ files"""
+       run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
+       color   = 'BLUE'
+Task.update_outputs(trans_update)
+
+class XMLHandler(ContentHandler):
+       """
+       Parser for *.qrc* files
+       """
+       def __init__(self):
+               self.buf = []
+               self.files = []
+       def startElement(self, name, attrs):
+               if name == 'file':
+                       self.buf = []
+       def endElement(self, name):
+               if name == 'file':
+                       self.files.append(str(''.join(self.buf)))
+       def characters(self, cars):
+               self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_rcc_task(self, node):
+       "Create rcc and cxx tasks for *.qrc* files"
+       rcnode = node.change_ext('_rc.cpp')
+       self.create_task('rcc', node, rcnode)
+       cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
+       try:
+               self.compiled_tasks.append(cpptask)
+       except AttributeError:
+               self.compiled_tasks = [cpptask]
+       return cpptask
+
+@extension(*EXT_UI)
+def create_uic_task(self, node):
+       "hook for uic tasks"
+       uictask = self.create_task('ui4', node)
+       uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
+
+@extension('.ts')
+def add_lang(self, node):
+       """add all the .ts file into self.lang"""
+       self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('qt4')
+@after_method('apply_link')
+def apply_qt4(self):
+       """
+       Add MOC_FLAGS which may be necessary for moc::
+
+               def build(bld):
+                       bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')
+
+       The additional parameters are:
+
+       :param lang: list of translation files (\*.ts) to process
+       :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+       :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
+       :type update: bool
+       :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
+       :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+       """
+       if getattr(self, 'lang', None):
+               qmtasks = []
+               for x in self.to_list(self.lang):
+                       if isinstance(x, str):
+                               x = self.path.find_resource(x + '.ts')
+                       qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+
+               if getattr(self, 'update', None) and Options.options.trans_qt4:
+                       cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
+                               a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
+                       for x in qmtasks:
+                               self.create_task('trans_update', cxxnodes, x.inputs)
+
+               if getattr(self, 'langname', None):
+                       qmnodes = [x.outputs[0] for x in qmtasks]
+                       rcnode = self.langname
+                       if isinstance(rcnode, str):
+                               rcnode = self.path.find_or_declare(rcnode + '.qrc')
+                       t = self.create_task('qm2rcc', qmnodes, rcnode)
+                       k = create_rcc_task(self, t.outputs[0])
+                       self.link_task.inputs.append(k.outputs[0])
+
+       lst = []
+       for flag in self.to_list(self.env['CXXFLAGS']):
+               if len(flag) < 2: continue
+               f = flag[0:2]
+               if f in ('-D', '-I', '/D', '/I'):
+                       if (f[0] == '/'):
+                               lst.append('-' + flag[1:])
+                       else:
+                               lst.append(flag)
+       self.env.append_value('MOC_FLAGS', lst)
+
+@extension(*EXT_QT4)
+def cxx_hook(self, node):
+       """
+       Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
+       """
+       return self.create_compiled_task('qxx', node)
+
+class rcc(Task.Task):
+       """
+       Process *.qrc* files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+       ext_out = ['.h']
+
+       def rcname(self):
+               return os.path.splitext(self.inputs[0].name)[0]
+
+       def scan(self):
+               """Parse the *.qrc* files"""
+               if not has_xml:
+                       Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
+                       return ([], [])
+
+               parser = make_parser()
+               curHandler = XMLHandler()
+               parser.setContentHandler(curHandler)
+               fi = open(self.inputs[0].abspath(), 'r')
+               try:
+                       parser.parse(fi)
+               finally:
+                       fi.close()
+
+               nodes = []
+               names = []
+               root = self.inputs[0].parent
+               for x in curHandler.files:
+                       nd = root.find_resource(x)
+                       if nd: nodes.append(nd)
+                       else: names.append(x)
+               return (nodes, names)
+
+class moc(Task.Task):
+       """
+       Create *.moc* files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+       def keyword(self):
+               return "Creating"
+       def __str__(self):
+               return self.outputs[0].path_from(self.generator.bld.launch_node())
+
+class ui4(Task.Task):
+       """
+       Process *.ui* files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_UIC} ${SRC} -o ${TGT}'
+       ext_out = ['.h']
+
+class ts2qm(Task.Task):
+       """
+       Create *.qm* files from *.ts* files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+       """
+       Transform *.qm* files into *.rc* files
+       """
+       color = 'BLUE'
+       after = 'ts2qm'
+
+       def run(self):
+               """Create a qrc file including the inputs"""
+               txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+               code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+               self.outputs[0].write(code)
+
+def configure(self):
+       """
+       Besides the configuration options, the environment variable QT4_ROOT may be used
+       to give the location of the qt4 libraries (absolute path).
+
+       The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
+       """
+       self.find_qt4_binaries()
+       self.set_qt4_libs_to_check()
+       self.set_qt4_defines()
+       self.find_qt4_libraries()
+       self.add_qt4_rpath()
+       self.simplify_qt4_libs()
+
+@conf
+def find_qt4_binaries(self):
+       env = self.env
+       opt = Options.options
+
+       qtdir = getattr(opt, 'qtdir', '')
+       qtbin = getattr(opt, 'qtbin', '')
+
+       paths = []
+
+       if qtdir:
+               qtbin = os.path.join(qtdir, 'bin')
+
+       # the qt directory has been given from QT4_ROOT - deduce the qt binary path
+       if not qtdir:
+               qtdir = os.environ.get('QT4_ROOT', '')
+               qtbin = os.environ.get('QT4_BIN', None) or os.path.join(qtdir, 'bin')
+
+       if qtbin:
+               paths = [qtbin]
+
+       # no qtdir, look in the path and in /usr/local/Trolltech
+       if not qtdir:
+               paths = os.environ.get('PATH', '').split(os.pathsep)
+               paths.append('/usr/share/qt4/bin/')
+               try:
+                       lst = Utils.listdir('/usr/local/Trolltech/')
+               except OSError:
+                       pass
+               else:
+                       if lst:
+                               lst.sort()
+                               lst.reverse()
+
+                               # keep the highest version
+                               qtdir = '/usr/local/Trolltech/%s/' % lst[0]
+                               qtbin = os.path.join(qtdir, 'bin')
+                               paths.append(qtbin)
+
+       # at the end, try to find qmake in the paths given
+       # keep the one with the highest version
+       cand = None
+       prev_ver = ['4', '0', '0']
+       for qmk in ('qmake-qt4', 'qmake4', 'qmake'):
+               try:
+                       qmake = self.find_program(qmk, path_list=paths)
+               except self.errors.ConfigurationError:
+                       pass
+               else:
+                       try:
+                               version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
+                       except self.errors.WafError:
+                               pass
+                       else:
+                               if version:
+                                       new_ver = version.split('.')
+                                       if new_ver > prev_ver:
+                                               cand = qmake
+                                               prev_ver = new_ver
+       if cand:
+               self.env.QMAKE = cand
+       else:
+               self.fatal('Could not find qmake for qt4')
+
+       qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep
+
+       def find_bin(lst, var):
+               if var in env:
+                       return
+               for f in lst:
+                       try:
+                               ret = self.find_program(f, path_list=paths)
+                       except self.errors.ConfigurationError:
+                               pass
+                       else:
+                               env[var]=ret
+                               break
+
+       find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
+       find_bin(['uic-qt4', 'uic'], 'QT_UIC')
+       if not env.QT_UIC:
+               self.fatal('cannot find the uic compiler for qt4')
+
+       self.start_msg('Checking for uic version')
+       uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH)
+       uicver = ''.join(uicver).strip()
+       uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
+       self.end_msg(uicver)
+       if uicver.find(' 3.') != -1:
+               self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
+
+       find_bin(['moc-qt4', 'moc'], 'QT_MOC')
+       find_bin(['rcc-qt4', 'rcc'], 'QT_RCC')
+       find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
+       find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
+
+       env['UIC3_ST']= '%s -o %s'
+       env['UIC_ST'] = '%s -o %s'
+       env['MOC_ST'] = '-o'
+       env['ui_PATTERN'] = 'ui_%s.h'
+       env['QT_LRELEASE_FLAGS'] = ['-silent']
+       env.MOCCPPPATH_ST = '-I%s'
+       env.MOCDEFINES_ST = '-D%s'
+
+@conf
+def find_qt4_libraries(self):
+       qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR", None)
+       if not qtlibs:
+               try:
+                       qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
+               except Errors.WafError:
+                       qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
+                       qtlibs = os.path.join(qtdir, 'lib')
+       self.msg('Found the Qt4 libraries in', qtlibs)
+
+       qtincludes =  os.environ.get("QT4_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+       env = self.env
+       if not 'PKG_CONFIG_PATH' in os.environ:
+               os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
+
+       try:
+               if os.environ.get("QT4_XCOMPILE", None):
+                       raise self.errors.ConfigurationError()
+               self.check_cfg(atleast_pkgconfig_version='0.1')
+       except self.errors.ConfigurationError:
+               for i in self.qt4_vars:
+                       uselib = i.upper()
+                       if Utils.unversioned_sys_platform() == "darwin":
+                               # Since at least qt 4.7.3 each library locates in separate directory
+                               frameworkName = i + ".framework"
+                               qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
+                               if os.path.exists(qtDynamicLib):
+                                       env.append_unique('FRAMEWORK_' + uselib, i)
+                                       self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+                               else:
+                                       self.msg('Checking for %s' % i, False, 'YELLOW')
+                               env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
+                       elif env.DEST_OS != "win32":
+                               qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
+                               qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
+                               if os.path.exists(qtDynamicLib):
+                                       env.append_unique('LIB_' + uselib, i)
+                                       self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+                               elif os.path.exists(qtStaticLib):
+                                       env.append_unique('LIB_' + uselib, i)
+                                       self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
+                               else:
+                                       self.msg('Checking for %s' % i, False, 'YELLOW')
+
+                               env.append_unique('LIBPATH_' + uselib, qtlibs)
+                               env.append_unique('INCLUDES_' + uselib, qtincludes)
+                               env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+                       else:
+                               # Release library names are like QtCore4
+                               for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
+                                       lib = os.path.join(qtlibs, k % i)
+                                       if os.path.exists(lib):
+                                               env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
+                                               self.msg('Checking for %s' % i, lib, 'GREEN')
+                                               break
+                               else:
+                                       self.msg('Checking for %s' % i, False, 'YELLOW')
+
+                               env.append_unique('LIBPATH_' + uselib, qtlibs)
+                               env.append_unique('INCLUDES_' + uselib, qtincludes)
+                               env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+
+                               # Debug library names are like QtCore4d
+                               uselib = i.upper() + "_debug"
+                               for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
+                                       lib = os.path.join(qtlibs, k % i)
+                                       if os.path.exists(lib):
+                                               env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
+                                               self.msg('Checking for %s' % i, lib, 'GREEN')
+                                               break
+                               else:
+                                       self.msg('Checking for %s' % i, False, 'YELLOW')
+
+                               env.append_unique('LIBPATH_' + uselib, qtlibs)
+                               env.append_unique('INCLUDES_' + uselib, qtincludes)
+                               env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+       else:
+               for i in self.qt4_vars_debug + self.qt4_vars:
+                       self.check_cfg(package=i, args='--cflags --libs', mandatory=False)
+
+@conf
+def simplify_qt4_libs(self):
+       # the libpaths make really long command-lines
+       # remove the qtcore ones from qtgui, etc
+       env = self.env
+       def process_lib(vars_, coreval):
+               for d in vars_:
+                       var = d.upper()
+                       if var == 'QTCORE':
+                               continue
+
+                       value = env['LIBPATH_'+var]
+                       if value:
+                               core = env[coreval]
+                               accu = []
+                               for lib in value:
+                                       if lib in core:
+                                               continue
+                                       accu.append(lib)
+                               env['LIBPATH_'+var] = accu
+
+       process_lib(self.qt4_vars,       'LIBPATH_QTCORE')
+       process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def add_qt4_rpath(self):
+       # rpath if wanted
+       env = self.env
+       if getattr(Options.options, 'want_rpath', False):
+               def process_rpath(vars_, coreval):
+                       for d in vars_:
+                               var = d.upper()
+                               value = env['LIBPATH_'+var]
+                               if value:
+                                       core = env[coreval]
+                                       accu = []
+                                       for lib in value:
+                                               if var != 'QTCORE':
+                                                       if lib in core:
+                                                               continue
+                                               accu.append('-Wl,--rpath='+lib)
+                                       env['RPATH_'+var] = accu
+               process_rpath(self.qt4_vars,       'LIBPATH_QTCORE')
+               process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def set_qt4_libs_to_check(self):
+       if not hasattr(self, 'qt4_vars'):
+               self.qt4_vars = QT4_LIBS
+       self.qt4_vars = Utils.to_list(self.qt4_vars)
+       if not hasattr(self, 'qt4_vars_debug'):
+               self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
+       self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)
+
+@conf
+def set_qt4_defines(self):
+       if sys.platform != 'win32':
+               return
+       for x in self.qt4_vars:
+               y = x[2:].upper()
+               self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
+               self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
+
+def options(opt):
+       """
+       Command-line options
+       """
+       opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
+
+       opt.add_option('--header-ext',
+               type='string',
+               default='',
+               help='header extension for moc files',
+               dest='qt_header_ext')
+
+       for i in 'qtdir qtbin qtlibs'.split():
+               opt.add_option('--'+i, type='string', default='', dest=i)
+
+       opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
diff --git a/third_party/waf/waflib/Tools/qt5.py b/third_party/waf/waflib/Tools/qt5.py
new file mode 100644 (file)
index 0000000..05522a4
--- /dev/null
@@ -0,0 +1,768 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+This tool helps with finding Qt5 tools and libraries,
+and also provides syntactic sugar for using Qt5 tools.
+
+The following snippet illustrates the tool usage::
+
+       def options(opt):
+               opt.load('compiler_cxx qt5')
+
+       def configure(conf):
+               conf.load('compiler_cxx qt5')
+
+       def build(bld):
+               bld(
+                       features = 'qt5 cxx cxxprogram',
+                       uselib   = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
+                       source   = 'main.cpp textures.qrc aboutDialog.ui',
+                       target   = 'window',
+               )
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "qt5" tool.
+
+You also need to edit your sources accordingly:
+
+- the normal way of doing things is to have your C++ files
+  include the .moc file.
+  This is regarded as the best practice (and provides much faster
+  compilations).
+  It also implies that the include paths have beenset properly.
+
+- to have the include paths added automatically, use the following::
+
+     from waflib.TaskGen import feature, before_method, after_method
+     @feature('cxx')
+     @after_method('process_source')
+     @before_method('apply_incpaths')
+     def add_includes_paths(self):
+        incs = set(self.to_list(getattr(self, 'includes', '')))
+        for x in self.compiled_tasks:
+            incs.add(x.inputs[0].parent.path_from(self.path))
+        self.includes = list(incs)
+
+Note: another tool provides Qt processing that does not require
+.moc includes, see 'playground/slow_qt/'.
+
+A few options (--qt{dir,bin,...}) and environment variables
+(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
+tool path selection, etc; please read the source for more info.
+
+The detection uses pkg-config on Linux by default. To force static library detection use:
+QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
+"""
+
+try:
+       from xml.sax import make_parser
+       from xml.sax.handler import ContentHandler
+except ImportError:
+       has_xml = False
+       ContentHandler = object
+else:
+       has_xml = True
+
+import os, sys, re
+from waflib.Tools import cxx
+from waflib import Task, Utils, Options, Errors, Context
+from waflib.TaskGen import feature, after_method, extension, before_method
+from waflib.Configure import conf
+from waflib import Logs
+
+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
+"""
+File extensions associated to .moc files
+"""
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI  = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
+"""
+File extensions of C++ files that may require a .moc processing
+"""
+
+class qxx(Task.classes['cxx']):
+       """
+       Each C++ file can have zero or several .moc files to create.
+       They are known only when the files are scanned (preprocessor)
+       To avoid scanning the c++ files each time (parsing C/C++), the results
+       are retrieved from the task cache (bld.node_deps/bld.raw_deps).
+       The moc tasks are also created *dynamically* during the build.
+       """
+
+       def __init__(self, *k, **kw):
+               Task.Task.__init__(self, *k, **kw)
+               self.moc_done = 0
+
+       def runnable_status(self):
+               """
+               Compute the task signature to make sure the scanner was executed. Create the
+               moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
+               then postpone the task execution (there is no need to recompute the task signature).
+               """
+               if self.moc_done:
+                       return Task.Task.runnable_status(self)
+               else:
+                       for t in self.run_after:
+                               if not t.hasrun:
+                                       return Task.ASK_LATER
+                       self.add_moc_tasks()
+                       return Task.Task.runnable_status(self)
+
+       def create_moc_task(self, h_node, m_node):
+               """
+               If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
+               It is not possible to change the file names, but we can assume that the moc transformation will be identical,
+               and the moc tasks can be shared in a global cache.
+               """
+               try:
+                       moc_cache = self.generator.bld.moc_cache
+               except AttributeError:
+                       moc_cache = self.generator.bld.moc_cache = {}
+
+               try:
+                       return moc_cache[h_node]
+               except KeyError:
+                       tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
+                       tsk.set_inputs(h_node)
+                       tsk.set_outputs(m_node)
+                       tsk.env.append_unique('MOC_FLAGS', '-i')
+
+                       if self.generator:
+                               self.generator.tasks.append(tsk)
+
+                       # direct injection in the build phase (safe because called from the main thread)
+                       gen = self.generator.bld.producer
+                       gen.outstanding.appendleft(tsk)
+                       gen.total += 1
+
+                       return tsk
+
+               else:
+                       # remove the signature, it must be recomputed with the moc task
+                       delattr(self, 'cache_sig')
+
+       def add_moc_tasks(self):
+               """
+               Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
+               """
+               node = self.inputs[0]
+               bld = self.generator.bld
+
+               try:
+                       # compute the signature once to know if there is a moc file to create
+                       self.signature()
+               except KeyError:
+                       # the moc file may be referenced somewhere else
+                       pass
+               else:
+                       # remove the signature, it must be recomputed with the moc task
+                       delattr(self, 'cache_sig')
+
+               include_nodes = [node.parent] + self.generator.includes_nodes
+
+               moctasks = []
+               mocfiles = set()
+               for d in bld.raw_deps.get(self.uid(), []):
+                       if not d.endswith('.moc'):
+                               continue
+
+                       # process that base.moc only once
+                       if d in mocfiles:
+                               continue
+                       mocfiles.add(d)
+
+                       # find the source associated with the moc file
+                       h_node = None
+                       base2 = d[:-4]
+
+                       # foo.moc from foo.cpp
+                       prefix = node.name[:node.name.rfind('.')]
+                       if base2 == prefix:
+                               h_node = node
+                       else:
+                               # this deviates from the standard
+                               # if bar.cpp includes foo.moc, then assume it is from foo.h
+                               for x in include_nodes:
+                                       for e in MOC_H:
+                                               h_node = x.find_node(base2 + e)
+                                               if h_node:
+                                                       break
+                                       else:
+                                               continue
+                                       break
+                       if h_node:
+                               m_node = h_node.change_ext('.moc')
+                       else:
+                               raise Errors.WafError('No source found for %r which is a moc file' % d)
+
+                       # create the moc task
+                       task = self.create_moc_task(h_node, m_node)
+                       moctasks.append(task)
+
+               # simple scheduler dependency: run the moc task before others
+               self.run_after.update(set(moctasks))
+               self.moc_done = 1
+
+class trans_update(Task.Task):
+       """Updates a .ts files from a list of C++ files"""
+       run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
+       color   = 'BLUE'
+
+class XMLHandler(ContentHandler):
+       """
+       Parses ``.qrc`` files
+       """
+       def __init__(self):
+               self.buf = []
+               self.files = []
+       def startElement(self, name, attrs):
+               if name == 'file':
+                       self.buf = []
+       def endElement(self, name):
+               if name == 'file':
+                       self.files.append(str(''.join(self.buf)))
+       def characters(self, cars):
+               self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_rcc_task(self, node):
+       "Creates rcc and cxx tasks for ``.qrc`` files"
+       rcnode = node.change_ext('_rc.cpp')
+       self.create_task('rcc', node, rcnode)
+       cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
+       try:
+               self.compiled_tasks.append(cpptask)
+       except AttributeError:
+               self.compiled_tasks = [cpptask]
+       return cpptask
+
+@extension(*EXT_UI)
+def create_uic_task(self, node):
+       "Create uic tasks for user interface ``.ui`` definition files"
+       uictask = self.create_task('ui5', node)
+       uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
+
+@extension('.ts')
+def add_lang(self, node):
+       """Adds all the .ts file into ``self.lang``"""
+       self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('qt5')
+@before_method('process_source')
+def process_mocs(self):
+       """
+       Processes MOC files included in headers::
+
+               def build(bld):
+                       bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')
+
+       The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
+       is provided to avoid name clashes when the same headers are used by several targets.
+       """
+       lst = self.to_nodes(getattr(self, 'moc', []))
+       self.source = self.to_list(getattr(self, 'source', []))
+       for x in lst:
+               prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
+               moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
+               moc_node = x.parent.find_or_declare(moc_target)
+               self.source.append(moc_node)
+
+               self.create_task('moc', x, moc_node)
+
+@feature('qt5')
+@after_method('apply_link')
+def apply_qt5(self):
+       """
+       Adds MOC_FLAGS which may be necessary for moc::
+
+               def build(bld):
+                       bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')
+
+       The additional parameters are:
+
+       :param lang: list of translation files (\*.ts) to process
+       :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+       :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
+       :type update: bool
+       :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
+       :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+       """
+       if getattr(self, 'lang', None):
+               qmtasks = []
+               for x in self.to_list(self.lang):
+                       if isinstance(x, str):
+                               x = self.path.find_resource(x + '.ts')
+                       qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+
+               if getattr(self, 'update', None) and Options.options.trans_qt5:
+                       cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
+                               a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
+                       for x in qmtasks:
+                               self.create_task('trans_update', cxxnodes, x.inputs)
+
+               if getattr(self, 'langname', None):
+                       qmnodes = [x.outputs[0] for x in qmtasks]
+                       rcnode = self.langname
+                       if isinstance(rcnode, str):
+                               rcnode = self.path.find_or_declare(rcnode + '.qrc')
+                       t = self.create_task('qm2rcc', qmnodes, rcnode)
+                       k = create_rcc_task(self, t.outputs[0])
+                       self.link_task.inputs.append(k.outputs[0])
+
+       lst = []
+       for flag in self.to_list(self.env.CXXFLAGS):
+               if len(flag) < 2: continue
+               f = flag[0:2]
+               if f in ('-D', '-I', '/D', '/I'):
+                       if (f[0] == '/'):
+                               lst.append('-' + flag[1:])
+                       else:
+                               lst.append(flag)
+       self.env.append_value('MOC_FLAGS', lst)
+
+@extension(*EXT_QT5)
+def cxx_hook(self, node):
+       """
+       Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
+       """
+       return self.create_compiled_task('qxx', node)
+
+class rcc(Task.Task):
+       """
+       Processes ``.qrc`` files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+       ext_out = ['.h']
+
+       def rcname(self):
+               return os.path.splitext(self.inputs[0].name)[0]
+
+       def scan(self):
+               """Parse the *.qrc* files"""
+               if not has_xml:
+                       Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+                       return ([], [])
+
+               parser = make_parser()
+               curHandler = XMLHandler()
+               parser.setContentHandler(curHandler)
+               fi = open(self.inputs[0].abspath(), 'r')
+               try:
+                       parser.parse(fi)
+               finally:
+                       fi.close()
+
+               nodes = []
+               names = []
+               root = self.inputs[0].parent
+               for x in curHandler.files:
+                       nd = root.find_resource(x)
+                       if nd: nodes.append(nd)
+                       else: names.append(x)
+               return (nodes, names)
+
+class moc(Task.Task):
+       """
+       Creates ``.moc`` files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+
+class ui5(Task.Task):
+       """
+       Processes ``.ui`` files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_UIC} ${SRC} -o ${TGT}'
+       ext_out = ['.h']
+
+class ts2qm(Task.Task):
+       """
+       Generates ``.qm`` files from ``.ts`` files
+       """
+       color   = 'BLUE'
+       run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+       """
+       Generates ``.qrc`` files from ``.qm`` files
+       """
+       color = 'BLUE'
+       after = 'ts2qm'
+       def run(self):
+               """Create a qrc file including the inputs"""
+               txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+               code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+               self.outputs[0].write(code)
+
+def configure(self):
+       """
+       Besides the configuration options, the environment variable QT5_ROOT may be used
+       to give the location of the qt5 libraries (absolute path).
+
+       The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
+       """
+       self.find_qt5_binaries()
+       self.set_qt5_libs_dir()
+       self.set_qt5_libs_to_check()
+       self.set_qt5_defines()
+       self.find_qt5_libraries()
+       self.add_qt5_rpath()
+       self.simplify_qt5_libs()
+
+       # warn about this during the configuration too
+       if not has_xml:
+               Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+
+       if 'COMPILER_CXX' not in self.env:
+               self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
+
+       # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
+       frag = '#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
+       uses = 'QT5CORE QT5WIDGETS QT5GUI'
+       for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]:
+               msg = 'See if Qt files compile '
+               if flag:
+                       msg += 'with %s' % flag
+               try:
+                       self.check(features='qt5 cxx', use=uses, uselib_store='qt5', cxxflags=flag, fragment=frag, msg=msg)
+               except self.errors.ConfigurationError:
+                       pass
+               else:
+                       break
+       else:
+               self.fatal('Could not build a simple Qt application')
+
+       # FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
+       from waflib import Utils
+       if Utils.unversioned_sys_platform() == 'freebsd':
+               frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
+               try:
+                       self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
+               except self.errors.ConfigurationError:
+                       self.check(features='qt5 cxx cxxprogram', use=uses, uselib_store='qt5', libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')
+
+@conf
+def find_qt5_binaries(self):
+       """
+       Detects Qt programs such as qmake, moc, uic, lrelease
+       """
+       env = self.env
+       opt = Options.options
+
+       qtdir = getattr(opt, 'qtdir', '')
+       qtbin = getattr(opt, 'qtbin', '')
+
+       paths = []
+
+       if qtdir:
+               qtbin = os.path.join(qtdir, 'bin')
+
+       # the qt directory has been given from QT5_ROOT - deduce the qt binary path
+       if not qtdir:
+               qtdir = self.environ.get('QT5_ROOT', '')
+               qtbin = self.environ.get('QT5_BIN') or os.path.join(qtdir, 'bin')
+
+       if qtbin:
+               paths = [qtbin]
+
+       # no qtdir, look in the path and in /usr/local/Trolltech
+       if not qtdir:
+               paths = self.environ.get('PATH', '').split(os.pathsep)
+               paths.extend(['/usr/share/qt5/bin', '/usr/local/lib/qt5/bin'])
+               try:
+                       lst = Utils.listdir('/usr/local/Trolltech/')
+               except OSError:
+                       pass
+               else:
+                       if lst:
+                               lst.sort()
+                               lst.reverse()
+
+                               # keep the highest version
+                               qtdir = '/usr/local/Trolltech/%s/' % lst[0]
+                               qtbin = os.path.join(qtdir, 'bin')
+                               paths.append(qtbin)
+
+       # at the end, try to find qmake in the paths given
+       # keep the one with the highest version
+       cand = None
+       prev_ver = ['5', '0', '0']
+       for qmk in ('qmake-qt5', 'qmake5', 'qmake'):
+               try:
+                       qmake = self.find_program(qmk, path_list=paths)
+               except self.errors.ConfigurationError:
+                       pass
+               else:
+                       try:
+                               version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
+                       except self.errors.WafError:
+                               pass
+                       else:
+                               if version:
+                                       new_ver = version.split('.')
+                                       if new_ver > prev_ver:
+                                               cand = qmake
+                                               prev_ver = new_ver
+
+       # qmake could not be found easily, rely on qtchooser
+       if not cand:
+               try:
+                       self.find_program('qtchooser')
+               except self.errors.ConfigurationError:
+                       pass
+               else:
+                       cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake']
+                       try:
+                               version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
+                       except self.errors.WafError:
+                               pass
+                       else:
+                               cand = cmd
+
+       if cand:
+               self.env.QMAKE = cand
+       else:
+               self.fatal('Could not find qmake for qt5')
+
+       self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
+       paths.insert(0, qtbin)
+
+       def find_bin(lst, var):
+               if var in env:
+                       return
+               for f in lst:
+                       try:
+                               ret = self.find_program(f, path_list=paths)
+                       except self.errors.ConfigurationError:
+                               pass
+                       else:
+                               env[var]=ret
+                               break
+
+       find_bin(['uic-qt5', 'uic'], 'QT_UIC')
+       if not env.QT_UIC:
+               self.fatal('cannot find the uic compiler for qt5')
+
+       self.start_msg('Checking for uic version')
+       uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
+       uicver = ''.join(uicver).strip()
+       uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
+       self.end_msg(uicver)
+       if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
+               self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path')
+
+       find_bin(['moc-qt5', 'moc'], 'QT_MOC')
+       find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
+       find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE')
+       find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE')
+
+       env.UIC_ST = '%s -o %s'
+       env.MOC_ST = '-o'
+       env.ui_PATTERN = 'ui_%s.h'
+       env.QT_LRELEASE_FLAGS = ['-silent']
+       env.MOCCPPPATH_ST = '-I%s'
+       env.MOCDEFINES_ST = '-D%s'
+
+@conf
+def set_qt5_libs_dir(self):
+       env = self.env
+       qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT5_LIBDIR')
+       if not qtlibs:
+               try:
+                       qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
+               except Errors.WafError:
+                       qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
+                       qtlibs = os.path.join(qtdir, 'lib')
+       self.msg('Found the Qt5 libraries in', qtlibs)
+       env.QTLIBS = qtlibs
+
+@conf
+def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
+       env = self.env
+       if force_static:
+               exts = ('.a', '.lib')
+               prefix = 'STLIB'
+       else:
+               exts = ('.so', '.lib')
+               prefix = 'LIB'
+
+       def lib_names():
+               for x in exts:
+                       for k in ('', '5') if Utils.is_win32 else ['']:
+                               for p in ('lib', ''):
+                                       yield (p, name, k, x)
+               raise StopIteration
+
+       for tup in lib_names():
+               k = ''.join(tup)
+               path = os.path.join(qtlibs, k)
+               if os.path.exists(path):
+                       if env.DEST_OS == 'win32':
+                               libval = ''.join(tup[:-1])
+                       else:
+                               libval = name
+                       env.append_unique(prefix + '_' + uselib, libval)
+                       env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
+                       env.append_unique('INCLUDES_' + uselib, qtincludes)
+                       env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt5', 'Qt')))
+                       return k
+       return False
+
+@conf
+def find_qt5_libraries(self):
+       env = self.env
+
+       qtincludes =  self.environ.get('QT5_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+       force_static = self.environ.get('QT5_FORCE_STATIC')
+       try:
+               if self.environ.get('QT5_XCOMPILE'):
+                       self.fatal('QT5_XCOMPILE Disables pkg-config detection')
+               self.check_cfg(atleast_pkgconfig_version='0.1')
+       except self.errors.ConfigurationError:
+               for i in self.qt5_vars:
+                       uselib = i.upper()
+                       if Utils.unversioned_sys_platform() == 'darwin':
+                               # Since at least qt 4.7.3 each library locates in separate directory
+                               fwk = i.replace('Qt5', 'Qt')
+                               frameworkName = fwk + '.framework'
+
+                               qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
+                               if os.path.exists(qtDynamicLib):
+                                       env.append_unique('FRAMEWORK_' + uselib, fwk)
+                                       env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
+                                       self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+                               else:
+                                       self.msg('Checking for %s' % i, False, 'YELLOW')
+                               env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
+                       else:
+                               for j in ('', 'd'):
+                                       k = '_DEBUG' if j == 'd' else ''
+                                       ret = self.find_single_qt5_lib(i + j, uselib + k, env.QTLIBS, qtincludes, force_static)
+                                       if not force_static and not ret:
+                                               ret = self.find_single_qt5_lib(i + j, uselib + k, env.QTLIBS, qtincludes, True)
+                                       self.msg('Checking for %s' % (i + j), ret, 'GREEN' if ret else 'YELLOW')
+       else:
+               path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (
+                       self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS)
+               for i in self.qt5_vars_debug + self.qt5_vars:
+                       self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
+
+@conf
+def simplify_qt5_libs(self):
+       """
+       Since library paths make really long command-lines,
+       and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
+       """
+       env = self.env
+       def process_lib(vars_, coreval):
+               for d in vars_:
+                       var = d.upper()
+                       if var == 'QTCORE':
+                               continue
+
+                       value = env['LIBPATH_'+var]
+                       if value:
+                               core = env[coreval]
+                               accu = []
+                               for lib in value:
+                                       if lib in core:
+                                               continue
+                                       accu.append(lib)
+                               env['LIBPATH_'+var] = accu
+       process_lib(self.qt5_vars,       'LIBPATH_QTCORE')
+       process_lib(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def add_qt5_rpath(self):
+       """
+       Defines rpath entries for Qt libraries
+       """
+       env = self.env
+       if getattr(Options.options, 'want_rpath', False):
+               def process_rpath(vars_, coreval):
+                       for d in vars_:
+                               var = d.upper()
+                               value = env['LIBPATH_' + var]
+                               if value:
+                                       core = env[coreval]
+                                       accu = []
+                                       for lib in value:
+                                               if var != 'QTCORE':
+                                                       if lib in core:
+                                                               continue
+                                               accu.append('-Wl,--rpath='+lib)
+                                       env['RPATH_' + var] = accu
+               process_rpath(self.qt5_vars,       'LIBPATH_QTCORE')
+               process_rpath(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def set_qt5_libs_to_check(self):
+       self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
+       if not self.qt5_vars:
+               dirlst = Utils.listdir(self.env.QTLIBS)
+
+               pat = self.env.cxxshlib_PATTERN
+               if Utils.is_win32:
+                       pat = pat.replace('.dll', '.lib')
+               if self.environ.get('QT5_FORCE_STATIC'):
+                       pat = self.env.cxxstlib_PATTERN
+               if Utils.unversioned_sys_platform() == 'darwin':
+                       pat = "%s\.framework"
+               re_qt = re.compile(pat%'Qt5?(?P<name>.*)'+'$')
+               for x in dirlst:
+                       m = re_qt.match(x)
+                       if m:
+                               self.qt5_vars.append("Qt5%s" % m.group('name'))
+               if not self.qt5_vars:
+                       self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS)
+
+       qtextralibs = getattr(Options.options, 'qtextralibs', None)
+       if qtextralibs:
+               self.qt5_vars.extend(qtextralibs.split(','))
+
+       if not hasattr(self, 'qt5_vars_debug'):
+               self.qt5_vars_debug = [a + '_DEBUG' for a in self.qt5_vars]
+       self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)
+
+@conf
+def set_qt5_defines(self):
+       if sys.platform != 'win32':
+               return
+       for x in self.qt5_vars:
+               y=x.replace('Qt5', 'Qt')[2:].upper()
+               self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
+               self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
+
+def options(opt):
+       """
+       Command-line options
+       """
+       opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
+       for i in 'qtdir qtbin qtlibs'.split():
+               opt.add_option('--'+i, type='string', default='', dest=i)
+
+       opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
+       opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
diff --git a/third_party/waf/waflib/Tools/ruby.py b/third_party/waf/waflib/Tools/ruby.py
new file mode 100644 (file)
index 0000000..b407c36
--- /dev/null
@@ -0,0 +1,189 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# daniel.svensson at purplescout.se 2008
+# Thomas Nagy 2016 (ita)
+
+"""
+Support for Ruby extensions. A C/C++ compiler is required::
+
+       def options(opt):
+               opt.load('compiler_c ruby')
+       def configure(conf):
+               conf.load('compiler_c ruby')
+               conf.check_ruby_version((1,8,0))
+               conf.check_ruby_ext_devel()
+               conf.check_ruby_module('libxml')
+       def build(bld):
+               bld(
+                       features = 'c cshlib rubyext',
+                       source = 'rb_mytest.c',
+                       target = 'mytest_ext',
+                       install_path = '${ARCHDIR_RUBY}')
+               bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb')
+"""
+
+import os
+from waflib import Options, Utils, Task
+from waflib.TaskGen import before_method, feature, extension
+from waflib.Configure import conf
+
+@feature('rubyext')
+@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link')
+def init_rubyext(self):
+       """
+       Add required variables for ruby extensions
+       """
+       self.install_path = '${ARCHDIR_RUBY}'
+       self.uselib = self.to_list(getattr(self, 'uselib', ''))
+       if not 'RUBY' in self.uselib:
+               self.uselib.append('RUBY')
+       if not 'RUBYEXT' in self.uselib:
+               self.uselib.append('RUBYEXT')
+
+@feature('rubyext')
+@before_method('apply_link', 'propagate_uselib_vars')
+def apply_ruby_so_name(self):
+       """
+       Strip the *lib* prefix from ruby extensions
+       """
+       self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN
+
+@conf
+def check_ruby_version(self, minver=()):
+       """
+       Checks if ruby is installed.
+       If installed the variable RUBY will be set in environment.
+       The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
+       """
+
+       ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary)
+
+       try:
+               version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
+       except Exception:
+               self.fatal('could not determine ruby version')
+       self.env.RUBY_VERSION = version
+
+       try:
+               ver = tuple(map(int, version.split(".")))
+       except Exception:
+               self.fatal('unsupported ruby version %r' % version)
+
+       cver = ''
+       if minver:
+               cver = '> ' + '.'.join(str(x) for x in minver)
+               if ver < minver:
+                       self.fatal('ruby is too old %r' % ver)
+
+       self.msg('Checking for ruby version %s' % cver, version)
+
+@conf
+def check_ruby_ext_devel(self):
+       """
+       Check if a ruby extension can be created
+       """
+       if not self.env.RUBY:
+               self.fatal('ruby detection is required first')
+
+       if not self.env.CC_NAME and not self.env.CXX_NAME:
+               self.fatal('load a c/c++ compiler first')
+
+       version = tuple(map(int, self.env.RUBY_VERSION.split(".")))
+
+       def read_out(cmd):
+               return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))
+
+       def read_config(key):
+               return read_out('puts RbConfig::CONFIG[%r]' % key)
+
+       cpppath = archdir = read_config('archdir')
+
+       if version >= (1, 9, 0):
+               ruby_hdrdir = read_config('rubyhdrdir')
+               cpppath += ruby_hdrdir
+               if version >= (2, 0, 0):
+                       cpppath += read_config('rubyarchhdrdir')
+               cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
+
+       self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False)
+
+       self.env.LIBPATH_RUBYEXT = read_config('libdir')
+       self.env.LIBPATH_RUBYEXT += archdir
+       self.env.INCLUDES_RUBYEXT = cpppath
+       self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS')
+       self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
+
+       # ok this is really stupid, but the command and flags are combined.
+       # so we try to find the first argument...
+       flags = read_config('LDSHARED')
+       while flags and flags[0][0] != '-':
+               flags = flags[1:]
+
+       # we also want to strip out the deprecated ppc flags
+       if len(flags) > 1 and flags[1] == "ppc":
+               flags = flags[2:]
+
+       self.env.LINKFLAGS_RUBYEXT = flags
+       self.env.LINKFLAGS_RUBYEXT += read_config('LIBS')
+       self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED')
+
+       if Options.options.rubyarchdir:
+               self.env.ARCHDIR_RUBY = Options.options.rubyarchdir
+       else:
+               self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
+
+       if Options.options.rubylibdir:
+               self.env.LIBDIR_RUBY = Options.options.rubylibdir
+       else:
+               self.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
+
+@conf
+def check_ruby_module(self, module_name):
+       """
+       Check if the selected ruby interpreter can require the given ruby module::
+
+               def configure(conf):
+                       conf.check_ruby_module('libxml')
+
+       :param module_name: module
+       :type  module_name: string
+       """
+       self.start_msg('Ruby module %s' % module_name)
+       try:
+               self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
+       except Exception:
+               self.end_msg(False)
+               self.fatal('Could not find the ruby module %r' % module_name)
+       self.end_msg(True)
+
+@extension('.rb')
+def process(self, node):
+       return self.create_task('run_ruby', node)
+
+class run_ruby(Task.Task):
+       """
+       Task to run ruby files detected by file extension .rb::
+
+               def options(opt):
+                       opt.load('ruby')
+
+               def configure(ctx):
+                       ctx.check_ruby_version()
+
+               def build(bld):
+                       bld.env.RBFLAGS = '-e puts "hello world"'
+                       bld(source='a_ruby_file.rb')
+       """
+       run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
+
+def options(opt):
+       """
+       Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
+       """
+       opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
+       opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
+       opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
diff --git a/third_party/waf/waflib/Tools/suncc.py b/third_party/waf/waflib/Tools/suncc.py
new file mode 100644 (file)
index 0000000..cc59e54
--- /dev/null
@@ -0,0 +1,69 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+# Ralf Habacker, 2006 (rh)
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_scc(conf):
+       """
+       Detects the Sun C compiler
+       """
+       v = conf.env
+       cc = conf.find_program('cc', var='CC')
+       try:
+               conf.cmd_and_log(cc + ['-flags'])
+       except Exception:
+               conf.fatal('%r is not a Sun compiler' % cc)
+       v.CC_NAME = 'sun'
+       conf.get_suncc_version(cc)
+
+@conf
+def scc_common_flags(conf):
+       """
+       Flags required for executing the sun C compiler
+       """
+       v = conf.env
+
+       v.CC_SRC_F            = []
+       v.CC_TGT_F            = ['-c', '-o', '']
+
+       if not v.LINK_CC:
+               v.LINK_CC = v.CC
+
+       v.CCLNK_SRC_F         = ''
+       v.CCLNK_TGT_F         = ['-o', '']
+       v.CPPPATH_ST          = '-I%s'
+       v.DEFINES_ST          = '-D%s'
+
+       v.LIB_ST              = '-l%s' # template for adding libs
+       v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+       v.STLIB_ST            = '-l%s'
+       v.STLIBPATH_ST        = '-L%s'
+
+       v.SONAME_ST           = '-Wl,-h,%s'
+       v.SHLIB_MARKER        = '-Bdynamic'
+       v.STLIB_MARKER        = '-Bstatic'
+
+       v.cprogram_PATTERN    = '%s'
+
+       v.CFLAGS_cshlib       = ['-xcode=pic32', '-DPIC']
+       v.LINKFLAGS_cshlib    = ['-G']
+       v.cshlib_PATTERN      = 'lib%s.so'
+
+       v.LINKFLAGS_cstlib    = ['-Bstatic']
+       v.cstlib_PATTERN      = 'lib%s.a'
+
+def configure(conf):
+       conf.find_scc()
+       conf.find_ar()
+       conf.scc_common_flags()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/suncxx.py b/third_party/waf/waflib/Tools/suncxx.py
new file mode 100644 (file)
index 0000000..8d024e5
--- /dev/null
@@ -0,0 +1,69 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+# Ralf Habacker, 2006 (rh)
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_sxx(conf):
+       """
+       Detects the sun C++ compiler
+       """
+       v = conf.env
+       cc = conf.find_program(['CC', 'c++'], var='CXX')
+       try:
+               conf.cmd_and_log(cc + ['-flags'])
+       except Exception:
+               conf.fatal('%r is not a Sun compiler' % cc)
+       v.CXX_NAME = 'sun'
+       conf.get_suncc_version(cc)
+
+@conf
+def sxx_common_flags(conf):
+       """
+       Flags required for executing the sun C++ compiler
+       """
+       v = conf.env
+
+       v.CXX_SRC_F           = []
+       v.CXX_TGT_F           = ['-c', '-o', '']
+
+       if not v.LINK_CXX:
+               v.LINK_CXX = v.CXX
+
+       v.CXXLNK_SRC_F        = []
+       v.CXXLNK_TGT_F        = ['-o', '']
+       v.CPPPATH_ST          = '-I%s'
+       v.DEFINES_ST          = '-D%s'
+
+       v.LIB_ST              = '-l%s' # template for adding libs
+       v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+       v.STLIB_ST            = '-l%s'
+       v.STLIBPATH_ST        = '-L%s'
+
+       v.SONAME_ST           = '-Wl,-h,%s'
+       v.SHLIB_MARKER        = '-Bdynamic'
+       v.STLIB_MARKER        = '-Bstatic'
+
+       v.cxxprogram_PATTERN  = '%s'
+
+       v.CXXFLAGS_cxxshlib   = ['-xcode=pic32', '-DPIC']
+       v.LINKFLAGS_cxxshlib  = ['-G']
+       v.cxxshlib_PATTERN    = 'lib%s.so'
+
+       v.LINKFLAGS_cxxstlib  = ['-Bstatic']
+       v.cxxstlib_PATTERN    = 'lib%s.a'
+
+def configure(conf):
+       conf.find_sxx()
+       conf.find_ar()
+       conf.sxx_common_flags()
+       conf.cxx_load_tools()
+       conf.cxx_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/tex.py b/third_party/waf/waflib/Tools/tex.py
new file mode 100644 (file)
index 0000000..e5888b9
--- /dev/null
@@ -0,0 +1,543 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+
+"""
+TeX/LaTeX/PDFLaTeX/XeLaTeX support
+
+Example::
+
+       def configure(conf):
+               conf.load('tex')
+               if not conf.env.LATEX:
+                       conf.fatal('The program LaTex is required')
+
+       def build(bld):
+               bld(
+                       features = 'tex',
+                       type     = 'latex', # pdflatex or xelatex
+                       source   = 'document.ltx', # mandatory, the source
+                       outs     = 'ps', # 'pdf' or 'ps pdf'
+                       deps     = 'crossreferencing.lst', # to give dependencies directly
+                       prompt   = 1, # 0 for the batch mode
+               )
+
+Notes:
+
+- To configure with a special program, use::
+
+     $ PDFLATEX=luatex waf configure
+
+- This tool does not use the target attribute of the task generator
+  (``bld(target=...)``); the target file name is built from the source
+  base name and the output type(s)
+"""
+
+import os, re
+from waflib import Utils, Task, Errors, Logs, Node
+from waflib.TaskGen import feature, before_method
+
+re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
+def bibunitscan(self):
+       """
+       Parses TeX inputs and try to find the *bibunit* file dependencies
+
+       :return: list of bibunit files
+       :rtype: list of :py:class:`waflib.Node.Node`
+       """
+       node = self.inputs[0]
+
+       nodes = []
+       if not node: return nodes
+
+       code = node.read()
+       for match in re_bibunit.finditer(code):
+               path = match.group('file')
+               if path:
+                       for k in ('', '.bib'):
+                               # add another loop for the tex include paths?
+                               Logs.debug('tex: trying %s%s', path, k)
+                               fi = node.parent.find_resource(path + k)
+                               if fi:
+                                       nodes.append(fi)
+                                       # no break, people are crazy
+                       else:
+                               Logs.debug('tex: could not find %s', path)
+
+       Logs.debug('tex: found the following bibunit files: %s', nodes)
+       return nodes
+
+exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
+"""List of typical file extensions included in latex files"""
+
+exts_tex = ['.ltx', '.tex']
+"""List of typical file extensions that contain latex"""
+
+re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
+"""Regexp for expressions that may include latex files"""
+
+g_bibtex_re = re.compile('bibdata', re.M)
+"""Regexp for bibtex files"""
+
+g_glossaries_re = re.compile('\\@newglossary', re.M)
+"""Regexp for expressions that create glossaries"""
+
+class tex(Task.Task):
+       """
+       Compiles a tex/latex file.
+
+       .. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
+       """
+
+       bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
+       bibtex_fun.__doc__ = """
+       Execute the program **bibtex**
+       """
+
+       makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
+       makeindex_fun.__doc__ = """
+       Execute the program **makeindex**
+       """
+
+       makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False)
+       makeglossaries_fun.__doc__ = """
+       Execute the program **makeglossaries**
+       """
+
+       def exec_command(self, cmd, **kw):
+               """
+               Executes TeX commands without buffering (latex may prompt for inputs)
+
+               :return: the return code
+               :rtype: int
+               """
+               if self.env.PROMPT_LATEX:
+                       # capture the outputs in configuration tests
+                       kw['stdout'] = kw['stderr'] = None
+               return super(tex, self).exec_command(cmd, **kw)
+
+       def scan_aux(self, node):
+               """
+               Recursive regex-based scanner that finds included auxiliary files.
+               """
+               nodes = [node]
+               re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)
+
+               def parse_node(node):
+                       code = node.read()
+                       for match in re_aux.finditer(code):
+                               path = match.group('file')
+                               found = node.parent.find_or_declare(path)
+                               if found and found not in nodes:
+                                       Logs.debug('tex: found aux node %r', found)
+                                       nodes.append(found)
+                                       parse_node(found)
+               parse_node(node)
+               return nodes
+
+       def scan(self):
+               """
+               Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`
+
+               Depending on your needs you might want:
+
+               * to change re_tex::
+
+                       from waflib.Tools import tex
+                       tex.re_tex = myregex
+
+               * or to change the method scan from the latex tasks::
+
+                       from waflib.Task import classes
+                       classes['latex'].scan = myscanfunction
+               """
+               node = self.inputs[0]
+
+               nodes = []
+               names = []
+               seen = []
+               if not node: return (nodes, names)
+
+               def parse_node(node):
+                       if node in seen:
+                               return
+                       seen.append(node)
+                       code = node.read()
+                       global re_tex
+                       for match in re_tex.finditer(code):
+
+                               multibib = match.group('type')
+                               if multibib and multibib.startswith('bibliography'):
+                                       multibib = multibib[len('bibliography'):]
+                                       if multibib.startswith('style'):
+                                               continue
+                               else:
+                                       multibib = None
+
+                               for path in match.group('file').split(','):
+                                       if path:
+                                               add_name = True
+                                               found = None
+                                               for k in exts_deps_tex:
+
+                                                       # issue 1067, scan in all texinputs folders
+                                                       for up in self.texinputs_nodes:
+                                                               Logs.debug('tex: trying %s%s', path, k)
+                                                               found = up.find_resource(path + k)
+                                                               if found:
+                                                                       break
+
+
+                                                       for tsk in self.generator.tasks:
+                                                               if not found or found in tsk.outputs:
+                                                                       break
+                                                       else:
+                                                               nodes.append(found)
+                                                               add_name = False
+                                                               for ext in exts_tex:
+                                                                       if found.name.endswith(ext):
+                                                                               parse_node(found)
+                                                                               break
+
+                                                       # multibib stuff
+                                                       if found and multibib and found.name.endswith('.bib'):
+                                                               try:
+                                                                       self.multibibs.append(found)
+                                                               except AttributeError:
+                                                                       self.multibibs = [found]
+
+                                                       # no break, people are crazy
+                                               if add_name:
+                                                       names.append(path)
+               parse_node(node)
+
+               for x in nodes:
+                       x.parent.get_bld().mkdir()
+
+               Logs.debug("tex: found the following : %s and names %s", nodes, names)
+               return (nodes, names)
+
+       def check_status(self, msg, retcode):
+               """
+               Checks an exit status and raise an error with a particular message
+
+               :param msg: message to display if the code is non-zero
+               :type msg: string
+               :param retcode: condition
+               :type retcode: boolean
+               """
+               if retcode != 0:
+                       raise Errors.WafError('%r command exit status %r' % (msg, retcode))
+
+       def info(self, *k, **kw):
+               try:
+                       info = self.generator.bld.conf.logger.info
+               except AttributeError:
+                       info = Logs.info
+               info(*k, **kw)
+
+       def bibfile(self):
+               """
+               Parses *.aux* files to find bibfiles to process.
+               If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
+               """
+               for aux_node in self.aux_nodes:
+                       try:
+                               ct = aux_node.read()
+                       except EnvironmentError:
+                               Logs.error('Error reading %s: %r', aux_node.abspath())
+                               continue
+
+                       if g_bibtex_re.findall(ct):
+                               self.info('calling bibtex')
+
+                               self.env.env = {}
+                               self.env.env.update(os.environ)
+                               self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+                               self.env.SRCFILE = aux_node.name[:-4]
+                               self.check_status('error when calling bibtex', self.bibtex_fun())
+
+               for node in getattr(self, 'multibibs', []):
+                       self.env.env = {}
+                       self.env.env.update(os.environ)
+                       self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+                       self.env.SRCFILE = node.name[:-4]
+                       self.check_status('error when calling bibtex', self.bibtex_fun())
+
+       def bibunits(self):
+               """
+               Parses *.aux* file to find bibunit files. If there are bibunit files,
+               runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
+               """
+               try:
+                       bibunits = bibunitscan(self)
+               except OSError:
+                       Logs.error('error bibunitscan')
+               else:
+                       if bibunits:
+                               fn  = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
+                               if fn:
+                                       self.info('calling bibtex on bibunits')
+
+                               for f in fn:
+                                       self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}
+                                       self.env.SRCFILE = f
+                                       self.check_status('error when calling bibtex', self.bibtex_fun())
+
+       def makeindex(self):
+               """
+               Searches the filesystem for *.idx* files to process. If present,
+               runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun`
+               """
+               self.idx_node = self.inputs[0].change_ext('.idx')
+               try:
+                       idx_path = self.idx_node.abspath()
+                       os.stat(idx_path)
+               except OSError:
+                       self.info('index file %s absent, not calling makeindex', idx_path)
+               else:
+                       self.info('calling makeindex')
+
+                       self.env.SRCFILE = self.idx_node.name
+                       self.env.env = {}
+                       self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())
+
+       def bibtopic(self):
+               """
+               Lists additional .aux files from the bibtopic package
+               """
+               p = self.inputs[0].parent.get_bld()
+               if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
+                       self.aux_nodes += p.ant_glob('*[0-9].aux')
+
+       def makeglossaries(self):
+               """
+               Lists additional glossaries from .aux files. If present, runs the makeglossaries program.
+               """
+               src_file = self.inputs[0].abspath()
+               base_file = os.path.basename(src_file)
+               base, _ = os.path.splitext(base_file)
+               for aux_node in self.aux_nodes:
+                       try:
+                               ct = aux_node.read()
+                       except EnvironmentError:
+                               Logs.error('Error reading %s: %r', aux_node.abspath())
+                               continue
+
+                       if g_glossaries_re.findall(ct):
+                               if not self.env.MAKEGLOSSARIES:
+                                       raise Errors.WafError("The program 'makeglossaries' is missing!")
+                               Logs.warn('calling makeglossaries')
+                               self.env.SRCFILE = base
+                               self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun())
+                               return
+
+       def texinputs(self):
+               """
+               Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables
+
+               :rtype: string
+               """
+               return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep
+
+       def run(self):
+               """
+               Runs the whole TeX build process
+
+               Multiple passes are required depending on the usage of cross-references,
+               bibliographies, glossaries, indexes and additional contents
+               The appropriate TeX compiler is called until the *.aux* files stop changing.
+               """
+               env = self.env
+
+               if not env.PROMPT_LATEX:
+                       env.append_value('LATEXFLAGS', '-interaction=batchmode')
+                       env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
+                       env.append_value('XELATEXFLAGS', '-interaction=batchmode')
+
+               # important, set the cwd for everybody
+               self.cwd = self.inputs[0].parent.get_bld()
+
+               self.info('first pass on %s', self.__class__.__name__)
+
+               # Hash .aux files before even calling the LaTeX compiler
+               cur_hash = self.hash_aux_nodes()
+
+               self.call_latex()
+
+               # Find the .aux files again since bibtex processing can require it
+               self.hash_aux_nodes()
+
+               self.bibtopic()
+               self.bibfile()
+               self.bibunits()
+               self.makeindex()
+               self.makeglossaries()
+
+               for i in range(10):
+                       # There is no need to call latex again if the .aux hash value has not changed
+                       prev_hash = cur_hash
+                       cur_hash = self.hash_aux_nodes()
+                       if not cur_hash:
+                               Logs.error('No aux.h to process')
+                       if cur_hash and cur_hash == prev_hash:
+                               break
+
+                       # run the command
+                       self.info('calling %s', self.__class__.__name__)
+                       self.call_latex()
+
+       def hash_aux_nodes(self):
+               """
+               Returns a hash of the .aux file contents
+
+               :rtype: string or bytes
+               """
+               try:
+                       self.aux_nodes
+               except AttributeError:
+                       try:
+                               self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
+                       except IOError:
+                               return None
+               return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
+
+       def call_latex(self):
+               """
+               Runs the TeX compiler once
+               """
+               self.env.env = {}
+               self.env.env.update(os.environ)
+               self.env.env.update({'TEXINPUTS': self.texinputs()})
+               self.env.SRCFILE = self.inputs[0].abspath()
+               self.check_status('error when calling latex', self.texfun())
+
+class latex(tex):
+       "Compiles LaTeX files"
+       texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+
+class pdflatex(tex):
+       "Compiles PdfLaTeX files"
+       texfun, vars =  Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+
+class xelatex(tex):
+       "XeLaTeX files"
+       texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+
+class dvips(Task.Task):
+       "Converts dvi files to postscript"
+       run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
+       color   = 'BLUE'
+       after   = ['latex', 'pdflatex', 'xelatex']
+
+class dvipdf(Task.Task):
+       "Converts dvi files to pdf"
+       run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
+       color   = 'BLUE'
+       after   = ['latex', 'pdflatex', 'xelatex']
+
+class pdf2ps(Task.Task):
+       "Converts pdf files to postscript"
+       run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
+       color   = 'BLUE'
+       after   = ['latex', 'pdflatex', 'xelatex']
+
+@feature('tex')
+@before_method('process_source')
+def apply_tex(self):
+       """
+       Creates :py:class:`waflib.Tools.tex.tex` objects, and
+       dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
+       """
+       if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
+               self.type = 'pdflatex'
+
+       outs = Utils.to_list(getattr(self, 'outs', []))
+
+       # prompt for incomplete files (else the batchmode is used)
+       try:
+               self.generator.bld.conf
+       except AttributeError:
+               default_prompt = False
+       else:
+               default_prompt = True
+       self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt)
+
+       deps_lst = []
+
+       if getattr(self, 'deps', None):
+               deps = self.to_list(self.deps)
+               for dep in deps:
+                       if isinstance(dep, str):
+                               n = self.path.find_resource(dep)
+                               if not n:
+                                       self.bld.fatal('Could not find %r for %r' % (dep, self))
+                               if not n in deps_lst:
+                                       deps_lst.append(n)
+                       elif isinstance(dep, Node.Node):
+                               deps_lst.append(dep)
+
+       for node in self.to_nodes(self.source):
+               if self.type == 'latex':
+                       task = self.create_task('latex', node, node.change_ext('.dvi'))
+               elif self.type == 'pdflatex':
+                       task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
+               elif self.type == 'xelatex':
+                       task = self.create_task('xelatex', node, node.change_ext('.pdf'))
+
+               task.env = self.env
+
+               # add the manual dependencies
+               if deps_lst:
+                       for n in deps_lst:
+                               if not n in task.dep_nodes:
+                                       task.dep_nodes.append(n)
+
+               # texinputs is a nasty beast
+               if hasattr(self, 'texinputs_nodes'):
+                       task.texinputs_nodes = self.texinputs_nodes
+               else:
+                       task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
+                       lst = os.environ.get('TEXINPUTS', '')
+                       if self.env.TEXINPUTS:
+                               lst += os.pathsep + self.env.TEXINPUTS
+                       if lst:
+                               lst = lst.split(os.pathsep)
+                       for x in lst:
+                               if x:
+                                       if os.path.isabs(x):
+                                               p = self.bld.root.find_node(x)
+                                               if p:
+                                                       task.texinputs_nodes.append(p)
+                                               else:
+                                                       Logs.error('Invalid TEXINPUTS folder %s', x)
+                                       else:
+                                               Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x)
+
+               if self.type == 'latex':
+                       if 'ps' in outs:
+                               tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
+                               tsk.env.env = dict(os.environ)
+                       if 'pdf' in outs:
+                               tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
+                               tsk.env.env = dict(os.environ)
+               elif self.type == 'pdflatex':
+                       if 'ps' in outs:
+                               self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
+       self.source = []
+
+def configure(self):
+       """
+       Find the programs tex, latex and others without raising errors.
+       """
+       v = self.env
+       for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
+               try:
+                       self.find_program(p, var=p.upper())
+               except self.errors.ConfigurationError:
+                       pass
+       v.DVIPSFLAGS = '-Ppdf'
diff --git a/third_party/waf/waflib/Tools/vala.py b/third_party/waf/waflib/Tools/vala.py
new file mode 100644 (file)
index 0000000..f2154cd
--- /dev/null
@@ -0,0 +1,350 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+# RadosÅ‚aw SzkodziÅ„ski, 2010
+
+"""
+At this point, vala is still unstable, so do not expect
+this tool to be too stable either (apis, etc)
+"""
+
+import re
+from waflib import Context, Task, Utils, Logs, Options, Errors, Node
+from waflib.TaskGen import extension, taskgen_method
+from waflib.Configure import conf
+
+class valac(Task.Task):
+       """
+       Compiles vala files
+       """
+       #run_str = "${VALAC} ${VALAFLAGS}" # ideally
+       #vars = ['VALAC_VERSION']
+       vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"]
+       ext_out = ['.h']
+
+       def run(self):
+               cmd = self.env.VALAC + self.env.VALAFLAGS
+               resources = getattr(self, 'vala_exclude', [])
+               cmd.extend([a.abspath() for a in self.inputs if a not in resources])
+               ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath())
+
+               if ret:
+                       return ret
+
+               if self.generator.dump_deps_node:
+                       self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
+
+               return ret
+
+@taskgen_method
+def init_vala_task(self):
+       """
+       Initializes the vala task with the relevant data (acts as a constructor)
+       """
+       self.profile = getattr(self, 'profile', 'gobject')
+
+       self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
+       self.use = Utils.to_list(getattr(self, 'use', []))
+       if packages and not self.use:
+               self.use = packages[:] # copy
+
+       if self.profile == 'gobject':
+               if not 'GOBJECT' in self.use:
+                       self.use.append('GOBJECT')
+
+       def addflags(flags):
+               self.env.append_value('VALAFLAGS', flags)
+
+       if self.profile:
+               addflags('--profile=%s' % self.profile)
+
+       valatask = self.valatask
+
+       # output directory
+       if hasattr(self, 'vala_dir'):
+               if isinstance(self.vala_dir, str):
+                       valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir)
+                       try:
+                               valatask.vala_dir_node.mkdir()
+                       except OSError:
+                               raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node)
+               else:
+                       valatask.vala_dir_node = self.vala_dir
+       else:
+               valatask.vala_dir_node = self.path.get_bld()
+       addflags('--directory=%s' % valatask.vala_dir_node.abspath())
+
+       if hasattr(self, 'thread'):
+               if self.profile == 'gobject':
+                       if not 'GTHREAD' in self.use:
+                               self.use.append('GTHREAD')
+               else:
+                       #Vala doesn't have threading support for dova nor posix
+                       Logs.warn('Profile %s means no threading support', self.profile)
+                       self.thread = False
+
+               if self.thread:
+                       addflags('--thread')
+
+       self.is_lib = 'cprogram' not in self.features
+       if self.is_lib:
+               addflags('--library=%s' % self.target)
+
+               h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target)
+               valatask.outputs.append(h_node)
+               addflags('--header=%s' % h_node.name)
+
+               valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target))
+
+               if getattr(self, 'gir', None):
+                       gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir)
+                       addflags('--gir=%s' % gir_node.name)
+                       valatask.outputs.append(gir_node)
+
+       self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
+       if self.vala_target_glib:
+               addflags('--target-glib=%s' % self.vala_target_glib)
+
+       addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))])
+
+       packages_private = Utils.to_list(getattr(self, 'packages_private', []))
+       addflags(['--pkg=%s' % x for x in packages_private])
+
+       def _get_api_version():
+               api_version = '1.0'
+               if hasattr(Context.g_module, 'API_VERSION'):
+                       version = Context.g_module.API_VERSION.split(".")
+                       if version[0] == "0":
+                               api_version = "0." + version[1]
+                       else:
+                               api_version = version[0] + ".0"
+               return api_version
+
+       self.includes = Utils.to_list(getattr(self, 'includes', []))
+       valatask.install_path = getattr(self, 'install_path', '')
+
+       valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
+       valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE)
+       valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
+       valatask.install_binding = getattr(self, 'install_binding', True)
+
+       self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
+       #includes =  []
+
+       if hasattr(self, 'use'):
+               local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
+               seen = []
+               while len(local_packages) > 0:
+                       package = local_packages.pop()
+                       if package in seen:
+                               continue
+                       seen.append(package)
+
+                       # check if the package exists
+                       try:
+                               package_obj = self.bld.get_tgen_by_name(package)
+                       except Errors.WafError:
+                               continue
+                       package_name = package_obj.target
+                       for task in package_obj.tasks:
+                               for output in task.outputs:
+                                       if output.name == package_name + ".vapi":
+                                               valatask.set_run_after(task)
+                                               if package_name not in packages:
+                                                       packages.append(package_name)
+                                               if output.parent not in vapi_dirs:
+                                                       vapi_dirs.append(output.parent)
+                                               if output.parent not in self.includes:
+                                                       self.includes.append(output.parent)
+
+                       if hasattr(package_obj, 'use'):
+                               lst = self.to_list(package_obj.use)
+                               lst.reverse()
+                               local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
+
+       addflags(['--pkg=%s' % p for p in packages])
+
+       for vapi_dir in vapi_dirs:
+               if isinstance(vapi_dir, Node.Node):
+                       v_node = vapi_dir
+               else:
+                       v_node = self.path.find_dir(vapi_dir)
+               if not v_node:
+                       Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
+               else:
+                       addflags('--vapidir=%s' % v_node.abspath())
+
+       self.dump_deps_node = None
+       if self.is_lib and self.packages:
+               self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target)
+               valatask.outputs.append(self.dump_deps_node)
+
+       if self.is_lib and valatask.install_binding:
+               headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
+               try:
+                       self.install_vheader.source = headers_list
+               except AttributeError:
+                       self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list)
+
+               vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
+               try:
+                       self.install_vapi.source = vapi_list
+               except AttributeError:
+                       self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list)
+
+               gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
+               try:
+                       self.install_gir.source = gir_list
+               except AttributeError:
+                       self.install_gir = self.add_install_files(
+                               install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list)
+
+       if hasattr(self, 'vala_resources'):
+               nodes = self.to_nodes(self.vala_resources)
+               valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes
+               valatask.inputs.extend(nodes)
+               for x in nodes:
+                       addflags(['--gresources', x.abspath()])
+
+@extension('.vala', '.gs')
+def vala_file(self, node):
+       """
+       Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node
+       to its inputs. The typical example is::
+
+               def build(bld):
+                       bld.program(
+                               packages      = 'gtk+-2.0',
+                               target        = 'vala-gtk-example',
+                               use           = 'GTK GLIB',
+                               source        = 'vala-gtk-example.vala foo.vala',
+                               vala_defines  = ['DEBUG'] # adds --define=<xyz> values to the command-line
+
+                               # the following arguments are for libraries
+                               #gir          = 'hello-1.0',
+                               #gir_path     = '/tmp',
+                               #vapi_path = '/tmp',
+                               #pkg_name = 'hello'
+                               # disable installing of gir, vapi and header
+                               #install_binding = False
+
+                               # profile     = 'xyz' # adds --profile=<xyz> to enable profiling
+                               # thread      = True, # adds --thread, except if profile is on or not on 'gobject'
+                               # vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
+                       )
+
+
+       :param node: vala file
+       :type node: :py:class:`waflib.Node.Node`
+       """
+
+       try:
+               valatask = self.valatask
+       except AttributeError:
+               valatask = self.valatask = self.create_task('valac')
+               self.init_vala_task()
+
+       valatask.inputs.append(node)
+       name = node.name[:node.name.rfind('.')] + '.c'
+       c_node = valatask.vala_dir_node.find_or_declare(name)
+       valatask.outputs.append(c_node)
+       self.source.append(c_node)
+
+@conf
+def find_valac(self, valac_name, min_version):
+       """
+       Find the valac program, and execute it to store the version
+       number in *conf.env.VALAC_VERSION*
+
+       :param valac_name: program name
+       :type valac_name: string or list of string
+       :param min_version: minimum version acceptable
+       :type min_version: tuple of int
+       """
+       valac = self.find_program(valac_name, var='VALAC')
+       try:
+               output = self.cmd_and_log(valac + ['--version'])
+       except Exception:
+               valac_version = None
+       else:
+               ver = re.search(r'\d+.\d+.\d+', output).group().split('.')
+               valac_version = tuple([int(x) for x in ver])
+
+       self.msg('Checking for %s version >= %r' % (valac_name, min_version),
+                valac_version, valac_version and valac_version >= min_version)
+       if valac and valac_version < min_version:
+               self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))
+
+       self.env.VALAC_VERSION = valac_version
+       return valac
+
+@conf
+def check_vala(self, min_version=(0,8,0), branch=None):
+       """
+       Check if vala compiler from a given branch exists of at least a given
+       version.
+
+       :param min_version: minimum version acceptable (0.8.0)
+       :type min_version: tuple
+       :param branch: first part of the version number, in case a snapshot is used (0, 8)
+       :type branch: tuple of int
+       """
+       if self.env.VALA_MINVER:
+               min_version = self.env.VALA_MINVER
+       if self.env.VALA_MINVER_BRANCH:
+               branch = self.env.VALA_MINVER_BRANCH
+       if not branch:
+               branch = min_version[:2]
+       try:
+               find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version)
+       except self.errors.ConfigurationError:
+               find_valac(self, 'valac', min_version)
+
+@conf
+def check_vala_deps(self):
+       """
+       Load the gobject and gthread packages if they are missing.
+       """
+       if not self.env.HAVE_GOBJECT:
+               pkg_args = {'package':      'gobject-2.0',
+                           'uselib_store': 'GOBJECT',
+                           'args':         '--cflags --libs'}
+               if getattr(Options.options, 'vala_target_glib', None):
+                       pkg_args['atleast_version'] = Options.options.vala_target_glib
+               self.check_cfg(**pkg_args)
+
+       if not self.env.HAVE_GTHREAD:
+               pkg_args = {'package':      'gthread-2.0',
+                           'uselib_store': 'GTHREAD',
+                           'args':         '--cflags --libs'}
+               if getattr(Options.options, 'vala_target_glib', None):
+                       pkg_args['atleast_version'] = Options.options.vala_target_glib
+               self.check_cfg(**pkg_args)
+
+def configure(self):
+       """
+       Use the following to enforce minimum vala version::
+
+               def configure(conf):
+                       conf.env.VALA_MINVER = (0, 10, 0)
+                       conf.load('vala')
+       """
+       self.load('gnu_dirs')
+       self.check_vala_deps()
+       self.check_vala()
+       self.add_os_flags('VALAFLAGS')
+       self.env.append_unique('VALAFLAGS', ['-C'])
+
+def options(opt):
+       """
+       Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
+       """
+       opt.load('gnu_dirs')
+       valaopts = opt.add_option_group('Vala Compiler Options')
+       valaopts.add_option('--vala-target-glib', default=None,
+               dest='vala_target_glib', metavar='MAJOR.MINOR',
+               help='Target version of glib for Vala GObject code generation')
diff --git a/third_party/waf/waflib/Tools/waf_unit_test.py b/third_party/waf/waflib/Tools/waf_unit_test.py
new file mode 100644 (file)
index 0000000..3e4fec0
--- /dev/null
@@ -0,0 +1,258 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2006
+# Thomas Nagy, 2010-2016 (ita)
+
+"""
+Unit testing system for C/C++/D providing test execution:
+
+* in parallel, by using ``waf -j``
+* partial (only the tests that have changed) or full (by using ``waf --alltests``)
+
+The tests are declared by adding the **test** feature to programs::
+
+       def options(opt):
+               opt.load('compiler_cxx waf_unit_test')
+       def configure(conf):
+               conf.load('compiler_cxx waf_unit_test')
+       def build(bld):
+               bld(features='cxx cxxprogram test', source='main.cpp', target='app')
+               # or
+               bld.program(features='test', source='main2.cpp', target='app2')
+
+When the build is executed, the program 'test' will be built and executed without arguments.
+The success/failure is detected by looking at the return code. The status and the standard output/error
+are stored on the build context.
+
+The results can be displayed by registering a callback function. Here is how to call
+the predefined callback::
+
+       def build(bld):
+               bld(features='cxx cxxprogram test', source='main.c', target='app')
+               from waflib.Tools import waf_unit_test
+               bld.add_post_fun(waf_unit_test.summary)
+
+By passing --dump-test-scripts the build outputs corresponding python files
+(with extension _run.py) that are useful for debugging purposes.
+"""
+
+import os, sys
+from waflib.TaskGen import feature, after_method, taskgen_method
+from waflib import Utils, Task, Logs, Options
+from waflib.Tools import ccroot
+testlock = Utils.threading.Lock()
+
+SCRIPT_TEMPLATE = """#! %(python)s
+import subprocess, sys
+cmd = %(cmd)r
+# if you want to debug with gdb:
+#cmd = ['gdb', '-args'] + cmd
+env = %(env)r
+status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
+sys.exit(status)
+"""
+
+@feature('test')
+@after_method('apply_link', 'process_use')
+def make_test(self):
+       """Create the unit test task. There can be only one unit test task by task generator."""
+       if not getattr(self, 'link_task', None):
+               return
+
+       tsk = self.create_task('utest', self.link_task.outputs)
+       if getattr(self, 'ut_str', None):
+               self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+               tsk.vars = lst + tsk.vars
+
+       if getattr(self, 'ut_cwd', None):
+               if isinstance(self.ut_cwd, str):
+                       # we want a Node instance
+                       if os.path.isabs(self.ut_cwd):
+                               self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
+                       else:
+                               self.ut_cwd = self.path.make_node(self.ut_cwd)
+       else:
+               self.ut_cwd = tsk.inputs[0].parent
+
+       if not hasattr(self, 'ut_paths'):
+               paths = []
+               for x in self.tmp_use_sorted:
+                       try:
+                               y = self.bld.get_tgen_by_name(x).link_task
+                       except AttributeError:
+                               pass
+                       else:
+                               if not isinstance(y, ccroot.stlink_task):
+                                       paths.append(y.outputs[0].parent.abspath())
+               self.ut_paths = os.pathsep.join(paths) + os.pathsep
+
+       if not hasattr(self, 'ut_env'):
+               self.ut_env = dct = dict(os.environ)
+               def add_path(var):
+                       dct[var] = self.ut_paths + dct.get(var,'')
+               if Utils.is_win32:
+                       add_path('PATH')
+               elif Utils.unversioned_sys_platform() == 'darwin':
+                       add_path('DYLD_LIBRARY_PATH')
+                       add_path('LD_LIBRARY_PATH')
+               else:
+                       add_path('LD_LIBRARY_PATH')
+
+@taskgen_method
+def add_test_results(self, tup):
+       """Override and return tup[1] to interrupt the build immediately if a test does not run"""
+       Logs.debug("ut: %r", tup)
+       self.utest_result = tup
+       try:
+               self.bld.utest_results.append(tup)
+       except AttributeError:
+               self.bld.utest_results = [tup]
+
+class utest(Task.Task):
+       """
+       Execute a unit test
+       """
+       color = 'PINK'
+       after = ['vnum', 'inst']
+       vars = []
+
+       def runnable_status(self):
+               """
+               Always execute the task if `waf --alltests` was used or no
+               tests if ``waf --notests`` was used
+               """
+               if getattr(Options.options, 'no_tests', False):
+                       return Task.SKIP_ME
+
+               ret = super(utest, self).runnable_status()
+               if ret == Task.SKIP_ME:
+                       if getattr(Options.options, 'all_tests', False):
+                               return Task.RUN_ME
+               return ret
+
+       def get_test_env(self):
+               """
+               In general, tests may require any library built anywhere in the project.
+               Override this method if fewer paths are needed
+               """
+               return self.generator.ut_env
+
+       def post_run(self):
+               super(utest, self).post_run()
+               if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
+                       self.generator.bld.task_sigs[self.uid()] = None
+
+       def run(self):
+               """
+               Execute the test. The execution is always successful, and the results
+               are stored on ``self.generator.bld.utest_results`` for postprocessing.
+
+               Override ``add_test_results`` to interrupt the build
+               """
+               if hasattr(self.generator, 'ut_run'):
+                       return self.generator.ut_run(self)
+
+               # TODO ut_exec, ut_fun, ut_cmd should be considered obsolete
+               self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
+               if getattr(self.generator, 'ut_fun', None):
+                       self.generator.ut_fun(self)
+               testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False)
+               if testcmd:
+                       self.ut_exec = (testcmd % ' '.join(self.ut_exec)).split(' ')
+
+               return self.exec_command(self.ut_exec)
+
+       def exec_command(self, cmd, **kw):
+               Logs.debug('runner: %r', cmd)
+               if getattr(Options.options, 'dump_test_scripts', False):
+                       global SCRIPT_TEMPLATE
+                       script_code = SCRIPT_TEMPLATE % {
+                               'python': sys.executable,
+                               'env': self.get_test_env(),
+                               'cwd': self.get_cwd().abspath(), 'cmd': cmd
+                       }
+                       script_file = self.inputs[0].abspath() + '_run.py'
+                       Utils.writef(script_file, script_code)
+                       os.chmod(script_file, Utils.O755)
+                       if Logs.verbose > 1:
+                               Logs.info('Test debug file written as %r' % script_file)
+
+               proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
+                       stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE)
+               (stdout, stderr) = proc.communicate()
+               self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
+               testlock.acquire()
+               try:
+                       return self.generator.add_test_results(tup)
+               finally:
+                       testlock.release()
+
+       def get_cwd(self):
+               return self.generator.ut_cwd
+
+def summary(bld):
+       """
+       Display an execution summary::
+
+               def build(bld):
+                       bld(features='cxx cxxprogram test', source='main.c', target='app')
+                       from waflib.Tools import waf_unit_test
+                       bld.add_post_fun(waf_unit_test.summary)
+       """
+       lst = getattr(bld, 'utest_results', [])
+       if lst:
+               Logs.pprint('CYAN', 'execution summary')
+
+               total = len(lst)
+               tfail = len([x for x in lst if x[1]])
+
+               Logs.pprint('CYAN', '  tests that pass %d/%d' % (total-tfail, total))
+               for (f, code, out, err) in lst:
+                       if not code:
+                               Logs.pprint('CYAN', '    %s' % f)
+
+               Logs.pprint('CYAN', '  tests that fail %d/%d' % (tfail, total))
+               for (f, code, out, err) in lst:
+                       if code:
+                               Logs.pprint('CYAN', '    %s' % f)
+
+def set_exit_code(bld):
+       """
+       If any of the tests fail waf will exit with that exit code.
+       This is useful if you have an automated build system which need
+       to report on errors from the tests.
+       You may use it like this:
+
+               def build(bld):
+                       bld(features='cxx cxxprogram test', source='main.c', target='app')
+                       from waflib.Tools import waf_unit_test
+                       bld.add_post_fun(waf_unit_test.set_exit_code)
+       """
+       lst = getattr(bld, 'utest_results', [])
+       for (f, code, out, err) in lst:
+               if code:
+                       msg = []
+                       if out:
+                               msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8')))
+                       if err:
+                               msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8')))
+                       bld.fatal(os.linesep.join(msg))
+
+
+def options(opt):
+       """
+       Provide the ``--alltests``, ``--notests`` and ``--testcmd`` command-line options.
+       """
+       opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
+       opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
+       opt.add_option('--clear-failed', action='store_true', default=False, help='Force failed unit tests to run again next time', dest='clear_failed_tests')
+       opt.add_option('--testcmd', action='store', default=False,
+        help = 'Run the unit tests using the test-cmd string'
+        ' example "--test-cmd="valgrind --error-exitcode=1'
+        ' %s" to run under valgrind', dest='testcmd')
+       opt.add_option('--dump-test-scripts', action='store_true', default=False,
+        help='Create python scripts to help debug tests', dest='dump_test_scripts')
diff --git a/third_party/waf/waflib/Tools/winres.py b/third_party/waf/waflib/Tools/winres.py
new file mode 100644 (file)
index 0000000..a437637
--- /dev/null
@@ -0,0 +1,80 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Brant Young, 2007
+
+"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"
+
+import re
+from waflib import Task
+from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+
+@extension('.rc')
+def rc_file(self, node):
+       """
+       Binds the .rc extension to a winrc task
+       """
+       obj_ext = '.rc.o'
+       if self.env.WINRC_TGT_F == '/fo':
+               obj_ext = '.res'
+       rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
+       try:
+               self.compiled_tasks.append(rctask)
+       except AttributeError:
+               self.compiled_tasks = [rctask]
+
+re_lines = re.compile(
+       '(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
+       '(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
+       re.IGNORECASE | re.MULTILINE)
+
+class rc_parser(c_preproc.c_parser):
+       """
+       Calculates dependencies in .rc files
+       """
+       def filter_comments(self, node):
+               """
+               Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments`
+               """
+               code = node.read()
+               if c_preproc.use_trigraphs:
+                       for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
+               code = c_preproc.re_nl.sub('', code)
+               code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+               ret = []
+               for m in re.finditer(re_lines, code):
+                       if m.group(2):
+                               ret.append((m.group(2), m.group(3)))
+                       else:
+                               ret.append(('include', m.group(5)))
+               return ret
+
+class winrc(Task.Task):
+       """
+       Compiles resource files
+       """
+       run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
+       color   = 'BLUE'
+       def scan(self):
+               tmp = rc_parser(self.generator.includes_nodes)
+               tmp.start(self.inputs[0], self.env)
+               return (tmp.nodes, tmp.names)
+
+def configure(conf):
+       """
+       Detects the programs RC or windres, depending on the C/C++ compiler in use
+       """
+       v = conf.env
+       if not v.WINRC:
+               if v.CC_NAME == 'msvc':
+                       conf.find_program('RC', var='WINRC', path_list=v.PATH)
+                       v.WINRC_TGT_F = '/fo'
+                       v.WINRC_SRC_F = ''
+               else:
+                       conf.find_program('windres', var='WINRC', path_list=v.PATH)
+                       v.WINRC_TGT_F = '-o'
+                       v.WINRC_SRC_F = '-i'
diff --git a/third_party/waf/waflib/Tools/xlc.py b/third_party/waf/waflib/Tools/xlc.py
new file mode 100644 (file)
index 0000000..c2e2ab6
--- /dev/null
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+# Michael Kuhn, 2009
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_xlc(conf):
+       """
+       Detects the Aix C compiler
+       """
+       cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
+       conf.get_xlc_version(cc)
+       conf.env.CC_NAME = 'xlc'
+
+@conf
+def xlc_common_flags(conf):
+       """
+       Flags required for executing the Aix C compiler
+       """
+       v = conf.env
+
+       v.CC_SRC_F            = []
+       v.CC_TGT_F            = ['-c', '-o']
+
+       if not v.LINK_CC:
+               v.LINK_CC = v.CC
+
+       v.CCLNK_SRC_F         = []
+       v.CCLNK_TGT_F         = ['-o']
+       v.CPPPATH_ST          = '-I%s'
+       v.DEFINES_ST          = '-D%s'
+
+       v.LIB_ST              = '-l%s' # template for adding libs
+       v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+       v.STLIB_ST            = '-l%s'
+       v.STLIBPATH_ST        = '-L%s'
+       v.RPATH_ST            = '-Wl,-rpath,%s'
+
+       v.SONAME_ST           = []
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+
+       v.LINKFLAGS_cprogram  = ['-Wl,-brtl']
+       v.cprogram_PATTERN    = '%s'
+
+       v.CFLAGS_cshlib       = ['-fPIC']
+       v.LINKFLAGS_cshlib    = ['-G', '-Wl,-brtl,-bexpfull']
+       v.cshlib_PATTERN      = 'lib%s.so'
+
+       v.LINKFLAGS_cstlib    = []
+       v.cstlib_PATTERN      = 'lib%s.a'
+
+def configure(conf):
+       conf.find_xlc()
+       conf.find_ar()
+       conf.xlc_common_flags()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/xlcxx.py b/third_party/waf/waflib/Tools/xlcxx.py
new file mode 100644 (file)
index 0000000..d7cafc0
--- /dev/null
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2016 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+# Michael Kuhn, 2009
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_xlcxx(conf):
+       """
+       Detects the Aix C++ compiler
+       """
+       cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
+       conf.get_xlc_version(cxx)
+       conf.env.CXX_NAME = 'xlc++'
+
+@conf
+def xlcxx_common_flags(conf):
+       """
+       Flags required for executing the Aix C++ compiler
+       """
+       v = conf.env
+
+       v.CXX_SRC_F           = []
+       v.CXX_TGT_F           = ['-c', '-o']
+
+       if not v.LINK_CXX:
+               v.LINK_CXX = v.CXX
+
+       v.CXXLNK_SRC_F        = []
+       v.CXXLNK_TGT_F        = ['-o']
+       v.CPPPATH_ST          = '-I%s'
+       v.DEFINES_ST          = '-D%s'
+
+       v.LIB_ST              = '-l%s' # template for adding libs
+       v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+       v.STLIB_ST            = '-l%s'
+       v.STLIBPATH_ST        = '-L%s'
+       v.RPATH_ST            = '-Wl,-rpath,%s'
+
+       v.SONAME_ST           = []
+       v.SHLIB_MARKER        = []
+       v.STLIB_MARKER        = []
+
+       v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
+       v.cxxprogram_PATTERN  = '%s'
+
+       v.CXXFLAGS_cxxshlib   = ['-fPIC']
+       v.LINKFLAGS_cxxshlib  = ['-G', '-Wl,-brtl,-bexpfull']
+       v.cxxshlib_PATTERN    = 'lib%s.so'
+
+       v.LINKFLAGS_cxxstlib  = []
+       v.cxxstlib_PATTERN    = 'lib%s.a'
+
+def configure(conf):
+       conf.find_xlcxx()
+       conf.find_ar()
+       conf.xlcxx_common_flags()
+       conf.cxx_load_tools()
+       conf.cxx_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/Utils.py b/third_party/waf/waflib/Utils.py
new file mode 100644 (file)
index 0000000..81353bf
--- /dev/null
@@ -0,0 +1,993 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
+
+"""
+Utilities and platform-specific fixes
+
+The portability fixes try to provide a consistent behavior of the Waf API
+through Python versions 2.5 to 3.X and across different platforms (win32, linux, etc)
+"""
+
+import atexit, os, sys, errno, traceback, inspect, re, datetime, platform, base64, signal, functools
+
+try:
+       import cPickle
+except ImportError:
+       import pickle as cPickle
+
+# leave this
+if os.name == 'posix' and sys.version_info[0] < 3:
+       try:
+               import subprocess32 as subprocess
+       except ImportError:
+               import subprocess
+else:
+       import subprocess
+
+try:
+       TimeoutExpired = subprocess.TimeoutExpired
+except AttributeError:
+       class TimeoutExpired(object):
+               pass
+
+from collections import deque, defaultdict
+
+try:
+       import _winreg as winreg
+except ImportError:
+       try:
+               import winreg
+       except ImportError:
+               winreg = None
+
+from waflib import Errors
+
+try:
+       from hashlib import md5
+except ImportError:
+       try:
+               from md5 import md5
+       except ImportError:
+               # never fail to enable fixes from another module
+               pass
+
+try:
+       import threading
+except ImportError:
+       if not 'JOBS' in os.environ:
+               # no threading :-(
+               os.environ['JOBS'] = '1'
+
+       class threading(object):
+               """
+               A fake threading class for platforms lacking the threading module.
+               Use ``waf -j1`` on those platforms
+               """
+               pass
+       class Lock(object):
+               """Fake Lock class"""
+               def acquire(self):
+                       pass
+               def release(self):
+                       pass
+       threading.Lock = threading.Thread = Lock
+
+SIG_NIL = 'SIG_NIL_SIG_NIL_'
+"""Arbitrary null value for hashes. Modify this value according to the hash function in use"""
+
+O644 = 420
+"""Constant representing the permissions for regular files (0644 raises a syntax error on python 3)"""
+
+O755 = 493
+"""Constant representing the permissions for executable files (0755 raises a syntax error on python 3)"""
+
+rot_chr = ['\\', '|', '/', '-']
+"List of characters to use when displaying the throbber (progress bar)"
+
+rot_idx = 0
+"Index of the current throbber character (progress bar)"
+
+class ordered_iter_dict(dict):
+       """Ordered dictionary that provides iteration from the most recently inserted keys first"""
+       def __init__(self, *k, **kw):
+               self.lst = deque()
+               dict.__init__(self, *k, **kw)
+       def clear(self):
+               dict.clear(self)
+               self.lst = deque()
+       def __setitem__(self, key, value):
+               if key in dict.keys(self):
+                       self.lst.remove(key)
+               dict.__setitem__(self, key, value)
+               self.lst.append(key)
+       def __delitem__(self, key):
+               dict.__delitem__(self, key)
+               try:
+                       self.lst.remove(key)
+               except ValueError:
+                       pass
+       def __iter__(self):
+               return reversed(self.lst)
+       def keys(self):
+               return reversed(self.lst)
+
+class lru_node(object):
+       """
+       Used by :py:class:`waflib.Utils.lru_cache`
+       """
+       __slots__ = ('next', 'prev', 'key', 'val')
+       def __init__(self):
+               self.next = self
+               self.prev = self
+               self.key = None
+               self.val = None
+
+class lru_cache(object):
+       """
+       A simple least-recently used cache with lazy allocation
+       """
+       __slots__ = ('maxlen', 'table', 'head')
+       def __init__(self, maxlen=100):
+               self.maxlen = maxlen
+               """
+               Maximum amount of elements in the cache
+               """
+               self.table = {}
+               """
+               Mapping key-value
+               """
+               self.head = lru_node()
+               self.head.next = self.head
+               self.head.prev = self.head
+
+       def __getitem__(self, key):
+               node = self.table[key]
+               # assert(key==node.key)
+               if node is self.head:
+                       return node.val
+
+               # detach the node found
+               node.prev.next = node.next
+               node.next.prev = node.prev
+
+               # replace the head
+               node.next = self.head.next
+               node.prev = self.head
+               self.head = node.next.prev = node.prev.next = node
+
+               return node.val
+
+       def __setitem__(self, key, val):
+               if key in self.table:
+                       # update the value for an existing key
+                       node = self.table[key]
+                       node.val = val
+                       self.__getitem__(key)
+               else:
+                       if len(self.table) < self.maxlen:
+                               # the very first item is unused until the maximum is reached
+                               node = lru_node()
+                               node.prev = self.head
+                               node.next = self.head.next
+                               node.prev.next = node.next.prev = node
+                       else:
+                               node = self.head = self.head.next
+                               try:
+                                       # that's another key
+                                       del self.table[node.key]
+                               except KeyError:
+                                       pass
+
+                       node.key = key
+                       node.val = val
+                       self.table[key] = node
+
+is_win32 = os.sep == '\\' or sys.platform == 'win32' # msys2
+"""
+Whether this system is a Windows series
+"""
+
+def readf(fname, m='r', encoding='ISO8859-1'):
+       """
+       Reads an entire file into a string. See also :py:meth:`waflib.Node.Node.readf`::
+
+               def build(ctx):
+                       from waflib import Utils
+                       txt = Utils.readf(self.path.find_node('wscript').abspath())
+                       txt = ctx.path.find_node('wscript').read()
+
+       :type  fname: string
+       :param fname: Path to file
+       :type  m: string
+       :param m: Open mode
+       :type encoding: string
+       :param encoding: encoding value, only used for python 3
+       :rtype: string
+       :return: Content of the file
+       """
+
+       if sys.hexversion > 0x3000000 and not 'b' in m:
+               m += 'b'
+               f = open(fname, m)
+               try:
+                       txt = f.read()
+               finally:
+                       f.close()
+               if encoding:
+                       txt = txt.decode(encoding)
+               else:
+                       txt = txt.decode()
+       else:
+               f = open(fname, m)
+               try:
+                       txt = f.read()
+               finally:
+                       f.close()
+       return txt
+
+def writef(fname, data, m='w', encoding='ISO8859-1'):
+       """
+       Writes an entire file from a string.
+       See also :py:meth:`waflib.Node.Node.writef`::
+
+               def build(ctx):
+                       from waflib import Utils
+                       txt = Utils.writef(self.path.make_node('i_like_kittens').abspath(), 'some data')
+                       self.path.make_node('i_like_kittens').write('some data')
+
+       :type  fname: string
+       :param fname: Path to file
+       :type   data: string
+       :param  data: The contents to write to the file
+       :type  m: string
+       :param m: Open mode
+       :type encoding: string
+       :param encoding: encoding value, only used for python 3
+       """
+       if sys.hexversion > 0x3000000 and not 'b' in m:
+               data = data.encode(encoding)
+               m += 'b'
+       f = open(fname, m)
+       try:
+               f.write(data)
+       finally:
+               f.close()
+
+def h_file(fname):
+       """
+       Computes a hash value for a file by using md5. Use the md5_tstamp
+       extension to get faster build hashes if necessary.
+
+       :type fname: string
+       :param fname: path to the file to hash
+       :return: hash of the file contents
+       :rtype: string or bytes
+       """
+       f = open(fname, 'rb')
+       m = md5()
+       try:
+               while fname:
+                       fname = f.read(200000)
+                       m.update(fname)
+       finally:
+               f.close()
+       return m.digest()
+
+def readf_win32(f, m='r', encoding='ISO8859-1'):
+       flags = os.O_NOINHERIT | os.O_RDONLY
+       if 'b' in m:
+               flags |= os.O_BINARY
+       if '+' in m:
+               flags |= os.O_RDWR
+       try:
+               fd = os.open(f, flags)
+       except OSError:
+               raise IOError('Cannot read from %r' % f)
+
+       if sys.hexversion > 0x3000000 and not 'b' in m:
+               m += 'b'
+               f = os.fdopen(fd, m)
+               try:
+                       txt = f.read()
+               finally:
+                       f.close()
+               if encoding:
+                       txt = txt.decode(encoding)
+               else:
+                       txt = txt.decode()
+       else:
+               f = os.fdopen(fd, m)
+               try:
+                       txt = f.read()
+               finally:
+                       f.close()
+       return txt
+
+def writef_win32(f, data, m='w', encoding='ISO8859-1'):
+       if sys.hexversion > 0x3000000 and not 'b' in m:
+               data = data.encode(encoding)
+               m += 'b'
+       flags = os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT
+       if 'b' in m:
+               flags |= os.O_BINARY
+       if '+' in m:
+               flags |= os.O_RDWR
+       try:
+               fd = os.open(f, flags)
+       except OSError:
+               raise OSError('Cannot write to %r' % f)
+       f = os.fdopen(fd, m)
+       try:
+               f.write(data)
+       finally:
+               f.close()
+
+def h_file_win32(fname):
+       try:
+               fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
+       except OSError:
+               raise OSError('Cannot read from %r' % fname)
+       f = os.fdopen(fd, 'rb')
+       m = md5()
+       try:
+               while fname:
+                       fname = f.read(200000)
+                       m.update(fname)
+       finally:
+               f.close()
+       return m.digest()
+
+# always save these
+readf_unix = readf
+writef_unix = writef
+h_file_unix = h_file
+if hasattr(os, 'O_NOINHERIT') and sys.hexversion < 0x3040000:
+       # replace the default functions
+       readf = readf_win32
+       writef = writef_win32
+       h_file = h_file_win32
+
+try:
+       x = ''.encode('hex')
+except LookupError:
+       import binascii
+       def to_hex(s):
+               ret = binascii.hexlify(s)
+               if not isinstance(ret, str):
+                       ret = ret.decode('utf-8')
+               return ret
+else:
+       def to_hex(s):
+               return s.encode('hex')
+
+to_hex.__doc__ = """
+Return the hexadecimal representation of a string
+
+:param s: string to convert
+:type s: string
+"""
+
+def listdir_win32(s):
+       """
+       Lists the contents of a folder in a portable manner.
+       On Win32, returns the list of drive letters: ['C:', 'X:', 'Z:'] when an empty string is given.
+
+       :type s: string
+       :param s: a string, which can be empty on Windows
+       """
+       if not s:
+               try:
+                       import ctypes
+               except ImportError:
+                       # there is nothing much we can do
+                       return [x + ':\\' for x in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
+               else:
+                       dlen = 4 # length of "?:\\x00"
+                       maxdrives = 26
+                       buf = ctypes.create_string_buffer(maxdrives * dlen)
+                       ndrives = ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen, ctypes.byref(buf))
+                       return [ str(buf.raw[4*i:4*i+2].decode('ascii')) for i in range(int(ndrives/dlen)) ]
+
+       if len(s) == 2 and s[1] == ":":
+               s += os.sep
+
+       if not os.path.isdir(s):
+               e = OSError('%s is not a directory' % s)
+               e.errno = errno.ENOENT
+               raise e
+       return os.listdir(s)
+
+listdir = os.listdir
+if is_win32:
+       listdir = listdir_win32
+
+def num2ver(ver):
+       """
+       Converts a string, tuple or version number into an integer. The number is supposed to have at most 4 digits::
+
+               from waflib.Utils import num2ver
+               num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0))
+
+       :type ver: string or tuple of numbers
+       :param ver: a version number
+       """
+       if isinstance(ver, str):
+               ver = tuple(ver.split('.'))
+       if isinstance(ver, tuple):
+               ret = 0
+               for i in range(4):
+                       if i < len(ver):
+                               ret += 256**(3 - i) * int(ver[i])
+               return ret
+       return ver
+
+def ex_stack():
+       """
+       Extracts the stack to display exceptions. Deprecated: use traceback.format_exc()
+
+       :return: a string represening the last exception
+       """
+       # TODO remove in waf 2.0
+       return traceback.format_exc()
+
+def to_list(val):
+       """
+       Converts a string argument to a list by splitting it by spaces.
+       Returns the object if not a string::
+
+               from waflib.Utils import to_list
+               lst = to_list('a b c d')
+
+       :param val: list of string or space-separated string
+       :rtype: list
+       :return: Argument converted to list
+       """
+       if isinstance(val, str):
+               return val.split()
+       else:
+               return val
+
+def split_path_unix(path):
+       return path.split('/')
+
+def split_path_cygwin(path):
+       if path.startswith('//'):
+               ret = path.split('/')[2:]
+               ret[0] = '/' + ret[0]
+               return ret
+       return path.split('/')
+
+re_sp = re.compile('[/\\\\]+')
+def split_path_win32(path):
+       if path.startswith('\\\\'):
+               ret = re_sp.split(path)[2:]
+               ret[0] = '\\' + ret[0]
+               return ret
+       return re_sp.split(path)
+
+msysroot = None
+def split_path_msys(path):
+       if path.startswith(('/', '\\')) and not path.startswith(('\\', '\\\\')):
+               # msys paths can be in the form /usr/bin
+               global msysroot
+               if not msysroot:
+                       # msys has python 2.7 or 3, so we can use this
+                       msysroot = subprocess.check_output(['cygpath', '-w', '/']).decode(sys.stdout.encoding or 'iso8859-1')
+                       msysroot = msysroot.strip()
+               path = os.path.normpath(msysroot + os.sep + path)
+       return split_path_win32(path)
+
+if sys.platform == 'cygwin':
+       split_path = split_path_cygwin
+elif is_win32:
+       if os.environ.get('MSYSTEM'):
+               split_path = split_path_msys
+       else:
+               split_path = split_path_win32
+else:
+       split_path = split_path_unix
+
+split_path.__doc__ = """
+Splits a path by / or \\; do not confuse this function with with ``os.path.split``
+
+:type  path: string
+:param path: path to split
+:return:     list of string
+"""
+
+def check_dir(path):
+       """
+       Ensures that a directory exists (similar to ``mkdir -p``).
+
+       :type  path: string
+       :param path: Path to directory
+       :raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added.
+       """
+       if not os.path.isdir(path):
+               try:
+                       os.makedirs(path)
+               except OSError ,e:
+                       if not os.path.isdir(path):
+                               raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
+
+def check_exe(name, env=None):
+       """
+       Ensures that a program exists
+
+       :type name: string
+       :param name: path to the program
+       :param env: configuration object
+       :type env: :py:class:`waflib.ConfigSet.ConfigSet`
+       :return: path of the program or None
+       :raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added.
+       """
+       if not name:
+               raise ValueError('Cannot execute an empty string!')
+       def is_exe(fpath):
+               return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+       fpath, fname = os.path.split(name)
+       if fpath and is_exe(name):
+               return os.path.abspath(name)
+       else:
+               env = env or os.environ
+               for path in env['PATH'].split(os.pathsep):
+                       path = path.strip('"')
+                       exe_file = os.path.join(path, name)
+                       if is_exe(exe_file):
+                               return os.path.abspath(exe_file)
+       return None
+
+def def_attrs(cls, **kw):
+       """
+       Sets default attributes on a class instance
+
+       :type cls: class
+       :param cls: the class to update the given attributes in.
+       :type kw: dict
+       :param kw: dictionary of attributes names and values.
+       """
+       for k, v in kw.items():
+               if not hasattr(cls, k):
+                       setattr(cls, k, v)
+
+def quote_define_name(s):
+       """
+       Converts a string into an identifier suitable for C defines.
+
+       :type  s: string
+       :param s: String to convert
+       :rtype: string
+       :return: Identifier suitable for C defines
+       """
+       fu = re.sub('[^a-zA-Z0-9]', '_', s)
+       fu = re.sub('_+', '_', fu)
+       fu = fu.upper()
+       return fu
+
+def h_list(lst):
+       """
+       Hash lists. We would prefer to use hash(tup) for tuples because it is much more efficient,
+       but Python now enforces hash randomization by assuming everybody is running a web application.
+
+       :param lst: list to hash
+       :type lst: list of strings
+       :return: hash of the list
+       """
+       return md5(repr(lst)).digest()
+
+def h_fun(fun):
+       """
+       Hash functions
+
+       :param fun: function to hash
+       :type  fun: function
+       :return: hash of the function
+       :rtype: string or bytes
+       """
+       try:
+               return fun.code
+       except AttributeError:
+               if isinstance(fun, functools.partial):
+                       code = list(fun.args)
+                       # The method items() provides a sequence of tuples where the first element
+                       # represents an optional argument of the partial function application
+                       #
+                       # The sorting result outcome will be consistent because:
+                       # 1. tuples are compared in order of their elements
+                       # 2. optional argument namess are unique
+                       code.extend(sorted(fun.keywords.items()))
+                       code.append(h_fun(fun.func))
+                       fun.code = h_list(code)
+                       return fun.code
+               try:
+                       h = inspect.getsource(fun)
+               except EnvironmentError:
+                       h = 'nocode'
+               try:
+                       fun.code = h
+               except AttributeError:
+                       pass
+               return h
+
+def h_cmd(ins):
+       """
+       Hashes objects recursively
+
+       :param ins: input object
+       :type ins: string or list or tuple or function
+       :rtype: string or bytes
+       """
+       # this function is not meant to be particularly fast
+       if isinstance(ins, str):
+               # a command is either a string
+               ret = ins
+       elif isinstance(ins, list) or isinstance(ins, tuple):
+               # or a list of functions/strings
+               ret = str([h_cmd(x) for x in ins])
+       else:
+               # or just a python function
+               ret = str(h_fun(ins))
+       if sys.hexversion > 0x3000000:
+               ret = ret.encode('iso8859-1', 'xmlcharrefreplace')
+       return ret
+
+reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
+def subst_vars(expr, params):
+       """
+       Replaces ${VAR} with the value of VAR taken from a dict or a config set::
+
+               from waflib import Utils
+               s = Utils.subst_vars('${PREFIX}/bin', env)
+
+       :type  expr: string
+       :param expr: String to perform substitution on
+       :param params: Dictionary or config set to look up variable values.
+       """
+       def repl_var(m):
+               if m.group(1):
+                       return '\\'
+               if m.group(2):
+                       return '$'
+               try:
+                       # ConfigSet instances may contain lists
+                       return params.get_flat(m.group(3))
+               except AttributeError:
+                       return params[m.group(3)]
+               # if you get a TypeError, it means that 'expr' is not a string...
+               # Utils.subst_vars(None, env)  will not work
+       return reg_subst.sub(repl_var, expr)
+
+def destos_to_binfmt(key):
+       """
+       Returns the binary format based on the unversioned platform name,
+       and defaults to ``elf`` if nothing is found.
+
+       :param key: platform name
+       :type  key: string
+       :return: string representing the binary format
+       """
+       if key == 'darwin':
+               return 'mac-o'
+       elif key in ('win32', 'cygwin', 'uwin', 'msys'):
+               return 'pe'
+       return 'elf'
+
+def unversioned_sys_platform():
+       """
+       Returns the unversioned platform name.
+       Some Python platform names contain versions, that depend on
+       the build environment, e.g. linux2, freebsd6, etc.
+       This returns the name without the version number. Exceptions are
+       os2 and win32, which are returned verbatim.
+
+       :rtype: string
+       :return: Unversioned platform name
+       """
+       s = sys.platform
+       if s.startswith('java'):
+               # The real OS is hidden under the JVM.
+               from java.lang import System
+               s = System.getProperty('os.name')
+               # see http://lopica.sourceforge.net/os.html for a list of possible values
+               if s == 'Mac OS X':
+                       return 'darwin'
+               elif s.startswith('Windows '):
+                       return 'win32'
+               elif s == 'OS/2':
+                       return 'os2'
+               elif s == 'HP-UX':
+                       return 'hp-ux'
+               elif s in ('SunOS', 'Solaris'):
+                       return 'sunos'
+               else: s = s.lower()
+
+       # powerpc == darwin for our purposes
+       if s == 'powerpc':
+               return 'darwin'
+       if s == 'win32' or s == 'os2':
+               return s
+       if s == 'cli' and os.name == 'nt':
+               # ironpython is only on windows as far as we know
+               return 'win32'
+       return re.split('\d+$', s)[0]
+
+def nada(*k, **kw):
+       """
+       Does nothing
+
+       :return: None
+       """
+       pass
+
+class Timer(object):
+       """
+       Simple object for timing the execution of commands.
+       Its string representation is the current time::
+
+               from waflib.Utils import Timer
+               timer = Timer()
+               a_few_operations()
+               s = str(timer)
+       """
+       def __init__(self):
+               self.start_time = datetime.datetime.utcnow()
+
+       def __str__(self):
+               delta = datetime.datetime.utcnow() - self.start_time
+               days = delta.days
+               hours, rem = divmod(delta.seconds, 3600)
+               minutes, seconds = divmod(rem, 60)
+               seconds += delta.microseconds * 1e-6
+               result = ''
+               if days:
+                       result += '%dd' % days
+               if days or hours:
+                       result += '%dh' % hours
+               if days or hours or minutes:
+                       result += '%dm' % minutes
+               return '%s%.3fs' % (result, seconds)
+
+def read_la_file(path):
+       """
+       Reads property files, used by msvc.py
+
+       :param path: file to read
+       :type path: string
+       """
+       sp = re.compile(r'^([^=]+)=\'(.*)\'$')
+       dc = {}
+       for line in readf(path).splitlines():
+               try:
+                       _, left, right, _ = sp.split(line.strip())
+                       dc[left] = right
+               except ValueError:
+                       pass
+       return dc
+
+def run_once(fun):
+       """
+       Decorator: let a function cache its results, use like this::
+
+               @run_once
+               def foo(k):
+                       return 345*2343
+
+       .. note:: in practice this can cause memory leaks, prefer a :py:class:`waflib.Utils.lru_cache`
+
+       :param fun: function to execute
+       :type fun: function
+       :return: the return value of the function executed
+       """
+       cache = {}
+       def wrap(*k):
+               try:
+                       return cache[k]
+               except KeyError:
+                       ret = fun(*k)
+                       cache[k] = ret
+                       return ret
+       wrap.__cache__ = cache
+       wrap.__name__ = fun.__name__
+       return wrap
+
+def get_registry_app_path(key, filename):
+       """
+       Returns the value of a registry key for an executable
+
+       :type key: string
+       :type filename: list of string
+       """
+       if not winreg:
+               return None
+       try:
+               result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0])
+       except WindowsError:
+               pass
+       else:
+               if os.path.isfile(result):
+                       return result
+
+def lib64():
+       """
+       Guess the default ``/usr/lib`` extension for 64-bit applications
+
+       :return: '64' or ''
+       :rtype: string
+       """
+       # default settings for /usr/lib
+       if os.sep == '/':
+               if platform.architecture()[0] == '64bit':
+                       if os.path.exists('/usr/lib64') and not os.path.exists('/usr/lib32'):
+                               return '64'
+       return ''
+
+def sane_path(p):
+       # private function for the time being!
+       return os.path.abspath(os.path.expanduser(p))
+
+process_pool = []
+"""
+List of processes started to execute sub-process commands
+"""
+
+def get_process():
+       """
+       Returns a process object that can execute commands as sub-processes
+
+       :rtype: subprocess.Popen
+       """
+       try:
+               return process_pool.pop()
+       except IndexError:
+               filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'processor.py'
+               cmd = [sys.executable, '-c', readf(filepath)]
+               return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
+
+def run_prefork_process(cmd, kwargs, cargs):
+       """
+       Delegates process execution to a pre-forked process instance.
+       """
+       if not 'env' in kwargs:
+               kwargs['env'] = dict(os.environ)
+       try:
+               obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs]))
+       except TypeError:
+               return run_regular_process(cmd, kwargs, cargs)
+
+       proc = get_process()
+       if not proc:
+               return run_regular_process(cmd, kwargs, cargs)
+
+       proc.stdin.write(obj)
+       proc.stdin.write('\n')
+       proc.stdin.flush()
+       obj = proc.stdout.readline()
+       if not obj:
+               raise OSError('Preforked sub-process %r died' % proc.pid)
+
+       process_pool.append(proc)
+       ret, out, err, ex, trace = cPickle.loads(base64.b64decode(obj))
+       if ex:
+               if ex == 'OSError':
+                       raise OSError(trace)
+               elif ex == 'ValueError':
+                       raise ValueError(trace)
+               elif ex == 'TimeoutExpired':
+                       exc = TimeoutExpired(cmd, timeout=cargs['timeout'], output=out)
+                       exc.stderr = err
+                       raise exc
+               else:
+                       raise Exception(trace)
+       return ret, out, err
+
+def lchown(path, user=-1, group=-1):
+       """
+       Change the owner/group of a path, raises an OSError if the
+       ownership change fails.
+
+       :param user: user to change
+       :type user: int or str
+       :param group: group to change
+       :type group: int or str
+       """
+       if isinstance(user, str):
+               import pwd
+               entry = pwd.getpwnam(user)
+               if not entry:
+                       raise OSError('Unknown user %r' % user)
+               user = entry[2]
+       if isinstance(group, str):
+               import grp
+               entry = grp.getgrnam(group)
+               if not entry:
+                       raise OSError('Unknown group %r' % group)
+               group = entry[2]
+       return os.lchown(path, user, group)
+
+def run_regular_process(cmd, kwargs, cargs={}):
+       """
+       Executes a subprocess command by using subprocess.Popen
+       """
+       proc = subprocess.Popen(cmd, **kwargs)
+       if kwargs.get('stdout') or kwargs.get('stderr'):
+               try:
+                       out, err = proc.communicate(**cargs)
+               except TimeoutExpired:
+                       if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
+                               os.killpg(proc.pid, signal.SIGKILL)
+                       else:
+                               proc.kill()
+                       out, err = proc.communicate()
+                       exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out)
+                       exc.stderr = err
+                       raise exc
+               status = proc.returncode
+       else:
+               out, err = (None, None)
+               try:
+                       status = proc.wait(**cargs)
+               except TimeoutExpired ,e:
+                       if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
+                               os.killpg(proc.pid, signal.SIGKILL)
+                       else:
+                               proc.kill()
+                       proc.wait()
+                       raise e
+       return status, out, err
+
+def run_process(cmd, kwargs, cargs={}):
+       """
+       Executes a subprocess by using a pre-forked process when possible
+       or falling back to subprocess.Popen. See :py:func:`waflib.Utils.run_prefork_process`
+       and :py:func:`waflib.Utils.run_regular_process`
+       """
+       if kwargs.get('stdout') and kwargs.get('stderr'):
+               return run_prefork_process(cmd, kwargs, cargs)
+       else:
+               return run_regular_process(cmd, kwargs, cargs)
+
+def alloc_process_pool(n, force=False):
+       """
+       Allocates an amount of processes to the default pool so its size is at least *n*.
+       It is useful to call this function early so that the pre-forked
+       processes use as little memory as possible.
+
+       :param n: pool size
+       :type n: integer
+       :param force: if True then *n* more processes are added to the existing pool
+       :type force: bool
+       """
+       # mandatory on python2, unnecessary on python >= 3.2
+       global run_process, get_process, alloc_process_pool
+       if not force:
+               n = max(n - len(process_pool), 0)
+       try:
+               lst = [get_process() for x in range(n)]
+       except OSError:
+               run_process = run_regular_process
+               get_process = alloc_process_pool = nada
+       else:
+               for x in lst:
+                       process_pool.append(x)
+
+def atexit_pool():
+       for k in process_pool:
+               try:
+                       os.kill(k.pid, 9)
+               except OSError:
+                       pass
+               else:
+                       k.wait()
+# see #1889
+if (sys.hexversion<0x207000f and not is_win32) or sys.hexversion>=0x306000f:
+       atexit.register(atexit_pool)
+
+if sys.platform == 'cli' or not sys.executable:
+       run_process = run_regular_process
+       get_process = alloc_process_pool = nada
diff --git a/third_party/waf/waflib/__init__.py b/third_party/waf/waflib/__init__.py
new file mode 100644 (file)
index 0000000..8766ecb
--- /dev/null
@@ -0,0 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2016 (ita)
diff --git a/third_party/waf/waflib/ansiterm.py b/third_party/waf/waflib/ansiterm.py
new file mode 100644 (file)
index 0000000..4ef682d
--- /dev/null
@@ -0,0 +1,345 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+
+"""
+Emulate a vt100 terminal in cmd.exe
+
+By wrapping sys.stdout / sys.stderr with Ansiterm,
+the vt100 escape characters will be interpreted and
+the equivalent actions will be performed with Win32
+console commands.
+
+"""
+
+import os, re, sys
+from waflib import Utils
+
+wlock = Utils.threading.Lock()
+
+try:
+       from ctypes import Structure, windll, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long
+except ImportError:
+
+       class AnsiTerm(object):
+               def __init__(self, stream):
+                       self.stream = stream
+                       try:
+                               self.errors = self.stream.errors
+                       except AttributeError:
+                               pass # python 2.5
+                       self.encoding = self.stream.encoding
+
+               def write(self, txt):
+                       try:
+                               wlock.acquire()
+                               self.stream.write(txt)
+                               self.stream.flush()
+                       finally:
+                               wlock.release()
+
+               def fileno(self):
+                       return self.stream.fileno()
+
+               def flush(self):
+                       self.stream.flush()
+
+               def isatty(self):
+                       return self.stream.isatty()
+else:
+
+       class COORD(Structure):
+               _fields_ = [("X", c_short), ("Y", c_short)]
+
+       class SMALL_RECT(Structure):
+               _fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
+
+       class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+               _fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_ushort), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
+
+       class CONSOLE_CURSOR_INFO(Structure):
+               _fields_ = [('dwSize', c_ulong), ('bVisible', c_int)]
+
+       try:
+               _type = unicode
+       except NameError:
+               _type = str
+
+       to_int = lambda number, default: number and int(number) or default
+
+       STD_OUTPUT_HANDLE = -11
+       STD_ERROR_HANDLE = -12
+
+       windll.kernel32.GetStdHandle.argtypes = [c_ulong]
+       windll.kernel32.GetStdHandle.restype = c_ulong
+       windll.kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+       windll.kernel32.GetConsoleScreenBufferInfo.restype = c_long
+       windll.kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort]
+       windll.kernel32.SetConsoleTextAttribute.restype = c_long
+       windll.kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)]
+       windll.kernel32.FillConsoleOutputCharacterW.restype = c_long
+       windll.kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ]
+       windll.kernel32.FillConsoleOutputAttribute.restype = c_long
+       windll.kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ]
+       windll.kernel32.SetConsoleCursorPosition.restype = c_long
+       windll.kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)]
+       windll.kernel32.SetConsoleCursorInfo.restype = c_long
+
+       class AnsiTerm(object):
+               """
+               emulate a vt100 terminal in cmd.exe
+               """
+               def __init__(self, s):
+                       self.stream = s
+                       try:
+                               self.errors = s.errors
+                       except AttributeError:
+                               pass # python2.5
+                       self.encoding = s.encoding
+                       self.cursor_history = []
+
+                       handle = (s.fileno() == 2) and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
+                       self.hconsole = windll.kernel32.GetStdHandle(handle)
+
+                       self._sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+
+                       self._csinfo = CONSOLE_CURSOR_INFO()
+                       windll.kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+
+                       # just to double check that the console is usable
+                       self._orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+                       r = windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo))
+                       self._isatty = r == 1
+
+               def screen_buffer_info(self):
+                       """
+                       Updates self._sbinfo and returns it
+                       """
+                       windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo))
+                       return self._sbinfo
+
+               def clear_line(self, param):
+                       mode = param and int(param) or 0
+                       sbinfo = self.screen_buffer_info()
+                       if mode == 1: # Clear from beginning of line to cursor position
+                               line_start = COORD(0, sbinfo.CursorPosition.Y)
+                               line_length = sbinfo.Size.X
+                       elif mode == 2: # Clear entire line
+                               line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
+                               line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
+                       else: # Clear from cursor position to end of line
+                               line_start = sbinfo.CursorPosition
+                               line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
+                       chars_written = c_ulong()
+                       windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
+                       windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
+
+               def clear_screen(self, param):
+                       mode = to_int(param, 0)
+                       sbinfo = self.screen_buffer_info()
+                       if mode == 1: # Clear from beginning of screen to cursor position
+                               clear_start = COORD(0, 0)
+                               clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
+                       elif mode == 2: # Clear entire screen and return cursor to home
+                               clear_start = COORD(0, 0)
+                               clear_length = sbinfo.Size.X * sbinfo.Size.Y
+                               windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
+                       else: # Clear from cursor position to end of screen
+                               clear_start = sbinfo.CursorPosition
+                               clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
+                       chars_written = c_ulong()
+                       windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
+                       windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
+
+               def push_cursor(self, param):
+                       sbinfo = self.screen_buffer_info()
+                       self.cursor_history.append(sbinfo.CursorPosition)
+
+               def pop_cursor(self, param):
+                       if self.cursor_history:
+                               old_pos = self.cursor_history.pop()
+                               windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
+
+               def set_cursor(self, param):
+                       y, sep, x = param.partition(';')
+                       x = to_int(x, 1) - 1
+                       y = to_int(y, 1) - 1
+                       sbinfo = self.screen_buffer_info()
+                       new_pos = COORD(
+                               min(max(0, x), sbinfo.Size.X),
+                               min(max(0, y), sbinfo.Size.Y)
+                       )
+                       windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+               def set_column(self, param):
+                       x = to_int(param, 1) - 1
+                       sbinfo = self.screen_buffer_info()
+                       new_pos = COORD(
+                               min(max(0, x), sbinfo.Size.X),
+                               sbinfo.CursorPosition.Y
+                       )
+                       windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+               def move_cursor(self, x_offset=0, y_offset=0):
+                       sbinfo = self.screen_buffer_info()
+                       new_pos = COORD(
+                               min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
+                               min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
+                       )
+                       windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+               def move_up(self, param):
+                       self.move_cursor(y_offset = -to_int(param, 1))
+
+               def move_down(self, param):
+                       self.move_cursor(y_offset = to_int(param, 1))
+
+               def move_left(self, param):
+                       self.move_cursor(x_offset = -to_int(param, 1))
+
+               def move_right(self, param):
+                       self.move_cursor(x_offset = to_int(param, 1))
+
+               def next_line(self, param):
+                       sbinfo = self.screen_buffer_info()
+                       self.move_cursor(
+                               x_offset = -sbinfo.CursorPosition.X,
+                               y_offset = to_int(param, 1)
+                       )
+
+               def prev_line(self, param):
+                       sbinfo = self.screen_buffer_info()
+                       self.move_cursor(
+                               x_offset = -sbinfo.CursorPosition.X,
+                               y_offset = -to_int(param, 1)
+                       )
+
+               def rgb2bgr(self, c):
+                       return ((c&1) << 2) | (c&2) | ((c&4)>>2)
+
+               def set_color(self, param):
+                       cols = param.split(';')
+                       sbinfo = self.screen_buffer_info()
+                       attr = sbinfo.Attributes
+                       for c in cols:
+                               c = to_int(c, 0)
+                               if 29 < c < 38: # fgcolor
+                                       attr = (attr & 0xfff0) | self.rgb2bgr(c - 30)
+                               elif 39 < c < 48: # bgcolor
+                                       attr = (attr & 0xff0f) | (self.rgb2bgr(c - 40) << 4)
+                               elif c == 0: # reset
+                                       attr = self._orig_sbinfo.Attributes
+                               elif c == 1: # strong
+                                       attr |= 0x08
+                               elif c == 4: # blink not available -> bg intensity
+                                       attr |= 0x80
+                               elif c == 7: # negative
+                                       attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4)
+
+                       windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
+
+               def show_cursor(self,param):
+                       self._csinfo.bVisible = 1
+                       windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+
+               def hide_cursor(self,param):
+                       self._csinfo.bVisible = 0
+                       windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+
+               ansi_command_table = {
+                       'A': move_up,
+                       'B': move_down,
+                       'C': move_right,
+                       'D': move_left,
+                       'E': next_line,
+                       'F': prev_line,
+                       'G': set_column,
+                       'H': set_cursor,
+                       'f': set_cursor,
+                       'J': clear_screen,
+                       'K': clear_line,
+                       'h': show_cursor,
+                       'l': hide_cursor,
+                       'm': set_color,
+                       's': push_cursor,
+                       'u': pop_cursor,
+               }
+               # Match either the escape sequence or text not containing escape sequence
+               ansi_tokens = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+               def write(self, text):
+                       try:
+                               wlock.acquire()
+                               if self._isatty:
+                                       for param, cmd, txt in self.ansi_tokens.findall(text):
+                                               if cmd:
+                                                       cmd_func = self.ansi_command_table.get(cmd)
+                                                       if cmd_func:
+                                                               cmd_func(self, param)
+                                               else:
+                                                       self.writeconsole(txt)
+                               else:
+                                       # no support for colors in the console, just output the text:
+                                       # eclipse or msys may be able to interpret the escape sequences
+                                       self.stream.write(text)
+                       finally:
+                               wlock.release()
+
+               def writeconsole(self, txt):
+                       chars_written = c_ulong()
+                       writeconsole = windll.kernel32.WriteConsoleA
+                       if isinstance(txt, _type):
+                               writeconsole = windll.kernel32.WriteConsoleW
+
+                       # MSDN says that there is a shared buffer of 64 KB for the console
+                       # writes. Attempt to not get ERROR_NOT_ENOUGH_MEMORY, see waf issue #746
+                       done = 0
+                       todo = len(txt)
+                       chunk = 32<<10
+                       while todo != 0:
+                               doing = min(chunk, todo)
+                               buf = txt[done:done+doing]
+                               r = writeconsole(self.hconsole, buf, doing, byref(chars_written), None)
+                               if r == 0:
+                                       chunk >>= 1
+                                       continue
+                               done += doing
+                               todo -= doing
+
+
+               def fileno(self):
+                       return self.stream.fileno()
+
+               def flush(self):
+                       pass
+
+               def isatty(self):
+                       return self._isatty
+
+       if sys.stdout.isatty() or sys.stderr.isatty():
+               handle = sys.stdout.isatty() and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
+               console = windll.kernel32.GetStdHandle(handle)
+               sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+               def get_term_cols():
+                       windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
+                       # Issue 1401 - the progress bar cannot reach the last character
+                       return sbinfo.Size.X - 1
+
+# just try and see
+try:
+       import struct, fcntl, termios
+except ImportError:
+       pass
+else:
+       if (sys.stdout.isatty() or sys.stderr.isatty()) and os.environ.get('TERM', '') not in ('dumb', 'emacs'):
+               FD = sys.stdout.isatty() and sys.stdout.fileno() or sys.stderr.fileno()
+               def fun():
+                       return struct.unpack("HHHH", fcntl.ioctl(FD, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)))[1]
+               try:
+                       fun()
+               except Exception ,e:
+                       pass
+               else:
+                       get_term_cols = fun
diff --git a/third_party/waf/waflib/extras/__init__.py b/third_party/waf/waflib/extras/__init__.py
new file mode 100644 (file)
index 0000000..3dfaabd
--- /dev/null
@@ -0,0 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
diff --git a/third_party/waf/waflib/extras/add_objects.py b/third_party/waf/waflib/extras/add_objects.py
new file mode 100644 (file)
index 0000000..5606fd6
--- /dev/null
@@ -0,0 +1,6 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+from waflib import Logs
+Logs.warn('This tool has been merged to the main library, remove the references to "add_objects"')
diff --git a/third_party/waf/waflib/extras/batched_cc.py b/third_party/waf/waflib/extras/batched_cc.py
new file mode 100644 (file)
index 0000000..4e48e78
--- /dev/null
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2015 (ita)
+
+"""
+Build as batches.
+
+Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
+cc -c ../file1.c ../file2.c ../file3.c
+
+Files are output on the directory where the compiler is called, and dependencies are more difficult
+to track (do not run the command on all source files if only one file changes)
+
+As such, we do as if the files were compiled one by one, but no command is actually run:
+replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
+signatures from each slave and finds out the command-line to run.
+
+Just import this module in the configuration (no other change required).
+This is provided as an example, for performance unity builds are recommended (fewer tasks and
+fewer jobs to execute). See waflib/extras/unity.py.
+"""
+
+from waflib import Task, Utils
+from waflib.TaskGen import extension, feature, after_method
+from waflib.Tools import c, cxx
+
+MAX_BATCH = 50
+
+c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+c_fun, _ = Task.compile_fun_noshell(c_str)
+
+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
+
+count = 70000
+class batch_task(Task.Task):
+       color = 'PINK'
+
+       after = ['c', 'cxx']
+       before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
+
+       def uid(self):
+               m = Utils.md5()
+               m.update(Task.Task.uid(self))
+               m.update(str(self.generator.idx).encode())
+               return m.digest()
+
+       def __str__(self):
+               return 'Batch compilation for %d slaves' % len(self.slaves)
+
+       def __init__(self, *k, **kw):
+               Task.Task.__init__(self, *k, **kw)
+               self.slaves = []
+               self.inputs = []
+               self.hasrun = 0
+
+               global count
+               count += 1
+               self.idx = count
+
+       def add_slave(self, slave):
+               self.slaves.append(slave)
+               self.set_run_after(slave)
+
+       def runnable_status(self):
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return Task.ASK_LATER
+
+               for t in self.slaves:
+                       #if t.executed:
+                       if t.hasrun != Task.SKIPPED:
+                               return Task.RUN_ME
+
+               return Task.SKIP_ME
+
+       def run(self):
+               self.outputs = []
+
+               srclst = []
+               slaves = []
+               for t in self.slaves:
+                       if t.hasrun != Task.SKIPPED:
+                               slaves.append(t)
+                               srclst.append(t.inputs[0].abspath())
+
+               self.env.SRCLST = srclst
+               self.cwd = slaves[0].outputs[0].parent.abspath()
+
+               if self.slaves[0].__class__.__name__ == 'c':
+                       ret = c_fun(self)
+               else:
+                       ret = cxx_fun(self)
+
+               if ret:
+                       return ret
+
+               for t in slaves:
+                       t.old_post_run()
+
+def hook(cls_type):
+       def n_hook(self, node):
+
+               ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o'
+               name = node.name
+               k = name.rfind('.')
+               if k >= 0:
+                       basename = name[:k] + ext
+               else:
+                       basename = name + ext
+
+               outdir = node.parent.get_bld().make_node('%d' % self.idx)
+               outdir.mkdir()
+               out = outdir.find_or_declare(basename)
+
+               task = self.create_task(cls_type, node, out)
+
+               try:
+                       self.compiled_tasks.append(task)
+               except AttributeError:
+                       self.compiled_tasks = [task]
+
+               if not getattr(self, 'masters', None):
+                       self.masters = {}
+                       self.allmasters = []
+
+               def fix_path(tsk):
+                       if self.env.CC_NAME == 'msvc':
+                               tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath())
+
+               if not node.parent in self.masters:
+                       m = self.masters[node.parent] = self.master = self.create_task('batch')
+                       fix_path(m)
+                       self.allmasters.append(m)
+               else:
+                       m = self.masters[node.parent]
+                       if len(m.slaves) > MAX_BATCH:
+                               m = self.masters[node.parent] = self.master = self.create_task('batch')
+                               fix_path(m)
+                               self.allmasters.append(m)
+               m.add_slave(task)
+               return task
+       return n_hook
+
+extension('.c')(hook('c'))
+extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx'))
+
+@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib')
+@after_method('apply_link')
+def link_after_masters(self):
+       if getattr(self, 'allmasters', None):
+               for m in self.allmasters:
+                       self.link_task.set_run_after(m)
+
+# Modify the c and cxx task classes - in theory it would be best to
+# create subclasses and to re-map the c/c++ extensions
+for x in ('c', 'cxx'):
+       t = Task.classes[x]
+       def run(self):
+               pass
+
+       def post_run(self):
+               pass
+
+       setattr(t, 'oldrun', getattr(t, 'run', None))
+       setattr(t, 'run', run)
+       setattr(t, 'old_post_run', t.post_run)
+       setattr(t, 'post_run', post_run)
diff --git a/third_party/waf/waflib/extras/build_file_tracker.py b/third_party/waf/waflib/extras/build_file_tracker.py
new file mode 100644 (file)
index 0000000..a00f7b2
--- /dev/null
@@ -0,0 +1,31 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015
+
+"""
+Force files to depend on the timestamps of those located in the build directory. You may
+want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
+
+Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
+or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
+or to hash the file in the build directory with its timestamp (similar to 'update_outputs')
+"""
+
+import os
+from waflib import Node, Utils
+
+def get_bld_sig(self):
+       try:
+               return self.cache_sig
+       except AttributeError:
+               pass
+
+       if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
+               self.sig = Utils.h_file(self.abspath())
+               self.cache_sig = ret = self.sig
+       else:
+               # add the
+               self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime)
+       return ret
+
+Node.Node.get_bld_sig = get_bld_sig
diff --git a/third_party/waf/waflib/extras/build_logs.py b/third_party/waf/waflib/extras/build_logs.py
new file mode 100644 (file)
index 0000000..2fb8d34
--- /dev/null
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2013 (ita)
+
+"""
+A system for recording all outputs to a log file. Just add the following to your wscript file::
+
+  def init(ctx):
+    ctx.load('build_logs')
+"""
+
+import atexit, sys, time, os, shutil, threading
+from waflib import ansiterm, Logs, Context
+
+# adding the logs under the build/ directory will clash with the clean/ command
+try:
+       up = os.path.dirname(Context.g_module.__file__)
+except AttributeError:
+       up = '.'
+LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M'))
+
+wlock = threading.Lock()
+class log_to_file(object):
+       def __init__(self, stream, fileobj, filename):
+               self.stream = stream
+               self.encoding = self.stream.encoding
+               self.fileobj = fileobj
+               self.filename = filename
+               self.is_valid = True
+       def replace_colors(self, data):
+               for x in Logs.colors_lst.values():
+                       if isinstance(x, str):
+                               data = data.replace(x, '')
+               return data
+       def write(self, data):
+               try:
+                       wlock.acquire()
+                       self.stream.write(data)
+                       self.stream.flush()
+                       if self.is_valid:
+                               self.fileobj.write(self.replace_colors(data))
+               finally:
+                       wlock.release()
+       def fileno(self):
+               return self.stream.fileno()
+       def flush(self):
+               self.stream.flush()
+               if self.is_valid:
+                       self.fileobj.flush()
+       def isatty(self):
+               return self.stream.isatty()
+
+def init(ctx):
+       global LOGFILE
+       filename = os.path.abspath(LOGFILE)
+       try:
+               os.makedirs(os.path.dirname(os.path.abspath(filename)))
+       except OSError:
+               pass
+
+       if hasattr(os, 'O_NOINHERIT'):
+               fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT)
+               fileobj = os.fdopen(fd, 'w')
+       else:
+               fileobj = open(LOGFILE, 'w')
+       old_stderr = sys.stderr
+
+       # sys.stdout has already been replaced, so __stdout__ will be faster
+       #sys.stdout = log_to_file(sys.stdout, fileobj, filename)
+       #sys.stderr = log_to_file(sys.stderr, fileobj, filename)
+       def wrap(stream):
+               if stream.isatty():
+                       return ansiterm.AnsiTerm(stream)
+               return stream
+       sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename)
+       sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename)
+
+       # now mess with the logging module...
+       for x in Logs.log.handlers:
+               try:
+                       stream = x.stream
+               except AttributeError:
+                       pass
+               else:
+                       if id(stream) == id(old_stderr):
+                               x.stream = sys.stderr
+
+def exit_cleanup():
+       try:
+               fileobj = sys.stdout.fileobj
+       except AttributeError:
+               pass
+       else:
+               sys.stdout.is_valid = False
+               sys.stderr.is_valid = False
+               fileobj.close()
+               filename = sys.stdout.filename
+
+               Logs.info('Output logged to %r' % filename)
+
+               # then copy the log file to "latest.log" if possible
+               up = os.path.dirname(os.path.abspath(filename))
+               try:
+                       shutil.copy(filename, os.path.join(up, 'latest.log'))
+               except OSError:
+                       # this may fail on windows due to processes spawned
+                       #
+                       pass
+
+atexit.register(exit_cleanup)
diff --git a/third_party/waf/waflib/extras/c_bgxlc.py b/third_party/waf/waflib/extras/c_bgxlc.py
new file mode 100644 (file)
index 0000000..7633f56
--- /dev/null
@@ -0,0 +1,31 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+IBM XL Compiler for Blue Gene
+"""
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_bgxlc')
+
+@conf
+def find_bgxlc(conf):
+       cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC')
+       conf.get_xlc_version(cc)
+       conf.env.CC = cc
+       conf.env.CC_NAME = 'bgxlc'
+
+def configure(conf):
+       conf.find_bgxlc()
+       conf.find_ar()
+       conf.xlc_common_flags()
+       conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull']
+       conf.env.LINKFLAGS_cprogram = []
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/c_dumbpreproc.py b/third_party/waf/waflib/extras/c_dumbpreproc.py
new file mode 100644 (file)
index 0000000..9407527
--- /dev/null
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Dumb C/C++ preprocessor for finding dependencies
+
+It will look at all include files it can find after removing the comments, so the following
+will always add the dependency on both "a.h" and "b.h"::
+
+       #include "a.h"
+       #ifdef B
+               #include "b.h"
+       #endif
+       int main() {
+               return 0;
+       }
+
+To use::
+
+       def configure(conf):
+               conf.load('compiler_c')
+               conf.load('c_dumbpreproc')
+"""
+
+import re
+from waflib.Tools import c_preproc
+
+re_inc = re.compile(
+       '^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
+       re.IGNORECASE | re.MULTILINE)
+
+def lines_includes(node):
+       code = node.read()
+       if c_preproc.use_trigraphs:
+               for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
+       code = c_preproc.re_nl.sub('', code)
+       code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+       return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
+
+parser = c_preproc.c_parser
+class dumb_parser(parser):
+       def addlines(self, node):
+               if node in self.nodes[:-1]:
+                       return
+               self.currentnode_stack.append(node.parent)
+
+               # Avoid reading the same files again
+               try:
+                       lines = self.parse_cache[node]
+               except KeyError:
+                       lines = self.parse_cache[node] = lines_includes(node)
+
+               self.lines = lines + [(c_preproc.POPFILE, '')] +  self.lines
+
+       def start(self, node, env):
+               try:
+                       self.parse_cache = node.ctx.parse_cache
+               except AttributeError:
+                       self.parse_cache = node.ctx.parse_cache = {}
+
+               self.addlines(node)
+               while self.lines:
+                       (x, y) = self.lines.pop(0)
+                       if x == c_preproc.POPFILE:
+                               self.currentnode_stack.pop()
+                               continue
+                       self.tryfind(y)
+
+c_preproc.c_parser = dumb_parser
diff --git a/third_party/waf/waflib/extras/c_emscripten.py b/third_party/waf/waflib/extras/c_emscripten.py
new file mode 100644 (file)
index 0000000..6e7fbbe
--- /dev/null
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+
+import subprocess, shlex, sys
+
+from waflib.Tools import ccroot, gcc, gxx
+from waflib.Configure import conf
+from waflib.TaskGen import after_method, feature
+
+from waflib.Tools.compiler_c import c_compiler
+from waflib.Tools.compiler_cxx import cxx_compiler
+
+for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
+       c_compiler[supported_os].append('c_emscripten')
+       cxx_compiler[supported_os].append('c_emscripten')
+
+
+@conf
+def get_emscripten_version(conf, cc):
+       """
+       Emscripten doesn't support processing '-' like clang/gcc
+       """
+
+       dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
+       dummy.write("")
+       cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
+       env = conf.env.env or None
+       try:
+               p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
+               out = p.communicate()[0]
+       except Exception as e:
+               conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
+
+       if not isinstance(out, str):
+               out = out.decode(sys.stdout.encoding or 'iso8859-1')
+
+       k = {}
+       out = out.splitlines()
+       for line in out:
+               lst = shlex.split(line)
+               if len(lst)>2:
+                       key = lst[1]
+                       val = lst[2]
+                       k[key] = val
+
+       if not ('__clang__' in k and 'EMSCRIPTEN' in k):
+               conf.fatal('Could not determine the emscripten compiler version.')
+
+       conf.env.DEST_OS = 'generic'
+       conf.env.DEST_BINFMT = 'elf'
+       conf.env.DEST_CPU = 'asm-js'
+       conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+       return k
+
+@conf
+def find_emscripten(conf):
+       cc = conf.find_program(['emcc'], var='CC')
+       conf.get_emscripten_version(cc)
+       conf.env.CC = cc
+       conf.env.CC_NAME = 'emscripten'
+       cxx = conf.find_program(['em++'], var='CXX')
+       conf.env.CXX = cxx
+       conf.env.CXX_NAME = 'emscripten'
+       conf.find_program(['emar'], var='AR')
+
+def configure(conf):
+       conf.find_emscripten()
+       conf.find_ar()
+       conf.gcc_common_flags()
+       conf.gxx_common_flags()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.cxx_load_tools()
+       conf.cxx_add_flags()
+       conf.link_add_flags()
+       conf.env.ARFLAGS = ['rcs']
+       conf.env.cshlib_PATTERN = '%s.js'
+       conf.env.cxxshlib_PATTERN = '%s.js'
+       conf.env.cstlib_PATTERN = '%s.bc'
+       conf.env.cxxstlib_PATTERN = '%s.bc'
+       conf.env.cprogram_PATTERN = '%s.html'
+       conf.env.cxxprogram_PATTERN = '%s.html'
+       conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+
+@feature('c', 'cxx', 'acm', 'includes')
+@after_method('propagate_uselib_vars', 'process_source', 'apply_incpaths')
+def apply_incpaths_emscripten(self):
+       """
+       Emscripten doesn't like absolute include paths
+       """
+       # TODO: in waf 1.9 we can switch back to bldnode as the default since path_from handles cross-drive paths
+       if self.env.CC_NAME != 'emscripten' or self.env.CC_NAME != 'emscripten':
+               return
+       lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+       self.includes_nodes = lst
+       self.env['INCPATHS'] = [x.path_from(self.bld.bldnode) for x in lst]
diff --git a/third_party/waf/waflib/extras/c_nec.py b/third_party/waf/waflib/extras/c_nec.py
new file mode 100644 (file)
index 0000000..87e0c05
--- /dev/null
@@ -0,0 +1,71 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+NEC SX Compiler for SX vector systems
+"""
+
+import re
+from waflib import Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_nec')
+
+@conf
+def find_sxc(conf):
+       cc = conf.find_program(['sxcc'], var='CC')
+       conf.get_sxc_version(cc)
+       conf.env.CC = cc
+       conf.env.CC_NAME = 'sxcc'
+
+@conf
+def get_sxc_version(conf, fc):
+               version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+               cmd = fc + ['-V']
+               p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
+               out, err = p.communicate()
+
+               if out: match = version_re(out)
+               else: match = version_re(err)
+               if not match:
+                               conf.fatal('Could not determine the NEC C compiler version.')
+               k = match.groupdict()
+               conf.env['C_VERSION'] = (k['major'], k['minor'])
+
+@conf
+def sxc_common_flags(conf):
+        v=conf.env
+        v['CC_SRC_F']=[]
+        v['CC_TGT_F']=['-c','-o']
+        if not v['LINK_CC']:v['LINK_CC']=v['CC']
+        v['CCLNK_SRC_F']=[]
+        v['CCLNK_TGT_F']=['-o']
+        v['CPPPATH_ST']='-I%s'
+        v['DEFINES_ST']='-D%s'
+        v['LIB_ST']='-l%s'
+        v['LIBPATH_ST']='-L%s'
+        v['STLIB_ST']='-l%s'
+        v['STLIBPATH_ST']='-L%s'
+        v['RPATH_ST']=''
+        v['SONAME_ST']=[]
+        v['SHLIB_MARKER']=[]
+        v['STLIB_MARKER']=[]
+        v['LINKFLAGS_cprogram']=['']
+        v['cprogram_PATTERN']='%s'
+        v['CFLAGS_cshlib']=['-fPIC']
+        v['LINKFLAGS_cshlib']=['']
+        v['cshlib_PATTERN']='lib%s.so'
+        v['LINKFLAGS_cstlib']=[]
+        v['cstlib_PATTERN']='lib%s.a'
+
+def configure(conf):
+       conf.find_sxc()
+       conf.find_program('sxar',VAR='AR')
+       conf.sxc_common_flags()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/cfg_altoptions.py b/third_party/waf/waflib/extras/cfg_altoptions.py
new file mode 100644 (file)
index 0000000..4a82a70
--- /dev/null
@@ -0,0 +1,109 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Tool to extend c_config.check_cfg()
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to work around the absence of ``*-config`` programs
+on systems, by keeping the same clean configuration syntax but inferring
+values or permitting their modification via the options interface.
+
+Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
+so you can put custom files in a folder containing new .pc files.
+This tool could also be implemented by taking advantage of this fact.
+
+Usage::
+
+   def options(opt):
+     opt.load('c_config_alt')
+     opt.add_package_option('package')
+
+   def configure(cfg):
+     conf.load('c_config_alt')
+     conf.check_cfg(...)
+
+Known issues:
+
+- Behavior with different build contexts...
+
+"""
+
+import os
+import functools
+from waflib import Configure, Options, Errors
+
+def name_to_dest(x):
+       return x.lower().replace('-', '_')
+
+
+def options(opt):
+       def x(opt, param):
+               dest = name_to_dest(param)
+               gr = opt.get_option_group("configure options")
+               gr.add_option('--%s-root' % dest,
+                help="path containing include and lib subfolders for %s" \
+                 % param,
+               )
+
+       opt.add_package_option = functools.partial(x, opt)
+
+
+check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')
+
+@Configure.conf
+def check_cfg(conf, *k, **kw):
+       if k:
+               lst = k[0].split()
+               kw['package'] = lst[0]
+               kw['args'] = ' '.join(lst[1:])
+
+       if not 'package' in kw:
+               return check_cfg_old(conf, **kw)
+
+       package = kw['package']
+
+       package_lo = name_to_dest(package)
+       package_hi = package.upper().replace('-', '_') # TODO FIXME
+       package_hi = kw.get('uselib_store', package_hi)
+
+       def check_folder(path, name):
+               try:
+                       assert os.path.isdir(path)
+               except AssertionError:
+                       raise Errors.ConfigurationError(
+                               "%s_%s (%s) is not a folder!" \
+                               % (package_lo, name, path))
+               return path
+
+       root = getattr(Options.options, '%s_root' % package_lo, None)
+
+       if root is None:
+               return check_cfg_old(conf, **kw)
+       else:
+               def add_manual_var(k, v):
+                       conf.start_msg('Adding for %s a manual var' % (package))
+                       conf.env["%s_%s" % (k, package_hi)] = v
+                       conf.end_msg("%s = %s" % (k, v))
+
+
+               check_folder(root, 'root')
+
+               pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
+               add_manual_var('INCLUDES', [pkg_inc])
+               pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
+               add_manual_var('LIBPATH', [pkg_lib])
+               add_manual_var('LIB', [package])
+
+               for x in kw.get('manual_deps', []):
+                       for k, v in sorted(conf.env.get_merged_dict().items()):
+                               if k.endswith('_%s' % x):
+                                       k = k.replace('_%s' % x, '')
+                                       conf.start_msg('Adding for %s a manual dep' \
+                                        %(package))
+                                       conf.env["%s_%s" % (k, package_hi)] += v
+                                       conf.end_msg('%s += %s' % (k, v))
+
+               return True
diff --git a/third_party/waf/waflib/extras/cfg_cross_gnu.py b/third_party/waf/waflib/extras/cfg_cross_gnu.py
new file mode 100644 (file)
index 0000000..0fb2efb
--- /dev/null
@@ -0,0 +1,176 @@
+#!/usr/bin/python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+# Tool to provide dedicated variables for cross-compilation
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to use environment variables to define cross-compilation things,
+mostly used when you use build variants.
+
+The variables are obtained from the environment in 3 ways:
+
+1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
+2. By defining HOST_x
+3. By defining ${CHOST//-/_}_x
+
+Usage:
+
+- In your build script::
+
+    def configure(cfg):
+      ...
+      conf.load('c_cross_gnu')
+      for variant in x_variants:
+        conf.xcheck_host()
+        conf.xcheck_host_var('POUET')
+        ...
+
+      ...
+
+- Then::
+
+    CHOST=arm-hardfloat-linux-gnueabi waf configure
+
+    env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
+
+    CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
+
+    HOST_CC="clang -..." waf configure
+
+"""
+
+import os
+from waflib import Utils, Configure
+
+try:
+       from shlex import quote
+except ImportError:
+       from pipes import quote
+
+def get_chost_stuff(conf):
+       """
+       Get the CHOST environment variable contents
+       """
+       chost = None
+       chost_envar = None
+       if conf.env.CHOST:
+               chost = conf.env.CHOST[0]
+               chost_envar = chost.replace('-', '_')
+       return chost, chost_envar
+
+
+@Configure.conf
+def xcheck_envar(conf, name, wafname=None, cross=False):
+       wafname = wafname or name
+       envar = os.environ.get(name, None)
+
+       if envar is None:
+               return
+
+       value = Utils.to_list(envar) if envar != '' else [envar]
+
+       conf.env[wafname] = value
+       if cross:
+               pretty = 'cross-compilation %s' % wafname
+       else:
+               pretty = wafname
+       conf.msg('Will use %s' % pretty,
+        " ".join(quote(x) for x in value))
+
+@Configure.conf
+def xcheck_host_prog(conf, name, tool, wafname=None):
+       wafname = wafname or name
+
+       chost, chost_envar = get_chost_stuff(conf)
+
+       specific = None
+       if chost:
+               specific = os.environ.get('%s_%s' % (chost_envar, name), None)
+
+       if specific:
+               value = Utils.to_list(specific)
+               conf.env[wafname] += value
+               conf.msg('Will use cross-compilation %s from %s_%s' \
+                % (name, chost_envar, name),
+                " ".join(quote(x) for x in value))
+               return
+       else:
+               envar = os.environ.get('HOST_%s' % name, None)
+               if envar is not None:
+                       value = Utils.to_list(envar)
+                       conf.env[wafname] = value
+                       conf.msg('Will use cross-compilation %s from HOST_%s' \
+                        % (name, name),
+                        " ".join(quote(x) for x in value))
+                       return
+
+       if conf.env[wafname]:
+               return
+
+       value = None
+       if chost:
+               value = '%s-%s' % (chost, tool)
+
+       if value:
+               conf.env[wafname] = value
+               conf.msg('Will use cross-compilation %s from CHOST' \
+                % wafname, value)
+
+@Configure.conf
+def xcheck_host_envar(conf, name, wafname=None):
+       wafname = wafname or name
+
+       chost, chost_envar = get_chost_stuff(conf)
+
+       specific = None
+       if chost:
+               specific = os.environ.get('%s_%s' % (chost_envar, name), None)
+
+       if specific:
+               value = Utils.to_list(specific)
+               conf.env[wafname] += value
+               conf.msg('Will use cross-compilation %s from %s_%s' \
+                % (name, chost_envar, name),
+                " ".join(quote(x) for x in value))
+               return
+
+
+       envar = os.environ.get('HOST_%s' % name, None)
+       if envar is None:
+               return
+
+       value = Utils.to_list(envar) if envar != '' else [envar]
+
+       conf.env[wafname] = value
+       conf.msg('Will use cross-compilation %s from HOST_%s' \
+        % (name, name),
+        " ".join(quote(x) for x in value))
+
+
+@Configure.conf
+def xcheck_host(conf):
+       conf.xcheck_envar('CHOST', cross=True)
+       conf.xcheck_host_prog('CC', 'gcc')
+       conf.xcheck_host_prog('CXX', 'g++')
+       conf.xcheck_host_prog('LINK_CC', 'gcc')
+       conf.xcheck_host_prog('LINK_CXX', 'g++')
+       conf.xcheck_host_prog('AR', 'ar')
+       conf.xcheck_host_prog('AS', 'as')
+       conf.xcheck_host_prog('LD', 'ld')
+       conf.xcheck_host_envar('CFLAGS')
+       conf.xcheck_host_envar('CXXFLAGS')
+       conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
+       conf.xcheck_host_envar('LIB')
+       conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
+       conf.xcheck_host_envar('PKG_CONFIG_PATH')
+
+       if not conf.env.env:
+               conf.env.env = {}
+               conf.env.env.update(os.environ)
+       if conf.env.PKG_CONFIG_LIBDIR:
+               conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
+       if conf.env.PKG_CONFIG_PATH:
+               conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
diff --git a/third_party/waf/waflib/extras/clang_compilation_database.py b/third_party/waf/waflib/extras/clang_compilation_database.py
new file mode 100644 (file)
index 0000000..e7230d4
--- /dev/null
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Christoph Koke, 2013
+
+"""
+Writes the c and cpp compile commands into build/compile_commands.json
+see http://clang.llvm.org/docs/JSONCompilationDatabase.html
+
+Usage:
+
+    def configure(conf):
+        conf.load('compiler_cxx')
+        ...
+        conf.load('clang_compilation_database')
+"""
+
+import sys, os, json, shlex, pipes
+from waflib import Logs, TaskGen
+from waflib.Tools import c, cxx
+
+if sys.hexversion >= 0x3030000:
+       quote = shlex.quote
+else:
+       quote = pipes.quote
+
+@TaskGen.feature('*')
+@TaskGen.after_method('process_use')
+def collect_compilation_db_tasks(self):
+       "Add a compilation database entry for compiled tasks"
+       try:
+               clang_db = self.bld.clang_compilation_database_tasks
+       except AttributeError:
+               clang_db = self.bld.clang_compilation_database_tasks = []
+               self.bld.add_post_fun(write_compilation_database)
+
+       for task in getattr(self, 'compiled_tasks', []):
+               if isinstance(task, (c.c, cxx.cxx)):
+                       clang_db.append(task)
+
+def write_compilation_database(ctx):
+       "Write the clang compilation database as JSON"
+       database_file = ctx.bldnode.make_node('compile_commands.json')
+       Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path))
+       try:
+               root = json.load(database_file)
+       except IOError:
+               root = []
+       clang_db = dict((x["file"], x) for x in root)
+       for task in getattr(ctx, 'clang_compilation_database_tasks', []):
+               try:
+                       cmd = task.last_cmd
+               except AttributeError:
+                       continue
+               directory = getattr(task, 'cwd', ctx.variant_dir)
+               f_node = task.inputs[0]
+               filename = os.path.relpath(f_node.abspath(), directory)
+               cmd = " ".join(map(quote, cmd))
+               entry = {
+                       "directory": directory,
+                       "command": cmd,
+                       "file": filename,
+               }
+               clang_db[filename] = entry
+       root = list(clang_db.values())
+       database_file.write(json.dumps(root, indent=2))
diff --git a/third_party/waf/waflib/extras/codelite.py b/third_party/waf/waflib/extras/codelite.py
new file mode 100644 (file)
index 0000000..c12ae4b
--- /dev/null
@@ -0,0 +1,880 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# CodeLite Project
+# Christian Klein (chrikle@berlios.de)
+# Created: Jan 2012
+# As templete for this file I used the msvs.py
+# I hope this template will work proper
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+   derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+"""
+
+
+To add this tool to your project:
+def options(conf):
+        opt.load('codelite')
+
+It can be a good idea to add the sync_exec tool too.
+
+To generate solution files:
+$ waf configure codelite
+
+To customize the outputs, provide subclasses in your wscript files:
+
+from waflib.extras import codelite
+class vsnode_target(codelite.vsnode_target):
+        def get_build_command(self, props):
+                # likely to be required
+                return "waf.bat build"
+        def collect_source(self):
+                # likely to be required
+                ...
+class codelite_bar(codelite.codelite_generator):
+        def init(self):
+                codelite.codelite_generator.init(self)
+                self.vsnode_target = vsnode_target
+
+The codelite class re-uses the same build() function for reading the targets (task generators),
+you may therefore specify codelite settings on the context object:
+
+def build(bld):
+        bld.codelite_solution_name = 'foo.workspace'
+        bld.waf_command = 'waf.bat'
+        bld.projects_dir = bld.srcnode.make_node('')
+        bld.projects_dir.mkdir()
+
+
+ASSUMPTIONS:
+* a project can be either a directory or a target, project files are written only for targets that have source files
+* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
+"""
+
+import os, re, sys
+import uuid # requires python 2.5
+from waflib.Build import BuildContext
+from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Project Name="${project.name}" InternalType="Library">
+  <Plugins>
+    <Plugin Name="qmake">
+      <![CDATA[00010001N0005Release000000000000]]>
+    </Plugin>
+  </Plugins>
+  <Description/>
+  <Dependencies/>
+  <VirtualDirectory Name="src">
+  ${for x in project.source}
+  ${if (project.get_key(x)=="sourcefile")}
+  <File Name="${x.abspath()}"/>
+  ${endif}
+  ${endfor}
+  </VirtualDirectory>
+  <VirtualDirectory Name="include">
+  ${for x in project.source}
+  ${if (project.get_key(x)=="headerfile")}
+  <File Name="${x.abspath()}"/>
+  ${endif}
+  ${endfor}
+  </VirtualDirectory>
+  <Settings Type="Dynamic Library">
+    <GlobalSettings>
+      <Compiler Options="" C_Options="">
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="">
+        <LibraryPath Value="."/>
+      </Linker>
+      <ResourceCompiler Options=""/>
+    </GlobalSettings>
+    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+        <IncludePath Value="."/>
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="" Required="yes">
+        <LibraryPath Value=""/>
+      </Linker>
+      <ResourceCompiler Options="" Required="no"/>
+      <General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
+      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+        <![CDATA[]]>
+      </Environment>
+      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+        <PostConnectCommands/>
+        <StartupCommands/>
+      </Releaseger>
+      <PreBuild/>
+      <PostBuild/>
+      <CustomBuild Enabled="yes">
+        $b = project.build_properties[0]}
+        <RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
+        <CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
+        <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
+        <Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
+        <Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
+        <Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
+        <Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
+        <Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
+        <Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
+        <PreprocessFileCommand/>
+        <SingleFileCommand/>
+        <MakefileGenerationCommand/>
+        <ThirdPartyToolName>None</ThirdPartyToolName>
+        <WorkingDirectory/>
+      </CustomBuild>
+      <AdditionalRules>
+        <CustomPostBuild/>
+        <CustomPreBuild/>
+      </AdditionalRules>
+      <Completion>
+        <ClangCmpFlags/>
+        <ClangPP/>
+        <SearchPaths/>
+      </Completion>
+    </Configuration>
+    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="" Required="yes"/>
+      <ResourceCompiler Options="" Required="no"/>
+      <General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
+      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+        <![CDATA[
+
+
+
+      ]]>
+      </Environment>
+      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+        <PostConnectCommands/>
+        <StartupCommands/>
+      </Releaseger>
+      <PreBuild/>
+      <PostBuild/>
+      <CustomBuild Enabled="no">
+        <RebuildCommand/>
+        <CleanCommand/>
+        <BuildCommand/>
+        <PreprocessFileCommand/>
+        <SingleFileCommand/>
+        <MakefileGenerationCommand/>
+        <ThirdPartyToolName/>
+        <WorkingDirectory/>
+      </CustomBuild>
+      <AdditionalRules>
+        <CustomPostBuild/>
+        <CustomPreBuild/>
+      </AdditionalRules>
+      <Completion>
+        <ClangCmpFlags/>
+        <ClangPP/>
+        <SearchPaths/>
+      </Completion>
+    </Configuration>
+  </Settings>
+</CodeLite_Project>'''
+
+
+
+
+SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
+${for p in project.all_projects}
+  <Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
+${endfor}
+  <BuildMatrix>
+    <WorkspaceConfiguration Name="Release" Selected="yes">
+${for p in project.all_projects}
+      <Project Name="${p.name}" ConfigName="Release"/>
+${endfor}
+    </WorkspaceConfiguration>
+  </BuildMatrix>
+</CodeLite_Workspace>'''
+
+
+
+COMPILE_TEMPLATE = '''def f(project):
+        lst = []
+        def xml_escape(value):
+                return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+        %s
+
+        #f = open('cmd.txt', 'w')
+        #f.write(str(lst))
+        #f.close()
+        return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+        """
+        Compile a template expression into a python function (like jsps, but way shorter)
+        """
+        extr = []
+        def repl(match):
+                g = match.group
+                if g('dollar'): return "$"
+                elif g('backslash'):
+                        return "\\"
+                elif g('subst'):
+                        extr.append(g('code'))
+                        return "<<|@|>>"
+                return None
+
+        line2 = reg_act.sub(repl, line)
+        params = line2.split('<<|@|>>')
+        assert(extr)
+
+
+        indent = 0
+        buf = []
+        app = buf.append
+
+        def app(txt):
+                buf.append(indent * '\t' + txt)
+
+        for x in range(len(extr)):
+                if params[x]:
+                        app("lst.append(%r)" % params[x])
+
+                f = extr[x]
+                if f.startswith('if') or f.startswith('for'):
+                        app(f + ':')
+                        indent += 1
+                elif f.startswith('py:'):
+                        app(f[3:])
+                elif f.startswith('endif') or f.startswith('endfor'):
+                        indent -= 1
+                elif f.startswith('else') or f.startswith('elif'):
+                        indent -= 1
+                        app(f + ':')
+                        indent += 1
+                elif f.startswith('xml:'):
+                        app('lst.append(xml_escape(%s))' % f[4:])
+                else:
+                        #app('lst.append((%s) or "cannot find %s")' % (f, f))
+                        app('lst.append(%s)' % f)
+
+        if extr:
+                if params[-1]:
+                        app("lst.append(%r)" % params[-1])
+
+        fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+        #print(fun)
+        return Task.funex(fun)
+
+
+re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
+def rm_blank_lines(txt):
+        txt = re_blank.sub('\r\n', txt)
+        return txt
+
+BOM = '\xef\xbb\xbf'
+try:
+        BOM = bytes(BOM, 'iso8859-1') # python 3
+except NameError:
+        pass
+
+def stealth_write(self, data, flags='wb'):
+        try:
+                unicode
+        except NameError:
+                data = data.encode('utf-8') # python 3
+        else:
+                data = data.decode(sys.getfilesystemencoding(), 'replace')
+                data = data.encode('utf-8')
+
+        if self.name.endswith('.project') or self.name.endswith('.project'):
+                data = BOM + data
+
+        try:
+                txt = self.read(flags='rb')
+                if txt != data:
+                        raise ValueError('must write')
+        except (IOError, ValueError):
+                self.write(data, flags=flags)
+        else:
+                Logs.debug('codelite: skipping %s' % self.abspath())
+Node.Node.stealth_write = stealth_write
+
+re_quote = re.compile("[^a-zA-Z0-9-]")
+def quote(s):
+        return re_quote.sub("_", s)
+
+def xml_escape(value):
+        return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+def make_uuid(v, prefix = None):
+        """
+        simple utility function
+        """
+        if isinstance(v, dict):
+                keys = list(v.keys())
+                keys.sort()
+                tmp = str([(k, v[k]) for k in keys])
+        else:
+                tmp = str(v)
+        d = Utils.md5(tmp.encode()).hexdigest().upper()
+        if prefix:
+                d = '%s%s' % (prefix, d[8:])
+        gid = uuid.UUID(d, version = 4)
+        return str(gid).upper()
+
+def diff(node, fromnode):
+        # difference between two nodes, but with "(..)" instead of ".."
+        c1 = node
+        c2 = fromnode
+
+        c1h = c1.height()
+        c2h = c2.height()
+
+        lst = []
+        up = 0
+
+        while c1h > c2h:
+                lst.append(c1.name)
+                c1 = c1.parent
+                c1h -= 1
+
+        while c2h > c1h:
+                up += 1
+                c2 = c2.parent
+                c2h -= 1
+
+        while id(c1) != id(c2):
+                lst.append(c1.name)
+                up += 1
+
+                c1 = c1.parent
+                c2 = c2.parent
+
+        for i in range(up):
+                lst.append('(..)')
+        lst.reverse()
+        return tuple(lst)
+
+class build_property(object):
+        pass
+
+class vsnode(object):
+        """
+        Abstract class representing visual studio elements
+        We assume that all visual studio nodes have a uuid and a parent
+        """
+        def __init__(self, ctx):
+                self.ctx = ctx # codelite context
+                self.name = '' # string, mandatory
+                self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
+                self.uuid = '' # string, mandatory
+                self.parent = None # parent node for visual studio nesting
+
+        def get_waf(self):
+                """
+                Override in subclasses...
+                """
+                return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))
+
+        def ptype(self):
+                """
+                Return a special uuid for projects written in the solution file
+                """
+                pass
+
+        def write(self):
+                """
+                Write the project file, by default, do nothing
+                """
+                pass
+
+        def make_uuid(self, val):
+                """
+                Alias for creating uuid values easily (the templates cannot access global variables)
+                """
+                return make_uuid(val)
+
+class vsnode_vsdir(vsnode):
+        """
+        Nodes representing visual studio folders (which do not match the filesystem tree!)
+        """
+        VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+        def __init__(self, ctx, uuid, name, vspath=''):
+                vsnode.__init__(self, ctx)
+                self.title = self.name = name
+                self.uuid = uuid
+                self.vspath = vspath or name
+
+        def ptype(self):
+                return self.VS_GUID_SOLUTIONFOLDER
+
+class vsnode_project(vsnode):
+        """
+        Abstract class representing visual studio project elements
+        A project is assumed to be writable, and has a node representing the file to write to
+        """
+        VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+        def ptype(self):
+                return self.VS_GUID_VCPROJ
+
+        def __init__(self, ctx, node):
+                vsnode.__init__(self, ctx)
+                self.path = node
+                self.uuid = make_uuid(node.abspath())
+                self.name = node.name
+                self.title = self.path.abspath()
+                self.source = [] # list of node objects
+                self.build_properties = [] # list of properties (nmake commands, output dir, etc)
+
+        def dirs(self):
+                """
+                Get the list of parent folders of the source files (header files included)
+                for writing the filters
+                """
+                lst = []
+                def add(x):
+                        if x.height() > self.tg.path.height() and x not in lst:
+                                lst.append(x)
+                                add(x.parent)
+                for x in self.source:
+                        add(x.parent)
+                return lst
+
+        def write(self):
+                Logs.debug('codelite: creating %r' % self.path)
+                #print "self.name:",self.name
+
+                # first write the project file
+                template1 = compile_template(PROJECT_TEMPLATE)
+                proj_str = template1(self)
+                proj_str = rm_blank_lines(proj_str)
+                self.path.stealth_write(proj_str)
+
+                # then write the filter
+                #template2 = compile_template(FILTER_TEMPLATE)
+                #filter_str = template2(self)
+                #filter_str = rm_blank_lines(filter_str)
+                #tmp = self.path.parent.make_node(self.path.name + '.filters')
+                #tmp.stealth_write(filter_str)
+
+        def get_key(self, node):
+                """
+                required for writing the source files
+                """
+                name = node.name
+                if name.endswith('.cpp') or name.endswith('.c'):
+                        return 'sourcefile'
+                return 'headerfile'
+
+        def collect_properties(self):
+                """
+                Returns a list of triplet (configuration, platform, output_directory)
+                """
+                ret = []
+                for c in self.ctx.configurations:
+                        for p in self.ctx.platforms:
+                                x = build_property()
+                                x.outdir = ''
+
+                                x.configuration = c
+                                x.platform = p
+
+                                x.preprocessor_definitions = ''
+                                x.includes_search_path = ''
+
+                                # can specify "deploy_dir" too
+                                ret.append(x)
+                self.build_properties = ret
+
+        def get_build_params(self, props):
+                opt = ''
+                return (self.get_waf(), opt)
+
+        def get_build_command(self, props):
+                return "%s build %s" % self.get_build_params(props)
+
+        def get_clean_command(self, props):
+                return "%s clean %s" % self.get_build_params(props)
+
+        def get_rebuild_command(self, props):
+                return "%s clean build %s" % self.get_build_params(props)
+
+        def get_install_command(self, props):
+                return "%s install %s" % self.get_build_params(props)
+        def get_build_and_install_command(self, props):
+                return "%s build install %s" % self.get_build_params(props)
+
+        def get_build_and_install_all_command(self, props):
+                return "%s build install" % self.get_build_params(props)[0]
+
+        def get_clean_all_command(self, props):
+                return "%s clean" % self.get_build_params(props)[0]
+
+        def get_build_all_command(self, props):
+                return "%s build" % self.get_build_params(props)[0]
+
+        def get_rebuild_all_command(self, props):
+                return "%s clean build" % self.get_build_params(props)[0]
+
+        def get_filter_name(self, node):
+                lst = diff(node, self.tg.path)
+                return '\\'.join(lst) or '.'
+
+class vsnode_alias(vsnode_project):
+        def __init__(self, ctx, node, name):
+                vsnode_project.__init__(self, ctx, node)
+                self.name = name
+                self.output_file = ''
+
+class vsnode_build_all(vsnode_alias):
+        """
+        Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
+        This is the only alias enabled by default
+        """
+        def __init__(self, ctx, node, name='build_all_projects'):
+                vsnode_alias.__init__(self, ctx, node, name)
+                self.is_active = True
+
+class vsnode_install_all(vsnode_alias):
+        """
+        Fake target used to emulate the behaviour of "make install"
+        """
+        def __init__(self, ctx, node, name='install_all_projects'):
+                vsnode_alias.__init__(self, ctx, node, name)
+
+        def get_build_command(self, props):
+                return "%s build install %s" % self.get_build_params(props)
+
+        def get_clean_command(self, props):
+                return "%s clean %s" % self.get_build_params(props)
+
+        def get_rebuild_command(self, props):
+                return "%s clean build install %s" % self.get_build_params(props)
+
+class vsnode_project_view(vsnode_alias):
+        """
+        Fake target used to emulate a file system view
+        """
+        def __init__(self, ctx, node, name='project_view'):
+                vsnode_alias.__init__(self, ctx, node, name)
+                self.tg = self.ctx() # fake one, cannot remove
+                self.exclude_files = Node.exclude_regs + '''
+waf-1.8.*
+waf3-1.8.*/**
+.waf-1.8.*
+.waf3-1.8.*/**
+**/*.sdf
+**/*.suo
+**/*.ncb
+**/%s
+                ''' % Options.lockfile
+
+        def collect_source(self):
+                # this is likely to be slow
+                self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
+
+        def get_build_command(self, props):
+                params = self.get_build_params(props) + (self.ctx.cmd,)
+                return "%s %s %s" % params
+
+        def get_clean_command(self, props):
+                return ""
+
+        def get_rebuild_command(self, props):
+                return self.get_build_command(props)
+
+class vsnode_target(vsnode_project):
+        """
+        CodeLite project representing a targets (programs, libraries, etc) and bound
+        to a task generator
+        """
+        def __init__(self, ctx, tg):
+                """
+                A project is more or less equivalent to a file/folder
+                """
+                base = getattr(ctx, 'projects_dir', None) or tg.path
+                node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
+                vsnode_project.__init__(self, ctx, node)
+                self.name = quote(tg.name)
+                self.tg     = tg  # task generator
+
+        def get_build_params(self, props):
+                """
+                Override the default to add the target name
+                """
+                opt = ''
+                if getattr(self, 'tg', None):
+                        opt += " --targets=%s" % self.tg.name
+                return (self.get_waf(), opt)
+
+        def collect_source(self):
+                tg = self.tg
+                source_files = tg.to_nodes(getattr(tg, 'source', []))
+                include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
+                include_files = []
+                for x in include_dirs:
+                        if isinstance(x, str):
+                                x = tg.path.find_node(x)
+                        if x:
+                                lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
+                                include_files.extend(lst)
+
+                # remove duplicates
+                self.source.extend(list(set(source_files + include_files)))
+                self.source.sort(key=lambda x: x.abspath())
+
+        def collect_properties(self):
+                """
+                CodeLite projects are associated with platforms and configurations (for building especially)
+                """
+                super(vsnode_target, self).collect_properties()
+                for x in self.build_properties:
+                        x.outdir = self.path.parent.abspath()
+                        x.preprocessor_definitions = ''
+                        x.includes_search_path = ''
+
+                        try:
+                                tsk = self.tg.link_task
+                        except AttributeError:
+                                pass
+                        else:
+                                x.output_file = tsk.outputs[0].abspath()
+                                x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
+                                x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
+
+class codelite_generator(BuildContext):
+        '''generates a CodeLite workspace'''
+        cmd = 'codelite'
+        fun = 'build'
+
+        def init(self):
+                """
+                Some data that needs to be present
+                """
+                if not getattr(self, 'configurations', None):
+                        self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
+                if not getattr(self, 'platforms', None):
+                        self.platforms = ['Win32']
+                if not getattr(self, 'all_projects', None):
+                        self.all_projects = []
+                if not getattr(self, 'project_extension', None):
+                        self.project_extension = '.project'
+                if not getattr(self, 'projects_dir', None):
+                        self.projects_dir = self.srcnode.make_node('')
+                        self.projects_dir.mkdir()
+
+                # bind the classes to the object, so that subclass can provide custom generators
+                if not getattr(self, 'vsnode_vsdir', None):
+                        self.vsnode_vsdir = vsnode_vsdir
+                if not getattr(self, 'vsnode_target', None):
+                        self.vsnode_target = vsnode_target
+                if not getattr(self, 'vsnode_build_all', None):
+                        self.vsnode_build_all = vsnode_build_all
+                if not getattr(self, 'vsnode_install_all', None):
+                        self.vsnode_install_all = vsnode_install_all
+                if not getattr(self, 'vsnode_project_view', None):
+                        self.vsnode_project_view = vsnode_project_view
+
+                self.numver = '11.00'
+                self.vsver  = '2010'
+
+        def execute(self):
+                """
+                Entry point
+                """
+                self.restore()
+                if not self.all_envs:
+                        self.load_envs()
+                self.recurse([self.run_dir])
+
+                # user initialization
+                self.init()
+
+                # two phases for creating the solution
+                self.collect_projects() # add project objects into "self.all_projects"
+                self.write_files() # write the corresponding project and solution files
+
+        def collect_projects(self):
+                """
+                Fill the list self.all_projects with project objects
+                Fill the list of build targets
+                """
+                self.collect_targets()
+                #self.add_aliases()
+                #self.collect_dirs()
+                default_project = getattr(self, 'default_project', None)
+                def sortfun(x):
+                        if x.name == default_project:
+                                return ''
+                        return getattr(x, 'path', None) and x.path.abspath() or x.name
+                self.all_projects.sort(key=sortfun)
+
+
+        def write_files(self):
+
+                """
+                Write the project and solution files from the data collected
+                so far. It is unlikely that you will want to change this
+                """
+                for p in self.all_projects:
+                        p.write()
+
+                # and finally write the solution file
+                node = self.get_solution_node()
+                node.parent.mkdir()
+                Logs.warn('Creating %r' % node)
+                #a = dir(self.root)
+                #for b in a:
+                #        print b
+                #print self.group_names
+                #print "Hallo2:   ",self.root.listdir()
+                #print getattr(self, 'codelite_solution_name', None)
+                template1 = compile_template(SOLUTION_TEMPLATE)
+                sln_str = template1(self)
+                sln_str = rm_blank_lines(sln_str)
+                node.stealth_write(sln_str)
+
+        def get_solution_node(self):
+                """
+                The solution filename is required when writing the .vcproj files
+                return self.solution_node and if it does not exist, make one
+                """
+                try:
+                        return self.solution_node
+                except:
+                        pass
+
+                codelite_solution_name = getattr(self, 'codelite_solution_name', None)
+                if not codelite_solution_name:
+                        codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
+                        setattr(self, 'codelite_solution_name', codelite_solution_name)
+                if os.path.isabs(codelite_solution_name):
+                        self.solution_node = self.root.make_node(codelite_solution_name)
+                else:
+                        self.solution_node = self.srcnode.make_node(codelite_solution_name)
+                return self.solution_node
+
+        def project_configurations(self):
+                """
+                Helper that returns all the pairs (config,platform)
+                """
+                ret = []
+                for c in self.configurations:
+                        for p in self.platforms:
+                                ret.append((c, p))
+                return ret
+
+        def collect_targets(self):
+                """
+                Process the list of task generators
+                """
+                for g in self.groups:
+                        for tg in g:
+                                if not isinstance(tg, TaskGen.task_gen):
+                                        continue
+
+                                if not hasattr(tg, 'codelite_includes'):
+                                        tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
+                                tg.post()
+                                if not getattr(tg, 'link_task', None):
+                                        continue
+
+                                p = self.vsnode_target(self, tg)
+                                p.collect_source() # delegate this processing
+                                p.collect_properties()
+                                self.all_projects.append(p)
+
+        def add_aliases(self):
+                """
+                Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
+                We also add an alias for "make install" (disabled by default)
+                """
+                base = getattr(self, 'projects_dir', None) or self.tg.path
+
+                node_project = base.make_node('build_all_projects' + self.project_extension) # Node
+                p_build = self.vsnode_build_all(self, node_project)
+                p_build.collect_properties()
+                self.all_projects.append(p_build)
+
+                node_project = base.make_node('install_all_projects' + self.project_extension) # Node
+                p_install = self.vsnode_install_all(self, node_project)
+                p_install.collect_properties()
+                self.all_projects.append(p_install)
+
+                node_project = base.make_node('project_view' + self.project_extension) # Node
+                p_view = self.vsnode_project_view(self, node_project)
+                p_view.collect_source()
+                p_view.collect_properties()
+                self.all_projects.append(p_view)
+
+                n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
+                p_build.parent = p_install.parent = p_view.parent = n
+                self.all_projects.append(n)
+
+        def collect_dirs(self):
+                """
+                Create the folder structure in the CodeLite project view
+                """
+                seen = {}
+                def make_parents(proj):
+                        # look at a project, try to make a parent
+                        if getattr(proj, 'parent', None):
+                                # aliases already have parents
+                                return
+                        x = proj.iter_path
+                        if x in seen:
+                                proj.parent = seen[x]
+                                return
+
+                        # There is not vsnode_vsdir for x.
+                        # So create a project representing the folder "x"
+                        n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
+                        n.iter_path = x.parent
+                        self.all_projects.append(n)
+
+                        # recurse up to the project directory
+                        if x.height() > self.srcnode.height() + 1:
+                                make_parents(n)
+
+                for p in self.all_projects[:]: # iterate over a copy of all projects
+                        if not getattr(p, 'tg', None):
+                                # but only projects that have a task generator
+                                continue
+
+                        # make a folder for each task generator
+                        p.iter_path = p.tg.path
+                        make_parents(p)
+
+
+
+def options(ctx):
+        pass
diff --git a/third_party/waf/waflib/extras/color_gcc.py b/third_party/waf/waflib/extras/color_gcc.py
new file mode 100644 (file)
index 0000000..b3587e8
--- /dev/null
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Replaces the default formatter by one which understands GCC output and colorizes it.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+import sys
+from waflib import Logs
+
+class ColorGCCFormatter(Logs.formatter):
+       def __init__(self, colors):
+               self.colors = colors
+               Logs.formatter.__init__(self)
+       def format(self, rec):
+               frame = sys._getframe()
+               while frame:
+                       func = frame.f_code.co_name
+                       if func == 'exec_command':
+                               cmd = frame.f_locals['cmd']
+                               if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
+                                       lines = []
+                                       for line in rec.msg.splitlines():
+                                               if 'warning: ' in line:
+                                                       lines.append(self.colors.YELLOW + line)
+                                               elif 'error: ' in line:
+                                                       lines.append(self.colors.RED + line)
+                                               elif 'note: ' in line:
+                                                       lines.append(self.colors.CYAN + line)
+                                               else:
+                                                       lines.append(line)
+                                       rec.msg = "\n".join(lines)
+                       frame = frame.f_back
+               return Logs.formatter.format(self, rec)
+
+def options(opt):
+       Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
diff --git a/third_party/waf/waflib/extras/color_rvct.py b/third_party/waf/waflib/extras/color_rvct.py
new file mode 100644 (file)
index 0000000..837fca2
--- /dev/null
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Replaces the default formatter by one which understands RVCT output and colorizes it.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+import sys
+import atexit
+from waflib import Logs
+
+errors = []
+
+def show_errors():
+       for i, e in enumerate(errors):
+               if i > 5:
+                       break
+               print("Error: %s" % e)
+
+atexit.register(show_errors)
+
+class RcvtFormatter(Logs.formatter):
+       def __init__(self, colors):
+               Logs.formatter.__init__(self)
+               self.colors = colors
+       def format(self, rec):
+               frame = sys._getframe()
+               while frame:
+                       func = frame.f_code.co_name
+                       if func == 'exec_command':
+                               cmd = frame.f_locals['cmd']
+                               if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]):
+                                       lines = []
+                                       for line in rec.msg.splitlines():
+                                               if 'Warning: ' in line:
+                                                       lines.append(self.colors.YELLOW + line)
+                                               elif 'Error: ' in line:
+                                                       lines.append(self.colors.RED + line)
+                                                       errors.append(line)
+                                               elif 'note: ' in line:
+                                                       lines.append(self.colors.CYAN + line)
+                                               else:
+                                                       lines.append(line)
+                                       rec.msg = "\n".join(lines)
+                       frame = frame.f_back
+               return Logs.formatter.format(self, rec)
+
+def options(opt):
+       Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))
diff --git a/third_party/waf/waflib/extras/compat15.py b/third_party/waf/waflib/extras/compat15.py
new file mode 100644 (file)
index 0000000..69722ff
--- /dev/null
@@ -0,0 +1,405 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+This file is provided to enable compatibility with waf 1.5
+It was enabled by default in waf 1.6, but it is not used in waf 1.7
+"""
+
+import sys
+from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context
+
+# the following is to bring some compatibility with waf 1.5 "import waflib.Configure â†’ import Configure"
+sys.modules['Environment'] = ConfigSet
+ConfigSet.Environment = ConfigSet.ConfigSet
+
+sys.modules['Logs'] = Logs
+sys.modules['Options'] = Options
+sys.modules['Scripting'] = Scripting
+sys.modules['Task'] = Task
+sys.modules['Build'] = Build
+sys.modules['Configure'] = Configure
+sys.modules['Node'] = Node
+sys.modules['Runner'] = Runner
+sys.modules['TaskGen'] = TaskGen
+sys.modules['Utils'] = Utils
+sys.modules['Constants'] = Context
+Context.SRCDIR = ''
+Context.BLDDIR = ''
+
+from waflib.Tools import c_preproc
+sys.modules['preproc'] = c_preproc
+
+from waflib.Tools import c_config
+sys.modules['config_c'] = c_config
+
+ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
+ConfigSet.ConfigSet.set_variant = Utils.nada
+
+Utils.pproc = Utils.subprocess
+
+Build.BuildContext.add_subdirs = Build.BuildContext.recurse
+Build.BuildContext.new_task_gen = Build.BuildContext.__call__
+Build.BuildContext.is_install = 0
+Node.Node.relpath_gen = Node.Node.path_from
+
+Utils.pproc = Utils.subprocess
+Utils.get_term_cols = Logs.get_term_cols
+
+def cmd_output(cmd, **kw):
+
+       silent = False
+       if 'silent' in kw:
+               silent = kw['silent']
+               del(kw['silent'])
+
+       if 'e' in kw:
+               tmp = kw['e']
+               del(kw['e'])
+               kw['env'] = tmp
+
+       kw['shell'] = isinstance(cmd, str)
+       kw['stdout'] = Utils.subprocess.PIPE
+       if silent:
+               kw['stderr'] = Utils.subprocess.PIPE
+
+       try:
+               p = Utils.subprocess.Popen(cmd, **kw)
+               output = p.communicate()[0]
+       except OSError ,e:
+               raise ValueError(str(e))
+
+       if p.returncode:
+               if not silent:
+                       msg = "command execution failed: %s -> %r" % (cmd, str(output))
+                       raise ValueError(msg)
+               output = ''
+       return output
+Utils.cmd_output = cmd_output
+
+def name_to_obj(self, s, env=None):
+       if Logs.verbose:
+               Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
+       return self.get_tgen_by_name(s)
+Build.BuildContext.name_to_obj = name_to_obj
+
+def env_of_name(self, name):
+       try:
+               return self.all_envs[name]
+       except KeyError:
+               Logs.error('no such environment: '+name)
+               return None
+Build.BuildContext.env_of_name = env_of_name
+
+
+def set_env_name(self, name, env):
+       self.all_envs[name] = env
+       return env
+Configure.ConfigurationContext.set_env_name = set_env_name
+
+def retrieve(self, name, fromenv=None):
+       try:
+               env = self.all_envs[name]
+       except KeyError:
+               env = ConfigSet.ConfigSet()
+               self.prepare_env(env)
+               self.all_envs[name] = env
+       else:
+               if fromenv:
+                       Logs.warn('The environment %s may have been configured already', name)
+       return env
+Configure.ConfigurationContext.retrieve = retrieve
+
+Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
+Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
+Configure.conftest = Configure.conf
+Configure.ConfigurationError = Errors.ConfigurationError
+Utils.WafError = Errors.WafError
+
+Options.OptionsContext.sub_options = Options.OptionsContext.recurse
+Options.OptionsContext.tool_options = Context.Context.load
+Options.Handler = Options.OptionsContext
+
+Task.simple_task_type = Task.task_type_from_func = Task.task_factory
+Task.TaskBase.classes = Task.classes
+
+def setitem(self, key, value):
+       if key.startswith('CCFLAGS'):
+               key = key[1:]
+       self.table[key] = value
+ConfigSet.ConfigSet.__setitem__ = setitem
+
+@TaskGen.feature('d')
+@TaskGen.before('apply_incpaths')
+def old_importpaths(self):
+       if getattr(self, 'importpaths', []):
+               self.includes = self.importpaths
+
+from waflib import Context
+eld = Context.load_tool
+def load_tool(*k, **kw):
+       ret = eld(*k, **kw)
+       if 'set_options' in ret.__dict__:
+               if Logs.verbose:
+                       Logs.warn('compat: rename "set_options" to options')
+               ret.options = ret.set_options
+       if 'detect' in ret.__dict__:
+               if Logs.verbose:
+                       Logs.warn('compat: rename "detect" to "configure"')
+               ret.configure = ret.detect
+       return ret
+Context.load_tool = load_tool
+
+def get_curdir(self):
+       return self.path.abspath()
+Context.Context.curdir = property(get_curdir, Utils.nada)
+
+def get_srcdir(self):
+       return self.srcnode.abspath()
+Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada)
+
+def get_blddir(self):
+       return self.bldnode.abspath()
+Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada)
+
+Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg
+Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg
+
+rev = Context.load_module
+def load_module(path, encoding=None):
+       ret = rev(path, encoding)
+       if 'set_options' in ret.__dict__:
+               if Logs.verbose:
+                       Logs.warn('compat: rename "set_options" to "options" (%r)', path)
+               ret.options = ret.set_options
+       if 'srcdir' in ret.__dict__:
+               if Logs.verbose:
+                       Logs.warn('compat: rename "srcdir" to "top" (%r)', path)
+               ret.top = ret.srcdir
+       if 'blddir' in ret.__dict__:
+               if Logs.verbose:
+                       Logs.warn('compat: rename "blddir" to "out" (%r)', path)
+               ret.out = ret.blddir
+       Utils.g_module = Context.g_module
+       Options.launch_dir = Context.launch_dir
+       return ret
+Context.load_module = load_module
+
+old_post = TaskGen.task_gen.post
+def post(self):
+       self.features = self.to_list(self.features)
+       if 'cc' in self.features:
+               if Logs.verbose:
+                       Logs.warn('compat: the feature cc does not exist anymore (use "c")')
+               self.features.remove('cc')
+               self.features.append('c')
+       if 'cstaticlib' in self.features:
+               if Logs.verbose:
+                       Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
+               self.features.remove('cstaticlib')
+               self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
+       if getattr(self, 'ccflags', None):
+               if Logs.verbose:
+                       Logs.warn('compat: "ccflags" was renamed to "cflags"')
+               self.cflags = self.ccflags
+       return old_post(self)
+TaskGen.task_gen.post = post
+
+def waf_version(*k, **kw):
+       Logs.warn('wrong version (waf_version was removed in waf 1.6)')
+Utils.waf_version = waf_version
+
+
+import os
+@TaskGen.feature('c', 'cxx', 'd')
+@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
+@TaskGen.after('apply_link', 'process_source')
+def apply_uselib_local(self):
+       """
+       process the uselib_local attribute
+       execute after apply_link because of the execution order set on 'link_task'
+       """
+       env = self.env
+       from waflib.Tools.ccroot import stlink_task
+
+       # 1. the case of the libs defined in the project (visit ancestors first)
+       # the ancestors external libraries (uselib) will be prepended
+       self.uselib = self.to_list(getattr(self, 'uselib', []))
+       self.includes = self.to_list(getattr(self, 'includes', []))
+       names = self.to_list(getattr(self, 'uselib_local', []))
+       get = self.bld.get_tgen_by_name
+       seen = set()
+       seen_uselib = set()
+       tmp = Utils.deque(names) # consume a copy of the list of names
+       if tmp:
+               if Logs.verbose:
+                       Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+       while tmp:
+               lib_name = tmp.popleft()
+               # visit dependencies only once
+               if lib_name in seen:
+                       continue
+
+               y = get(lib_name)
+               y.post()
+               seen.add(lib_name)
+
+               # object has ancestors to process (shared libraries): add them to the end of the list
+               if getattr(y, 'uselib_local', None):
+                       for x in self.to_list(getattr(y, 'uselib_local', [])):
+                               obj = get(x)
+                               obj.post()
+                               if getattr(obj, 'link_task', None):
+                                       if not isinstance(obj.link_task, stlink_task):
+                                               tmp.append(x)
+
+               # link task and flags
+               if getattr(y, 'link_task', None):
+
+                       link_name = y.target[y.target.rfind(os.sep) + 1:]
+                       if isinstance(y.link_task, stlink_task):
+                               env.append_value('STLIB', [link_name])
+                       else:
+                               # some linkers can link against programs
+                               env.append_value('LIB', [link_name])
+
+                       # the order
+                       self.link_task.set_run_after(y.link_task)
+
+                       # for the recompilation
+                       self.link_task.dep_nodes += y.link_task.outputs
+
+                       # add the link path too
+                       tmp_path = y.link_task.outputs[0].parent.bldpath()
+                       if not tmp_path in env['LIBPATH']:
+                               env.prepend_value('LIBPATH', [tmp_path])
+
+               # add ancestors uselib too - but only propagate those that have no staticlib defined
+               for v in self.to_list(getattr(y, 'uselib', [])):
+                       if v not in seen_uselib:
+                               seen_uselib.add(v)
+                               if not env['STLIB_' + v]:
+                                       if not v in self.uselib:
+                                               self.uselib.insert(0, v)
+
+               # if the library task generator provides 'export_includes', add to the include path
+               # the export_includes must be a list of paths relative to the other library
+               if getattr(y, 'export_includes', None):
+                       self.includes.extend(y.to_incnodes(y.export_includes))
+
+@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
+@TaskGen.after('apply_link')
+def apply_objdeps(self):
+       "add the .o files produced by some other object files in the same manner as uselib_local"
+       names = getattr(self, 'add_objects', [])
+       if not names:
+               return
+       names = self.to_list(names)
+
+       get = self.bld.get_tgen_by_name
+       seen = []
+       while names:
+               x = names[0]
+
+               # visit dependencies only once
+               if x in seen:
+                       names = names[1:]
+                       continue
+
+               # object does not exist ?
+               y = get(x)
+
+               # object has ancestors to process first ? update the list of names
+               if getattr(y, 'add_objects', None):
+                       added = 0
+                       lst = y.to_list(y.add_objects)
+                       lst.reverse()
+                       for u in lst:
+                               if u in seen: continue
+                               added = 1
+                               names = [u]+names
+                       if added: continue # list of names modified, loop
+
+               # safe to process the current object
+               y.post()
+               seen.append(x)
+
+               for t in getattr(y, 'compiled_tasks', []):
+                       self.link_task.inputs.extend(t.outputs)
+
+@TaskGen.after('apply_link')
+def process_obj_files(self):
+       if not hasattr(self, 'obj_files'):
+               return
+       for x in self.obj_files:
+               node = self.path.find_resource(x)
+               self.link_task.inputs.append(node)
+
+@TaskGen.taskgen_method
+def add_obj_file(self, file):
+       """Small example on how to link object files as if they were source
+       obj = bld.create_obj('cc')
+       obj.add_obj_file('foo.o')"""
+       if not hasattr(self, 'obj_files'): self.obj_files = []
+       if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
+       self.obj_files.append(file)
+
+
+old_define = Configure.ConfigurationContext.__dict__['define']
+
+@Configure.conf
+def define(self, key, val, quote=True, comment=''):
+       old_define(self, key, val, quote, comment)
+       if key.startswith('HAVE_'):
+               self.env[key] = 1
+
+old_undefine = Configure.ConfigurationContext.__dict__['undefine']
+
+@Configure.conf
+def undefine(self, key, comment=''):
+       old_undefine(self, key, comment)
+       if key.startswith('HAVE_'):
+               self.env[key] = 0
+
+# some people might want to use export_incdirs, but it was renamed
+def set_incdirs(self, val):
+       Logs.warn('compat: change "export_incdirs" by "export_includes"')
+       self.export_includes = val
+TaskGen.task_gen.export_incdirs = property(None, set_incdirs)
+
+def install_dir(self, path):
+       if not path:
+               return []
+
+       destpath = Utils.subst_vars(path, self.env)
+
+       if self.is_install > 0:
+               Logs.info('* creating %s', destpath)
+               Utils.check_dir(destpath)
+       elif self.is_install < 0:
+               Logs.info('* removing %s', destpath)
+               try:
+                       os.remove(destpath)
+               except OSError:
+                       pass
+Build.BuildContext.install_dir = install_dir
+
+# before/after names
+repl = {'apply_core': 'process_source',
+       'apply_lib_vars': 'process_source',
+       'apply_obj_vars': 'propagate_uselib_vars',
+       'exec_rule': 'process_rule'
+}
+def after(*k):
+       k = [repl.get(key, key) for key in k]
+       return TaskGen.after_method(*k)
+
+def before(*k):
+       k = [repl.get(key, key) for key in k]
+       return TaskGen.before_method(*k)
+TaskGen.before = before
diff --git a/third_party/waf/waflib/extras/cppcheck.py b/third_party/waf/waflib/extras/cppcheck.py
new file mode 100644 (file)
index 0000000..3bbeabf
--- /dev/null
@@ -0,0 +1,546 @@
+#! /usr/bin/env python
+# -*- encoding: utf-8 -*-
+# Michel Mooij, michel.mooij7@gmail.com
+
+"""
+Tool Description
+================
+This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
+checking tool 'cppcheck'.
+
+See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
+itself.
+Note that many linux distributions already provide a ready to install version
+of cppcheck. On fedora, for instance, it can be installed using yum:
+
+       'sudo yum install cppcheck'
+
+
+Usage
+=====
+In order to use this waftool simply add it to the 'options' and 'configure'
+functions of your main waf script as shown in the example below:
+
+       def options(opt):
+               opt.load('cppcheck', tooldir='./waftools')
+
+       def configure(conf):
+               conf.load('cppcheck')
+
+Note that example shown above assumes that the cppcheck waftool is located in
+the sub directory named 'waftools'.
+
+When configured as shown in the example above, cppcheck will automatically
+perform a source code analysis on all C/C++ build tasks that have been
+defined in your waf build system.
+
+The example shown below for a C program will be used as input for cppcheck when
+building the task.
+
+       def build(bld):
+               bld.program(name='foo', src='foobar.c')
+
+The result of the source code analysis will be stored both as xml and html
+files in the build location for the task. Should any error be detected by
+cppcheck the build will be aborted and a link to the html report will be shown.
+
+When needed source code checking by cppcheck can be disabled per task, per
+detected error or warning for a particular task. It can be also be disabled for
+all tasks.
+
+In order to exclude a task from source code checking add the skip option to the
+task as shown below:
+
+       def build(bld):
+               bld.program(
+                               name='foo',
+                               src='foobar.c'
+                               cppcheck_skip=True
+               )
+
+When needed problems detected by cppcheck may be suppressed using a file
+containing a list of suppression rules. The relative or absolute path to this
+file can be added to the build task as shown in the example below:
+
+               bld.program(
+                               name='bar',
+                               src='foobar.c',
+                               cppcheck_suppress='bar.suppress'
+               )
+
+A cppcheck suppress file should contain one suppress rule per line. Each of
+these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
+
+Dependencies
+================
+This waftool depends on the python pygments module, it is used for source code
+syntax highlighting when creating the html reports. see http://pygments.org/ for
+more information on this package.
+
+Remarks
+================
+The generation of the html report is originally based on the cppcheck-htmlreport.py
+script that comes shipped with the cppcheck tool.
+"""
+
+import sys
+import xml.etree.ElementTree as ElementTree
+from waflib import Task, TaskGen, Logs, Context
+
+PYGMENTS_EXC_MSG= '''
+The required module 'pygments' could not be found. Please install it using your
+platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
+see 'http://pygments.org/download/' for installation instructions.
+'''
+
+try:
+       import pygments
+       from pygments import formatters, lexers
+except ImportError as e:
+       Logs.warn(PYGMENTS_EXC_MSG)
+       raise e
+
+
+def options(opt):
+       opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
+               default=False, action='store_true',
+               help='do not check C/C++ sources (default=False)')
+
+       opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
+               default=False, action='store_true',
+               help='continue in case of errors (default=False)')
+
+       opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
+               default='warning,performance,portability,style,unusedFunction', action='store',
+               help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
+
+       opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
+               default='warning,performance,portability,style', action='store',
+               help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
+
+       opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
+               default='c99', action='store',
+               help='cppcheck standard to use when checking C (default=c99)')
+
+       opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
+               default='c++03', action='store',
+               help='cppcheck standard to use when checking C++ (default=c++03)')
+
+       opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
+               default=False, action='store_true',
+               help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
+
+       opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
+               default='20', action='store',
+               help='maximum preprocessor (--max-configs) define iterations (default=20)')
+
+
+def configure(conf):
+       if conf.options.cppcheck_skip:
+               conf.env.CPPCHECK_SKIP = [True]
+       conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
+       conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
+       conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
+       conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
+       conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
+       conf.find_program('cppcheck', var='CPPCHECK')
+
+
+@TaskGen.feature('c')
+@TaskGen.feature('cxx')
+def cppcheck_execute(self):
+       if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip:
+               return
+       if getattr(self, 'cppcheck_skip', False):
+               return
+       task = self.create_task('cppcheck')
+       task.cmd = _tgen_create_cmd(self)
+       task.fatal = []
+       if not self.bld.options.cppcheck_err_resume:
+               task.fatal.append('error')
+
+
+def _tgen_create_cmd(self):
+       features = getattr(self, 'features', [])
+       std_c = self.env.CPPCHECK_STD_C
+       std_cxx = self.env.CPPCHECK_STD_CXX
+       max_configs = self.env.CPPCHECK_MAX_CONFIGS
+       bin_enable = self.env.CPPCHECK_BIN_ENABLE
+       lib_enable = self.env.CPPCHECK_LIB_ENABLE
+
+       cmd  = self.env.CPPCHECK
+       args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
+       args.append('--max-configs=%s' % max_configs)
+
+       if 'cxx' in features:
+               args.append('--language=c++')
+               args.append('--std=%s' % std_cxx)
+       else:
+               args.append('--language=c')
+               args.append('--std=%s' % std_c)
+
+       if self.bld.options.cppcheck_check_config:
+               args.append('--check-config')
+
+       if set(['cprogram','cxxprogram']) & set(features):
+               args.append('--enable=%s' % bin_enable)
+       else:
+               args.append('--enable=%s' % lib_enable)
+
+       for src in self.to_list(getattr(self, 'source', [])):
+               args.append('%r' % src)
+       for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
+               args.append('-I%r' % inc)
+       for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
+               args.append('-I%r' % inc)
+       return cmd + args
+
+
+class cppcheck(Task.Task):
+       quiet = True
+
+       def run(self):
+               stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
+               self._save_xml_report(stderr)
+               defects = self._get_defects(stderr)
+               index = self._create_html_report(defects)
+               self._errors_evaluate(defects, index)
+               return 0
+
+       def _save_xml_report(self, s):
+               '''use cppcheck xml result string, add the command string used to invoke cppcheck
+               and save as xml file.
+               '''
+               header = '%s\n' % s.splitlines()[0]
+               root = ElementTree.fromstring(s)
+               cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
+               cmd.text = str(self.cmd)
+               body = ElementTree.tostring(root)
+               node = self.generator.path.get_bld().find_or_declare('cppcheck.xml')
+               node.write(header + body)
+
+       def _get_defects(self, xml_string):
+               '''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
+               a list of defects.
+               '''
+               defects = []
+               for error in ElementTree.fromstring(xml_string).iter('error'):
+                       defect = {}
+                       defect['id'] = error.get('id')
+                       defect['severity'] = error.get('severity')
+                       defect['msg'] = str(error.get('msg')).replace('<','&lt;')
+                       defect['verbose'] = error.get('verbose')
+                       for location in error.findall('location'):
+                               defect['file'] = location.get('file')
+                               defect['line'] = str(int(location.get('line')) - 1)
+                       defects.append(defect)
+               return defects
+
+       def _create_html_report(self, defects):
+               files, css_style_defs = self._create_html_files(defects)
+               index = self._create_html_index(files)
+               self._create_css_file(css_style_defs)
+               return index
+
+       def _create_html_files(self, defects):
+               sources = {}
+               defects = [defect for defect in defects if defect.has_key('file')]
+               for defect in defects:
+                       name = defect['file']
+                       if not sources.has_key(name):
+                               sources[name] = [defect]
+                       else:
+                               sources[name].append(defect)
+
+               files = {}
+               css_style_defs = None
+               bpath = self.generator.path.get_bld().abspath()
+               names = sources.keys()
+               for i in range(0,len(names)):
+                       name = names[i]
+                       htmlfile = 'cppcheck/%i.html' % (i)
+                       errors = sources[name]
+                       files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
+                       css_style_defs = self._create_html_file(name, htmlfile, errors)
+               return files, css_style_defs
+
+       def _create_html_file(self, sourcefile, htmlfile, errors):
+               name = self.generator.get_name()
+               root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
+               title = root.find('head/title')
+               title.text = 'cppcheck - report - %s' % name
+
+               body = root.find('body')
+               for div in body.findall('div'):
+                       if div.get('id') == 'page':
+                               page = div
+                               break
+               for div in page.findall('div'):
+                       if div.get('id') == 'header':
+                               h1 = div.find('h1')
+                               h1.text = 'cppcheck report - %s' % name
+                       if div.get('id') == 'content':
+                               content = div
+                               srcnode = self.generator.bld.root.find_node(sourcefile)
+                               hl_lines = [e['line'] for e in errors if e.has_key('line')]
+                               formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
+                               formatter.errors = [e for e in errors if e.has_key('line')]
+                               css_style_defs = formatter.get_style_defs('.highlight')
+                               lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
+                               s = pygments.highlight(srcnode.read(), lexer, formatter)
+                               table = ElementTree.fromstring(s)
+                               content.append(table)
+
+               s = ElementTree.tostring(root, method='html')
+               s = CCPCHECK_HTML_TYPE + s
+               node = self.generator.path.get_bld().find_or_declare(htmlfile)
+               node.write(s)
+               return css_style_defs
+
+       def _create_html_index(self, files):
+               name = self.generator.get_name()
+               root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
+               title = root.find('head/title')
+               title.text = 'cppcheck - report - %s' % name
+
+               body = root.find('body')
+               for div in body.findall('div'):
+                       if div.get('id') == 'page':
+                               page = div
+                               break
+               for div in page.findall('div'):
+                       if div.get('id') == 'header':
+                               h1 = div.find('h1')
+                               h1.text = 'cppcheck report - %s' % name
+                       if div.get('id') == 'content':
+                               content = div
+                               self._create_html_table(content, files)
+
+               s = ElementTree.tostring(root, method='html')
+               s = CCPCHECK_HTML_TYPE + s
+               node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html')
+               node.write(s)
+               return node
+
+       def _create_html_table(self, content, files):
+               table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
+               for name, val in files.items():
+                       f = val['htmlfile']
+                       s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
+                       row = ElementTree.fromstring(s)
+                       table.append(row)
+
+                       errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint)
+                       for e in errors:
+                               if not e.has_key('line'):
+                                       s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
+                               else:
+                                       attr = ''
+                                       if e['severity'] == 'error':
+                                               attr = 'class="error"'
+                                       s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
+                                       s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
+                               row = ElementTree.fromstring(s)
+                               table.append(row)
+               content.append(table)
+
+       def _create_css_file(self, css_style_defs):
+               css = str(CPPCHECK_CSS_FILE)
+               if css_style_defs:
+                       css = "%s\n%s\n" % (css, css_style_defs)
+               node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
+               node.write(css)
+
+       def _errors_evaluate(self, errors, http_index):
+               name = self.generator.get_name()
+               fatal = self.fatal
+               severity = [err['severity'] for err in errors]
+               problems = [err for err in errors if err['severity'] != 'information']
+
+               if set(fatal) & set(severity):
+                       exc  = "\n"
+                       exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
+                       exc += "\n    file://%r" % (http_index)
+                       exc += "\n"
+                       self.generator.bld.fatal(exc)
+
+               elif len(problems):
+                       msg =  "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
+                       msg += "\n    file://%r" % http_index
+                       msg += "\n"
+                       Logs.error(msg)
+
+
+class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
+       errors = []
+
+       def wrap(self, source, outfile):
+               line_no = 1
+               for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
+                       # If this is a source code line we want to add a span tag at the end.
+                       if i == 1:
+                               for error in self.errors:
+                                       if int(error['line']) == line_no:
+                                               t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
+                               line_no = line_no + 1
+                       yield i, t
+
+
+CCPCHECK_HTML_TYPE = \
+'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
+
+CPPCHECK_HTML_FILE = """
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp "&#160;">]>
+<html>
+       <head>
+               <title>cppcheck - report - XXX</title>
+               <link href="style.css" rel="stylesheet" type="text/css" />
+               <style type="text/css">
+               </style>
+       </head>
+       <body class="body">
+               <div id="page-header">&nbsp;</div>
+               <div id="page">
+                       <div id="header">
+                               <h1>cppcheck report - XXX</h1>
+                       </div>
+                       <div id="menu">
+                               <a href="index.html">Defect list</a>
+                       </div>
+                       <div id="content">
+                       </div>
+                       <div id="footer">
+                               <div>cppcheck - a tool for static C/C++ code analysis</div>
+                               <div>
+                               Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
+                       Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
+                               IRC: #cppcheck at irc.freenode.net
+                               </div>
+                               &nbsp;
+                       </div>
+               &nbsp;
+               </div>
+               <div id="page-footer">&nbsp;</div>
+       </body>
+</html>
+"""
+
+CPPCHECK_HTML_TABLE = """
+<table>
+       <tr>
+               <th>Line</th>
+               <th>Id</th>
+               <th>Severity</th>
+               <th>Message</th>
+       </tr>
+</table>
+"""
+
+CPPCHECK_HTML_ERROR = \
+'<span style="background: #ffaaaa;padding: 3px;">&lt;--- %s</span>\n'
+
+CPPCHECK_CSS_FILE = """
+body.body {
+       font-family: Arial;
+       font-size: 13px;
+       background-color: black;
+       padding: 0px;
+       margin: 0px;
+}
+
+.error {
+       font-family: Arial;
+       font-size: 13px;
+       background-color: #ffb7b7;
+       padding: 0px;
+       margin: 0px;
+}
+
+th, td {
+       min-width: 100px;
+       text-align: left;
+}
+
+#page-header {
+       clear: both;
+       width: 1200px;
+       margin: 20px auto 0px auto;
+       height: 10px;
+       border-bottom-width: 2px;
+       border-bottom-style: solid;
+       border-bottom-color: #aaaaaa;
+}
+
+#page {
+       width: 1160px;
+       margin: auto;
+       border-left-width: 2px;
+       border-left-style: solid;
+       border-left-color: #aaaaaa;
+       border-right-width: 2px;
+       border-right-style: solid;
+       border-right-color: #aaaaaa;
+       background-color: White;
+       padding: 20px;
+}
+
+#page-footer {
+       clear: both;
+       width: 1200px;
+       margin: auto;
+       height: 10px;
+       border-top-width: 2px;
+       border-top-style: solid;
+       border-top-color: #aaaaaa;
+}
+
+#header {
+       width: 100%;
+       height: 70px;
+       background-image: url(logo.png);
+       background-repeat: no-repeat;
+       background-position: left top;
+       border-bottom-style: solid;
+       border-bottom-width: thin;
+       border-bottom-color: #aaaaaa;
+}
+
+#menu {
+       margin-top: 5px;
+       text-align: left;
+       float: left;
+       width: 100px;
+       height: 300px;
+}
+
+#menu > a {
+       margin-left: 10px;
+       display: block;
+}
+
+#content {
+       float: left;
+       width: 1020px;
+       margin: 5px;
+       padding: 0px 10px 10px 10px;
+       border-left-style: solid;
+       border-left-width: thin;
+       border-left-color: #aaaaaa;
+}
+
+#footer {
+       padding-bottom: 5px;
+       padding-top: 5px;
+       border-top-style: solid;
+       border-top-width: thin;
+       border-top-color: #aaaaaa;
+       clear: both;
+       font-size: 10px;
+}
+
+#footer > div {
+       float: left;
+       width: 33%;
+}
+
+"""
diff --git a/third_party/waf/waflib/extras/cpplint.py b/third_party/waf/waflib/extras/cpplint.py
new file mode 100644 (file)
index 0000000..e574ab1
--- /dev/null
@@ -0,0 +1,217 @@
+#! /usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2014
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the cpplint tool to the waf file:
+$ ./waf-light --tools=compat15,cpplint
+
+this tool also requires cpplint for python.
+If you have PIP, you can install it like this: pip install cpplint
+
+When using this tool, the wscript will look like:
+
+    def options(opt):
+        opt.load('compiler_cxx cpplint')
+
+    def configure(conf):
+        conf.load('compiler_cxx cpplint')
+        # optional, you can also specify them on the command line
+        conf.env.CPPLINT_FILTERS = ','.join((
+            '-whitespace/newline',      # c++11 lambda
+            '-readability/braces',      # c++11 constructor
+            '-whitespace/braces',       # c++11 constructor
+            '-build/storage_class',     # c++11 for-range
+            '-whitespace/blank_line',   # user pref
+            '-whitespace/labels'        # user pref
+            ))
+
+    def build(bld):
+        bld(features='cpplint', source='main.cpp', target='app')
+        # add include files, because they aren't usually built
+        bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
+'''
+
+import sys, re
+import logging
+import threading
+from waflib import Task, TaskGen, Logs, Options, Node
+try:
+    import cpplint.cpplint as cpplint_tool
+except ImportError:
+    try:
+        import cpplint as cpplint_tool
+    except ImportError:
+        pass
+
+
+critical_errors = 0
+CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n'
+RE_EMACS = re.compile('(?P<filename>.*):(?P<linenum>\d+):  (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]')
+CPPLINT_RE = {
+    'waf': RE_EMACS,
+    'emacs': RE_EMACS,
+    'vs7': re.compile('(?P<filename>.*)\((?P<linenum>\d+)\):  (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
+    'eclipse': re.compile('(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
+}
+
+def options(opt):
+    opt.add_option('--cpplint-filters', type='string',
+                   default='', dest='CPPLINT_FILTERS',
+                   help='add filters to cpplint')
+    opt.add_option('--cpplint-length', type='int',
+                   default=80, dest='CPPLINT_LINE_LENGTH',
+                   help='specify the line length (default: 80)')
+    opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
+                   help='specify the log level (default: 1)')
+    opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
+                   help='break the build if error >= level (default: 5)')
+    opt.add_option('--cpplint-skip', action='store_true',
+                   default=False, dest='CPPLINT_SKIP',
+                   help='skip cpplint during build')
+    opt.add_option('--cpplint-output', type='string',
+                   default='waf', dest='CPPLINT_OUTPUT',
+                   help='select output format (waf, emacs, vs7)')
+
+
+def configure(conf):
+    conf.start_msg('Checking cpplint')
+    try:
+        cpplint_tool._cpplint_state
+        conf.end_msg('ok')
+    except NameError:
+        conf.env.CPPLINT_SKIP = True
+        conf.end_msg('not found, skipping it.')
+
+
+class cpplint_formatter(Logs.formatter):
+    def __init__(self, fmt):
+        logging.Formatter.__init__(self, CPPLINT_FORMAT)
+        self.fmt = fmt
+
+    def format(self, rec):
+        if self.fmt == 'waf':
+            result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict()
+            rec.msg = CPPLINT_FORMAT % result
+        if rec.levelno <= logging.INFO:
+            rec.c1 = Logs.colors.CYAN
+        return super(cpplint_formatter, self).format(rec)
+
+
+class cpplint_handler(Logs.log_handler):
+    def __init__(self, stream=sys.stderr, **kw):
+        super(cpplint_handler, self).__init__(stream, **kw)
+        self.stream = stream
+
+    def emit(self, rec):
+        rec.stream = self.stream
+        self.emit_override(rec)
+        self.flush()
+
+
+class cpplint_wrapper(object):
+    stream = None
+    tasks_count = 0
+    lock = threading.RLock()
+
+    def __init__(self, logger, threshold, fmt):
+        self.logger = logger
+        self.threshold = threshold
+        self.error_count = 0
+        self.fmt = fmt
+
+    def __enter__(self):
+        with cpplint_wrapper.lock:
+            cpplint_wrapper.tasks_count += 1
+            if cpplint_wrapper.tasks_count == 1:
+                sys.stderr.flush()
+                cpplint_wrapper.stream = sys.stderr
+                sys.stderr = self
+            return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        with cpplint_wrapper.lock:
+            cpplint_wrapper.tasks_count -= 1
+            if cpplint_wrapper.tasks_count == 0:
+                sys.stderr = cpplint_wrapper.stream
+                sys.stderr.flush()
+
+    def isatty(self):
+        return True
+
+    def write(self, message):
+        global critical_errors
+        result = CPPLINT_RE[self.fmt].match(message)
+        if not result:
+            return
+        level = int(result.groupdict()['confidence'])
+        if level >= self.threshold:
+            critical_errors += 1
+        if level <= 2:
+            self.logger.info(message)
+        elif level <= 4:
+            self.logger.warning(message)
+        else:
+            self.logger.error(message)
+
+
+cpplint_logger = None
+def get_cpplint_logger(fmt):
+    global cpplint_logger
+    if cpplint_logger:
+        return cpplint_logger
+    cpplint_logger = logging.getLogger('cpplint')
+    hdlr = cpplint_handler()
+    hdlr.setFormatter(cpplint_formatter(fmt))
+    cpplint_logger.addHandler(hdlr)
+    cpplint_logger.setLevel(logging.DEBUG)
+    return cpplint_logger
+
+
+class cpplint(Task.Task):
+    color = 'PINK'
+
+    def __init__(self, *k, **kw):
+        super(cpplint, self).__init__(*k, **kw)
+
+    def run(self):
+        global critical_errors
+        with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
+            if self.env.CPPLINT_OUTPUT != 'waf':
+                cpplint_tool._cpplint_state.output_format = self.env.CPPLINT_OUTPUT
+            cpplint_tool._cpplint_state.SetFilters(self.env.CPPLINT_FILTERS)
+            cpplint_tool._line_length = self.env.CPPLINT_LINE_LENGTH
+            cpplint_tool.ProcessFile(self.inputs[0].abspath(), self.env.CPPLINT_LEVEL)
+        return critical_errors
+
+@TaskGen.extension('.h', '.hh', '.hpp', '.hxx')
+def cpplint_includes(self, node):
+    pass
+
+@TaskGen.feature('cpplint')
+@TaskGen.before_method('process_source')
+def post_cpplint(self):
+    if self.env.CPPLINT_SKIP:
+        return
+
+    if not self.env.CPPLINT_INITIALIZED:
+        for key, value in Options.options.__dict__.items():
+            if not key.startswith('CPPLINT_') or self.env[key]:
+               continue
+            self.env[key] = value
+        self.env.CPPLINT_INITIALIZED = True
+
+    if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
+        return
+
+    for src in self.to_list(getattr(self, 'source', [])):
+        if isinstance(src, Node.Node):
+            node = src
+        else:
+            node = self.path.find_or_declare(src)
+        if not node:
+            self.bld.fatal('Could not find %r' % src)
+        self.create_task('cpplint', node)
diff --git a/third_party/waf/waflib/extras/cython.py b/third_party/waf/waflib/extras/cython.py
new file mode 100644 (file)
index 0000000..26d1c6f
--- /dev/null
@@ -0,0 +1,145 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2015
+
+import os, re
+from waflib import Task, Logs
+from waflib.TaskGen import extension
+
+cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
+re_cyt = re.compile(r"""
+       (?:from\s+(\w+)\s+)?   # optionally match "from foo" and capture foo
+       c?import\s(\w+|[*])    # require "import bar" and capture bar
+       """, re.M | re.VERBOSE)
+
+@extension('.pyx')
+def add_cython_file(self, node):
+       """
+       Process a *.pyx* file given in the list of source files. No additional
+       feature is required::
+
+               def build(bld):
+                       bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
+       """
+       ext = '.c'
+       if 'cxx' in self.features:
+               self.env.append_unique('CYTHONFLAGS', '--cplus')
+               ext = '.cc'
+
+       for x in getattr(self, 'cython_includes', []):
+               # TODO re-use these nodes in "scan" below
+               d = self.path.find_dir(x)
+               if d:
+                       self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())
+
+       tsk = self.create_task('cython', node, node.change_ext(ext))
+       self.source += tsk.outputs
+
+class cython(Task.Task):
+       run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
+       color   = 'GREEN'
+
+       vars    = ['INCLUDES']
+       """
+       Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
+       by the metaclass.
+       """
+
+       ext_out = ['.h']
+       """
+       The creation of a .h file is known only after the build has begun, so it is not
+       possible to compute a build order just by looking at the task inputs/outputs.
+       """
+
+       def runnable_status(self):
+               """
+               Perform a double-check to add the headers created by cython
+               to the output nodes. The scanner is executed only when the cython task
+               must be executed (optimization).
+               """
+               ret = super(cython, self).runnable_status()
+               if ret == Task.ASK_LATER:
+                       return ret
+               for x in self.generator.bld.raw_deps[self.uid()]:
+                       if x.startswith('header:'):
+                               self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
+               return super(cython, self).runnable_status()
+
+       def post_run(self):
+               for x in self.outputs:
+                       if x.name.endswith('.h'):
+                               if not os.path.exists(x.abspath()):
+                                       if Logs.verbose:
+                                               Logs.warn('Expected %r' % x.abspath())
+                                       x.write('')
+               return Task.Task.post_run(self)
+
+       def scan(self):
+               """
+               Return the dependent files (.pxd) by looking in the include folders.
+               Put the headers to generate in the custom list "bld.raw_deps".
+               To inspect the scanne results use::
+
+                       $ waf clean build --zones=deps
+               """
+               node = self.inputs[0]
+               txt = node.read()
+
+               mods = []
+               for m in re_cyt.finditer(txt):
+                       if m.group(1):  # matches "from foo import bar"
+                               mods.append(m.group(1))
+                       else:
+                               mods.append(m.group(2))
+
+               Logs.debug("cython: mods %r" % mods)
+               incs = getattr(self.generator, 'cython_includes', [])
+               incs = [self.generator.path.find_dir(x) for x in incs]
+               incs.append(node.parent)
+
+               found = []
+               missing = []
+               for x in mods:
+                       for y in incs:
+                               k = y.find_resource(x + '.pxd')
+                               if k:
+                                       found.append(k)
+                                       break
+                       else:
+                               missing.append(x)
+
+               # the cython file implicitly depends on a pxd file that might be present
+               implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
+               if implicit:
+                       found.append(implicit)
+
+               Logs.debug("cython: found %r" % found)
+
+               # Now the .h created - store them in bld.raw_deps for later use
+               has_api = False
+               has_public = False
+               for l in txt.splitlines():
+                       if cy_api_pat.match(l):
+                               if ' api ' in l:
+                                       has_api = True
+                               if ' public ' in l:
+                                       has_public = True
+               name = node.name.replace('.pyx', '')
+               if has_api:
+                       missing.append('header:%s_api.h' % name)
+               if has_public:
+                       missing.append('header:%s.h' % name)
+
+               return (found, missing)
+
+def options(ctx):
+       ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')
+
+def configure(ctx):
+       if not ctx.env.CC and not ctx.env.CXX:
+               ctx.fatal('Load a C/C++ compiler first')
+       if not ctx.env.PYTHON:
+               ctx.fatal('Load the python tool first!')
+       ctx.find_program('cython', var='CYTHON')
+       if ctx.options.cython_flags:
+               ctx.env.CYTHONFLAGS = ctx.options.cython_flags
diff --git a/third_party/waf/waflib/extras/dcc.py b/third_party/waf/waflib/extras/dcc.py
new file mode 100644 (file)
index 0000000..8fd2096
--- /dev/null
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero, 2011 (zougloub)
+
+from waflib import Options
+from waflib.Tools import ccroot
+from waflib.Configure import conf
+
+@conf
+def find_dcc(conf):
+       conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', ""))
+       conf.env.CC_NAME = 'dcc'
+
+@conf
+def find_dld(conf):
+       conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', ""))
+       conf.env.LINK_CC_NAME = 'dld'
+
+@conf
+def find_dar(conf):
+       conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', ""))
+       conf.env.AR_NAME = 'dar'
+       conf.env.ARFLAGS = 'rcs'
+
+@conf
+def find_ddump(conf):
+       conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', ""))
+
+@conf
+def dcc_common_flags(conf):
+       v = conf.env
+       v['CC_SRC_F']            = []
+       v['CC_TGT_F']            = ['-c', '-o']
+
+       # linker
+       if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+       v['CCLNK_SRC_F']         = []
+       v['CCLNK_TGT_F']         = ['-o']
+       v['CPPPATH_ST']          = '-I%s'
+       v['DEFINES_ST']          = '-D%s'
+
+       v['LIB_ST']              = '-l:%s' # template for adding libs
+       v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
+       v['STLIB_ST']            = '-l:%s'
+       v['STLIBPATH_ST']        = '-L%s'
+       v['RPATH_ST']            = '-Wl,-rpath,%s'
+       #v['STLIB_MARKER']        = '-Wl,-Bstatic'
+
+       # program
+       v['cprogram_PATTERN']    = '%s.elf'
+
+       # static lib
+       v['LINKFLAGS_cstlib']    = ['-Wl,-Bstatic']
+       v['cstlib_PATTERN']      = 'lib%s.a'
+
+def configure(conf):
+       conf.find_dcc()
+       conf.find_dar()
+       conf.find_dld()
+       conf.find_ddump()
+       conf.dcc_common_flags()
+       conf.cc_load_tools()
+       conf.cc_add_flags()
+       conf.link_add_flags()
+
+def options(opt):
+       """
+       Add the ``--with-diab-bindir`` command-line options.
+       """
+       opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
diff --git a/third_party/waf/waflib/extras/distnet.py b/third_party/waf/waflib/extras/distnet.py
new file mode 100644 (file)
index 0000000..ac8c344
--- /dev/null
@@ -0,0 +1,431 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+waf-powered distributed network builds, with a network cache.
+
+Caching files from a server has advantages over a NFS/Samba shared folder:
+
+- builds are much faster because they use local files
+- builds just continue to work in case of a network glitch
+- permissions are much simpler to manage
+"""
+
+import os, urllib, tarfile, re, shutil, tempfile, sys
+from collections import OrderedDict
+from waflib import Context, Utils, Logs
+
+try:
+       from urllib.parse import urlencode
+except ImportError:
+       urlencode = urllib.urlencode
+
+def safe_urlencode(data):
+       x = urlencode(data)
+       try:
+               x = x.encode('utf-8')
+       except Exception:
+               pass
+       return x
+
+try:
+       from urllib.error import URLError
+except ImportError:
+       from urllib2 import URLError
+
+try:
+       from urllib.request import Request, urlopen
+except ImportError:
+       from urllib2 import Request, urlopen
+
+DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
+DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
+TARFORMAT = 'w:bz2'
+TIMEOUT = 60
+REQUIRES = 'requires.txt'
+
+re_com = re.compile('\s*#.*', re.M)
+
+def total_version_order(num):
+       lst = num.split('.')
+       template = '%10s' * len(lst)
+       ret = template % tuple(lst)
+       return ret
+
+def get_distnet_cache():
+       return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
+
+def get_server_url():
+       return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
+
+def get_download_url():
+       return '%s/download.py' % get_server_url()
+
+def get_upload_url():
+       return '%s/upload.py' % get_server_url()
+
+def get_resolve_url():
+       return '%s/resolve.py' % get_server_url()
+
+def send_package_name():
+       out = getattr(Context.g_module, 'out', 'build')
+       pkgfile = '%s/package_to_upload.tarfile' % out
+       return pkgfile
+
+class package(Context.Context):
+       fun = 'package'
+       cmd = 'package'
+
+       def execute(self):
+               try:
+                       files = self.files
+               except AttributeError:
+                       files = self.files = []
+
+               Context.Context.execute(self)
+               pkgfile = send_package_name()
+               if not pkgfile in files:
+                       if not REQUIRES in files:
+                               files.append(REQUIRES)
+                       self.make_tarfile(pkgfile, files, add_to_package=False)
+
+       def make_tarfile(self, filename, files, **kw):
+               if kw.get('add_to_package', True):
+                       self.files.append(filename)
+
+               with tarfile.open(filename, TARFORMAT) as tar:
+                       endname = os.path.split(filename)[-1]
+                       endname = endname.split('.')[0] + '/'
+                       for x in files:
+                               tarinfo = tar.gettarinfo(x, x)
+                               tarinfo.uid   = tarinfo.gid   = 0
+                               tarinfo.uname = tarinfo.gname = 'root'
+                               tarinfo.size = os.stat(x).st_size
+
+                               # TODO - more archive creation options?
+                               if kw.get('bare', True):
+                                       tarinfo.name = os.path.split(x)[1]
+                               else:
+                                       tarinfo.name = endname + x # todo, if tuple, then..
+                               Logs.debug("adding %r to %s" % (tarinfo.name, filename))
+                               with open(x, 'rb') as f:
+                                       tar.addfile(tarinfo, f)
+               Logs.info('Created %s' % filename)
+
+class publish(Context.Context):
+       fun = 'publish'
+       cmd = 'publish'
+       def execute(self):
+               if hasattr(Context.g_module, 'publish'):
+                       Context.Context.execute(self)
+               mod = Context.g_module
+
+               rfile = getattr(self, 'rfile', send_package_name())
+               if not os.path.isfile(rfile):
+                       self.fatal('Create the release file with "waf release" first! %r' % rfile)
+
+               fdata = Utils.readf(rfile, m='rb')
+               data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
+
+               req = Request(get_upload_url(), data)
+               response = urlopen(req, timeout=TIMEOUT)
+               data = response.read().strip()
+
+               if sys.hexversion>0x300000f:
+                       data = data.decode('utf-8')
+
+               if data != 'ok':
+                       self.fatal('Could not publish the package %r' % data)
+
+class constraint(object):
+       def __init__(self, line=''):
+               self.required_line = line
+               self.info = []
+
+               line = line.strip()
+               if not line:
+                       return
+
+               lst = line.split(',')
+               if lst:
+                       self.pkgname = lst[0]
+                       self.required_version = lst[1]
+                       for k in lst:
+                               a, b, c = k.partition('=')
+                               if a and c:
+                                       self.info.append((a, c))
+       def __str__(self):
+               buf = []
+               buf.append(self.pkgname)
+               buf.append(self.required_version)
+               for k in self.info:
+                       buf.append('%s=%s' % k)
+               return ','.join(buf)
+
+       def __repr__(self):
+               return "requires %s-%s" % (self.pkgname, self.required_version)
+
+       def human_display(self, pkgname, pkgver):
+               return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
+
+       def why(self):
+               ret = []
+               for x in self.info:
+                       if x[0] == 'reason':
+                               ret.append(x[1])
+               return ret
+
+       def add_reason(self, reason):
+               self.info.append(('reason', reason))
+
+def parse_constraints(text):
+       assert(text is not None)
+       constraints = []
+       text = re.sub(re_com, '', text)
+       lines = text.splitlines()
+       for line in lines:
+               line = line.strip()
+               if not line:
+                       continue
+               constraints.append(constraint(line))
+       return constraints
+
+def list_package_versions(cachedir, pkgname):
+       pkgdir = os.path.join(cachedir, pkgname)
+       try:
+               versions = os.listdir(pkgdir)
+       except OSError:
+               return []
+       versions.sort(key=total_version_order)
+       versions.reverse()
+       return versions
+
+class package_reader(Context.Context):
+       cmd = 'solver'
+       fun = 'solver'
+
+       def __init__(self, **kw):
+               Context.Context.__init__(self, **kw)
+
+               self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
+               self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
+               self.cache_constraints = {}
+               self.constraints = []
+
+       def compute_dependencies(self, filename=REQUIRES):
+               text = Utils.readf(filename)
+               data = safe_urlencode([('text', text)])
+
+               if '--offline' in sys.argv:
+                       self.constraints = self.local_resolve(text)
+               else:
+                       req = Request(get_resolve_url(), data)
+                       try:
+                               response = urlopen(req, timeout=TIMEOUT)
+                       except URLError as e:
+                               Logs.warn('The package server is down! %r' % e)
+                               self.constraints = self.local_resolve(text)
+                       else:
+                               ret = response.read()
+                               try:
+                                       ret = ret.decode('utf-8')
+                               except Exception:
+                                       pass
+                               self.trace(ret)
+                               self.constraints = parse_constraints(ret)
+               self.check_errors()
+
+       def check_errors(self):
+               errors = False
+               for c in self.constraints:
+                       if not c.required_version:
+                               errors = True
+
+                               reasons = c.why()
+                               if len(reasons) == 1:
+                                       Logs.error('%s but no matching package could be found in this repository' % reasons[0])
+                               else:
+                                       Logs.error('Conflicts on package %r:' % c.pkgname)
+                                       for r in reasons:
+                                               Logs.error('  %s' % r)
+               if errors:
+                       self.fatal('The package requirements cannot be satisfied!')
+
+       def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
+               try:
+                       return self.cache_constraints[(pkgname, pkgver)]
+               except KeyError:
+                       #Logs.error("no key %r" % (pkgname, pkgver))
+                       text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
+                       ret = parse_constraints(text)
+                       self.cache_constraints[(pkgname, pkgver)] = ret
+                       return ret
+
+       def apply_constraint(self, domain, constraint):
+               vname = constraint.required_version.replace('*', '.*')
+               rev = re.compile(vname, re.M)
+               ret = [x for x in domain if rev.match(x)]
+               return ret
+
+       def trace(self, *k):
+               if getattr(self, 'debug', None):
+                       Logs.error(*k)
+
+       def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
+               # breadth first search
+               n_packages_to_versions = dict(packages_to_versions)
+               n_packages_to_constraints = dict(packages_to_constraints)
+
+               self.trace("calling solve with %r    %r %r" % (packages_to_versions, todo, done))
+               done = done + [pkgname]
+
+               constraints = self.load_constraints(pkgname, pkgver)
+               self.trace("constraints %r" % constraints)
+
+               for k in constraints:
+                       try:
+                               domain = n_packages_to_versions[k.pkgname]
+                       except KeyError:
+                               domain = list_package_versions(get_distnet_cache(), k.pkgname)
+
+
+                       self.trace("constraints?")
+                       if not k.pkgname in done:
+                               todo = todo + [k.pkgname]
+
+                       self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
+
+                       # apply the constraint
+                       domain = self.apply_constraint(domain, k)
+
+                       self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
+
+                       n_packages_to_versions[k.pkgname] = domain
+
+                       # then store the constraint applied
+                       constraints = list(packages_to_constraints.get(k.pkgname, []))
+                       constraints.append((pkgname, pkgver, k))
+                       n_packages_to_constraints[k.pkgname] = constraints
+
+                       if not domain:
+                               self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
+                               return (n_packages_to_versions, n_packages_to_constraints)
+
+               # next package on the todo list
+               if not todo:
+                       return (n_packages_to_versions, n_packages_to_constraints)
+
+               n_pkgname = todo[0]
+               n_pkgver = n_packages_to_versions[n_pkgname][0]
+               tmp = dict(n_packages_to_versions)
+               tmp[n_pkgname] = [n_pkgver]
+
+               self.trace("fixed point %s" % n_pkgname)
+
+               return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
+
+       def get_results(self):
+               return '\n'.join([str(c) for c in self.constraints])
+
+       def solution_to_constraints(self, versions, constraints):
+               solution = []
+               for p in versions.keys():
+                       c = constraint()
+                       solution.append(c)
+
+                       c.pkgname = p
+                       if versions[p]:
+                               c.required_version = versions[p][0]
+                       else:
+                               c.required_version = ''
+                       for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
+                               c.add_reason(c2.human_display(from_pkgname, from_pkgver))
+               return solution
+
+       def local_resolve(self, text):
+               self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
+               p2v = OrderedDict({self.myproject: [self.myversion]})
+               (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
+               return self.solution_to_constraints(versions, constraints)
+
+       def download_to_file(self, pkgname, pkgver, subdir, tmp):
+               data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
+               req = urlopen(get_download_url(), data, timeout=TIMEOUT)
+               with open(tmp, 'wb') as f:
+                       while True:
+                               buf = req.read(8192)
+                               if not buf:
+                                       break
+                               f.write(buf)
+
+       def extract_tar(self, subdir, pkgdir, tmpfile):
+               with tarfile.open(tmpfile) as f:
+                       temp = tempfile.mkdtemp(dir=pkgdir)
+                       try:
+                               f.extractall(temp)
+                               os.rename(temp, os.path.join(pkgdir, subdir))
+                       finally:
+                               try:
+                                       shutil.rmtree(temp)
+                               except Exception:
+                                       pass
+
+       def get_pkg_dir(self, pkgname, pkgver, subdir):
+               pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
+               if not os.path.isdir(pkgdir):
+                       os.makedirs(pkgdir)
+
+               target = os.path.join(pkgdir, subdir)
+
+               if os.path.exists(target):
+                       return target
+
+               (fd, tmp) = tempfile.mkstemp(dir=pkgdir)
+               try:
+                       os.close(fd)
+                       self.download_to_file(pkgname, pkgver, subdir, tmp)
+                       if subdir == REQUIRES:
+                               os.rename(tmp, target)
+                       else:
+                               self.extract_tar(subdir, pkgdir, tmp)
+               finally:
+                       try:
+                               os.remove(tmp)
+                       except OSError:
+                               pass
+
+               return target
+
+       def __iter__(self):
+               if not self.constraints:
+                       self.compute_dependencies()
+               for x in self.constraints:
+                       if x.pkgname == self.myproject:
+                               continue
+                       yield x
+               raise StopIteration
+
+       def execute(self):
+               self.compute_dependencies()
+
+packages = package_reader()
+
+def load_tools(ctx, extra):
+       global packages
+       for c in packages:
+               packages.get_pkg_dir(c.pkgname, c.required_version, extra)
+               noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
+               for x in os.listdir(noarchdir):
+                       if x.startswith('waf_') and x.endswith('.py'):
+                               ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
+
+def options(opt):
+       opt.add_option('--offline', action='store_true')
+       packages.execute()
+       load_tools(opt, REQUIRES)
+
+def configure(conf):
+       load_tools(conf, conf.variant)
+
+def build(bld):
+       load_tools(bld, bld.variant)
diff --git a/third_party/waf/waflib/extras/doxygen.py b/third_party/waf/waflib/extras/doxygen.py
new file mode 100644 (file)
index 0000000..9e17595
--- /dev/null
@@ -0,0 +1,226 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2008-2010 (ita)
+
+"""
+
+Doxygen support
+
+Variables passed to bld():
+* doxyfile -- the Doxyfile to use
+* doxy_tar -- destination archive for generated documentation (if desired)
+* install_path -- where to install the documentation
+* pars -- dictionary overriding doxygen configuration settings
+
+When using this tool, the wscript will look like:
+
+       def options(opt):
+               opt.load('doxygen')
+
+       def configure(conf):
+               conf.load('doxygen')
+               # check conf.env.DOXYGEN, if it is mandatory
+
+       def build(bld):
+               if bld.env.DOXYGEN:
+                       bld(features="doxygen", doxyfile='Doxyfile', ...)
+"""
+
+import os, os.path, re
+from waflib import Task, Utils, Node
+from waflib.TaskGen import feature
+
+DOXY_STR = '"${DOXYGEN}" - '
+DOXY_FMTS = 'html latex man rft xml'.split()
+DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
+c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
+inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
+'''.split())
+
+re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
+re_nl = re.compile('\r*\n', re.M)
+def parse_doxy(txt):
+       tbl = {}
+       txt   = re_rl.sub('', txt)
+       lines = re_nl.split(txt)
+       for x in lines:
+               x = x.strip()
+               if not x or x.startswith('#') or x.find('=') < 0:
+                       continue
+               if x.find('+=') >= 0:
+                       tmp = x.split('+=')
+                       key = tmp[0].strip()
+                       if key in tbl:
+                               tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
+                       else:
+                               tbl[key] = '+='.join(tmp[1:]).strip()
+               else:
+                       tmp = x.split('=')
+                       tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+       return tbl
+
+class doxygen(Task.Task):
+       vars  = ['DOXYGEN', 'DOXYFLAGS']
+       color = 'BLUE'
+
+       def runnable_status(self):
+               '''
+               self.pars are populated in runnable_status - because this function is being
+               run *before* both self.pars "consumers" - scan() and run()
+
+               set output_dir (node) for the output
+               '''
+
+               for x in self.run_after:
+                       if not x.hasrun:
+                               return Task.ASK_LATER
+
+               if not getattr(self, 'pars', None):
+                       txt = self.inputs[0].read()
+                       self.pars = parse_doxy(txt)
+                       if self.pars.get('OUTPUT_DIRECTORY'):
+                               # Use the path parsed from the Doxyfile as an absolute path
+                               output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
+                       else:
+                               # If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy'
+                               output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy')
+                       output_node.mkdir()
+                       self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()
+
+                       # Override with any parameters passed to the task generator
+                       if getattr(self.generator, 'pars', None):
+                               for k, v in self.generator.pars.items():
+                                       self.pars[k] = v
+
+                       self.doxy_inputs = getattr(self, 'doxy_inputs', [])
+                       if not self.pars.get('INPUT'):
+                               self.doxy_inputs.append(self.inputs[0].parent)
+                       else:
+                               for i in self.pars.get('INPUT').split():
+                                       if os.path.isabs(i):
+                                               node = self.generator.bld.root.find_node(i)
+                                       else:
+                                               node = self.inputs[0].parent.find_node(i)
+                                       if not node:
+                                               self.generator.bld.fatal('Could not find the doxygen input %r' % i)
+                                       self.doxy_inputs.append(node)
+
+               if not getattr(self, 'output_dir', None):
+                       bld = self.generator.bld
+                       # Output path is always an absolute path as it was transformed above.
+                       self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
+
+               self.signature()
+               ret = Task.Task.runnable_status(self)
+               if ret == Task.SKIP_ME:
+                       # in case the files were removed
+                       self.add_install()
+               return ret
+
+       def scan(self):
+               exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+               file_patterns = self.pars.get('FILE_PATTERNS','').split()
+               if not file_patterns:
+                       file_patterns = DOXY_FILE_PATTERNS
+               if self.pars.get('RECURSIVE') == 'YES':
+                       file_patterns = ["**/%s" % pattern for pattern in file_patterns]
+               nodes = []
+               names = []
+               for node in self.doxy_inputs:
+                       if os.path.isdir(node.abspath()):
+                               for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
+                                       nodes.append(m)
+                       else:
+                               nodes.append(node)
+               return (nodes, names)
+
+       def run(self):
+               dct = self.pars.copy()
+               code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
+               code = code.encode() # for python 3
+               #fmt = DOXY_STR % (self.inputs[0].parent.abspath())
+               cmd = Utils.subst_vars(DOXY_STR, self.env)
+               env = self.env.env or None
+               proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath())
+               proc.communicate(code)
+               return proc.returncode
+
+       def post_run(self):
+               nodes = self.output_dir.ant_glob('**/*', quiet=True)
+               for x in nodes:
+                       x.sig = Utils.h_file(x.abspath())
+               self.add_install()
+               return Task.Task.post_run(self)
+
+       def add_install(self):
+               nodes = self.output_dir.ant_glob('**/*', quiet=True)
+               self.outputs += nodes
+               if getattr(self.generator, 'install_path', None):
+                       if not getattr(self.generator, 'doxy_tar', None):
+                               self.generator.bld.install_files(self.generator.install_path,
+                                       self.outputs,
+                                       postpone=False,
+                                       cwd=self.output_dir,
+                                       relative_trick=True)
+
+class tar(Task.Task):
+       "quick tar creation"
+       run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
+       color   = 'RED'
+       after   = ['doxygen']
+       def runnable_status(self):
+               for x in getattr(self, 'input_tasks', []):
+                       if not x.hasrun:
+                               return Task.ASK_LATER
+
+               if not getattr(self, 'tar_done_adding', None):
+                       # execute this only once
+                       self.tar_done_adding = True
+                       for x in getattr(self, 'input_tasks', []):
+                               self.set_inputs(x.outputs)
+                       if not self.inputs:
+                               return Task.SKIP_ME
+               return Task.Task.runnable_status(self)
+
+       def __str__(self):
+               tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+               return '%s: %s\n' % (self.__class__.__name__, tgt_str)
+
+@feature('doxygen')
+def process_doxy(self):
+       if not getattr(self, 'doxyfile', None):
+               self.generator.bld.fatal('no doxyfile??')
+
+       node = self.doxyfile
+       if not isinstance(node, Node.Node):
+               node = self.path.find_resource(node)
+       if not node:
+               raise ValueError('doxygen file not found')
+
+       # the task instance
+       dsk = self.create_task('doxygen', node)
+
+       if getattr(self, 'doxy_tar', None):
+               tsk = self.create_task('tar')
+               tsk.input_tasks = [dsk]
+               tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
+               if self.doxy_tar.endswith('bz2'):
+                       tsk.env['TAROPTS'] = ['cjf']
+               elif self.doxy_tar.endswith('gz'):
+                       tsk.env['TAROPTS'] = ['czf']
+               else:
+                       tsk.env['TAROPTS'] = ['cf']
+               if getattr(self, 'install_path', None):
+                       self.bld.install_files(self.install_path, tsk.outputs)
+
+def configure(conf):
+       '''
+       Check if doxygen and tar commands are present in the system
+
+       If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
+       variables will be set. Detection can be controlled by setting DOXYGEN and
+       TAR environmental variables.
+       '''
+
+       conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
+       conf.find_program('tar', var='TAR', mandatory=False)
diff --git a/third_party/waf/waflib/extras/dpapi.py b/third_party/waf/waflib/extras/dpapi.py
new file mode 100644 (file)
index 0000000..4df64bf
--- /dev/null
@@ -0,0 +1,86 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Matt Clarkson, 2012
+
+'''
+DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx)
+This file uses code originally created by Crusher Joe:
+http://article.gmane.org/gmane.comp.python.ctypes/420
+And modified by Wayne Koorts:
+http://stackoverflow.com/questions/463832/using-dpapi-with-python
+'''
+
+from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer
+from ctypes.wintypes import DWORD
+from waflib.Configure import conf
+
+LocalFree = windll.kernel32.LocalFree
+memcpy = cdll.msvcrt.memcpy
+CryptProtectData = windll.crypt32.CryptProtectData
+CryptUnprotectData = windll.crypt32.CryptUnprotectData
+CRYPTPROTECT_UI_FORBIDDEN = 0x01
+try:
+       extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii')
+except AttributeError:
+       extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'
+
+class DATA_BLOB(Structure):
+       _fields_ = [
+               ('cbData', DWORD),
+               ('pbData', POINTER(c_char))
+       ]
+
+def get_data(blob_out):
+       cbData = int(blob_out.cbData)
+       pbData = blob_out.pbData
+       buffer = c_buffer(cbData)
+       memcpy(buffer, pbData, cbData)
+       LocalFree(pbData);
+       return buffer.raw
+
+@conf
+def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
+       '''
+       Encrypts data and returns byte string
+
+       :param input_bytes: The data to be encrypted
+       :type input_bytes: String or Bytes
+       :param entropy: Extra entropy to add to the encryption process (optional)
+       :type entropy: String or Bytes
+       '''
+       if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes):
+               self.fatal('The inputs to dpapi must be bytes')
+       buffer_in      = c_buffer(input_bytes, len(input_bytes))
+       buffer_entropy = c_buffer(entropy, len(entropy))
+       blob_in        = DATA_BLOB(len(input_bytes), buffer_in)
+       blob_entropy   = DATA_BLOB(len(entropy), buffer_entropy)
+       blob_out       = DATA_BLOB()
+
+       if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
+               None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
+               return get_data(blob_out)
+       else:
+               self.fatal('Failed to decrypt data')
+
+@conf
+def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
+       '''
+       Decrypts data and returns byte string
+
+       :param encrypted_bytes: The encrypted data
+       :type encrypted_bytes: Bytes
+       :param entropy: Extra entropy to add to the encryption process (optional)
+       :type entropy: String or Bytes
+       '''
+       if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes):
+               self.fatal('The inputs to dpapi must be bytes')
+       buffer_in      = c_buffer(encrypted_bytes, len(encrypted_bytes))
+       buffer_entropy = c_buffer(entropy, len(entropy))
+       blob_in        = DATA_BLOB(len(encrypted_bytes), buffer_in)
+       blob_entropy   = DATA_BLOB(len(entropy), buffer_entropy)
+       blob_out       = DATA_BLOB()
+       if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None,
+               None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
+               return get_data(blob_out)
+       else:
+               self.fatal('Failed to decrypt data')
diff --git a/third_party/waf/waflib/extras/file_to_object.py b/third_party/waf/waflib/extras/file_to_object.py
new file mode 100644 (file)
index 0000000..c2e8809
--- /dev/null
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Tool to embed file into objects
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to embed file contents in object files (.o).
+It is not exactly portable, and the file contents are reachable
+using various non-portable fashions.
+The goal here is to provide a functional interface to the embedding
+of file data in objects.
+See the ``playground/embedded_resources`` example for an example.
+
+Usage::
+
+   bld(
+    name='pipeline',
+     # ^ Reference this in use="..." for things using the generated code
+    features='file_to_object',
+    source='some.file',
+     # ^ Name of the file to embed in binary section.
+   )
+
+Known issues:
+
+- Destination is named like source, with extension renamed to .o
+  eg. some.file -> some.o
+
+"""
+
+import os
+from waflib import Task, TaskGen, Errors
+
+def filename_c_escape(x):
+       return x.replace("\\", "\\\\")
+
+class file_to_object_s(Task.Task):
+       color = 'CYAN'
+       dep_vars = ('DEST_CPU', 'DEST_BINFMT')
+
+       def run(self):
+               name = []
+               for i, x in enumerate(self.inputs[0].name):
+                       if x.isalnum():
+                               name.append(x)
+                       else:
+                               name.append('_')
+               file = self.inputs[0].abspath()
+               size = os.path.getsize(file)
+               if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'):
+                       unit = 'quad'
+                       align = 8
+               elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'):
+                       unit = 'long'
+                       align = 4
+               else:
+                       raise Errors.WafError("Unsupported DEST_CPU, please report bug!")
+
+               file = filename_c_escape(file)
+               name = "_binary_" + "".join(name)
+               rodata = ".section .rodata"
+               if self.env.DEST_BINFMT == "mac-o":
+                       name = "_" + name
+                       rodata = ".section __TEXT,__const"
+
+               with open(self.outputs[0].abspath(), 'w') as f:
+                       f.write(\
+"""
+       .global %(name)s_start
+       .global %(name)s_end
+       .global %(name)s_size
+       %(rodata)s
+%(name)s_start:
+       .incbin "%(file)s"
+%(name)s_end:
+       .align %(align)d
+%(name)s_size:
+       .%(unit)s 0x%(size)x
+""" % locals())
+
+class file_to_object_c(Task.Task):
+       color = 'CYAN'
+       def run(self):
+               name = []
+               for i, x in enumerate(self.inputs[0].name):
+                       if x.isalnum():
+                               name.append(x)
+                       else:
+                               name.append('_')
+               file = self.inputs[0].abspath()
+               size = os.path.getsize(file)
+
+               name = "_binary_" + "".join(name)
+
+               data = self.inputs[0].read('rb')
+               lines, line = [], []
+               for idx_byte, byte in enumerate(data):
+                       line.append(byte)
+                       if len(line) > 15 or idx_byte == size-1:
+                               lines.append(", ".join(("0x%02x" % ord(x)) for x in line))
+                               line = []
+               data = ",\n ".join(lines)
+
+               self.outputs[0].write(\
+"""
+unsigned long %(name)s_size = %(size)dL;
+char const %(name)s_start[] = {
+ %(data)s
+};
+char const %(name)s_end[] = {};
+""" % locals())
+
+@TaskGen.feature('file_to_object')
+@TaskGen.before_method('process_source')
+def tg_file_to_object(self):
+       bld = self.bld
+       sources = self.to_nodes(self.source)
+       targets = []
+       for src in sources:
+               if bld.env.F2O_METHOD == ["asm"]:
+                       tgt = src.parent.find_or_declare(src.name + '.f2o.s')
+                       tsk = self.create_task('file_to_object_s', src, tgt)
+                       tsk.cwd = src.parent.abspath() # verify
+               else:
+                       tgt = src.parent.find_or_declare(src.name + '.f2o.c')
+                       tsk = self.create_task('file_to_object_c', src, tgt)
+                       tsk.cwd = src.parent.abspath() # verify
+               targets.append(tgt)
+       self.source = targets
+
+def configure(conf):
+       conf.load('gas')
+       conf.env.F2O_METHOD = ["c"]
diff --git a/third_party/waf/waflib/extras/freeimage.py b/third_party/waf/waflib/extras/freeimage.py
new file mode 100644 (file)
index 0000000..8933abe
--- /dev/null
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2011
+
+'''
+To add the freeimage tool to the waf file:
+$ ./waf-light --tools=compat15,freeimage
+       or, if you have waf >= 1.6.2
+$ ./waf update --files=freeimage
+
+The wscript will look like:
+
+def options(opt):
+       opt.load('compiler_cxx freeimage')
+
+def configure(conf):
+       conf.load('compiler_cxx freeimage')
+
+       # you can call check_freeimage with some parameters.
+       # It's optional on Linux, it's 'mandatory' on Windows if
+       # you didn't use --fi-path on the command-line
+
+       # conf.check_freeimage(path='FreeImage/Dist', fip=True)
+
+def build(bld):
+       bld(source='main.cpp', target='app', use='FREEIMAGE')
+'''
+
+from waflib import Utils
+from waflib.Configure import conf
+
+
+def options(opt):
+       opt.add_option('--fi-path', type='string', default='', dest='fi_path',
+                                  help='''path to the FreeImage directory \
+                                               where the files are e.g. /FreeImage/Dist''')
+       opt.add_option('--fip', action='store_true', default=False, dest='fip',
+                                  help='link with FreeImagePlus')
+       opt.add_option('--fi-static', action='store_true',
+                                  default=False, dest='fi_static',
+                                  help="link as shared libraries")
+
+
+@conf
+def check_freeimage(self, path=None, fip=False):
+       self.start_msg('Checking FreeImage')
+       if not self.env['CXX']:
+               self.fatal('you must load compiler_cxx before loading freeimage')
+       prefix = self.options.fi_static and 'ST' or ''
+       platform = Utils.unversioned_sys_platform()
+       if platform == 'win32':
+               if not path:
+                       self.fatal('you must specify the path to FreeImage. \
+                                          use --fi-path=/FreeImage/Dist')
+               else:
+                       self.env['INCLUDES_FREEIMAGE'] = path
+                       self.env['%sLIBPATH_FREEIMAGE' % prefix] = path
+       libs = ['FreeImage']
+       if self.options.fip:
+               libs.append('FreeImagePlus')
+       if platform == 'win32':
+               self.env['%sLIB_FREEIMAGE' % prefix] = libs
+       else:
+               self.env['%sLIB_FREEIMAGE' % prefix] = [i.lower() for i in libs]
+       self.end_msg('ok')
+
+
+def configure(conf):
+       platform = Utils.unversioned_sys_platform()
+       if platform == 'win32' and not conf.options.fi_path:
+               return
+       conf.check_freeimage(conf.options.fi_path, conf.options.fip)
diff --git a/third_party/waf/waflib/extras/fsb.py b/third_party/waf/waflib/extras/fsb.py
new file mode 100644 (file)
index 0000000..ba475d8
--- /dev/null
@@ -0,0 +1,30 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Fully sequential builds
+
+The previous tasks from task generators are re-processed, and this may lead to speed issues
+Yet, if you are using this, speed is probably a minor concern
+"""
+
+from waflib import Build
+
+def options(opt):
+       pass
+
+def configure(conf):
+       pass
+
+class FSBContext(Build.BuildContext):
+       def __call__(self, *k, **kw):
+               ret = Build.BuildContext.__call__(self, *k, **kw)
+
+               # evaluate the results immediately
+               Build.BuildContext.compile(self)
+
+               return ret
+
+       def compile(self):
+               pass
diff --git a/third_party/waf/waflib/extras/gccdeps.py b/third_party/waf/waflib/extras/gccdeps.py
new file mode 100644 (file)
index 0000000..26b8bdb
--- /dev/null
@@ -0,0 +1,211 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2010 (ita)
+
+"""
+Execute the tasks with gcc -MD, read the dependencies from the .d file
+and prepare the dependency calculation for the next run.
+This affects the cxx class, so make sure to load Qt5 after this tool.
+
+Usage:
+       def configure(conf):
+               conf.load('gccdeps')
+"""
+
+import os, re, threading
+from waflib import Task, Logs, Utils, Errors
+from waflib.Tools import c_preproc
+from waflib.TaskGen import before_method, feature
+
+lock = threading.Lock()
+
+gccdeps_flags = ['-MD']
+if not c_preproc.go_absolute:
+       gccdeps_flags = ['-MMD']
+
+# Third-party tools are allowed to add extra names in here with append()
+supported_compilers = ['gcc', 'icc', 'clang']
+
+def scan(self):
+       if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+               return super(self.derived_gccdeps, self).scan()
+       nodes = self.generator.bld.node_deps.get(self.uid(), [])
+       names = []
+       return (nodes, names)
+
+re_o = re.compile("\.o$")
+re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
+
+def remove_makefile_rule_lhs(line):
+       # Splitting on a plain colon would accidentally match inside a
+       # Windows absolute-path filename, so we must search for a colon
+       # followed by whitespace to find the divider between LHS and RHS
+       # of the Makefile rule.
+       rulesep = ': '
+
+       sep_idx = line.find(rulesep)
+       if sep_idx >= 0:
+               return line[sep_idx + 2:]
+       else:
+               return line
+
+def path_to_node(base_node, path, cached_nodes):
+       # Take the base node and the path and return a node
+       # Results are cached because searching the node tree is expensive
+       # The following code is executed by threads, it is not safe, so a lock is needed...
+       if getattr(path, '__hash__'):
+               node_lookup_key = (base_node, path)
+       else:
+               # Not hashable, assume it is a list and join into a string
+               node_lookup_key = (base_node, os.path.sep.join(path))
+       try:
+               lock.acquire()
+               node = cached_nodes[node_lookup_key]
+       except KeyError:
+               node = base_node.find_resource(path)
+               cached_nodes[node_lookup_key] = node
+       finally:
+               lock.release()
+       return node
+
+def post_run(self):
+       if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+               return super(self.derived_gccdeps, self).post_run()
+
+       name = self.outputs[0].abspath()
+       name = re_o.sub('.d', name)
+       try:
+               txt = Utils.readf(name)
+       except EnvironmentError:
+               Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
+               raise
+       #os.remove(name)
+
+       # Compilers have the choice to either output the file's dependencies
+       # as one large Makefile rule:
+       #
+       #   /path/to/file.o: /path/to/dep1.h \
+       #                    /path/to/dep2.h \
+       #                    /path/to/dep3.h \
+       #                    ...
+       #
+       # or as many individual rules:
+       #
+       #   /path/to/file.o: /path/to/dep1.h
+       #   /path/to/file.o: /path/to/dep2.h
+       #   /path/to/file.o: /path/to/dep3.h
+       #   ...
+       #
+       # So the first step is to sanitize the input by stripping out the left-
+       # hand side of all these lines. After that, whatever remains are the
+       # implicit dependencies of task.outputs[0]
+       txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()])
+
+       # Now join all the lines together
+       txt = txt.replace('\\\n', '')
+
+       val = txt.strip()
+       val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]
+
+       nodes = []
+       bld = self.generator.bld
+
+       # Dynamically bind to the cache
+       try:
+               cached_nodes = bld.cached_nodes
+       except AttributeError:
+               cached_nodes = bld.cached_nodes = {}
+
+       for x in val:
+
+               node = None
+               if os.path.isabs(x):
+                       node = path_to_node(bld.root, x, cached_nodes)
+               else:
+                       # TODO waf 1.9 - single cwd value
+                       path = getattr(bld, 'cwdx', bld.bldnode)
+                       # when calling find_resource, make sure the path does not contain '..'
+                       x = [k for k in Utils.split_path(x) if k and k != '.']
+                       while '..' in x:
+                               idx = x.index('..')
+                               if idx == 0:
+                                       x = x[1:]
+                                       path = path.parent
+                               else:
+                                       del x[idx]
+                                       del x[idx-1]
+
+                       node = path_to_node(path, x, cached_nodes)
+
+               if not node:
+                       raise ValueError('could not find %r for %r' % (x, self))
+               if id(node) == id(self.inputs[0]):
+                       # ignore the source file, it is already in the dependencies
+                       # this way, successful config tests may be retrieved from the cache
+                       continue
+               nodes.append(node)
+
+       Logs.debug('deps: gccdeps for %s returned %s', self, nodes)
+
+       bld.node_deps[self.uid()] = nodes
+       bld.raw_deps[self.uid()] = []
+
+       try:
+               del self.cache_sig
+       except AttributeError:
+               pass
+
+       Task.Task.post_run(self)
+
+def sig_implicit_deps(self):
+       if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+               return super(self.derived_gccdeps, self).sig_implicit_deps()
+       try:
+               return Task.Task.sig_implicit_deps(self)
+       except Errors.WafError:
+               return Utils.SIG_NIL
+
+def wrap_compiled_task(classname):
+       derived_class = type(classname, (Task.classes[classname],), {})
+       derived_class.derived_gccdeps = derived_class
+       derived_class.post_run = post_run
+       derived_class.scan = scan
+       derived_class.sig_implicit_deps = sig_implicit_deps
+
+for k in ('c', 'cxx'):
+       if k in Task.classes:
+               wrap_compiled_task(k)
+
+@before_method('process_source')
+@feature('force_gccdeps')
+def force_gccdeps(self):
+       self.env.ENABLE_GCCDEPS = ['c', 'cxx']
+
+def configure(conf):
+       # in case someone provides a --enable-gccdeps command-line option
+       if not getattr(conf.options, 'enable_gccdeps', True):
+               return
+
+       global gccdeps_flags
+       flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
+       if conf.env.CC_NAME in supported_compilers:
+               try:
+                       conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
+               except Errors.ConfigurationError:
+                       pass
+               else:
+                       conf.env.append_value('CFLAGS', gccdeps_flags)
+                       conf.env.append_unique('ENABLE_GCCDEPS', 'c')
+
+       if conf.env.CXX_NAME in supported_compilers:
+               try:
+                       conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
+               except Errors.ConfigurationError:
+                       pass
+               else:
+                       conf.env.append_value('CXXFLAGS', gccdeps_flags)
+                       conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
diff --git a/third_party/waf/waflib/extras/go.py b/third_party/waf/waflib/extras/go.py
new file mode 100644 (file)
index 0000000..2ba54b8
--- /dev/null
@@ -0,0 +1,255 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Tom Wambold tom5760 gmail.com 2009
+# Thomas Nagy 2010
+
+"""
+Go as a language may look nice, but its toolchain is one of the worse a developer
+has ever seen. It keeps changing though, and I would like to believe that it will get
+better eventually, but the crude reality is that this tool and the examples are
+getting broken every few months.
+
+If you have been lured into trying to use Go, you should stick to their Makefiles.
+"""
+
+import os, platform
+
+from waflib import Utils, Task, TaskGen
+from waflib.TaskGen import feature, extension, after_method, before_method
+from waflib.Tools.ccroot import link_task, stlink_task, propagate_uselib_vars, process_use
+
+class go(Task.Task):
+       run_str = '${GOC} ${GOCFLAGS} ${CPPPATH_ST:INCPATHS} -o ${TGT} ${SRC}'
+
+class gopackage(stlink_task):
+       run_str = '${GOP} grc ${TGT} ${SRC}'
+
+class goprogram(link_task):
+       run_str = '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}'
+       inst_to = '${BINDIR}'
+       chmod   = Utils.O755
+
+class cgopackage(stlink_task):
+       color   = 'YELLOW'
+       inst_to = '${LIBDIR}'
+       ext_in  = ['.go']
+       ext_out = ['.a']
+
+       def run(self):
+               src_dir = self.generator.bld.path
+               source  = self.inputs
+               target  = self.outputs[0].change_ext('')
+
+               #print ("--> %s" % self.outputs)
+               #print ('++> %s' % self.outputs[1])
+               bld_dir = self.outputs[1]
+               bld_dir.mkdir()
+               obj_dir = bld_dir.make_node('_obj')
+               obj_dir.mkdir()
+
+               bld_srcs = []
+               for s in source:
+                       # FIXME: it seems gomake/cgo stumbles on filenames like a/b/c.go
+                       # -> for the time being replace '/' with '_'...
+                       #b = bld_dir.make_node(s.path_from(src_dir))
+                       b = bld_dir.make_node(s.path_from(src_dir).replace(os.sep,'_'))
+                       b.parent.mkdir()
+                       #print ('++> %s' % (s.path_from(src_dir),))
+                       try:
+                               try:os.remove(b.abspath())
+                               except Exception:pass
+                               os.symlink(s.abspath(), b.abspath())
+                       except Exception:
+                               # if no support for symlinks, copy the file from src
+                               b.write(s.read())
+                       bld_srcs.append(b)
+                       #print("--|> [%s]" % b.abspath())
+                       b.sig = Utils.h_file(b.abspath())
+                       pass
+               #self.set_inputs(bld_srcs)
+               #self.generator.bld.raw_deps[self.uid()] = [self.signature()] + bld_srcs
+               makefile_node = bld_dir.make_node("Makefile")
+               makefile_tmpl = '''\
+# Copyright 2009 The Go Authors.  All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file. ---
+
+include $(GOROOT)/src/Make.inc
+
+TARG=%(target)s
+
+GCIMPORTS= %(gcimports)s
+
+CGOFILES=\\
+\t%(source)s
+
+CGO_CFLAGS= %(cgo_cflags)s
+
+CGO_LDFLAGS= %(cgo_ldflags)s
+
+include $(GOROOT)/src/Make.pkg
+
+%%: install %%.go
+       $(GC) $*.go
+       $(LD) -o $@ $*.$O
+
+''' % {
+'gcimports': ' '.join(l for l in self.env['GOCFLAGS']),
+'cgo_cflags' : ' '.join(l for l in self.env['GOCFLAGS']),
+'cgo_ldflags': ' '.join(l for l in self.env['GOLFLAGS']),
+'target': target.path_from(obj_dir),
+'source': ' '.join([b.path_from(bld_dir) for b in bld_srcs])
+}
+               makefile_node.write(makefile_tmpl)
+               #print ("::makefile: %s"%makefile_node.abspath())
+               cmd = Utils.subst_vars('gomake ${GOMAKE_FLAGS}', self.env).strip()
+               o = self.outputs[0].change_ext('.gomake.log')
+               fout_node = bld_dir.find_or_declare(o.name)
+               fout = open(fout_node.abspath(), 'w')
+               rc = self.generator.bld.exec_command(
+                cmd,
+                stdout=fout,
+                stderr=fout,
+                cwd=bld_dir.abspath(),
+               )
+               if rc != 0:
+                       import waflib.Logs as msg
+                       msg.error('** error running [%s] (cgo-%s)' % (cmd, target))
+                       msg.error(fout_node.read())
+                       return rc
+               self.generator.bld.read_stlib(
+                target,
+                paths=[obj_dir.abspath(),],
+               )
+               tgt = self.outputs[0]
+               if tgt.parent != obj_dir:
+                       install_dir = os.path.join('${LIBDIR}',
+                               tgt.parent.path_from(obj_dir))
+               else:
+                       install_dir = '${LIBDIR}'
+               #print('===> %s (%s)' % (tgt.abspath(), install_dir))
+               self.generator.bld.install_files(
+                install_dir,
+                tgt.abspath(),
+                relative_trick=False,
+                postpone=False,
+               )
+               return rc
+
+@extension('.go')
+def compile_go(self, node):
+       #print('*'*80, self.name)
+       if not ('cgopackage' in self.features):
+               return self.create_compiled_task('go', node)
+       #print ('compile_go-cgo...')
+       #bld_dir = node.parent.get_bld()
+       #obj_dir = bld_dir.make_node('_obj')
+       return self.create_task('cgopackage', node, node.change_ext('.a'))
+
+@feature('gopackage', 'goprogram', 'cgopackage')
+@before_method('process_source')
+def go_compiler_is_foobar(self):
+       if self.env.GONAME == 'gcc':
+               return
+       self.source = self.to_nodes(self.source)
+       src = []
+       go = []
+       for node in self.source:
+               if node.name.endswith('.go'):
+                       go.append(node)
+               else:
+                       src.append(node)
+       self.source = src
+       if not ('cgopackage' in self.features):
+               #print('--> [%s]... (%s)' % (go[0], getattr(self, 'target', 'N/A')))
+               tsk = self.create_compiled_task('go', go[0])
+               tsk.inputs.extend(go[1:])
+       else:
+               #print ('+++ [%s] +++' % self.target)
+               bld_dir = self.path.get_bld().make_node('cgopackage--%s' % self.target.replace(os.sep,'_'))
+               obj_dir = bld_dir.make_node('_obj')
+               target  = obj_dir.make_node(self.target+'.a')
+               tsk = self.create_task('cgopackage', go, [target, bld_dir])
+               self.link_task = tsk
+
+@feature('gopackage', 'goprogram', 'cgopackage')
+@after_method('process_source', 'apply_incpaths',)
+def go_local_libs(self):
+       names = self.to_list(getattr(self, 'use', []))
+       #print ('== go-local-libs == [%s] == use: %s' % (self.name, names))
+       for name in names:
+               tg = self.bld.get_tgen_by_name(name)
+               if not tg:
+                       raise Utils.WafError('no target of name %r necessary for %r in go uselib local' % (name, self))
+               tg.post()
+               #print ("-- tg[%s]: %s" % (self.name,name))
+               lnk_task = getattr(tg, 'link_task', None)
+               if lnk_task:
+                       for tsk in self.tasks:
+                               if isinstance(tsk, (go, gopackage, cgopackage)):
+                                       tsk.set_run_after(lnk_task)
+                                       tsk.dep_nodes.extend(lnk_task.outputs)
+                       path = lnk_task.outputs[0].parent.abspath()
+                       if isinstance(lnk_task, (go, gopackage)):
+                               # handle hierarchical packages
+                               path = lnk_task.generator.path.get_bld().abspath()
+                       elif isinstance(lnk_task, (cgopackage,)):
+                               # handle hierarchical cgopackages
+                               cgo_obj_dir = lnk_task.outputs[1].find_or_declare('_obj')
+                               path = cgo_obj_dir.abspath()
+                       # recursively add parent GOCFLAGS...
+                       self.env.append_unique('GOCFLAGS',
+                        getattr(lnk_task.env, 'GOCFLAGS',[]))
+                       # ditto for GOLFLAGS...
+                       self.env.append_unique('GOLFLAGS',
+                        getattr(lnk_task.env, 'GOLFLAGS',[]))
+                       self.env.append_unique('GOCFLAGS', ['-I%s' % path])
+                       self.env.append_unique('GOLFLAGS', ['-L%s' % path])
+               for n in getattr(tg, 'includes_nodes', []):
+                       self.env.append_unique('GOCFLAGS', ['-I%s' % n.abspath()])
+               pass
+       pass
+
+def configure(conf):
+
+       def set_def(var, val):
+               if not conf.env[var]:
+                       conf.env[var] = val
+
+       goarch = os.getenv('GOARCH')
+       if goarch == '386':
+               set_def('GO_PLATFORM', 'i386')
+       elif goarch == 'amd64':
+               set_def('GO_PLATFORM', 'x86_64')
+       elif goarch == 'arm':
+               set_def('GO_PLATFORM', 'arm')
+       else:
+               set_def('GO_PLATFORM', platform.machine())
+
+       if conf.env.GO_PLATFORM == 'x86_64':
+               set_def('GO_COMPILER', '6g')
+               set_def('GO_LINKER', '6l')
+       elif conf.env.GO_PLATFORM in ('i386', 'i486', 'i586', 'i686'):
+               set_def('GO_COMPILER', '8g')
+               set_def('GO_LINKER', '8l')
+       elif conf.env.GO_PLATFORM == 'arm':
+               set_def('GO_COMPILER', '5g')
+               set_def('GO_LINKER', '5l')
+               set_def('GO_EXTENSION', '.5')
+
+       if not (conf.env.GO_COMPILER or conf.env.GO_LINKER):
+               raise conf.fatal('Unsupported platform ' + platform.machine())
+
+       set_def('GO_PACK', 'gopack')
+       set_def('gopackage_PATTERN', '%s.a')
+       set_def('CPPPATH_ST', '-I%s')
+
+       set_def('GOMAKE_FLAGS', ['--quiet'])
+       conf.find_program(conf.env.GO_COMPILER, var='GOC')
+       conf.find_program(conf.env.GO_LINKER,   var='GOL')
+       conf.find_program(conf.env.GO_PACK,     var='GOP')
+
+       conf.find_program('cgo',                var='CGO')
+
+TaskGen.feature('go')(process_use)
+TaskGen.feature('go')(propagate_uselib_vars)
diff --git a/third_party/waf/waflib/extras/gob2.py b/third_party/waf/waflib/extras/gob2.py
new file mode 100644 (file)
index 0000000..637f293
--- /dev/null
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+from waflib import TaskGen
+
+TaskGen.declare_chain(
+       name = 'gob2',
+       rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}',
+       ext_in = '.gob',
+       ext_out = '.c'
+)
+
+def configure(conf):
+       conf.find_program('gob2', var='GOB2')
+       conf.env['GOB2FLAGS'] = ''
diff --git a/third_party/waf/waflib/extras/halide.py b/third_party/waf/waflib/extras/halide.py
new file mode 100644 (file)
index 0000000..acec8ec
--- /dev/null
@@ -0,0 +1,149 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Halide code generation tool
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+Tool to run `Halide <http://halide-lang.org>`_ code generators.
+
+Usage::
+
+   bld(
+    name='pipeline',
+     # ^ Reference this in use="..." for things using the generated code
+    #target=['pipeline.o', 'pipeline.h']
+     # ^ by default, name.{o,h} is added, but you can set the outputs here
+    features='halide',
+    halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug",
+     # ^ Environment passed to the generator,
+     # can be a dict, k/v list, or string.
+    args=[],
+     # ^ Command-line arguments to the generator (optional),
+     # eg. to give parameters to the scheduling
+    source='pipeline_gen',
+     # ^ Name of the source executable
+   )
+
+
+Known issues:
+
+
+- Currently only supports Linux (no ".exe")
+
+- Doesn't rerun on input modification when input is part of a build
+  chain, and has been modified externally.
+
+"""
+
+import os
+from waflib import Task, Utils, Options, TaskGen, Errors
+
+class run_halide_gen(Task.Task):
+       color = 'CYAN'
+       vars = ['HALIDE_ENV', 'HALIDE_ARGS']
+       run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}"
+       def __str__(self):
+               stuff = "halide"
+               stuff += ("[%s]" % (",".join(
+                ('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items()))))
+               return Task.Task.__str__(self).replace(self.__class__.__name__,
+                stuff)
+
+@TaskGen.feature('halide')
+@TaskGen.before_method('process_source')
+def halide(self):
+       Utils.def_attrs(self,
+        args=[],
+        halide_env={},
+       )
+
+       bld = self.bld
+
+       env = self.halide_env
+       try:
+               if isinstance(env, str):
+                       env = dict(x.split('=') for x in env.split())
+               elif isinstance(env, list):
+                       env = dict(x.split('=') for x in env)
+               assert isinstance(env, dict)
+       except Exception as e:
+               if not isinstance(e, ValueError) \
+                and not isinstance(e, AssertionError):
+                       raise
+               raise Errors.WafError(
+                "halide_env must be under the form" \
+                " {'HL_x':'a', 'HL_y':'b'}" \
+                " or ['HL_x=y', 'HL_y=b']" \
+                " or 'HL_x=y HL_y=b'")
+
+       src = self.to_nodes(self.source)
+       assert len(src) == 1, "Only one source expected"
+       src = src[0]
+
+       args = Utils.to_list(self.args)
+
+       def change_ext(src, ext):
+               # Return a node with a new extension, in an appropriate folder
+               name = src.name
+               xpos = src.name.rfind('.')
+               if xpos == -1: xpos = len(src.name)
+               newname = name[:xpos] + ext
+               if src.is_child_of(bld.bldnode):
+                       node = src.get_src().parent.find_or_declare(newname)
+               else:
+                       node = bld.bldnode.find_or_declare(newname)
+               return node
+
+       def to_nodes(self, lst, path=None):
+               tmp = []
+               path = path or self.path
+               find = path.find_or_declare
+
+               if isinstance(lst, self.path.__class__):
+                       lst = [lst]
+
+               for x in Utils.to_list(lst):
+                       if isinstance(x, str):
+                               node = find(x)
+                       else:
+                               node = x
+                       tmp.append(node)
+               return tmp
+
+       tgt = to_nodes(self, self.target)
+       if not tgt:
+               tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
+       cwd = tgt[0].parent.abspath()
+       task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
+       task.env.append_unique('HALIDE_ARGS', args)
+       if task.env.env == []:
+               task.env.env = {}
+       task.env.env.update(env)
+       task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items()))
+       task.env.HALIDE_ARGS = args
+
+       try:
+               self.compiled_tasks.append(task)
+       except AttributeError:
+               self.compiled_tasks = [task]
+       self.source = []
+
+def configure(conf):
+       if Options.options.halide_root is None:
+               conf.check_cfg(package='Halide', args='--cflags --libs')
+       else:
+               halide_root = Options.options.halide_root
+               conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ]
+               conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ]
+               conf.env.LIB_HALIDE = ["Halide"]
+
+               # You might want to add this, while upstream doesn't fix it
+               #conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread']
+
+def options(opt):
+       opt.add_option('--halide-root',
+        help="path to Halide include and lib files",
+       )
diff --git a/third_party/waf/waflib/extras/local_rpath.py b/third_party/waf/waflib/extras/local_rpath.py
new file mode 100644 (file)
index 0000000..8942e97
--- /dev/null
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+from waflib.TaskGen import after_method, feature
+
+@after_method('propagate_uselib_vars')
+@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
+def add_rpath_stuff(self):
+       all = self.to_list(getattr(self, 'use', []))
+       while all:
+               name = all.pop()
+               try:
+                       tg = self.bld.get_tgen_by_name(name)
+               except:
+                       continue
+               self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
+               all.extend(self.to_list(getattr(tg, 'use', [])))
diff --git a/third_party/waf/waflib/extras/make.py b/third_party/waf/waflib/extras/make.py
new file mode 100644 (file)
index 0000000..8b99c4d
--- /dev/null
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+A make-like way of executing the build, following the relationships between inputs/outputs
+
+This algorithm will lead to slower builds, will not be as flexible as "waf build", but
+it might be useful for building data files (?)
+
+It is likely to break in the following cases:
+- files are created dynamically (no inputs or outputs)
+- headers
+- building two files from different groups
+"""
+
+import re
+from waflib import Options, Task
+from waflib.Build import BuildContext
+
+class MakeContext(BuildContext):
+       '''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
+       cmd = 'make'
+       fun = 'build'
+
+       def __init__(self, **kw):
+               super(MakeContext, self).__init__(**kw)
+               self.files = Options.options.files
+
+       def get_build_iterator(self):
+               if not self.files:
+                       while 1:
+                               yield super(MakeContext, self).get_build_iterator()
+
+               for g in self.groups:
+                       for tg in g:
+                               try:
+                                       f = tg.post
+                               except AttributeError:
+                                       pass
+                               else:
+                                       f()
+
+                       provides = {}
+                       uses = {}
+                       all_tasks = []
+                       tasks = []
+                       for pat in self.files.split(','):
+                               matcher = self.get_matcher(pat)
+                               for tg in g:
+                                       if isinstance(tg, Task.TaskBase):
+                                               lst = [tg]
+                                       else:
+                                               lst = tg.tasks
+                                       for tsk in lst:
+                                               all_tasks.append(tsk)
+
+                                               do_exec = False
+                                               for node in getattr(tsk, 'inputs', []):
+                                                       try:
+                                                               uses[node].append(tsk)
+                                                       except:
+                                                               uses[node] = [tsk]
+
+                                                       if matcher(node, output=False):
+                                                               do_exec = True
+                                                               break
+
+                                               for node in getattr(tsk, 'outputs', []):
+                                                       try:
+                                                               provides[node].append(tsk)
+                                                       except:
+                                                               provides[node] = [tsk]
+
+                                                       if matcher(node, output=True):
+                                                               do_exec = True
+                                                               break
+                                               if do_exec:
+                                                       tasks.append(tsk)
+
+                       # so we have the tasks that we need to process, the list of all tasks,
+                       # the map of the tasks providing nodes, and the map of tasks using nodes
+
+                       if not tasks:
+                               # if there are no tasks matching, return everything in the current group
+                               result = all_tasks
+                       else:
+                               # this is like a big filter...
+                               result = set([])
+                               seen = set([])
+                               cur = set(tasks)
+                               while cur:
+                                       result |= cur
+                                       tosee = set([])
+                                       for tsk in cur:
+                                               for node in getattr(tsk, 'inputs', []):
+                                                       if node in seen:
+                                                               continue
+                                                       seen.add(node)
+                                                       tosee |= set(provides.get(node, []))
+                                       cur = tosee
+                               result = list(result)
+
+                       Task.set_file_constraints(result)
+                       Task.set_precedence_constraints(result)
+                       yield result
+
+               while 1:
+                       yield []
+
+       def get_matcher(self, pat):
+               # this returns a function
+               inn = True
+               out = True
+               if pat.startswith('in:'):
+                       out = False
+                       pat = pat.replace('in:', '')
+               elif pat.startswith('out:'):
+                       inn = False
+                       pat = pat.replace('out:', '')
+
+               anode = self.root.find_node(pat)
+               pattern = None
+               if not anode:
+                       if not pat.startswith('^'):
+                               pat = '^.+?%s' % pat
+                       if not pat.endswith('$'):
+                               pat = '%s$' % pat
+                       pattern = re.compile(pat)
+
+               def match(node, output):
+                       if output == True and not out:
+                               return False
+                       if output == False and not inn:
+                               return False
+
+                       if anode:
+                               return anode == node
+                       else:
+                               return pattern.match(node.abspath())
+               return match
diff --git a/third_party/waf/waflib/extras/md5_tstamp.py b/third_party/waf/waflib/extras/md5_tstamp.py
new file mode 100644 (file)
index 0000000..63b71d8
--- /dev/null
@@ -0,0 +1,67 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+This module assumes that only one build context is running at a given time, which
+is not the case if you want to execute configuration tests in parallel.
+
+Store some values on the buildcontext mapping file paths to
+stat values and md5 values (timestamp + md5)
+this way the md5 hashes are computed only when timestamp change (can be faster)
+There is usually little or no gain from enabling this, but it can be used to enable
+the second level cache with timestamps (WAFCACHE)
+
+You may have to run distclean or to remove the build directory before enabling/disabling
+this hashing scheme
+"""
+
+import os, stat
+from waflib import Utils, Build, Context
+
+STRONGEST = True
+
+try:
+       Build.BuildContext.store_real
+except AttributeError:
+
+       Context.DBFILE += '_md5tstamp'
+
+       Build.hashes_md5_tstamp = {}
+       Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+       def store(self):
+               # save the hash cache as part of the default pickle file
+               self.hashes_md5_tstamp = Build.hashes_md5_tstamp
+               self.store_real()
+       Build.BuildContext.store_real = Build.BuildContext.store
+       Build.BuildContext.store      = store
+
+       def restore(self):
+               # we need a module variable for h_file below
+               self.restore_real()
+               try:
+                       Build.hashes_md5_tstamp = self.hashes_md5_tstamp or {}
+               except AttributeError:
+                       Build.hashes_md5_tstamp = {}
+       Build.BuildContext.restore_real = Build.BuildContext.restore
+       Build.BuildContext.restore      = restore
+
+       def h_file(filename):
+               st = os.stat(filename)
+               if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
+
+               if filename in Build.hashes_md5_tstamp:
+                       if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime):
+                               return Build.hashes_md5_tstamp[filename][1]
+               if STRONGEST:
+                       ret = Utils.h_file_no_md5(filename)
+                       Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), ret)
+                       return ret
+               else:
+                       m = Utils.md5()
+                       m.update(str(st.st_mtime))
+                       m.update(str(st.st_size))
+                       m.update(filename)
+                       Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest())
+                       return m.digest()
+       Utils.h_file_no_md5 = Utils.h_file
+       Utils.h_file = h_file
diff --git a/third_party/waf/waflib/extras/mem_reducer.py b/third_party/waf/waflib/extras/mem_reducer.py
new file mode 100644 (file)
index 0000000..e97c8d7
--- /dev/null
@@ -0,0 +1,110 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+
+"""
+This tool can help to reduce the memory usage in very large builds featuring many tasks with after/before attributes.
+It may also improve the overall build time by decreasing the amount of iterations over tasks.
+
+Usage:
+def options(opt):
+       opt.load('mem_reducer')
+"""
+
+import itertools
+from waflib import Utils, Task, Runner
+
+class SetOfTasks(object):
+       """Wraps a set and a task which has a list of other sets.
+       The interface is meant to mimic the interface of set. Add missing functions as needed.
+       """
+       def __init__(self, owner):
+               self._set = owner.run_after
+               self._owner = owner
+
+       def __iter__(self):
+               for g in self._owner.run_after_groups:
+                       #print len(g)
+                       for task in g:
+                               yield task
+               for task in self._set:
+                       yield task
+
+       def add(self, obj):
+               self._set.add(obj)
+
+       def update(self, obj):
+               self._set.update(obj)
+
+def set_precedence_constraints(tasks):
+       cstr_groups = Utils.defaultdict(list)
+       for x in tasks:
+               x.run_after = SetOfTasks(x)
+               x.run_after_groups = []
+               x.waiting_sets = []
+
+               h = x.hash_constraints()
+               cstr_groups[h].append(x)
+
+       # create sets which can be reused for all tasks
+       for k in cstr_groups.keys():
+               cstr_groups[k] = set(cstr_groups[k])
+
+       # this list should be short
+       for key1, key2 in itertools.combinations(cstr_groups.keys(), 2):
+               group1 = cstr_groups[key1]
+               group2 = cstr_groups[key2]
+               # get the first entry of the set
+               t1 = next(iter(group1))
+               t2 = next(iter(group2))
+
+               # add the constraints based on the comparisons
+               if Task.is_before(t1, t2):
+                       for x in group2:
+                               x.run_after_groups.append(group1)
+                       for k in group1:
+                               k.waiting_sets.append(group1)
+               elif Task.is_before(t2, t1):
+                       for x in group1:
+                               x.run_after_groups.append(group2)
+                       for k in group2:
+                               k.waiting_sets.append(group2)
+
+Task.set_precedence_constraints = set_precedence_constraints
+
+def get_out(self):
+       tsk = self.out.get()
+       if not self.stop:
+               self.add_more_tasks(tsk)
+       self.count -= 1
+       self.dirty = True
+
+       # shrinking sets
+       try:
+               ws = tsk.waiting_sets
+       except AttributeError:
+               pass
+       else:
+               for k in ws:
+                       try:
+                               k.remove(tsk)
+                       except KeyError:
+                               pass
+
+       return tsk
+Runner.Parallel.get_out = get_out
+
+def skip(self, tsk):
+       tsk.hasrun = Task.SKIPPED
+
+       # shrinking sets
+       try:
+               ws = tsk.waiting_sets
+       except AttributeError:
+               pass
+       else:
+               for k in ws:
+                       try:
+                               k.remove(tsk)
+                       except KeyError:
+                               pass
+Runner.Parallel.skip = skip
similarity index 70%
rename from third_party/waf/wafadmin/Tools/misc.py
rename to third_party/waf/waflib/extras/misc.py
index 6ef45aef444b74096442a8f086ea45fb247a2a0f..802323ddcc7c7f98ce9ebc5f0377f648a47bc032 100644 (file)
@@ -1,26 +1,37 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006 (ita)
+# Thomas Nagy, 2006-2010 (ita)
 
 """
-Custom objects:
- - execute a function everytime
- - copy a file somewhere else
+This tool is totally deprecated
+
+Try using:
+       .pc.in files for .pc files
+       the feature intltool_in - see demos/intltool
+       make-like rules
 """
 
 import shutil, re, os
-import TaskGen, Node, Task, Utils, Build, Constants
-from TaskGen import feature, taskgen, after, before
-from Logs import debug
+from waflib import Node, Task, Utils, Errors
+from waflib.TaskGen import feature, after_method, before_method
+from waflib.Logs import debug
+
+def copy_attrs(orig, dest, names, only_if_set=False):
+       """
+       copy class attributes from an object to another
+       """
+       for a in Utils.to_list(names):
+               u = getattr(orig, a, ())
+               if u or not only_if_set:
+                       setattr(dest, a, u)
 
 def copy_func(tsk):
        "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
-       env = tsk.env
-       infile = tsk.inputs[0].abspath(env)
-       outfile = tsk.outputs[0].abspath(env)
+       infile = tsk.inputs[0].abspath()
+       outfile = tsk.outputs[0].abspath()
        try:
                shutil.copy2(infile, outfile)
-       except (OSError, IOError):
+       except EnvironmentError:
                return 1
        else:
                if tsk.chmod: os.chmod(outfile, tsk.chmod)
@@ -28,40 +39,31 @@ def copy_func(tsk):
 
 def action_process_file_func(tsk):
        "Ask the function attached to the task to process it"
-       if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!')
+       if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!')
        return tsk.fun(tsk)
 
-class cmd_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
 @feature('cmd')
 def apply_cmd(self):
        "call a command everytime"
-       if not self.fun: raise Utils.WafError('cmdobj needs a function!')
+       if not self.fun: raise Errors.WafError('cmdobj needs a function!')
        tsk = Task.TaskBase()
        tsk.fun = self.fun
        tsk.env = self.env
        self.tasks.append(tsk)
        tsk.install_path = self.install_path
 
-class copy_taskgen(TaskGen.task_gen):
-       "By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)"
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
 @feature('copy')
-@before('apply_core')
+@before_method('process_source')
 def apply_copy(self):
        Utils.def_attrs(self, fun=copy_func)
        self.default_install_path = 0
 
        lst = self.to_list(self.source)
-       self.meths.remove('apply_core')
+       self.meths.remove('process_source')
 
        for filename in lst:
                node = self.path.find_resource(filename)
-               if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
+               if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
 
                target = self.target
                if not target or len(lst)>1: target = node.name
@@ -71,57 +73,45 @@ def apply_copy(self):
 
                tsk = self.create_task('copy', node, newnode)
                tsk.fun = self.fun
-               tsk.chmod = self.chmod
-               tsk.install_path = self.install_path
+               tsk.chmod = getattr(self, 'chmod', Utils.O644)
 
                if not tsk.env:
                        tsk.debug()
-                       raise Utils.WafError('task without an environment')
+                       raise Errors.WafError('task without an environment')
 
 def subst_func(tsk):
        "Substitutes variables in a .in file"
 
        m4_re = re.compile('@(\w+)@', re.M)
 
-       env = tsk.env
-       infile = tsk.inputs[0].abspath(env)
-       outfile = tsk.outputs[0].abspath(env)
-
-       code = Utils.readf(infile)
+       code = tsk.inputs[0].read() #Utils.readf(infile)
 
        # replace all % by %% to prevent errors by % signs in the input file while string formatting
        code = code.replace('%', '%%')
 
        s = m4_re.sub(r'%(\1)s', code)
 
-       di = tsk.dict or {}
+       env = tsk.env
+       di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {})
        if not di:
                names = m4_re.findall(code)
                for i in names:
                        di[i] = env.get_flat(i) or env.get_flat(i.upper())
 
-       file = open(outfile, 'w')
-       file.write(s % di)
-       file.close()
-       if tsk.chmod: os.chmod(outfile, tsk.chmod)
-
-class subst_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
+       tsk.outputs[0].write(s % di)
 
 @feature('subst')
-@before('apply_core')
+@before_method('process_source')
 def apply_subst(self):
        Utils.def_attrs(self, fun=subst_func)
-       self.default_install_path = 0
        lst = self.to_list(self.source)
-       self.meths.remove('apply_core')
+       self.meths.remove('process_source')
 
        self.dict = getattr(self, 'dict', {})
 
        for filename in lst:
                node = self.path.find_resource(filename)
-               if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
+               if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
 
                if self.target:
                        newnode = self.path.find_or_declare(self.target)
@@ -134,7 +124,7 @@ def apply_subst(self):
                        pass
 
                if self.dict and not self.env['DICT_HASH']:
-                       self.env = self.env.copy()
+                       self.env = self.env.derive()
                        keys = list(self.dict.keys())
                        keys.sort()
                        lst = [self.dict[x] for x in keys]
@@ -144,12 +134,11 @@ def apply_subst(self):
                tsk.fun = self.fun
                tsk.dict = self.dict
                tsk.dep_vars = ['DICT_HASH']
-               tsk.install_path = self.install_path
-               tsk.chmod = self.chmod
+               tsk.chmod = getattr(self, 'chmod', Utils.O644)
 
                if not tsk.env:
                        tsk.debug()
-                       raise Utils.WafError('task without an environment')
+                       raise Errors.WafError('task without an environment')
 
 ####################
 ## command-output ####
@@ -167,33 +156,33 @@ class input_file(cmd_arg):
                assert isinstance(base_path, Node.Node)
                self.node = base_path.find_resource(self.name)
                if self.node is None:
-                       raise Utils.WafError("Input file %s not found in " % (self.name, base_path))
+                       raise Errors.WafError("Input file %s not found in " % (self.name, base_path))
 
        def get_path(self, env, absolute):
                if absolute:
-                       return self.template % self.node.abspath(env)
+                       return self.template % self.node.abspath()
                else:
-                       return self.template % self.node.srcpath(env)
+                       return self.template % self.node.srcpath()
 
 class output_file(cmd_arg):
        def find_node(self, base_path):
                assert isinstance(base_path, Node.Node)
                self.node = base_path.find_or_declare(self.name)
                if self.node is None:
-                       raise Utils.WafError("Output file %s not found in " % (self.name, base_path))
+                       raise Errors.WafError("Output file %s not found in " % (self.name, base_path))
 
        def get_path(self, env, absolute):
                if absolute:
-                       return self.template % self.node.abspath(env)
+                       return self.template % self.node.abspath()
                else:
-                       return self.template % self.node.bldpath(env)
+                       return self.template % self.node.bldpath()
 
 class cmd_dir_arg(cmd_arg):
        def find_node(self, base_path):
                assert isinstance(base_path, Node.Node)
                self.node = base_path.find_dir(self.name)
                if self.node is None:
-                       raise Utils.WafError("Directory %s not found in " % (self.name, base_path))
+                       raise Errors.WafError("Directory %s not found in " % (self.name, base_path))
 
 class input_dir(cmd_dir_arg):
        def get_path(self, dummy_env, dummy_absolute):
@@ -201,13 +190,13 @@ class input_dir(cmd_dir_arg):
 
 class output_dir(cmd_dir_arg):
        def get_path(self, env, dummy_absolute):
-               return self.template % self.node.abspath(env)
+               return self.template % self.node.abspath()
 
 
 class command_output(Task.Task):
        color = "BLUE"
        def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
-               Task.Task.__init__(self, env, normal=1)
+               Task.Task.__init__(self, env=env)
                assert isinstance(command, (str, Node.Node))
                self.command = command
                self.command_args = command_args
@@ -226,13 +215,13 @@ class command_output(Task.Task):
 
                def input_path(node, template):
                        if task.cwd is None:
-                               return template % node.bldpath(task.env)
+                               return template % node.bldpath()
                        else:
                                return template % node.abspath()
                def output_path(node, template):
                        fun = node.abspath
                        if task.cwd is None: fun = node.bldpath
-                       return template % fun(task.env)
+                       return template % fun()
 
                if isinstance(task.command, Node.Node):
                        argv = [input_path(task.command, '%s')]
@@ -272,13 +261,9 @@ class command_output(Task.Task):
                        os_env = os.environ
                else:
                        os_env = task.os_env
-               command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
+               command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
                return command.wait()
 
-class cmd_output_taskgen(TaskGen.task_gen):
-       def __init__(self, *k, **kw):
-               TaskGen.task_gen.__init__(self, *k, **kw)
-
 @feature('command-output')
 def init_cmd_output(self):
        Utils.def_attrs(self,
@@ -320,10 +305,10 @@ def init_cmd_output(self):
                os_env = None)
 
 @feature('command-output')
-@after('init_cmd_output')
+@after_method('init_cmd_output')
 def apply_cmd_output(self):
        if self.command is None:
-               raise Utils.WafError("command-output missing command")
+               raise Errors.WafError("command-output missing command")
        if self.command_is_external:
                cmd = self.command
                cmd_node = None
@@ -336,11 +321,7 @@ use command_is_external=True''') % (self.command,)
 
        if self.cwd is None:
                cwd = None
-       else:
-               assert isinstance(cwd, CmdDirArg)
-               self.cwd.find_node(self.path)
 
-       args = []
        inputs = []
        outputs = []
 
@@ -358,7 +339,7 @@ use command_is_external=True''') % (self.command,)
                assert isinstance(self.stdout, str)
                stdout = self.path.find_or_declare(self.stdout)
                if stdout is None:
-                       raise Utils.WafError("File %s not found" % (self.stdout,))
+                       raise Errors.WafError("File %s not found" % (self.stdout,))
                outputs.append(stdout)
 
        if self.stderr is None:
@@ -367,7 +348,7 @@ use command_is_external=True''') % (self.command,)
                assert isinstance(self.stderr, str)
                stderr = self.path.find_or_declare(self.stderr)
                if stderr is None:
-                       raise Utils.WafError("File %s not found" % (self.stderr,))
+                       raise Errors.WafError("File %s not found" % (self.stderr,))
                outputs.append(stderr)
 
        if self.stdin is None:
@@ -376,28 +357,30 @@ use command_is_external=True''') % (self.command,)
                assert isinstance(self.stdin, str)
                stdin = self.path.find_resource(self.stdin)
                if stdin is None:
-                       raise Utils.WafError("File %s not found" % (self.stdin,))
+                       raise Errors.WafError("File %s not found" % (self.stdin,))
                inputs.append(stdin)
 
        for hidden_input in self.to_list(self.hidden_inputs):
                node = self.path.find_resource(hidden_input)
                if node is None:
-                       raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path))
+                       raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path))
                inputs.append(node)
 
        for hidden_output in self.to_list(self.hidden_outputs):
                node = self.path.find_or_declare(hidden_output)
                if node is None:
-                       raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path))
+                       raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path))
                outputs.append(node)
 
        if not (inputs or getattr(self, 'no_inputs', None)):
-               raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
+               raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs')
        if not (outputs or getattr(self, 'no_outputs', None)):
-               raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
+               raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs')
 
+       cwd = self.bld.variant_dir
        task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
-       Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
+       task.generator = self
+       copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
        self.tasks.append(task)
 
        task.inputs = inputs
@@ -419,11 +402,9 @@ use command_is_external=True''') % (self.command,)
 
 def post_run(self):
        for x in self.outputs:
-               h = Utils.h_file(x.abspath(self.env))
-               self.generator.bld.node_sigs[self.env.variant()][x.id] = h
+               x.sig = Utils.h_file(x.abspath())
 
 def runnable_status(self):
-       return Constants.RUN_ME
+       return self.RUN_ME
 
-Task.task_type_from_func('copy', vars=[], func=action_process_file_func)
-TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen
+Task.task_factory('copy', vars=[], func=action_process_file_func)
diff --git a/third_party/waf/waflib/extras/msvcdeps.py b/third_party/waf/waflib/extras/msvcdeps.py
new file mode 100644 (file)
index 0000000..98b0677
--- /dev/null
@@ -0,0 +1,262 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright Garmin International or its subsidiaries, 2012-2013
+
+'''
+Off-load dependency scanning from Python code to MSVC compiler
+
+This tool is safe to load in any environment; it will only activate the
+MSVC exploits when it finds that a particular taskgen uses MSVC to
+compile.
+
+Empirical testing shows about a 10% execution time savings from using
+this tool as compared to c_preproc.
+
+The technique of gutting scan() and pushing the dependency calculation
+down to post_run() is cribbed from gccdeps.py.
+'''
+
+import os
+import sys
+import tempfile
+import threading
+
+from waflib import Context, Errors, Logs, Task, Utils
+from waflib.Tools import c_preproc, c, cxx, msvc
+from waflib.TaskGen import feature, before_method
+
+lock = threading.Lock()
+nodes = {} # Cache the path -> Node lookup
+
+PREPROCESSOR_FLAG = '/showIncludes'
+INCLUDE_PATTERN = 'Note: including file:'
+
+# Extensible by outside tools
+supported_compilers = ['msvc']
+
+@feature('c', 'cxx')
+@before_method('process_source')
+def apply_msvcdeps_flags(taskgen):
+    if taskgen.env.CC_NAME not in supported_compilers:
+        return
+
+    for flag in ('CFLAGS', 'CXXFLAGS'):
+        if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
+            taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
+
+    # Figure out what casing conventions the user's shell used when
+    # launching Waf
+    (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
+    taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
+
+def path_to_node(base_node, path, cached_nodes):
+    # Take the base node and the path and return a node
+    # Results are cached because searching the node tree is expensive
+    # The following code is executed by threads, it is not safe, so a lock is needed...
+    if getattr(path, '__hash__'):
+        node_lookup_key = (base_node, path)
+    else:
+        # Not hashable, assume it is a list and join into a string
+        node_lookup_key = (base_node, os.path.sep.join(path))
+    try:
+        lock.acquire()
+        node = cached_nodes[node_lookup_key]
+    except KeyError:
+        node = base_node.find_resource(path)
+        cached_nodes[node_lookup_key] = node
+    finally:
+        lock.release()
+    return node
+
+'''
+Register a task subclass that has hooks for running our custom
+dependency calculations rather than the C/C++ stock c_preproc
+method.
+'''
+def wrap_compiled_task(classname):
+    derived_class = type(classname, (Task.classes[classname],), {})
+
+    def post_run(self):
+        if self.env.CC_NAME not in supported_compilers:
+            return super(derived_class, self).post_run()
+
+        if getattr(self, 'cached', None):
+            return Task.Task.post_run(self)
+
+        bld = self.generator.bld
+        unresolved_names = []
+        resolved_nodes = []
+
+        lowercase = self.generator.msvcdeps_drive_lowercase
+        correct_case_path = bld.path.abspath()
+        correct_case_path_len = len(correct_case_path)
+        correct_case_path_norm = os.path.normcase(correct_case_path)
+
+        # Dynamically bind to the cache
+        try:
+            cached_nodes = bld.cached_nodes
+        except AttributeError:
+            cached_nodes = bld.cached_nodes = {}
+
+        for path in self.msvcdeps_paths:
+            node = None
+            if os.path.isabs(path):
+                # Force drive letter to match conventions of main source tree
+                drive, tail = os.path.splitdrive(path)
+
+                if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
+                    # Path is in the sandbox, force it to be correct.  MSVC sometimes returns a lowercase path.
+                    path = correct_case_path + path[correct_case_path_len:]
+                else:
+                    # Check the drive letter
+                    if lowercase and (drive != drive.lower()):
+                        path = drive.lower() + tail
+                    elif (not lowercase) and (drive != drive.upper()):
+                        path = drive.upper() + tail
+                node = path_to_node(bld.root, path, cached_nodes)
+            else:
+                base_node = bld.bldnode
+                # when calling find_resource, make sure the path does not begin by '..'
+                path = [k for k in Utils.split_path(path) if k and k != '.']
+                while path[0] == '..':
+                    path = path[1:]
+                    base_node = base_node.parent
+
+                node = path_to_node(base_node, path, cached_nodes)
+
+            if not node:
+                raise ValueError('could not find %r for %r' % (path, self))
+            else:
+                if not c_preproc.go_absolute:
+                    if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
+                        # System library
+                        Logs.debug('msvcdeps: Ignoring system include %r' % node)
+                        continue
+
+                if id(node) == id(self.inputs[0]):
+                    # Self-dependency
+                    continue
+
+                resolved_nodes.append(node)
+
+        bld.node_deps[self.uid()] = resolved_nodes
+        bld.raw_deps[self.uid()] = unresolved_names
+
+        try:
+            del self.cache_sig
+        except:
+            pass
+
+        Task.Task.post_run(self)
+
+    def scan(self):
+        if self.env.CC_NAME not in supported_compilers:
+            return super(derived_class, self).scan()
+
+        resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
+        unresolved_names = []
+        return (resolved_nodes, unresolved_names)
+
+    def sig_implicit_deps(self):
+        if self.env.CC_NAME not in supported_compilers:
+            return super(derived_class, self).sig_implicit_deps()
+
+        try:
+            return Task.Task.sig_implicit_deps(self)
+        except Errors.WafError:
+            return Utils.SIG_NIL
+
+    def exec_response_command(self, cmd, **kw):
+        # exec_response_command() is only called from inside msvc.py anyway
+        assert self.env.CC_NAME in supported_compilers
+
+        # Only bother adding '/showIncludes' to compile tasks
+        if isinstance(self, (c.c, cxx.cxx)):
+            try:
+                # The Visual Studio IDE adds an environment variable that causes
+                # the MS compiler to send its textual output directly to the
+                # debugging window rather than normal stdout/stderr.
+                #
+                # This is unrecoverably bad for this tool because it will cause
+                # all the dependency scanning to see an empty stdout stream and
+                # assume that the file being compiled uses no headers.
+                #
+                # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
+                #
+                # Attempting to repair the situation by deleting the offending
+                # envvar at this point in tool execution will not be good enough--
+                # its presence poisons the 'waf configure' step earlier. We just
+                # want to put a sanity check here in order to help developers
+                # quickly diagnose the issue if an otherwise-good Waf tree
+                # is then executed inside the MSVS IDE.
+                assert 'VS_UNICODE_OUTPUT' not in kw['env']
+
+                tmp = None
+
+                # This block duplicated from Waflib's msvc.py
+                if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
+                    program = cmd[0]
+                    cmd = [self.quote_response_command(x) for x in cmd]
+                    (fd, tmp) = tempfile.mkstemp()
+                    os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
+                    os.close(fd)
+                    cmd = [program, '@' + tmp]
+                # ... end duplication
+
+                self.msvcdeps_paths = []
+
+                kw['env'] = kw.get('env', os.environ.copy())
+                kw['cwd'] = kw.get('cwd', os.getcwd())
+                kw['quiet'] = Context.STDOUT
+                kw['output'] = Context.STDOUT
+
+                out = []
+
+                try:
+                    raw_out = self.generator.bld.cmd_and_log(cmd, **kw)
+                    ret = 0
+                except Errors.WafError as e:
+                    raw_out = e.stdout
+                    ret = e.returncode
+
+                for line in raw_out.splitlines():
+                    if line.startswith(INCLUDE_PATTERN):
+                        inc_path = line[len(INCLUDE_PATTERN):].strip()
+                        Logs.debug('msvcdeps: Regex matched %s' % inc_path)
+                        self.msvcdeps_paths.append(inc_path)
+                    else:
+                        out.append(line)
+
+                # Pipe through the remaining stdout content (not related to /showIncludes)
+                if self.generator.bld.logger:
+                    self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
+                else:
+                    sys.stdout.write(os.linesep.join(out) + os.linesep)
+
+            finally:
+                if tmp:
+                    try:
+                        os.remove(tmp)
+                    except OSError:
+                        pass
+
+            return ret
+        else:
+            # Use base class's version of this method for linker tasks
+            return super(derived_class, self).exec_response_command(cmd, **kw)
+
+    def can_retrieve_cache(self):
+        # msvcdeps and netcaching are incompatible, so disable the cache
+        if self.env.CC_NAME not in supported_compilers:
+            return super(derived_class, self).can_retrieve_cache()
+        self.nocache = True # Disable sending the file to the cache
+        return False
+
+    derived_class.post_run = post_run
+    derived_class.scan = scan
+    derived_class.sig_implicit_deps = sig_implicit_deps
+    derived_class.exec_response_command = exec_response_command
+    derived_class.can_retrieve_cache = can_retrieve_cache
+
+for k in ('c', 'cxx'):
+    wrap_compiled_task(k)
diff --git a/third_party/waf/waflib/extras/msvs.py b/third_party/waf/waflib/extras/msvs.py
new file mode 100644 (file)
index 0000000..5f76c26
--- /dev/null
@@ -0,0 +1,1033 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Avalanche Studios 2009-2011
+# Thomas Nagy 2011
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+   derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+"""
+To add this tool to your project:
+def options(conf):
+       opt.load('msvs')
+
+It can be a good idea to add the sync_exec tool too.
+
+To generate solution files:
+$ waf configure msvs
+
+To customize the outputs, provide subclasses in your wscript files:
+
+from waflib.extras import msvs
+class vsnode_target(msvs.vsnode_target):
+       def get_build_command(self, props):
+               # likely to be required
+               return "waf.bat build"
+       def collect_source(self):
+               # likely to be required
+               ...
+class msvs_bar(msvs.msvs_generator):
+       def init(self):
+               msvs.msvs_generator.init(self)
+               self.vsnode_target = vsnode_target
+
+The msvs class re-uses the same build() function for reading the targets (task generators),
+you may therefore specify msvs settings on the context object:
+
+def build(bld):
+       bld.solution_name = 'foo.sln'
+       bld.waf_command = 'waf.bat'
+       bld.projects_dir = bld.srcnode.make_node('.depproj')
+       bld.projects_dir.mkdir()
+
+For visual studio 2008, the command is called 'msvs2008', and the classes
+such as vsnode_target are wrapped by a decorator class 'wrap_2008' to
+provide special functionality.
+
+ASSUMPTIONS:
+* a project can be either a directory or a target, vcxproj files are written only for targets that have source files
+* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
+"""
+
+import os, re, sys
+import uuid # requires python 2.5
+from waflib.Build import BuildContext
+from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0"
+       xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+
+       <ItemGroup Label="ProjectConfigurations">
+               ${for b in project.build_properties}
+               <ProjectConfiguration Include="${b.configuration}|${b.platform}">
+                       <Configuration>${b.configuration}</Configuration>
+                       <Platform>${b.platform}</Platform>
+               </ProjectConfiguration>
+               ${endfor}
+       </ItemGroup>
+
+       <PropertyGroup Label="Globals">
+               <ProjectGuid>{${project.uuid}}</ProjectGuid>
+               <Keyword>MakeFileProj</Keyword>
+               <ProjectName>${project.name}</ProjectName>
+       </PropertyGroup>
+       <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+
+       ${for b in project.build_properties}
+       <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'" Label="Configuration">
+               <ConfigurationType>Makefile</ConfigurationType>
+               <OutDir>${b.outdir}</OutDir>
+               <PlatformToolset>v110</PlatformToolset>
+       </PropertyGroup>
+       ${endfor}
+
+       <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+       <ImportGroup Label="ExtensionSettings">
+       </ImportGroup>
+
+       ${for b in project.build_properties}
+       <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+               <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+       </ImportGroup>
+       ${endfor}
+
+       ${for b in project.build_properties}
+       <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+               <NMakeBuildCommandLine>${xml:project.get_build_command(b)}</NMakeBuildCommandLine>
+               <NMakeReBuildCommandLine>${xml:project.get_rebuild_command(b)}</NMakeReBuildCommandLine>
+               <NMakeCleanCommandLine>${xml:project.get_clean_command(b)}</NMakeCleanCommandLine>
+               <NMakeIncludeSearchPath>${xml:b.includes_search_path}</NMakeIncludeSearchPath>
+               <NMakePreprocessorDefinitions>${xml:b.preprocessor_definitions};$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>
+               <IncludePath>${xml:b.includes_search_path}</IncludePath>
+               <ExecutablePath>$(ExecutablePath)</ExecutablePath>
+
+               ${if getattr(b, 'output_file', None)}
+               <NMakeOutput>${xml:b.output_file}</NMakeOutput>
+               ${endif}
+               ${if getattr(b, 'deploy_dir', None)}
+               <RemoteRoot>${xml:b.deploy_dir}</RemoteRoot>
+               ${endif}
+       </PropertyGroup>
+       ${endfor}
+
+       ${for b in project.build_properties}
+               ${if getattr(b, 'deploy_dir', None)}
+       <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+               <Deploy>
+                       <DeploymentType>CopyToHardDrive</DeploymentType>
+               </Deploy>
+       </ItemDefinitionGroup>
+               ${endif}
+       ${endfor}
+
+       <ItemGroup>
+               ${for x in project.source}
+               <${project.get_key(x)} Include='${x.win32path()}' />
+               ${endfor}
+       </ItemGroup>
+       <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+       <ImportGroup Label="ExtensionTargets">
+       </ImportGroup>
+</Project>
+'''
+
+FILTER_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+       <ItemGroup>
+               ${for x in project.source}
+                       <${project.get_key(x)} Include="${x.win32path()}">
+                               <Filter>${project.get_filter_name(x.parent)}</Filter>
+                       </${project.get_key(x)}>
+               ${endfor}
+       </ItemGroup>
+       <ItemGroup>
+               ${for x in project.dirs()}
+                       <Filter Include="${project.get_filter_name(x)}">
+                               <UniqueIdentifier>{${project.make_uuid(x.win32path())}}</UniqueIdentifier>
+                       </Filter>
+               ${endfor}
+       </ItemGroup>
+</Project>
+'''
+
+PROJECT_2008_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
+<VisualStudioProject ProjectType="Visual C++" Version="9,00"
+       Name="${xml: project.name}" ProjectGUID="{${project.uuid}}"
+       Keyword="MakeFileProj"
+       TargetFrameworkVersion="196613">
+       <Platforms>
+               ${if project.build_properties}
+               ${for b in project.build_properties}
+                  <Platform Name="${xml: b.platform}" />
+               ${endfor}
+               ${else}
+                  <Platform Name="Win32" />
+               ${endif}
+       </Platforms>
+       <ToolFiles>
+       </ToolFiles>
+       <Configurations>
+               ${if project.build_properties}
+               ${for b in project.build_properties}
+               <Configuration
+                       Name="${xml: b.configuration}|${xml: b.platform}"
+                       IntermediateDirectory="$ConfigurationName"
+                       OutputDirectory="${xml: b.outdir}"
+                       ConfigurationType="0">
+                       <Tool
+                               Name="VCNMakeTool"
+                               BuildCommandLine="${xml: project.get_build_command(b)}"
+                               ReBuildCommandLine="${xml: project.get_rebuild_command(b)}"
+                               CleanCommandLine="${xml: project.get_clean_command(b)}"
+                               ${if getattr(b, 'output_file', None)}
+                               Output="${xml: b.output_file}"
+                               ${endif}
+                               PreprocessorDefinitions="${xml: b.preprocessor_definitions}"
+                               IncludeSearchPath="${xml: b.includes_search_path}"
+                               ForcedIncludes=""
+                               ForcedUsingAssemblies=""
+                               AssemblySearchPath=""
+                               CompileAsManaged=""
+                       />
+               </Configuration>
+               ${endfor}
+               ${else}
+                       <Configuration Name="Release|Win32" >
+               </Configuration>
+               ${endif}
+       </Configurations>
+       <References>
+       </References>
+       <Files>
+${project.display_filter()}
+       </Files>
+</VisualStudioProject>
+'''
+
+SOLUTION_TEMPLATE = '''Microsoft Visual Studio Solution File, Format Version ${project.numver}
+# Visual Studio ${project.vsver}
+${for p in project.all_projects}
+Project("{${p.ptype()}}") = "${p.name}", "${p.title}", "{${p.uuid}}"
+EndProject${endfor}
+Global
+       GlobalSection(SolutionConfigurationPlatforms) = preSolution
+               ${if project.all_projects}
+               ${for (configuration, platform) in project.all_projects[0].ctx.project_configurations()}
+               ${configuration}|${platform} = ${configuration}|${platform}
+               ${endfor}
+               ${endif}
+       EndGlobalSection
+       GlobalSection(ProjectConfigurationPlatforms) = postSolution
+               ${for p in project.all_projects}
+                       ${if hasattr(p, 'source')}
+                       ${for b in p.build_properties}
+               {${p.uuid}}.${b.configuration}|${b.platform}.ActiveCfg = ${b.configuration}|${b.platform}
+                       ${if getattr(p, 'is_active', None)}
+               {${p.uuid}}.${b.configuration}|${b.platform}.Build.0 = ${b.configuration}|${b.platform}
+                       ${endif}
+                       ${if getattr(p, 'is_deploy', None)}
+               {${p.uuid}}.${b.configuration}|${b.platform}.Deploy.0 = ${b.configuration}|${b.platform}
+                       ${endif}
+                       ${endfor}
+                       ${endif}
+               ${endfor}
+       EndGlobalSection
+       GlobalSection(SolutionProperties) = preSolution
+               HideSolutionNode = FALSE
+       EndGlobalSection
+       GlobalSection(NestedProjects) = preSolution
+       ${for p in project.all_projects}
+               ${if p.parent}
+               {${p.uuid}} = {${p.parent.uuid}}
+               ${endif}
+       ${endfor}
+       EndGlobalSection
+EndGlobal
+'''
+
+COMPILE_TEMPLATE = '''def f(project):
+       lst = []
+       def xml_escape(value):
+               return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+       %s
+
+       #f = open('cmd.txt', 'w')
+       #f.write(str(lst))
+       #f.close()
+       return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+       """
+       Compile a template expression into a python function (like jsps, but way shorter)
+       """
+       extr = []
+       def repl(match):
+               g = match.group
+               if g('dollar'): return "$"
+               elif g('backslash'):
+                       return "\\"
+               elif g('subst'):
+                       extr.append(g('code'))
+                       return "<<|@|>>"
+               return None
+
+       line2 = reg_act.sub(repl, line)
+       params = line2.split('<<|@|>>')
+       assert(extr)
+
+
+       indent = 0
+       buf = []
+       app = buf.append
+
+       def app(txt):
+               buf.append(indent * '\t' + txt)
+
+       for x in range(len(extr)):
+               if params[x]:
+                       app("lst.append(%r)" % params[x])
+
+               f = extr[x]
+               if f.startswith('if') or f.startswith('for'):
+                       app(f + ':')
+                       indent += 1
+               elif f.startswith('py:'):
+                       app(f[3:])
+               elif f.startswith('endif') or f.startswith('endfor'):
+                       indent -= 1
+               elif f.startswith('else') or f.startswith('elif'):
+                       indent -= 1
+                       app(f + ':')
+                       indent += 1
+               elif f.startswith('xml:'):
+                       app('lst.append(xml_escape(%s))' % f[4:])
+               else:
+                       #app('lst.append((%s) or "cannot find %s")' % (f, f))
+                       app('lst.append(%s)' % f)
+
+       if extr:
+               if params[-1]:
+                       app("lst.append(%r)" % params[-1])
+
+       fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+       #print(fun)
+       return Task.funex(fun)
+
+
+re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
+def rm_blank_lines(txt):
+       txt = re_blank.sub('\r\n', txt)
+       return txt
+
+BOM = '\xef\xbb\xbf'
+try:
+       BOM = bytes(BOM, 'iso8859-1') # python 3
+except TypeError:
+       pass
+
+def stealth_write(self, data, flags='wb'):
+       try:
+               unicode
+       except NameError:
+               data = data.encode('utf-8') # python 3
+       else:
+               data = data.decode(sys.getfilesystemencoding(), 'replace')
+               data = data.encode('utf-8')
+
+       if self.name.endswith('.vcproj') or self.name.endswith('.vcxproj'):
+               data = BOM + data
+
+       try:
+               txt = self.read(flags='rb')
+               if txt != data:
+                       raise ValueError('must write')
+       except (IOError, ValueError):
+               self.write(data, flags=flags)
+       else:
+               Logs.debug('msvs: skipping %s' % self.win32path())
+Node.Node.stealth_write = stealth_write
+
+re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I)
+def win32path(self):
+       p = self.abspath()
+       m = re_win32.match(p)
+       if m:
+               return "%s:%s" % (m.group(2).upper(), m.group(3))
+       return p
+Node.Node.win32path = win32path
+
+re_quote = re.compile("[^a-zA-Z0-9-]")
+def quote(s):
+       return re_quote.sub("_", s)
+
+def xml_escape(value):
+       return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+def make_uuid(v, prefix = None):
+       """
+       simple utility function
+       """
+       if isinstance(v, dict):
+               keys = list(v.keys())
+               keys.sort()
+               tmp = str([(k, v[k]) for k in keys])
+       else:
+               tmp = str(v)
+       d = Utils.md5(tmp.encode()).hexdigest().upper()
+       if prefix:
+               d = '%s%s' % (prefix, d[8:])
+       gid = uuid.UUID(d, version = 4)
+       return str(gid).upper()
+
+def diff(node, fromnode):
+       # difference between two nodes, but with "(..)" instead of ".."
+       c1 = node
+       c2 = fromnode
+
+       c1h = c1.height()
+       c2h = c2.height()
+
+       lst = []
+       up = 0
+
+       while c1h > c2h:
+               lst.append(c1.name)
+               c1 = c1.parent
+               c1h -= 1
+
+       while c2h > c1h:
+               up += 1
+               c2 = c2.parent
+               c2h -= 1
+
+       while id(c1) != id(c2):
+               lst.append(c1.name)
+               up += 1
+
+               c1 = c1.parent
+               c2 = c2.parent
+
+       for i in range(up):
+               lst.append('(..)')
+       lst.reverse()
+       return tuple(lst)
+
+class build_property(object):
+       pass
+
+class vsnode(object):
+       """
+       Abstract class representing visual studio elements
+       We assume that all visual studio nodes have a uuid and a parent
+       """
+       def __init__(self, ctx):
+               self.ctx = ctx # msvs context
+               self.name = '' # string, mandatory
+               self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
+               self.uuid = '' # string, mandatory
+               self.parent = None # parent node for visual studio nesting
+
+       def get_waf(self):
+               """
+               Override in subclasses...
+               """
+               return 'cd /d "%s" & %s' % (self.ctx.srcnode.win32path(), getattr(self.ctx, 'waf_command', 'waf.bat'))
+
+       def ptype(self):
+               """
+               Return a special uuid for projects written in the solution file
+               """
+               pass
+
+       def write(self):
+               """
+               Write the project file, by default, do nothing
+               """
+               pass
+
+       def make_uuid(self, val):
+               """
+               Alias for creating uuid values easily (the templates cannot access global variables)
+               """
+               return make_uuid(val)
+
+class vsnode_vsdir(vsnode):
+       """
+       Nodes representing visual studio folders (which do not match the filesystem tree!)
+       """
+       VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+       def __init__(self, ctx, uuid, name, vspath=''):
+               vsnode.__init__(self, ctx)
+               self.title = self.name = name
+               self.uuid = uuid
+               self.vspath = vspath or name
+
+       def ptype(self):
+               return self.VS_GUID_SOLUTIONFOLDER
+
+class vsnode_project(vsnode):
+       """
+       Abstract class representing visual studio project elements
+       A project is assumed to be writable, and has a node representing the file to write to
+       """
+       VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+       def ptype(self):
+               return self.VS_GUID_VCPROJ
+
+       def __init__(self, ctx, node):
+               vsnode.__init__(self, ctx)
+               self.path = node
+               self.uuid = make_uuid(node.win32path())
+               self.name = node.name
+               self.title = self.path.win32path()
+               self.source = [] # list of node objects
+               self.build_properties = [] # list of properties (nmake commands, output dir, etc)
+
+       def dirs(self):
+               """
+               Get the list of parent folders of the source files (header files included)
+               for writing the filters
+               """
+               lst = []
+               def add(x):
+                       if x.height() > self.tg.path.height() and x not in lst:
+                               lst.append(x)
+                               add(x.parent)
+               for x in self.source:
+                       add(x.parent)
+               return lst
+
+       def write(self):
+               Logs.debug('msvs: creating %r' % self.path)
+
+               # first write the project file
+               template1 = compile_template(PROJECT_TEMPLATE)
+               proj_str = template1(self)
+               proj_str = rm_blank_lines(proj_str)
+               self.path.stealth_write(proj_str)
+
+               # then write the filter
+               template2 = compile_template(FILTER_TEMPLATE)
+               filter_str = template2(self)
+               filter_str = rm_blank_lines(filter_str)
+               tmp = self.path.parent.make_node(self.path.name + '.filters')
+               tmp.stealth_write(filter_str)
+
+       def get_key(self, node):
+               """
+               required for writing the source files
+               """
+               name = node.name
+               if name.endswith('.cpp') or name.endswith('.c'):
+                       return 'ClCompile'
+               return 'ClInclude'
+
+       def collect_properties(self):
+               """
+               Returns a list of triplet (configuration, platform, output_directory)
+               """
+               ret = []
+               for c in self.ctx.configurations:
+                       for p in self.ctx.platforms:
+                               x = build_property()
+                               x.outdir = ''
+
+                               x.configuration = c
+                               x.platform = p
+
+                               x.preprocessor_definitions = ''
+                               x.includes_search_path = ''
+
+                               # can specify "deploy_dir" too
+                               ret.append(x)
+               self.build_properties = ret
+
+       def get_build_params(self, props):
+               opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
+               return (self.get_waf(), opt)
+
+       def get_build_command(self, props):
+               return "%s build %s" % self.get_build_params(props)
+
+       def get_clean_command(self, props):
+               return "%s clean %s" % self.get_build_params(props)
+
+       def get_rebuild_command(self, props):
+               return "%s clean build %s" % self.get_build_params(props)
+
+       def get_filter_name(self, node):
+               lst = diff(node, self.tg.path)
+               return '\\'.join(lst) or '.'
+
+class vsnode_alias(vsnode_project):
+       def __init__(self, ctx, node, name):
+               vsnode_project.__init__(self, ctx, node)
+               self.name = name
+               self.output_file = ''
+
+class vsnode_build_all(vsnode_alias):
+       """
+       Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
+       This is the only alias enabled by default
+       """
+       def __init__(self, ctx, node, name='build_all_projects'):
+               vsnode_alias.__init__(self, ctx, node, name)
+               self.is_active = True
+
+class vsnode_install_all(vsnode_alias):
+       """
+       Fake target used to emulate the behaviour of "make install"
+       """
+       def __init__(self, ctx, node, name='install_all_projects'):
+               vsnode_alias.__init__(self, ctx, node, name)
+
+       def get_build_command(self, props):
+               return "%s build install %s" % self.get_build_params(props)
+
+       def get_clean_command(self, props):
+               return "%s clean %s" % self.get_build_params(props)
+
+       def get_rebuild_command(self, props):
+               return "%s clean build install %s" % self.get_build_params(props)
+
+class vsnode_project_view(vsnode_alias):
+       """
+       Fake target used to emulate a file system view
+       """
+       def __init__(self, ctx, node, name='project_view'):
+               vsnode_alias.__init__(self, ctx, node, name)
+               self.tg = self.ctx() # fake one, cannot remove
+               self.exclude_files = Node.exclude_regs + '''
+waf-1.8.*
+waf3-1.8.*/**
+.waf-1.8.*
+.waf3-1.8.*/**
+**/*.sdf
+**/*.suo
+**/*.ncb
+**/%s
+               ''' % Options.lockfile
+
+       def collect_source(self):
+               # this is likely to be slow
+               self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
+
+       def get_build_command(self, props):
+               params = self.get_build_params(props) + (self.ctx.cmd,)
+               return "%s %s %s" % params
+
+       def get_clean_command(self, props):
+               return ""
+
+       def get_rebuild_command(self, props):
+               return self.get_build_command(props)
+
+class vsnode_target(vsnode_project):
+       """
+       Visual studio project representing a targets (programs, libraries, etc) and bound
+       to a task generator
+       """
+       def __init__(self, ctx, tg):
+               """
+               A project is more or less equivalent to a file/folder
+               """
+               base = getattr(ctx, 'projects_dir', None) or tg.path
+               node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
+               vsnode_project.__init__(self, ctx, node)
+               self.name = quote(tg.name)
+               self.tg     = tg  # task generator
+
+       def get_build_params(self, props):
+               """
+               Override the default to add the target name
+               """
+               opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
+               if getattr(self, 'tg', None):
+                       opt += " --targets=%s" % self.tg.name
+               return (self.get_waf(), opt)
+
+       def collect_source(self):
+               tg = self.tg
+               source_files = tg.to_nodes(getattr(tg, 'source', []))
+               include_dirs = Utils.to_list(getattr(tg, 'msvs_includes', []))
+               include_files = []
+               for x in include_dirs:
+                       if isinstance(x, str):
+                               x = tg.path.find_node(x)
+                       if x:
+                               lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
+                               include_files.extend(lst)
+
+               # remove duplicates
+               self.source.extend(list(set(source_files + include_files)))
+               self.source.sort(key=lambda x: x.win32path())
+
+       def collect_properties(self):
+               """
+               Visual studio projects are associated with platforms and configurations (for building especially)
+               """
+               super(vsnode_target, self).collect_properties()
+               for x in self.build_properties:
+                       x.outdir = self.path.parent.win32path()
+                       x.preprocessor_definitions = ''
+                       x.includes_search_path = ''
+
+                       try:
+                               tsk = self.tg.link_task
+                       except AttributeError:
+                               pass
+                       else:
+                               x.output_file = tsk.outputs[0].win32path()
+                               x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
+                               x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
+
+class msvs_generator(BuildContext):
+       '''generates a visual studio 2010 solution'''
+       cmd = 'msvs'
+       fun = 'build'
+
+       def init(self):
+               """
+               Some data that needs to be present
+               """
+               if not getattr(self, 'configurations', None):
+                       self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
+               if not getattr(self, 'platforms', None):
+                       self.platforms = ['Win32']
+               if not getattr(self, 'all_projects', None):
+                       self.all_projects = []
+               if not getattr(self, 'project_extension', None):
+                       self.project_extension = '.vcxproj'
+               if not getattr(self, 'projects_dir', None):
+                       self.projects_dir = self.srcnode.make_node('.depproj')
+                       self.projects_dir.mkdir()
+
+               # bind the classes to the object, so that subclass can provide custom generators
+               if not getattr(self, 'vsnode_vsdir', None):
+                       self.vsnode_vsdir = vsnode_vsdir
+               if not getattr(self, 'vsnode_target', None):
+                       self.vsnode_target = vsnode_target
+               if not getattr(self, 'vsnode_build_all', None):
+                       self.vsnode_build_all = vsnode_build_all
+               if not getattr(self, 'vsnode_install_all', None):
+                       self.vsnode_install_all = vsnode_install_all
+               if not getattr(self, 'vsnode_project_view', None):
+                       self.vsnode_project_view = vsnode_project_view
+
+               self.numver = '11.00'
+               self.vsver  = '2010'
+
+       def execute(self):
+               """
+               Entry point
+               """
+               self.restore()
+               if not self.all_envs:
+                       self.load_envs()
+               self.recurse([self.run_dir])
+
+               # user initialization
+               self.init()
+
+               # two phases for creating the solution
+               self.collect_projects() # add project objects into "self.all_projects"
+               self.write_files() # write the corresponding project and solution files
+
+       def collect_projects(self):
+               """
+               Fill the list self.all_projects with project objects
+               Fill the list of build targets
+               """
+               self.collect_targets()
+               self.add_aliases()
+               self.collect_dirs()
+               default_project = getattr(self, 'default_project', None)
+               def sortfun(x):
+                       if x.name == default_project:
+                               return ''
+                       return getattr(x, 'path', None) and x.path.win32path() or x.name
+               self.all_projects.sort(key=sortfun)
+
+       def write_files(self):
+               """
+               Write the project and solution files from the data collected
+               so far. It is unlikely that you will want to change this
+               """
+               for p in self.all_projects:
+                       p.write()
+
+               # and finally write the solution file
+               node = self.get_solution_node()
+               node.parent.mkdir()
+               Logs.warn('Creating %r' % node)
+               template1 = compile_template(SOLUTION_TEMPLATE)
+               sln_str = template1(self)
+               sln_str = rm_blank_lines(sln_str)
+               node.stealth_write(sln_str)
+
+       def get_solution_node(self):
+               """
+               The solution filename is required when writing the .vcproj files
+               return self.solution_node and if it does not exist, make one
+               """
+               try:
+                       return self.solution_node
+               except AttributeError:
+                       pass
+
+               solution_name = getattr(self, 'solution_name', None)
+               if not solution_name:
+                       solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.sln'
+               if os.path.isabs(solution_name):
+                       self.solution_node = self.root.make_node(solution_name)
+               else:
+                       self.solution_node = self.srcnode.make_node(solution_name)
+               return self.solution_node
+
+       def project_configurations(self):
+               """
+               Helper that returns all the pairs (config,platform)
+               """
+               ret = []
+               for c in self.configurations:
+                       for p in self.platforms:
+                               ret.append((c, p))
+               return ret
+
+       def collect_targets(self):
+               """
+               Process the list of task generators
+               """
+               for g in self.groups:
+                       for tg in g:
+                               if not isinstance(tg, TaskGen.task_gen):
+                                       continue
+
+                               if not hasattr(tg, 'msvs_includes'):
+                                       tg.msvs_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
+                               tg.post()
+                               if not getattr(tg, 'link_task', None):
+                                       continue
+
+                               p = self.vsnode_target(self, tg)
+                               p.collect_source() # delegate this processing
+                               p.collect_properties()
+                               self.all_projects.append(p)
+
+       def add_aliases(self):
+               """
+               Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
+               We also add an alias for "make install" (disabled by default)
+               """
+               base = getattr(self, 'projects_dir', None) or self.tg.path
+
+               node_project = base.make_node('build_all_projects' + self.project_extension) # Node
+               p_build = self.vsnode_build_all(self, node_project)
+               p_build.collect_properties()
+               self.all_projects.append(p_build)
+
+               node_project = base.make_node('install_all_projects' + self.project_extension) # Node
+               p_install = self.vsnode_install_all(self, node_project)
+               p_install.collect_properties()
+               self.all_projects.append(p_install)
+
+               node_project = base.make_node('project_view' + self.project_extension) # Node
+               p_view = self.vsnode_project_view(self, node_project)
+               p_view.collect_source()
+               p_view.collect_properties()
+               self.all_projects.append(p_view)
+
+               n = self.vsnode_vsdir(self, make_uuid(self.srcnode.win32path() + 'build_aliases'), "build_aliases")
+               p_build.parent = p_install.parent = p_view.parent = n
+               self.all_projects.append(n)
+
+       def collect_dirs(self):
+               """
+               Create the folder structure in the Visual studio project view
+               """
+               seen = {}
+               def make_parents(proj):
+                       # look at a project, try to make a parent
+                       if getattr(proj, 'parent', None):
+                               # aliases already have parents
+                               return
+                       x = proj.iter_path
+                       if x in seen:
+                               proj.parent = seen[x]
+                               return
+
+                       # There is not vsnode_vsdir for x.
+                       # So create a project representing the folder "x"
+                       n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.win32path()), x.name)
+                       n.iter_path = x.parent
+                       self.all_projects.append(n)
+
+                       # recurse up to the project directory
+                       if x.height() > self.srcnode.height() + 1:
+                               make_parents(n)
+
+               for p in self.all_projects[:]: # iterate over a copy of all projects
+                       if not getattr(p, 'tg', None):
+                               # but only projects that have a task generator
+                               continue
+
+                       # make a folder for each task generator
+                       p.iter_path = p.tg.path
+                       make_parents(p)
+
+def wrap_2008(cls):
+       class dec(cls):
+               def __init__(self, *k, **kw):
+                       cls.__init__(self, *k, **kw)
+                       self.project_template = PROJECT_2008_TEMPLATE
+
+               def display_filter(self):
+
+                       root = build_property()
+                       root.subfilters = []
+                       root.sourcefiles = []
+                       root.source = []
+                       root.name = ''
+
+                       @Utils.run_once
+                       def add_path(lst):
+                               if not lst:
+                                       return root
+                               child = build_property()
+                               child.subfilters = []
+                               child.sourcefiles = []
+                               child.source = []
+                               child.name = lst[-1]
+
+                               par = add_path(lst[:-1])
+                               par.subfilters.append(child)
+                               return child
+
+                       for x in self.source:
+                               # this crap is for enabling subclasses to override get_filter_name
+                               tmp = self.get_filter_name(x.parent)
+                               tmp = tmp != '.' and tuple(tmp.split('\\')) or ()
+                               par = add_path(tmp)
+                               par.source.append(x)
+
+                       def display(n):
+                               buf = []
+                               for x in n.source:
+                                       buf.append('<File RelativePath="%s" FileType="%s"/>\n' % (xml_escape(x.win32path()), self.get_key(x)))
+                               for x in n.subfilters:
+                                       buf.append('<Filter Name="%s">' % xml_escape(x.name))
+                                       buf.append(display(x))
+                                       buf.append('</Filter>')
+                               return '\n'.join(buf)
+
+                       return display(root)
+
+               def get_key(self, node):
+                       """
+                       If you do not want to let visual studio use the default file extensions,
+                       override this method to return a value:
+                               0: C/C++ Code, 1: C++ Class, 2: C++ Header File, 3: C++ Form,
+                               4: C++ Control, 5: Text File, 6: DEF File, 7: IDL File,
+                               8: Makefile, 9: RGS File, 10: RC File, 11: RES File, 12: XSD File,
+                               13: XML File, 14: HTML File, 15: CSS File, 16: Bitmap, 17: Icon,
+                               18: Resx File, 19: BSC File, 20: XSX File, 21: C++ Web Service,
+                               22: ASAX File, 23: Asp Page, 24: Document, 25: Discovery File,
+                               26: C# File, 27: eFileTypeClassDiagram, 28: MHTML Document,
+                               29: Property Sheet, 30: Cursor, 31: Manifest, 32: eFileTypeRDLC
+                       """
+                       return ''
+
+               def write(self):
+                       Logs.debug('msvs: creating %r' % self.path)
+                       template1 = compile_template(self.project_template)
+                       proj_str = template1(self)
+                       proj_str = rm_blank_lines(proj_str)
+                       self.path.stealth_write(proj_str)
+
+       return dec
+
+class msvs_2008_generator(msvs_generator):
+       '''generates a visual studio 2008 solution'''
+       cmd = 'msvs2008'
+       fun = msvs_generator.fun
+
+       def init(self):
+               if not getattr(self, 'project_extension', None):
+                       self.project_extension = '_2008.vcproj'
+               if not getattr(self, 'solution_name', None):
+                       self.solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '_2008.sln'
+
+               if not getattr(self, 'vsnode_target', None):
+                       self.vsnode_target = wrap_2008(vsnode_target)
+               if not getattr(self, 'vsnode_build_all', None):
+                       self.vsnode_build_all = wrap_2008(vsnode_build_all)
+               if not getattr(self, 'vsnode_install_all', None):
+                       self.vsnode_install_all = wrap_2008(vsnode_install_all)
+               if not getattr(self, 'vsnode_project_view', None):
+                       self.vsnode_project_view = wrap_2008(vsnode_project_view)
+
+               msvs_generator.init(self)
+               self.numver = '10.00'
+               self.vsver  = '2008'
+
+def options(ctx):
+       """
+       If the msvs option is used, try to detect if the build is made from visual studio
+       """
+       ctx.add_option('--execsolution', action='store', help='when building with visual studio, use a build state file')
+
+       old = BuildContext.execute
+       def override_build_state(ctx):
+               def lock(rm, add):
+                       uns = ctx.options.execsolution.replace('.sln', rm)
+                       uns = ctx.root.make_node(uns)
+                       try:
+                               uns.delete()
+                       except OSError:
+                               pass
+
+                       uns = ctx.options.execsolution.replace('.sln', add)
+                       uns = ctx.root.make_node(uns)
+                       try:
+                               uns.write('')
+                       except EnvironmentError:
+                               pass
+
+               if ctx.options.execsolution:
+                       ctx.launch_dir = Context.top_dir # force a build for the whole project (invalid cwd when called by visual studio)
+                       lock('.lastbuildstate', '.unsuccessfulbuild')
+                       old(ctx)
+                       lock('.unsuccessfulbuild', '.lastbuildstate')
+               else:
+                       old(ctx)
+       BuildContext.execute = override_build_state
diff --git a/third_party/waf/waflib/extras/netcache_client.py b/third_party/waf/waflib/extras/netcache_client.py
new file mode 100644 (file)
index 0000000..63859b6
--- /dev/null
@@ -0,0 +1,389 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011-2015 (ita)
+
+"""
+A client for the network cache (playground/netcache/). Launch the server with:
+./netcache_server, then use it for the builds by adding the following:
+
+       def build(bld):
+               bld.load('netcache_client')
+
+The parameters should be present in the environment in the form:
+       NETCACHE=host:port waf configure build
+
+Or in a more detailed way:
+       NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build
+
+where:
+       host: host where the server resides, by default localhost
+       port: by default push on 11001 and pull on 12001
+
+Use the server provided in playground/netcache/Netcache.java
+"""
+
+import os, socket, time, atexit, sys
+from waflib import Task, Logs, Utils, Build, Runner
+from waflib.Configure import conf
+
+BUF = 8192 * 16
+HEADER_SIZE = 128
+MODES = ['PUSH', 'PULL', 'PUSH_PULL']
+STALE_TIME = 30 # seconds
+
+GET = 'GET'
+PUT = 'PUT'
+LST = 'LST'
+BYE = 'BYE'
+
+all_sigs_in_cache = (0.0, [])
+
+def put_data(conn, data):
+       if sys.hexversion > 0x3000000:
+               data = data.encode('iso8859-1')
+       cnt = 0
+       while cnt < len(data):
+               sent = conn.send(data[cnt:])
+               if sent == 0:
+                       raise RuntimeError('connection ended')
+               cnt += sent
+
+push_connections = Runner.Queue(0)
+pull_connections = Runner.Queue(0)
+def get_connection(push=False):
+       # return a new connection... do not forget to release it!
+       try:
+               if push:
+                       ret = push_connections.get(block=False)
+               else:
+                       ret = pull_connections.get(block=False)
+       except Exception:
+               ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+               if push:
+                       ret.connect(Task.push_addr)
+               else:
+                       ret.connect(Task.pull_addr)
+       return ret
+
+def release_connection(conn, msg='', push=False):
+       if conn:
+               if push:
+                       push_connections.put(conn)
+               else:
+                       pull_connections.put(conn)
+
+def close_connection(conn, msg=''):
+       if conn:
+               data = '%s,%s' % (BYE, msg)
+               try:
+                       put_data(conn, data.ljust(HEADER_SIZE))
+               except:
+                       pass
+               try:
+                       conn.close()
+               except:
+                       pass
+
+def close_all():
+       for q in (push_connections, pull_connections):
+               while q.qsize():
+                       conn = q.get()
+                       try:
+                               close_connection(conn)
+                       except:
+                               # ignore errors when cleaning up
+                               pass
+atexit.register(close_all)
+
+def read_header(conn):
+       cnt = 0
+       buf = []
+       while cnt < HEADER_SIZE:
+               data = conn.recv(HEADER_SIZE - cnt)
+               if not data:
+                       #import traceback
+                       #traceback.print_stack()
+                       raise ValueError('connection ended when reading a header %r' % buf)
+               buf.append(data)
+               cnt += len(data)
+       if sys.hexversion > 0x3000000:
+               ret = ''.encode('iso8859-1').join(buf)
+               ret = ret.decode('iso8859-1')
+       else:
+               ret = ''.join(buf)
+       return ret
+
+def check_cache(conn, ssig):
+       """
+       List the files on the server, this is an optimization because it assumes that
+       concurrent builds are rare
+       """
+       global all_sigs_in_cache
+       if not STALE_TIME:
+               return
+       if time.time() - all_sigs_in_cache[0] > STALE_TIME:
+
+               params = (LST,'')
+               put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+
+               # read what is coming back
+               ret = read_header(conn)
+               size = int(ret.split(',')[0])
+
+               buf = []
+               cnt = 0
+               while cnt < size:
+                       data = conn.recv(min(BUF, size-cnt))
+                       if not data:
+                               raise ValueError('connection ended %r %r' % (cnt, size))
+                       buf.append(data)
+                       cnt += len(data)
+
+               if sys.hexversion > 0x3000000:
+                       ret = ''.encode('iso8859-1').join(buf)
+                       ret = ret.decode('iso8859-1')
+               else:
+                       ret = ''.join(buf)
+
+               all_sigs_in_cache = (time.time(), ret.splitlines())
+               Logs.debug('netcache: server cache has %r entries' % len(all_sigs_in_cache[1]))
+
+       if not ssig in all_sigs_in_cache[1]:
+               raise ValueError('no file %s in cache' % ssig)
+
+class MissingFile(Exception):
+       pass
+
+def recv_file(conn, ssig, count, p):
+       check_cache(conn, ssig)
+
+       params = (GET, ssig, str(count))
+       put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+       data = read_header(conn)
+
+       size = int(data.split(',')[0])
+
+       if size == -1:
+               raise MissingFile('no file %s - %s in cache' % (ssig, count))
+
+       # get the file, writing immediately
+       # TODO a tmp file would be better
+       f = open(p, 'wb')
+       cnt = 0
+       while cnt < size:
+               data = conn.recv(min(BUF, size-cnt))
+               if not data:
+                       raise ValueError('connection ended %r %r' % (cnt, size))
+               f.write(data)
+               cnt += len(data)
+       f.close()
+
+def sock_send(conn, ssig, cnt, p):
+       #print "pushing %r %r %r" % (ssig, cnt, p)
+       size = os.stat(p).st_size
+       params = (PUT, ssig, str(cnt), str(size))
+       put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+       f = open(p, 'rb')
+       cnt = 0
+       while cnt < size:
+               r = f.read(min(BUF, size-cnt))
+               while r:
+                       k = conn.send(r)
+                       if not k:
+                               raise ValueError('connection ended')
+                       cnt += k
+                       r = r[k:]
+
+def can_retrieve_cache(self):
+       if not Task.pull_addr:
+               return False
+       if not self.outputs:
+               return False
+       self.cached = False
+
+       cnt = 0
+       sig = self.signature()
+       ssig = Utils.to_hex(self.uid() + sig)
+
+       conn = None
+       err = False
+       try:
+               try:
+                       conn = get_connection()
+                       for node in self.outputs:
+                               p = node.abspath()
+                               recv_file(conn, ssig, cnt, p)
+                               cnt += 1
+               except MissingFile as e:
+                       Logs.debug('netcache: file is not in the cache %r' % e)
+                       err = True
+
+               except Exception as e:
+                       Logs.debug('netcache: could not get the files %r' % e)
+                       err = True
+
+                       # broken connection? remove this one
+                       close_connection(conn)
+                       conn = None
+       finally:
+               release_connection(conn)
+       if err:
+               return False
+
+       for node in self.outputs:
+               node.sig = sig
+               #if self.generator.bld.progress_bar < 1:
+               #       self.generator.bld.to_log('restoring from cache %r\n' % node.abspath())
+
+       self.cached = True
+       return True
+
+@Utils.run_once
+def put_files_cache(self):
+       if not Task.push_addr:
+               return
+       if not self.outputs:
+               return
+       if getattr(self, 'cached', None):
+               return
+
+       #print "called put_files_cache", id(self)
+       bld = self.generator.bld
+       sig = self.signature()
+       ssig = Utils.to_hex(self.uid() + sig)
+
+       conn = None
+       cnt = 0
+       try:
+               for node in self.outputs:
+                       # We could re-create the signature of the task with the signature of the outputs
+                       # in practice, this means hashing the output files
+                       # this is unnecessary
+                       try:
+                               if not conn:
+                                       conn = get_connection(push=True)
+                               sock_send(conn, ssig, cnt, node.abspath())
+                       except Exception as e:
+                               Logs.debug("netcache: could not push the files %r" % e)
+
+                               # broken connection? remove this one
+                               close_connection(conn)
+                               conn = None
+                       cnt += 1
+       finally:
+               release_connection(conn, push=True)
+
+       bld.task_sigs[self.uid()] = self.cache_sig
+
+def hash_env_vars(self, env, vars_lst):
+       # reimplement so that the resulting hash does not depend on local paths
+       if not env.table:
+               env = env.parent
+               if not env:
+                       return Utils.SIG_NIL
+
+       idx = str(id(env)) + str(vars_lst)
+       try:
+               cache = self.cache_env
+       except AttributeError:
+               cache = self.cache_env = {}
+       else:
+               try:
+                       return self.cache_env[idx]
+               except KeyError:
+                       pass
+
+       v = str([env[a] for a in vars_lst])
+       v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
+       m = Utils.md5()
+       m.update(v.encode())
+       ret = m.digest()
+
+       Logs.debug('envhash: %r %r', ret, v)
+
+       cache[idx] = ret
+
+       return ret
+
+def uid(self):
+       # reimplement so that the signature does not depend on local paths
+       try:
+               return self.uid_
+       except AttributeError:
+               m = Utils.md5()
+               src = self.generator.bld.srcnode
+               up = m.update
+               up(self.__class__.__name__.encode())
+               for x in self.inputs + self.outputs:
+                       up(x.path_from(src).encode())
+               self.uid_ = m.digest()
+               return self.uid_
+
+
+def make_cached(cls):
+       if getattr(cls, 'nocache', None):
+               return
+
+       m1 = cls.run
+       def run(self):
+               if getattr(self, 'nocache', False):
+                       return m1(self)
+               if self.can_retrieve_cache():
+                       return 0
+               return m1(self)
+       cls.run = run
+
+       m2 = cls.post_run
+       def post_run(self):
+               if getattr(self, 'nocache', False):
+                       return m2(self)
+               bld = self.generator.bld
+               ret = m2(self)
+               if bld.cache_global:
+                       self.put_files_cache()
+               if hasattr(self, 'chmod'):
+                       for node in self.outputs:
+                               os.chmod(node.abspath(), self.chmod)
+               return ret
+       cls.post_run = post_run
+
+@conf
+def setup_netcache(ctx, push_addr, pull_addr):
+       Task.Task.can_retrieve_cache = can_retrieve_cache
+       Task.Task.put_files_cache = put_files_cache
+       Task.Task.uid = uid
+       Task.push_addr = push_addr
+       Task.pull_addr = pull_addr
+       Build.BuildContext.hash_env_vars = hash_env_vars
+       ctx.cache_global = True
+
+       for x in Task.classes.values():
+               make_cached(x)
+
+def build(bld):
+       if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ:
+               Logs.warn('Setting  NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001')
+               os.environ['NETCACHE_PULL'] = '127.0.0.1:12001'
+               os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001'
+
+       if 'NETCACHE' in os.environ:
+               if not 'NETCACHE_PUSH' in os.environ:
+                       os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE']
+               if not 'NETCACHE_PULL' in os.environ:
+                       os.environ['NETCACHE_PULL'] = os.environ['NETCACHE']
+
+       v = os.environ['NETCACHE_PULL']
+       if v:
+               h, p = v.split(':')
+               pull_addr = (h, int(p))
+       else:
+               pull_addr = None
+
+       v = os.environ['NETCACHE_PUSH']
+       if v:
+               h, p = v.split(':')
+               push_addr = (h, int(p))
+       else:
+               push_addr = None
+
+       setup_netcache(bld, push_addr, pull_addr)
diff --git a/third_party/waf/waflib/extras/nobuild.py b/third_party/waf/waflib/extras/nobuild.py
new file mode 100644 (file)
index 0000000..c628af8
--- /dev/null
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Override the build commands to write empty files.
+This is useful for profiling and evaluating the Python overhead.
+
+To use::
+
+    def build(bld):
+        ...
+        bld.load('nobuild')
+
+"""
+
+from waflib import Task
+def build(bld):
+       def run(self):
+               for x in self.outputs:
+                       x.write('')
+       for (name, cls) in Task.classes.items():
+               cls.run = run
diff --git a/third_party/waf/waflib/extras/objcopy.py b/third_party/waf/waflib/extras/objcopy.py
new file mode 100644 (file)
index 0000000..939c2c1
--- /dev/null
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+# Grygoriy Fuchedzhy 2010
+
+"""
+Support for converting linked targets to ihex, srec or binary files using
+objcopy. Use the 'objcopy' feature in conjuction with the 'cc' or 'cxx'
+feature. The 'objcopy' feature uses the following attributes:
+
+objcopy_bfdname                Target object format name (eg. ihex, srec, binary).
+                                          Defaults to ihex.
+objcopy_target          File name used for objcopy output. This defaults to the
+                                          target name with objcopy_bfdname as extension.
+objcopy_install_path   Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
+objcopy_flags            Additional flags passed to objcopy.
+"""
+
+from waflib.Utils import def_attrs
+from waflib import Task
+from waflib.TaskGen import feature, after_method
+
+class objcopy(Task.Task):
+       run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
+       color   = 'CYAN'
+
+@feature('objcopy')
+@after_method('apply_link')
+def map_objcopy(self):
+       def_attrs(self,
+          objcopy_bfdname = 'ihex',
+          objcopy_target = None,
+          objcopy_install_path = "${PREFIX}/firmware",
+          objcopy_flags = '')
+
+       link_output = self.link_task.outputs[0]
+       if not self.objcopy_target:
+               self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
+       task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target))
+
+       task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
+       try:
+               task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
+       except AttributeError:
+               pass
+
+       if self.objcopy_install_path:
+               self.bld.install_files(self.objcopy_install_path,
+                                                          task.outputs[0],
+                                                          env=task.env.derive())
+
+def configure(ctx):
+       ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
diff --git a/third_party/waf/waflib/extras/package.py b/third_party/waf/waflib/extras/package.py
new file mode 100644 (file)
index 0000000..387a3cd
--- /dev/null
@@ -0,0 +1,75 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011
+
+"""
+Obtain packages, unpack them in a location, and add associated uselib variables
+(CFLAGS_pkgname, LIBPATH_pkgname, etc).
+
+The default is use a Dependencies.txt file in the source directory.
+
+This is a work in progress.
+
+Usage:
+
+def options(opt):
+       opt.load('package')
+
+def configure(conf):
+       conf.load_packages()
+"""
+
+from waflib import Logs
+from waflib.Configure import conf
+
+try:
+       from urllib import request
+except ImportError:
+       from urllib import urlopen
+else:
+       urlopen = request.urlopen
+
+
+CACHEVAR = 'WAFCACHE_PACKAGE'
+
+@conf
+def get_package_cache_dir(self):
+       cache = None
+       if CACHEVAR in conf.environ:
+               cache = conf.environ[CACHEVAR]
+               cache = self.root.make_node(cache)
+       elif self.env[CACHEVAR]:
+               cache = self.env[CACHEVAR]
+               cache = self.root.make_node(cache)
+       else:
+               cache = self.srcnode.make_node('.wafcache_package')
+       cache.mkdir()
+       return cache
+
+@conf
+def download_archive(self, src, dst):
+       for x in self.env.PACKAGE_REPO:
+               url = '/'.join((x, src))
+               try:
+                       web = urlopen(url)
+                       try:
+                               if web.getcode() != 200:
+                                       continue
+                       except AttributeError:
+                               pass
+               except Exception:
+                       # on python3 urlopen throws an exception
+                       # python 2.3 does not have getcode and throws an exception to fail
+                       continue
+               else:
+                       tmp = self.root.make_node(dst)
+                       tmp.write(web.read())
+                       Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url))
+                       break
+       else:
+               self.fatal('Could not get the package %s' % src)
+
+@conf
+def load_packages(self):
+       self.get_package_cache_dir()
+       # read the dependencies, get the archives, ..
diff --git a/third_party/waf/waflib/extras/parallel_debug.py b/third_party/waf/waflib/extras/parallel_debug.py
new file mode 100644 (file)
index 0000000..9419125
--- /dev/null
@@ -0,0 +1,441 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2010 (ita)
+
+"""
+Debugging helper for parallel compilation, outputs
+a file named pdebug.svg in the source directory::
+
+       def options(opt):
+               opt.load('parallel_debug')
+       def build(bld):
+               ...
+"""
+
+import time, sys, re
+try: from Queue import Queue
+except: from queue import Queue
+from waflib import Runner, Options, Utils, Task, Logs, Errors
+
+#import random
+#random.seed(100)
+
+SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0"
+   x="${project.x}" y="${project.y}" width="${project.width}" height="${project.height}" id="svg602" xml:space="preserve">
+
+<style type='text/css' media='screen'>
+       g.over rect { stroke:#FF0000; fill-opacity:0.4 }
+</style>
+
+<script type='text/javascript'><![CDATA[
+var svg  = document.getElementsByTagName('svg')[0];
+
+svg.addEventListener('mouseover', function(e) {
+       var g = e.target.parentNode;
+       var x = document.getElementById('r_' + g.id);
+       if (x) {
+               g.setAttribute('class', g.getAttribute('class') + ' over');
+               x.setAttribute('class', x.getAttribute('class') + ' over');
+               showInfo(e, g.id);
+       }
+}, false);
+
+svg.addEventListener('mouseout', function(e) {
+               var g = e.target.parentNode;
+               var x = document.getElementById('r_' + g.id);
+               if (x) {
+                       g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
+                       x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
+                       hideInfo(e);
+               }
+}, false);
+
+function showInfo(evt, txt) {
+       tooltip = document.getElementById('tooltip');
+
+       var t = document.getElementById('tooltiptext');
+       t.firstChild.data = txt;
+
+       var x = evt.clientX + 9;
+       if (x > 250) { x -= t.getComputedTextLength() + 16; }
+       var y = evt.clientY + 20;
+       tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
+       tooltip.setAttributeNS(null, "visibility", "visible");
+
+       var r = document.getElementById('tooltiprect');
+       r.setAttribute('width', t.getComputedTextLength() + 6);
+}
+
+function hideInfo(evt) {
+       var tooltip = document.getElementById('tooltip');
+       tooltip.setAttributeNS(null,"visibility","hidden");
+}
+]]></script>
+
+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
+<rect
+   x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
+   style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"
+   />
+
+${if project.title}
+  <text x="${project.title_x}" y="${project.title_y}"
+    style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">${project.title}</text>
+${endif}
+
+
+${for cls in project.groups}
+  <g id='${cls.classname}'>
+    ${for rect in cls.rects}
+    <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+    ${endfor}
+  </g>
+${endfor}
+
+${for info in project.infos}
+  <g id='r_${info.classname}'>
+   <rect x='${info.x}' y='${info.y}' width='${info.width}' height='${info.height}' style="font-size:10;fill:${info.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+   <text x="${info.text_x}" y="${info.text_y}"
+       style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
+   >${info.text}</text>
+  </g>
+${endfor}
+
+  <g transform="translate(0,0)" visibility="hidden" id="tooltip">
+       <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
+       <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;" />
+  </g>
+
+</svg>
+"""
+
+COMPILE_TEMPLATE = '''def f(project):
+       lst = []
+       def xml_escape(value):
+               return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+       %s
+       return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+
+       extr = []
+       def repl(match):
+               g = match.group
+               if g('dollar'): return "$"
+               elif g('backslash'):
+                       return "\\"
+               elif g('subst'):
+                       extr.append(g('code'))
+                       return "<<|@|>>"
+               return None
+
+       line2 = reg_act.sub(repl, line)
+       params = line2.split('<<|@|>>')
+       assert(extr)
+
+
+       indent = 0
+       buf = []
+       app = buf.append
+
+       def app(txt):
+               buf.append(indent * '\t' + txt)
+
+       for x in range(len(extr)):
+               if params[x]:
+                       app("lst.append(%r)" % params[x])
+
+               f = extr[x]
+               if f.startswith('if') or f.startswith('for'):
+                       app(f + ':')
+                       indent += 1
+               elif f.startswith('py:'):
+                       app(f[3:])
+               elif f.startswith('endif') or f.startswith('endfor'):
+                       indent -= 1
+               elif f.startswith('else') or f.startswith('elif'):
+                       indent -= 1
+                       app(f + ':')
+                       indent += 1
+               elif f.startswith('xml:'):
+                       app('lst.append(xml_escape(%s))' % f[4:])
+               else:
+                       #app('lst.append((%s) or "cannot find %s")' % (f, f))
+                       app('lst.append(str(%s))' % f)
+
+       if extr:
+               if params[-1]:
+                       app("lst.append(%r)" % params[-1])
+
+       fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+       # uncomment the following to debug the template
+       #for i, x in enumerate(fun.splitlines()):
+       #       print i, x
+       return Task.funex(fun)
+
+# red   #ff4d4d
+# green #4da74d
+# lila  #a751ff
+
+color2code = {
+       'GREEN'  : '#4da74d',
+       'YELLOW' : '#fefe44',
+       'PINK'   : '#a751ff',
+       'RED'    : '#cc1d1d',
+       'BLUE'   : '#6687bb',
+       'CYAN'   : '#34e2e2',
+}
+
+mp = {}
+info = [] # list of (text,color)
+
+def map_to_color(name):
+       if name in mp:
+               return mp[name]
+       try:
+               cls = Task.classes[name]
+       except KeyError:
+               return color2code['RED']
+       if cls.color in mp:
+               return mp[cls.color]
+       if cls.color in color2code:
+               return color2code[cls.color]
+       return color2code['RED']
+
+def process(self):
+       m = self.master
+       if m.stop:
+               m.out.put(self)
+               return
+
+       self.master.set_running(1, id(Utils.threading.currentThread()), self)
+
+       # remove the task signature immediately before it is executed
+       # in case of failure the task will be executed again
+       try:
+               del self.generator.bld.task_sigs[self.uid()]
+       except:
+               pass
+
+       try:
+               self.generator.bld.returned_tasks.append(self)
+               self.log_display(self.generator.bld)
+               ret = self.run()
+       except Exception:
+               self.err_msg = Utils.ex_stack()
+               self.hasrun = Task.EXCEPTION
+
+               # TODO cleanup
+               m.error_handler(self)
+               m.out.put(self)
+               return
+
+       if ret:
+               self.err_code = ret
+               self.hasrun = Task.CRASHED
+       else:
+               try:
+                       self.post_run()
+               except Errors.WafError:
+                       pass
+               except Exception:
+                       self.err_msg = Utils.ex_stack()
+                       self.hasrun = Task.EXCEPTION
+               else:
+                       self.hasrun = Task.SUCCESS
+       if self.hasrun != Task.SUCCESS:
+               m.error_handler(self)
+
+       self.master.set_running(-1, id(Utils.threading.currentThread()), self)
+       m.out.put(self)
+Task.TaskBase.process_back = Task.TaskBase.process
+Task.TaskBase.process = process
+
+old_start = Runner.Parallel.start
+def do_start(self):
+       try:
+               Options.options.dband
+       except AttributeError:
+               self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')
+
+       self.taskinfo = Queue()
+       old_start(self)
+       if self.dirty:
+               make_picture(self)
+Runner.Parallel.start = do_start
+
+def set_running(self, by, i, tsk):
+       self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by)  )
+Runner.Parallel.set_running = set_running
+
+def name2class(name):
+       return name.replace(' ', '_').replace('.', '_')
+
+def make_picture(producer):
+       # first, cast the parameters
+       if not hasattr(producer.bld, 'path'):
+               return
+
+       tmp = []
+       try:
+               while True:
+                       tup = producer.taskinfo.get(False)
+                       tmp.append(list(tup))
+       except:
+               pass
+
+       try:
+               ini = float(tmp[0][2])
+       except:
+               return
+
+       if not info:
+               seen = []
+               for x in tmp:
+                       name = x[3]
+                       if not name in seen:
+                               seen.append(name)
+                       else:
+                               continue
+
+                       info.append((name, map_to_color(name)))
+               info.sort(key=lambda x: x[0])
+
+       thread_count = 0
+       acc = []
+       for x in tmp:
+               thread_count += x[6]
+               acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
+
+       data_node = producer.bld.path.make_node('pdebug.dat')
+       data_node.write('\n'.join(acc))
+
+       tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
+
+       st = {}
+       for l in tmp:
+               if not l[0] in st:
+                       st[l[0]] = len(st.keys())
+       tmp = [  [st[lst[0]]] + lst[1:] for lst in tmp ]
+       THREAD_AMOUNT = len(st.keys())
+
+       st = {}
+       for l in tmp:
+               if not l[1] in st:
+                       st[l[1]] = len(st.keys())
+       tmp = [  [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
+
+
+       BAND = Options.options.dband
+
+       seen = {}
+       acc = []
+       for x in range(len(tmp)):
+               line = tmp[x]
+               id = line[1]
+
+               if id in seen:
+                       continue
+               seen[id] = True
+
+               begin = line[2]
+               thread_id = line[0]
+               for y in range(x + 1, len(tmp)):
+                       line = tmp[y]
+                       if line[1] == id:
+                               end = line[2]
+                               #print id, thread_id, begin, end
+                               #acc.append(  ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
+                               acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
+                               break
+
+       if Options.options.dmaxtime < 0.1:
+               gwidth = 1
+               for x in tmp:
+                       m = BAND * x[2]
+                       if m > gwidth:
+                               gwidth = m
+       else:
+               gwidth = BAND * Options.options.dmaxtime
+
+       ratio = float(Options.options.dwidth) / gwidth
+       gwidth = Options.options.dwidth
+       gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
+
+
+       # simple data model for our template
+       class tobject(object):
+               pass
+
+       model = tobject()
+       model.x = 0
+       model.y = 0
+       model.width = gwidth + 4
+       model.height = gheight + 4
+
+       model.title = Options.options.dtitle
+       model.title_x = gwidth / 2
+       model.title_y = gheight + - 5
+
+       groups = {}
+       for (x, y, w, h, clsname) in acc:
+               try:
+                       groups[clsname].append((x, y, w, h))
+               except:
+                       groups[clsname] = [(x, y, w, h)]
+
+       # groups of rectangles (else js highlighting is slow)
+       model.groups = []
+       for cls in groups:
+               g = tobject()
+               model.groups.append(g)
+               g.classname = name2class(cls)
+               g.rects = []
+               for (x, y, w, h) in groups[cls]:
+                       r = tobject()
+                       g.rects.append(r)
+                       r.x = 2 + x * ratio
+                       r.y = 2 + y
+                       r.width = w * ratio
+                       r.height = h
+                       r.color = map_to_color(cls)
+
+       cnt = THREAD_AMOUNT
+
+       # caption
+       model.infos = []
+       for (text, color) in info:
+               inf = tobject()
+               model.infos.append(inf)
+               inf.classname = name2class(text)
+               inf.x = 2 + BAND
+               inf.y = 5 + (cnt + 0.5) * BAND
+               inf.width = BAND/2
+               inf.height = BAND/2
+               inf.color = color
+
+               inf.text = text
+               inf.text_x = 2 + 2 * BAND
+               inf.text_y = 5 + (cnt + 0.5) * BAND + 10
+
+               cnt += 1
+
+       # write the file...
+       template1 = compile_template(SVG_TEMPLATE)
+       txt = template1(model)
+
+       node = producer.bld.path.make_node('pdebug.svg')
+       node.write(txt)
+       Logs.warn('Created the diagram %r' % node.abspath())
+
+def options(opt):
+       opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
+               help='title for the svg diagram', dest='dtitle')
+       opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
+       opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
+       opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
+       opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
diff --git a/third_party/waf/waflib/extras/pch.py b/third_party/waf/waflib/extras/pch.py
new file mode 100644 (file)
index 0000000..8b107ac
--- /dev/null
@@ -0,0 +1,148 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Alexander Afanasyev (UCLA), 2014
+
+"""
+Enable precompiled C++ header support (currently only clang++ and g++ are supported)
+
+To use this tool, wscript should look like:
+
+       def options(opt):
+               opt.load('pch')
+               # This will add `--with-pch` configure option.
+               # Unless --with-pch during configure stage specified, the precompiled header support is disabled
+
+       def configure(conf):
+               conf.load('pch')
+               # this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
+               # Unless conf.env.WITH_PCH is set, the precompiled header support is disabled
+
+       def build(bld):
+               bld(features='cxx pch',
+                       target='precompiled-headers',
+                       name='precompiled-headers',
+                       headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`
+
+                       # Other parameters to compile precompiled headers
+                       # includes=...,
+                       # export_includes=...,
+                       # use=...,
+                       # ...
+
+                       # Exported parameters will be propagated even if precompiled headers are disabled
+               )
+
+               bld(
+                       target='test',
+                       features='cxx cxxprogram',
+                       source='a.cpp b.cpp d.cpp main.cpp',
+                       use='precompiled-headers',
+               )
+
+               # or
+
+               bld(
+                       target='test',
+                       features='pch cxx cxxprogram',
+                       source='a.cpp b.cpp d.cpp main.cpp',
+                       headers='a.h b.h c.h',
+               )
+
+Note that precompiled header must have multiple inclusion guards.  If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
+"""
+
+import os
+from waflib import Task, TaskGen, Utils
+from waflib.Tools import c_preproc, cxx
+
+
+PCH_COMPILER_OPTIONS = {
+       'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
+       'g++':     [['-include'], '.gch', ['-x', 'c++-header']],
+}
+
+
+def options(opt):
+       opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')
+
+def configure(conf):
+       if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
+               conf.env.WITH_PCH = True
+               flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
+               conf.env.CXXPCH_F = flags[0]
+               conf.env.CXXPCH_EXT = flags[1]
+               conf.env.CXXPCH_FLAGS = flags[2]
+
+
+@TaskGen.feature('pch')
+@TaskGen.before('process_source')
+def apply_pch(self):
+       if not self.env.WITH_PCH:
+               return
+
+       if getattr(self.bld, 'pch_tasks', None) is None:
+               self.bld.pch_tasks = {}
+
+       if getattr(self, 'headers', None) is None:
+               return
+
+       self.headers = self.to_nodes(self.headers)
+
+       if getattr(self, 'name', None):
+               try:
+                       task = self.bld.pch_tasks[self.name]
+                       self.bld.fatal("Duplicated 'pch' task with name %r" % self.name)
+               except KeyError:
+                       pass
+
+       out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
+       out = self.path.find_or_declare(out)
+       task = self.create_task('gchx', self.headers, out)
+
+       # target should be an absolute path of `out`, but without precompiled header extension
+       task.target = out.abspath()[:-len(out.suffix())]
+
+       self.pch_task = task
+       if getattr(self, 'name', None):
+               self.bld.pch_tasks[self.name] = task
+
+@TaskGen.feature('cxx')
+@TaskGen.after_method('process_source', 'propagate_uselib_vars')
+def add_pch(self):
+       if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
+               return
+
+       pch = None
+       # find pch task, if any
+
+       if getattr(self, 'pch_task', None):
+               pch = self.pch_task
+       else:
+               for use in Utils.to_list(self.use):
+                       try:
+                               pch = self.bld.pch_tasks[use]
+                       except KeyError:
+                               pass
+
+       if pch:
+               for x in self.compiled_tasks:
+                       x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
+
+class gchx(Task.Task):
+       run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()}'
+       scan    = c_preproc.scan
+       color   = 'BLUE'
+       ext_out=['.h']
+
+       def runnable_status(self):
+               try:
+                       node_deps = self.generator.bld.node_deps[self.uid()]
+               except KeyError:
+                       node_deps = []
+               ret = Task.Task.runnable_status(self)
+               if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
+                       t = os.stat(self.outputs[0].abspath()).st_mtime
+                       for n in self.inputs + node_deps:
+                               if os.stat(n.abspath()).st_mtime > t:
+                                       return Task.RUN_ME
+               return ret
diff --git a/third_party/waf/waflib/extras/pep8.py b/third_party/waf/waflib/extras/pep8.py
new file mode 100644 (file)
index 0000000..3709d9b
--- /dev/null
@@ -0,0 +1,106 @@
+#! /usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2011
+
+'''
+Install pep8 module:
+$ easy_install pep8
+       or
+$ pip install pep8
+
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,pep8
+       or, if you have waf >= 1.6.2
+$ ./waf update --files=pep8
+
+
+Then add this to your wscript:
+
+[at]extension('.py', 'wscript')
+def run_pep8(self, node):
+       self.create_task('Pep8', node)
+
+'''
+
+import threading
+from waflib import Task, Options
+
+pep8 = __import__('pep8')
+
+
+class Pep8(Task.Task):
+       color = 'PINK'
+       lock = threading.Lock()
+
+       def check_options(self):
+               if pep8.options:
+                       return
+               pep8.options = Options.options
+               pep8.options.prog = 'pep8'
+               excl = pep8.options.exclude.split(',')
+               pep8.options.exclude = [s.rstrip('/') for s in excl]
+               if pep8.options.filename:
+                       pep8.options.filename = pep8.options.filename.split(',')
+               if pep8.options.select:
+                       pep8.options.select = pep8.options.select.split(',')
+               else:
+                       pep8.options.select = []
+               if pep8.options.ignore:
+                       pep8.options.ignore = pep8.options.ignore.split(',')
+               elif pep8.options.select:
+                       # Ignore all checks which are not explicitly selected
+                       pep8.options.ignore = ['']
+               elif pep8.options.testsuite or pep8.options.doctest:
+                       # For doctest and testsuite, all checks are required
+                       pep8.options.ignore = []
+               else:
+                       # The default choice: ignore controversial checks
+                       pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
+               pep8.options.physical_checks = pep8.find_checks('physical_line')
+               pep8.options.logical_checks = pep8.find_checks('logical_line')
+               pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
+               pep8.options.messages = {}
+
+       def run(self):
+               with Pep8.lock:
+                       self.check_options()
+               pep8.input_file(self.inputs[0].abspath())
+               return 0 if not pep8.get_count() else -1
+
+
+def options(opt):
+       opt.add_option('-q', '--quiet', default=0, action='count',
+                                  help="report only file names, or nothing with -qq")
+       opt.add_option('-r', '--repeat', action='store_true',
+                                  help="show all occurrences of the same error")
+       opt.add_option('--exclude', metavar='patterns',
+                                  default=pep8.DEFAULT_EXCLUDE,
+                                  help="exclude files or directories which match these "
+                                  "comma separated patterns (default: %s)" %
+                                  pep8.DEFAULT_EXCLUDE,
+                                  dest='exclude')
+       opt.add_option('--filename', metavar='patterns', default='*.py',
+                                  help="when parsing directories, only check filenames "
+                                  "matching these comma separated patterns (default: "
+                                  "*.py)")
+       opt.add_option('--select', metavar='errors', default='',
+                                  help="select errors and warnings (e.g. E,W6)")
+       opt.add_option('--ignore', metavar='errors', default='',
+                                  help="skip errors and warnings (e.g. E4,W)")
+       opt.add_option('--show-source', action='store_true',
+                                  help="show source code for each error")
+       opt.add_option('--show-pep8', action='store_true',
+                                  help="show text of PEP 8 for each error")
+       opt.add_option('--statistics', action='store_true',
+                                  help="count errors and warnings")
+       opt.add_option('--count', action='store_true',
+                                  help="print total number of errors and warnings "
+                                  "to standard error and set exit code to 1 if "
+                                  "total is not null")
+       opt.add_option('--benchmark', action='store_true',
+                                  help="measure processing speed")
+       opt.add_option('--testsuite', metavar='dir',
+                                  help="run regression tests from dir")
+       opt.add_option('--doctest', action='store_true',
+                                  help="run doctest on myself")
diff --git a/third_party/waf/waflib/extras/prefork.py b/third_party/waf/waflib/extras/prefork.py
new file mode 100755 (executable)
index 0000000..b912c5b
--- /dev/null
@@ -0,0 +1,401 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Execute commands through pre-forked servers. This tool creates as many servers as build threads.
+On a benchmark executed on Linux Kubuntu 14, 8 virtual cores and SSD drive::
+
+    ./genbench.py /tmp/build 200 100 15 5
+    waf clean build -j24
+    # no prefork: 2m7.179s
+    # prefork:    0m55.400s
+
+To use::
+
+    def options(opt):
+        # optional, will spawn 40 servers early
+        opt.load('prefork')
+
+    def build(bld):
+        bld.load('prefork')
+        ...
+        more code
+
+The servers and the build process are using a shared nonce to prevent undesirable external connections.
+"""
+
+import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random, signal
+try:
+       import SocketServer
+except ImportError:
+       import socketserver as SocketServer
+try:
+       from queue import Queue
+except ImportError:
+       from Queue import Queue
+try:
+       import cPickle
+except ImportError:
+       import pickle as cPickle
+
+SHARED_KEY = None
+HEADER_SIZE = 64
+
+REQ = 'REQ'
+RES = 'RES'
+BYE = 'BYE'
+
+def make_header(params, cookie=''):
+       header = ','.join(params)
+       header = header.ljust(HEADER_SIZE - len(cookie))
+       assert(len(header) == HEADER_SIZE - len(cookie))
+       header = header + cookie
+       if sys.hexversion > 0x3000000:
+               header = header.encode('iso8859-1')
+       return header
+
+def safe_compare(x, y):
+       sum = 0
+       for (a, b) in zip(x, y):
+               sum |= ord(a) ^ ord(b)
+       return sum == 0
+
+re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
+class req(SocketServer.StreamRequestHandler):
+       def handle(self):
+               try:
+                       while self.process_command():
+                               pass
+               except KeyboardInterrupt:
+                       return
+               except Exception as e:
+                       print(e)
+
+       def send_response(self, ret, out, err, exc):
+               if out or err or exc:
+                       data = (out, err, exc)
+                       data = cPickle.dumps(data, -1)
+               else:
+                       data = ''
+
+               params = [RES, str(ret), str(len(data))]
+
+               # no need for the cookie in the response
+               self.wfile.write(make_header(params))
+               if data:
+                       self.wfile.write(data)
+               self.wfile.flush()
+
+       def process_command(self):
+               query = self.rfile.read(HEADER_SIZE)
+               if not query:
+                       return None
+               #print(len(query))
+               assert(len(query) == HEADER_SIZE)
+               if sys.hexversion > 0x3000000:
+                       query = query.decode('iso8859-1')
+
+               # magic cookie
+               key = query[-20:]
+               if not safe_compare(key, SHARED_KEY):
+                       print('%r %r' % (key, SHARED_KEY))
+                       self.send_response(-1, '', '', 'Invalid key given!')
+                       return 'meh'
+
+               query = query[:-20]
+               #print "%r" % query
+               if not re_valid_query.match(query):
+                       self.send_response(-1, '', '', 'Invalid query %r' % query)
+                       raise ValueError('Invalid query %r' % query)
+
+               query = query.strip().split(',')
+
+               if query[0] == REQ:
+                       self.run_command(query[1:])
+               elif query[0] == BYE:
+                       raise ValueError('Exit')
+               else:
+                       raise ValueError('Invalid query %r' % query)
+               return 'ok'
+
+       def run_command(self, query):
+
+               size = int(query[0])
+               data = self.rfile.read(size)
+               assert(len(data) == size)
+               kw = cPickle.loads(data)
+
+               # run command
+               ret = out = err = exc = None
+               cmd = kw['cmd']
+               del kw['cmd']
+               #print(cmd)
+
+               try:
+                       if kw['stdout'] or kw['stderr']:
+                               p = subprocess.Popen(cmd, **kw)
+                               (out, err) = p.communicate()
+                               ret = p.returncode
+                       else:
+                               ret = subprocess.Popen(cmd, **kw).wait()
+               except KeyboardInterrupt:
+                       raise
+               except Exception as e:
+                       ret = -1
+                       exc = str(e) + traceback.format_exc()
+
+               self.send_response(ret, out, err, exc)
+
+def create_server(conn, cls):
+       # child processes do not need the key, so we remove it from the OS environment
+       global SHARED_KEY
+       SHARED_KEY = os.environ['SHARED_KEY']
+       os.environ['SHARED_KEY'] = ''
+
+       ppid = int(os.environ['PREFORKPID'])
+       def reap():
+               if os.sep != '/':
+                       os.waitpid(ppid, 0)
+               else:
+                       while 1:
+                               try:
+                                       os.kill(ppid, 0)
+                               except OSError:
+                                       break
+                               else:
+                                       time.sleep(1)
+               os.kill(os.getpid(), signal.SIGKILL)
+       t = threading.Thread(target=reap)
+       t.setDaemon(True)
+       t.start()
+
+       server = SocketServer.TCPServer(conn, req)
+       print(server.server_address[1])
+       sys.stdout.flush()
+       #server.timeout = 6000 # seconds
+       server.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+       try:
+               server.serve_forever(poll_interval=0.001)
+       except KeyboardInterrupt:
+               pass
+
+if __name__ == '__main__':
+       conn = ("127.0.0.1", 0)
+       #print("listening - %r %r\n" % conn)
+       create_server(conn, req)
+else:
+
+       from waflib import Logs, Utils, Runner, Errors, Options
+
+       def init_task_pool(self):
+               # lazy creation, and set a common pool for all task consumers
+               pool = self.pool = []
+               for i in range(self.numjobs):
+                       consumer = Runner.get_pool()
+                       pool.append(consumer)
+                       consumer.idx = i
+               self.ready = Queue(0)
+               def setq(consumer):
+                       consumer.ready = self.ready
+                       try:
+                               threading.current_thread().idx = consumer.idx
+                       except Exception as e:
+                               print(e)
+               for x in pool:
+                       x.ready.put(setq)
+               return pool
+       Runner.Parallel.init_task_pool = init_task_pool
+
+       def make_server(bld, idx):
+               cmd = [sys.executable, os.path.abspath(__file__)]
+               proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+               return proc
+
+       def make_conn(bld, srv):
+               port = srv.port
+               conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+               conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+               conn.connect(('127.0.0.1', port))
+               return conn
+
+
+       SERVERS = []
+       CONNS = []
+       def close_all():
+               global SERVERS, CONNS
+               while CONNS:
+                       conn = CONNS.pop()
+                       try:
+                               conn.close()
+                       except:
+                               pass
+               while SERVERS:
+                       srv = SERVERS.pop()
+                       try:
+                               srv.kill()
+                       except:
+                               pass
+       atexit.register(close_all)
+
+       def put_data(conn, data):
+               cnt = 0
+               while cnt < len(data):
+                       sent = conn.send(data[cnt:])
+                       if sent == 0:
+                               raise RuntimeError('connection ended')
+                       cnt += sent
+
+       def read_data(conn, siz):
+               cnt = 0
+               buf = []
+               while cnt < siz:
+                       data = conn.recv(min(siz - cnt, 1024))
+                       if not data:
+                               raise RuntimeError('connection ended %r %r' % (cnt, siz))
+                       buf.append(data)
+                       cnt += len(data)
+               if sys.hexversion > 0x3000000:
+                       ret = ''.encode('iso8859-1').join(buf)
+               else:
+                       ret = ''.join(buf)
+               return ret
+
+       def exec_command(self, cmd, **kw):
+               if 'stdout' in kw:
+                       if kw['stdout'] not in (None, subprocess.PIPE):
+                               return self.exec_command_old(cmd, **kw)
+               elif 'stderr' in kw:
+                       if kw['stderr'] not in (None, subprocess.PIPE):
+                               return self.exec_command_old(cmd, **kw)
+
+               kw['shell'] = isinstance(cmd, str)
+               Logs.debug('runner: %r' % cmd)
+               Logs.debug('runner_env: kw=%s' % kw)
+
+               if self.logger:
+                       self.logger.info(cmd)
+
+               if 'stdout' not in kw:
+                       kw['stdout'] = subprocess.PIPE
+               if 'stderr' not in kw:
+                       kw['stderr'] = subprocess.PIPE
+
+               if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+                       raise Errors.WafError("Program %s not found!" % cmd[0])
+
+               idx = threading.current_thread().idx
+               kw['cmd'] = cmd
+
+               # serialization..
+               #print("sub %r %r" % (idx, cmd))
+               #print("write to %r %r" % (idx, cmd))
+
+               data = cPickle.dumps(kw, -1)
+               params = [REQ, str(len(data))]
+               header = make_header(params, self.SHARED_KEY)
+
+               conn = CONNS[idx]
+
+               put_data(conn, header + data)
+               #put_data(conn, data)
+
+               #print("running %r %r" % (idx, cmd))
+               #print("read from %r %r" % (idx, cmd))
+
+               data = read_data(conn, HEADER_SIZE)
+               if sys.hexversion > 0x3000000:
+                       data = data.decode('iso8859-1')
+
+               #print("received %r" % data)
+               lst = data.split(',')
+               ret = int(lst[1])
+               dlen = int(lst[2])
+
+               out = err = None
+               if dlen:
+                       data = read_data(conn, dlen)
+                       (out, err, exc) = cPickle.loads(data)
+                       if exc:
+                               raise Errors.WafError('Execution failure: %s' % exc)
+
+               if out:
+                       if not isinstance(out, str):
+                               out = out.decode(sys.stdout.encoding or 'iso8859-1')
+                       if self.logger:
+                               self.logger.debug('out: %s' % out)
+                       else:
+                               Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+               if err:
+                       if not isinstance(err, str):
+                               err = err.decode(sys.stdout.encoding or 'iso8859-1')
+                       if self.logger:
+                               self.logger.error('err: %s' % err)
+                       else:
+                               Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+               return ret
+
+       def init_key(ctx):
+               try:
+                       key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
+               except KeyError:
+                       key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
+                       os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
+
+               os.environ['PREFORKPID'] = str(os.getpid())
+               return key
+
+       def init_servers(ctx, maxval):
+               while len(SERVERS) < maxval:
+                       i = len(SERVERS)
+                       srv = make_server(ctx, i)
+                       SERVERS.append(srv)
+               while len(CONNS) < maxval:
+                       i = len(CONNS)
+                       srv = SERVERS[i]
+
+                       # postpone the connection
+                       srv.port = int(srv.stdout.readline())
+
+                       conn = None
+                       for x in range(30):
+                               try:
+                                       conn = make_conn(ctx, srv)
+                                       break
+                               except socket.error:
+                                       time.sleep(0.01)
+                       if not conn:
+                               raise ValueError('Could not start the server!')
+                       if srv.poll() is not None:
+                               Logs.warn('Looks like it it not our server process - concurrent builds are unsupported at this stage')
+                               raise ValueError('Could not start the server')
+                       CONNS.append(conn)
+
+       def init_smp(self):
+               if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
+                       return
+               if Utils.unversioned_sys_platform() in ('freebsd',):
+                       pid = os.getpid()
+                       cmd = ['cpuset', '-l', '0', '-p', str(pid)]
+               elif Utils.unversioned_sys_platform() in ('linux',):
+                       pid = os.getpid()
+                       cmd = ['taskset', '-pc', '0', str(pid)]
+               if cmd:
+                       self.cmd_and_log(cmd, quiet=0)
+
+       def options(opt):
+               init_key(opt)
+               init_servers(opt, 40)
+               opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
+
+       def build(bld):
+               if bld.cmd == 'clean':
+                       return
+
+               init_key(bld)
+               init_servers(bld, bld.jobs)
+               init_smp(bld)
+
+               bld.__class__.exec_command_old = bld.__class__.exec_command
+               bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/preforkjava.py b/third_party/waf/waflib/extras/preforkjava.py
new file mode 100644 (file)
index 0000000..e93461b
--- /dev/null
@@ -0,0 +1,236 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+# TODO: have the child process terminate if the parent is killed abruptly
+
+import os, socket, threading, sys, subprocess, time, atexit, random
+try:
+       from queue import Queue
+except ImportError:
+       from Queue import Queue
+
+import json as pickle
+
+SHARED_KEY = None
+HEADER_SIZE = 64
+
+REQ = 'REQ'
+RES = 'RES'
+BYE = 'BYE'
+
+def make_header(params, cookie=''):
+       header = ','.join(params)
+       header = header.ljust(HEADER_SIZE - len(cookie))
+       assert(len(header) == HEADER_SIZE - len(cookie))
+       header = header + cookie
+       if sys.hexversion > 0x3000000:
+               header = header.encode('iso8859-1')
+       return header
+
+if 1:
+       from waflib import Logs, Utils, Runner, Errors, Options
+
+       def init_task_pool(self):
+               # lazy creation, and set a common pool for all task consumers
+               pool = self.pool = []
+               for i in range(self.numjobs):
+                       consumer = Runner.get_pool()
+                       pool.append(consumer)
+                       consumer.idx = i
+               self.ready = Queue(0)
+               def setq(consumer):
+                       consumer.ready = self.ready
+                       try:
+                               threading.current_thread().idx = consumer.idx
+                       except Exception as e:
+                               print(e)
+               for x in pool:
+                       x.ready.put(setq)
+               return pool
+       Runner.Parallel.init_task_pool = init_task_pool
+
+       def make_server(bld, idx):
+               top = getattr(bld, 'preforkjava_top', os.path.dirname(os.path.abspath('__file__')))
+               cp = getattr(bld, 'preforkjava_cp', os.path.join(top, 'minimal-json-0.9.3-SNAPSHOT.jar') + os.pathsep + top)
+
+               for x in cp.split(os.pathsep):
+                       if x and not os.path.exists(x):
+                               Logs.warn('Invalid classpath: %r' % cp)
+                               Logs.warn('Set for example bld.preforkjava_cp to /path/to/minimal-json:/path/to/Prefork.class/')
+
+               cwd = getattr(bld, 'preforkjava_cwd', top)
+               port = getattr(bld, 'preforkjava_port', 51200)
+               cmd = getattr(bld, 'preforkjava_cmd', 'java -cp %s%s Prefork %d' % (cp, os.pathsep, port))
+               proc = subprocess.Popen(cmd.split(), shell=False, cwd=cwd)
+               proc.port = port
+               return proc
+
+       def make_conn(bld, srv):
+               #port = PORT + idx
+               port = srv.port
+               conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+               conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+               conn.connect(('127.0.0.1', port))
+               return conn
+
+       SERVERS = []
+       CONNS = []
+       def close_all():
+               global SERVERS
+               while SERVERS:
+                       srv = SERVERS.pop()
+                       #pid = srv.pid
+                       try:
+                               srv.kill()
+                       except Exception:
+                               pass
+       atexit.register(close_all)
+
+       def put_data(conn, data):
+               cnt = 0
+               while cnt < len(data):
+                       sent = conn.send(data[cnt:])
+                       if sent == 0:
+                               raise RuntimeError('connection ended')
+                       cnt += sent
+
+       def read_data(conn, siz):
+               cnt = 0
+               buf = []
+               while cnt < siz:
+                       data = conn.recv(min(siz - cnt, 1024))
+                       if not data:
+                               raise RuntimeError('connection ended %r %r' % (cnt, siz))
+                       buf.append(data)
+                       cnt += len(data)
+               if sys.hexversion > 0x3000000:
+                       ret = ''.encode('iso8859-1').join(buf)
+               else:
+                       ret = ''.join(buf)
+               return ret
+
+       def exec_command(self, cmd, **kw):
+               if 'stdout' in kw:
+                       if kw['stdout'] not in (None, subprocess.PIPE):
+                               return self.exec_command_old(cmd, **kw)
+               elif 'stderr' in kw:
+                       if kw['stderr'] not in (None, subprocess.PIPE):
+                               return self.exec_command_old(cmd, **kw)
+
+               kw['shell'] = isinstance(cmd, str)
+               Logs.debug('runner: %r' % cmd)
+               Logs.debug('runner_env: kw=%s' % kw)
+
+               if self.logger:
+                       self.logger.info(cmd)
+
+               if 'stdout' not in kw:
+                       kw['stdout'] = subprocess.PIPE
+               if 'stderr' not in kw:
+                       kw['stderr'] = subprocess.PIPE
+
+               if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+                       raise Errors.WafError("Program %s not found!" % cmd[0])
+
+               idx = threading.current_thread().idx
+               kw['cmd'] = cmd
+
+               data = pickle.dumps(kw)
+               params = [REQ, str(len(data))]
+               header = make_header(params, self.SHARED_KEY)
+
+               conn = CONNS[idx]
+
+               if sys.hexversion > 0x3000000:
+                       data = data.encode('iso8859-1')
+               put_data(conn, header + data)
+
+               data = read_data(conn, HEADER_SIZE)
+               if sys.hexversion > 0x3000000:
+                       data = data.decode('iso8859-1')
+
+               #print("received %r" % data)
+               lst = data.split(',')
+               ret = int(lst[1])
+               dlen = int(lst[2])
+
+               out = err = None
+               if dlen:
+                       data = read_data(conn, dlen)
+                       (out, err, exc) = pickle.loads(data)
+                       if exc:
+                               raise Errors.WafError('Execution failure: %s' % exc)
+
+               if out:
+                       if not isinstance(out, str):
+                               out = out.decode(sys.stdout.encoding or 'iso8859-1')
+                       if self.logger:
+                               self.logger.debug('out: %s' % out)
+                       else:
+                               Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+               if err:
+                       if not isinstance(err, str):
+                               err = err.decode(sys.stdout.encoding or 'iso8859-1')
+                       if self.logger:
+                               self.logger.error('err: %s' % err)
+                       else:
+                               Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+               return ret
+
+       def init_key(ctx):
+               try:
+                       key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
+               except KeyError:
+                       key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
+                       os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
+               os.environ['PREFORKPID'] = str(os.getpid())
+               return key
+
+       def init_servers(ctx, maxval):
+               while len(SERVERS) < 1:
+                       i = len(SERVERS)
+                       srv = make_server(ctx, i)
+                       SERVERS.append(srv)
+               while len(CONNS) < maxval:
+                       i = len(CONNS)
+                       srv = SERVERS[0]
+                       conn = None
+                       for x in range(30):
+                               try:
+                                       conn = make_conn(ctx, srv)
+                                       break
+                               except socket.error:
+                                       time.sleep(0.01)
+                       if not conn:
+                               raise ValueError('Could not start the server!')
+                       CONNS.append(conn)
+
+       def init_smp(self):
+               if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
+                       return
+               if Utils.unversioned_sys_platform() in ('freebsd',):
+                       pid = os.getpid()
+                       cmd = ['cpuset', '-l', '0', '-p', str(pid)]
+               elif Utils.unversioned_sys_platform() in ('linux',):
+                       pid = os.getpid()
+                       cmd = ['taskset', '-pc', '0', str(pid)]
+               if cmd:
+                       self.cmd_and_log(cmd, quiet=0)
+
+       def options(opt):
+               opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
+               init_key(opt)
+               init_servers(opt, 40)
+
+       def build(bld):
+               if bld.cmd == 'clean':
+                       return
+
+               init_key(bld)
+               init_servers(bld, bld.jobs)
+               init_smp(bld)
+
+               bld.__class__.exec_command_old = bld.__class__.exec_command
+               bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/preforkunix.py b/third_party/waf/waflib/extras/preforkunix.py
new file mode 100644 (file)
index 0000000..ec9aeeb
--- /dev/null
@@ -0,0 +1,317 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+A version of prefork.py that uses unix sockets. The advantage is that it does not expose
+connections to the outside. Yet performance it only works on unix-like systems
+and performance can be slightly worse.
+
+To use::
+
+    def options(opt):
+        # recommended, fork new processes before using more memory
+        opt.load('preforkunix')
+
+    def build(bld):
+        bld.load('preforkunix')
+        ...
+        more code
+"""
+
+import os, re, socket, threading, sys, subprocess, atexit, traceback, signal, time
+try:
+       from queue import Queue
+except ImportError:
+       from Queue import Queue
+try:
+       import cPickle
+except ImportError:
+       import pickle as cPickle
+
+HEADER_SIZE = 20
+
+REQ = 'REQ'
+RES = 'RES'
+BYE = 'BYE'
+
+def make_header(params, cookie=''):
+       header = ','.join(params)
+       header = header.ljust(HEADER_SIZE - len(cookie))
+       assert(len(header) == HEADER_SIZE - len(cookie))
+       header = header + cookie
+       if sys.hexversion > 0x3000000:
+               header = header.encode('iso8859-1')
+       return header
+
+re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
+if 1:
+       def send_response(conn, ret, out, err, exc):
+               if out or err or exc:
+                       data = (out, err, exc)
+                       data = cPickle.dumps(data, -1)
+               else:
+                       data = ''
+
+               params = [RES, str(ret), str(len(data))]
+
+               # no need for the cookie in the response
+               conn.send(make_header(params))
+               if data:
+                       conn.send(data)
+
+       def process_command(conn):
+               query = conn.recv(HEADER_SIZE)
+               if not query:
+                       return None
+               #print(len(query))
+               assert(len(query) == HEADER_SIZE)
+               if sys.hexversion > 0x3000000:
+                       query = query.decode('iso8859-1')
+
+               #print "%r" % query
+               if not re_valid_query.match(query):
+                       send_response(conn, -1, '', '', 'Invalid query %r' % query)
+                       raise ValueError('Invalid query %r' % query)
+
+               query = query.strip().split(',')
+
+               if query[0] == REQ:
+                       run_command(conn, query[1:])
+               elif query[0] == BYE:
+                       raise ValueError('Exit')
+               else:
+                       raise ValueError('Invalid query %r' % query)
+               return 'ok'
+
+       def run_command(conn, query):
+
+               size = int(query[0])
+               data = conn.recv(size)
+               assert(len(data) == size)
+               kw = cPickle.loads(data)
+
+               # run command
+               ret = out = err = exc = None
+               cmd = kw['cmd']
+               del kw['cmd']
+               #print(cmd)
+
+               try:
+                       if kw['stdout'] or kw['stderr']:
+                               p = subprocess.Popen(cmd, **kw)
+                               (out, err) = p.communicate()
+                               ret = p.returncode
+                       else:
+                               ret = subprocess.Popen(cmd, **kw).wait()
+               except KeyboardInterrupt:
+                       raise
+               except Exception as e:
+                       ret = -1
+                       exc = str(e) + traceback.format_exc()
+
+               send_response(conn, ret, out, err, exc)
+
+if 1:
+
+       from waflib import Logs, Utils, Runner, Errors, Options
+
+       def init_task_pool(self):
+               # lazy creation, and set a common pool for all task consumers
+               pool = self.pool = []
+               for i in range(self.numjobs):
+                       consumer = Runner.get_pool()
+                       pool.append(consumer)
+                       consumer.idx = i
+               self.ready = Queue(0)
+               def setq(consumer):
+                       consumer.ready = self.ready
+                       try:
+                               threading.current_thread().idx = consumer.idx
+                       except Exception as e:
+                               print(e)
+               for x in pool:
+                       x.ready.put(setq)
+               return pool
+       Runner.Parallel.init_task_pool = init_task_pool
+
+       def make_conn(bld):
+               child_socket, parent_socket = socket.socketpair(socket.AF_UNIX)
+               ppid = os.getpid()
+               pid = os.fork()
+               if pid == 0:
+                       parent_socket.close()
+
+                       # if the parent crashes, try to exit cleanly
+                       def reap():
+                               while 1:
+                                       try:
+                                               os.kill(ppid, 0)
+                                       except OSError:
+                                               break
+                                       else:
+                                               time.sleep(1)
+                               os.kill(os.getpid(), signal.SIGKILL)
+                       t = threading.Thread(target=reap)
+                       t.setDaemon(True)
+                       t.start()
+
+                       # write to child_socket only
+                       try:
+                               while process_command(child_socket):
+                                       pass
+                       except KeyboardInterrupt:
+                               sys.exit(2)
+               else:
+                       child_socket.close()
+                       return (pid, parent_socket)
+
+       SERVERS = []
+       CONNS = []
+       def close_all():
+               global SERVERS, CONS
+               while CONNS:
+                       conn = CONNS.pop()
+                       try:
+                               conn.close()
+                       except:
+                               pass
+               while SERVERS:
+                       pid = SERVERS.pop()
+                       try:
+                               os.kill(pid, 9)
+                       except:
+                               pass
+       atexit.register(close_all)
+
+       def put_data(conn, data):
+               cnt = 0
+               while cnt < len(data):
+                       sent = conn.send(data[cnt:])
+                       if sent == 0:
+                               raise RuntimeError('connection ended')
+                       cnt += sent
+
+       def read_data(conn, siz):
+               cnt = 0
+               buf = []
+               while cnt < siz:
+                       data = conn.recv(min(siz - cnt, 1024))
+                       if not data:
+                               raise RuntimeError('connection ended %r %r' % (cnt, siz))
+                       buf.append(data)
+                       cnt += len(data)
+               if sys.hexversion > 0x3000000:
+                       ret = ''.encode('iso8859-1').join(buf)
+               else:
+                       ret = ''.join(buf)
+               return ret
+
+       def exec_command(self, cmd, **kw):
+               if 'stdout' in kw:
+                       if kw['stdout'] not in (None, subprocess.PIPE):
+                               return self.exec_command_old(cmd, **kw)
+               elif 'stderr' in kw:
+                       if kw['stderr'] not in (None, subprocess.PIPE):
+                               return self.exec_command_old(cmd, **kw)
+
+               kw['shell'] = isinstance(cmd, str)
+               Logs.debug('runner: %r' % cmd)
+               Logs.debug('runner_env: kw=%s' % kw)
+
+               if self.logger:
+                       self.logger.info(cmd)
+
+               if 'stdout' not in kw:
+                       kw['stdout'] = subprocess.PIPE
+               if 'stderr' not in kw:
+                       kw['stderr'] = subprocess.PIPE
+
+               if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+                       raise Errors.WafError("Program %s not found!" % cmd[0])
+
+               idx = threading.current_thread().idx
+               kw['cmd'] = cmd
+
+               # serialization..
+               #print("sub %r %r" % (idx, cmd))
+               #print("write to %r %r" % (idx, cmd))
+
+               data = cPickle.dumps(kw, -1)
+               params = [REQ, str(len(data))]
+               header = make_header(params)
+
+               conn = CONNS[idx]
+
+               put_data(conn, header + data)
+
+               #print("running %r %r" % (idx, cmd))
+               #print("read from %r %r" % (idx, cmd))
+
+               data = read_data(conn, HEADER_SIZE)
+               if sys.hexversion > 0x3000000:
+                       data = data.decode('iso8859-1')
+
+               #print("received %r" % data)
+               lst = data.split(',')
+               ret = int(lst[1])
+               dlen = int(lst[2])
+
+               out = err = None
+               if dlen:
+                       data = read_data(conn, dlen)
+                       (out, err, exc) = cPickle.loads(data)
+                       if exc:
+                               raise Errors.WafError('Execution failure: %s' % exc)
+
+               if out:
+                       if not isinstance(out, str):
+                               out = out.decode(sys.stdout.encoding or 'iso8859-1')
+                       if self.logger:
+                               self.logger.debug('out: %s' % out)
+                       else:
+                               Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+               if err:
+                       if not isinstance(err, str):
+                               err = err.decode(sys.stdout.encoding or 'iso8859-1')
+                       if self.logger:
+                               self.logger.error('err: %s' % err)
+                       else:
+                               Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+               return ret
+
+       def init_smp(self):
+               if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
+                       return
+               if Utils.unversioned_sys_platform() in ('freebsd',):
+                       pid = os.getpid()
+                       cmd = ['cpuset', '-l', '0', '-p', str(pid)]
+               elif Utils.unversioned_sys_platform() in ('linux',):
+                       pid = os.getpid()
+                       cmd = ['taskset', '-pc', '0', str(pid)]
+               if cmd:
+                       self.cmd_and_log(cmd, quiet=0)
+
+       def options(opt):
+               # memory consumption might be at the lowest point while processing options
+               opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
+               if Utils.is_win32 or os.sep != '/':
+                       return
+               while len(CONNS) < 30:
+                       (pid, conn) = make_conn(opt)
+                       SERVERS.append(pid)
+                       CONNS.append(conn)
+
+       def build(bld):
+               if Utils.is_win32 or os.sep != '/':
+                       return
+               if bld.cmd == 'clean':
+                       return
+               while len(CONNS) < bld.jobs:
+                       (pid, conn) = make_conn(bld)
+                       SERVERS.append(pid)
+                       CONNS.append(conn)
+               init_smp(bld)
+               bld.__class__.exec_command_old = bld.__class__.exec_command
+               bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/print_commands.py b/third_party/waf/waflib/extras/print_commands.py
new file mode 100644 (file)
index 0000000..ada0ee5
--- /dev/null
@@ -0,0 +1,84 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+
+"""
+Illustrate how to override a class method to do something
+
+In this case, print the commands being executed as strings
+(the commands are usually lists, so this can be misleading)
+"""
+
+import sys
+from waflib import Context, Utils, Errors, Logs
+
+def exec_command(self, cmd, **kw):
+       subprocess = Utils.subprocess
+       kw['shell'] = isinstance(cmd, str)
+
+       if isinstance(cmd, str):
+               kw['shell'] = True
+               txt = cmd
+       else:
+               txt = ' '.join(repr(x) if ' ' in x else x for x in cmd)
+
+       Logs.debug('runner: %s', txt)
+       Logs.debug('runner_env: kw=%s', kw)
+
+       if self.logger:
+               self.logger.info(cmd)
+
+       if 'stdout' not in kw:
+               kw['stdout'] = subprocess.PIPE
+       if 'stderr' not in kw:
+               kw['stderr'] = subprocess.PIPE
+
+       if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+               raise Errors.WafError("Program %s not found!" % cmd[0])
+
+       wargs = {}
+       if 'timeout' in kw:
+               if kw['timeout'] is not None:
+                       wargs['timeout'] = kw['timeout']
+               del kw['timeout']
+       if 'input' in kw:
+               if kw['input']:
+                       wargs['input'] = kw['input']
+                       kw['stdin'] = Utils.subprocess.PIPE
+               del kw['input']
+
+       if 'cwd' in kw:
+               if not isinstance(kw['cwd'], str):
+                       kw['cwd'] = kw['cwd'].abspath()
+
+       try:
+               if kw['stdout'] or kw['stderr']:
+                       p = subprocess.Popen(cmd, **kw)
+                       (out, err) = p.communicate(**wargs)
+                       ret = p.returncode
+               else:
+                       out, err = (None, None)
+                       ret = subprocess.Popen(cmd, **kw).wait(**wargs)
+       except Exception ,e:
+               raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
+
+       if out:
+               if not isinstance(out, str):
+                       out = out.decode(sys.stdout.encoding or 'iso8859-1')
+               if self.logger:
+                       self.logger.debug('out: %s' % out)
+               else:
+                       Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+       if err:
+               if not isinstance(err, str):
+                       err = err.decode(sys.stdout.encoding or 'iso8859-1')
+               if self.logger:
+                       self.logger.error('err: %s' % err)
+               else:
+                       Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+       return ret
+
+Context.Context.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/proc.py b/third_party/waf/waflib/extras/proc.py
new file mode 100644 (file)
index 0000000..fec4c4c
--- /dev/null
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import environ, path
+from waflib import TaskGen, Utils
+
+def options(opt):
+       grp = opt.add_option_group('Oracle ProC Options')
+       grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
+       grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
+       grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server')
+
+def configure(cnf):
+       env = cnf.env
+       if not env.PROC_ORACLE:
+               env.PROC_ORACLE = cnf.options.oracle_home
+       if not env.PROC_TNS_ADMIN:
+               env.PROC_TNS_ADMIN = cnf.options.tns_admin
+       if not env.PROC_CONNECTION:
+               env.PROC_CONNECTION = cnf.options.connection
+       cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')
+
+def proc(tsk):
+       env = tsk.env
+       gen = tsk.generator
+       inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
+
+       # FIXME the if-else construct will not work in python 2
+       cmd = (
+               [env.PROC] +
+               ['SQLCHECK=SEMANTICS'] +
+               (['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
+                       if env.PROC_INCLUDES else []) +
+               ['INCLUDE=(' + ','.join(
+                       [i.bldpath() for i in inc_nodes]
+               ) + ')'] +
+               ['userid=' + env.PROC_CONNECTION] +
+               ['INAME=' + tsk.inputs[0].bldpath()] +
+               ['ONAME=' + tsk.outputs[0].bldpath()]
+       )
+       exec_env = {
+               'ORACLE_HOME': env.PROC_ORACLE,
+               'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
+       }
+       if env.PROC_TNS_ADMIN:
+               exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
+       return tsk.exec_command(cmd, env=exec_env)
+
+TaskGen.declare_chain(
+       name = 'proc',
+       rule = proc,
+       ext_in = '.pc',
+       ext_out = '.c',
+)
diff --git a/third_party/waf/waflib/extras/protoc.py b/third_party/waf/waflib/extras/protoc.py
new file mode 100644 (file)
index 0000000..97d1ef3
--- /dev/null
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Philipp Bender, 2012
+# Matt Clarkson, 2012
+
+import re
+from waflib.Task import Task
+from waflib.TaskGen import extension
+
+"""
+A simple tool to integrate protocol buffers into your build system.
+
+Example::
+
+    def configure(conf):
+        conf.load('compiler_cxx cxx protoc')
+
+    def build(bld):
+        bld(
+                features = 'cxx cxxprogram'
+                source   = 'main.cpp file1.proto proto/file2.proto',
+                include  = '. proto',
+                target   = 'executable')
+
+Notes when using this tool:
+
+- protoc command line parsing is tricky.
+
+  The generated files can be put in subfolders which depend on
+  the order of the include paths.
+
+  Try to be simple when creating task generators
+  containing protoc stuff.
+
+"""
+
+class protoc(Task):
+       # protoc expects the input proto file to be an absolute path.
+       run_str = '${PROTOC} ${PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${SRC[0].abspath()}'
+       color   = 'BLUE'
+       ext_out = ['.h', 'pb.cc']
+       def scan(self):
+               """
+               Scan .proto dependencies
+               """
+               node = self.inputs[0]
+
+               nodes = []
+               names = []
+               seen = []
+
+               if not node: return (nodes, names)
+
+               def parse_node(node):
+                       if node in seen:
+                               return
+                       seen.append(node)
+                       code = node.read().splitlines()
+                       for line in code:
+                               m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
+                               if m:
+                                       dep = m.groups()[0]
+                                       for incpath in self.env.INCPATHS:
+                                               found = incpath.find_resource(dep)
+                                               if found:
+                                                       nodes.append(found)
+                                                       parse_node(found)
+                                               else:
+                                                       names.append(dep)
+
+               parse_node(node)
+               return (nodes, names)
+
+@extension('.proto')
+def process_protoc(self, node):
+       cpp_node = node.change_ext('.pb.cc')
+       hpp_node = node.change_ext('.pb.h')
+       self.create_task('protoc', node, [cpp_node, hpp_node])
+       self.source.append(cpp_node)
+
+       if 'cxx' in self.features and not self.env.PROTOC_FLAGS:
+               #self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().abspath() # <- this does not work
+               self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().bldpath()
+
+       use = getattr(self, 'use', '')
+       if not 'PROTOBUF' in use:
+               self.use = self.to_list(use) + ['PROTOBUF']
+
+def configure(conf):
+       conf.check_cfg(package="protobuf", uselib_store="PROTOBUF", args=['--cflags', '--libs'])
+       conf.find_program('protoc', var='PROTOC')
+       conf.env.PROTOC_ST = '-I%s'
diff --git a/third_party/waf/waflib/extras/relocation.py b/third_party/waf/waflib/extras/relocation.py
new file mode 100644 (file)
index 0000000..052890b
--- /dev/null
@@ -0,0 +1,83 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Waf 1.6
+
+Try to detect if the project directory was relocated, and if it was,
+change the node representing the project directory. Just call:
+
+ waf configure build
+
+Note that if the project directory name changes, the signatures for the tasks using
+files in that directory will change, causing a partial build.
+"""
+
+import os
+from waflib import Build, ConfigSet, Task, Utils, Errors
+from waflib.TaskGen import feature, after_method
+
+EXTRA_LOCK = '.old_srcdir'
+
+old1 = Build.BuildContext.store
+def store(self):
+       old1(self)
+       db = os.path.join(self.variant_dir, EXTRA_LOCK)
+       env = ConfigSet.ConfigSet()
+       env.SRCDIR = self.srcnode.abspath()
+       env.store(db)
+Build.BuildContext.store = store
+
+old2 = Build.BuildContext.init_dirs
+def init_dirs(self):
+
+       if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
+               raise Errors.WafError('The project was not configured: run "waf configure" first!')
+
+       srcdir = None
+       db = os.path.join(self.variant_dir, EXTRA_LOCK)
+       env = ConfigSet.ConfigSet()
+       try:
+               env.load(db)
+               srcdir = env.SRCDIR
+       except:
+               pass
+
+       if srcdir:
+               d = self.root.find_node(srcdir)
+               if d and srcdir != self.top_dir and getattr(d, 'children', ''):
+                       srcnode = self.root.make_node(self.top_dir)
+                       print("relocating the source directory %r -> %r" % (srcdir, self.top_dir))
+                       srcnode.children = {}
+
+                       for (k, v) in d.children.items():
+                               srcnode.children[k] = v
+                               v.parent = srcnode
+                       d.children = {}
+
+       old2(self)
+
+Build.BuildContext.init_dirs = init_dirs
+
+
+def uid(self):
+       try:
+               return self.uid_
+       except AttributeError:
+               # this is not a real hot zone, but we want to avoid surprises here
+               m = Utils.md5()
+               up = m.update
+               up(self.__class__.__name__.encode())
+               for x in self.inputs + self.outputs:
+                       up(x.path_from(x.ctx.srcnode).encode())
+               self.uid_ = m.digest()
+               return self.uid_
+Task.Task.uid = uid
+
+@feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes')
+@after_method('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+       lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+       self.includes_nodes = lst
+       bld = self.bld
+       self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
diff --git a/third_party/waf/waflib/extras/remote.py b/third_party/waf/waflib/extras/remote.py
new file mode 100644 (file)
index 0000000..6aca854
--- /dev/null
@@ -0,0 +1,326 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Remote Builds tool using rsync+ssh
+
+__author__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2013"
+
+"""
+Simple Remote Builds
+********************
+
+This tool is an *experimental* tool (meaning, do not even try to pollute
+the waf bug tracker with bugs in here, contact me directly) providing simple
+remote builds.
+
+It uses rsync and ssh to perform the remote builds.
+It is intended for performing cross-compilation on platforms where
+a cross-compiler is either unavailable (eg. MacOS, QNX) a specific product
+does not exist (eg. Windows builds using Visual Studio) or simply not installed.
+This tool sends the sources and the waf script to the remote host,
+and commands the usual waf execution.
+
+There are alternatives to using this tool, such as setting up shared folders,
+logging on to remote machines, and building on the shared folders.
+Electing one method or another depends on the size of the program.
+
+
+Usage
+=====
+
+1. Set your wscript file so it includes a list of variants,
+   e.g.::
+
+     from waflib import Utils
+     top = '.'
+     out = 'build'
+
+     variants = [
+      'linux_64_debug',
+      'linux_64_release',
+      'linux_32_debug',
+      'linux_32_release',
+      ]
+
+     from waflib.extras import remote
+
+     def options(opt):
+         # normal stuff from here on
+         opt.load('compiler_c')
+
+     def configure(conf):
+         if not conf.variant:
+             return
+         # normal stuff from here on
+         conf.load('compiler_c')
+
+     def build(bld):
+         if not bld.variant:
+             return
+         # normal stuff from here on
+         bld(features='c cprogram', target='app', source='main.c')
+
+
+2. Build the waf file, so it includes this tool, and put it in the current
+   directory
+
+   .. code:: bash
+
+      ./waf-light --tools=remote
+
+3. Set the host names to access the hosts:
+
+   .. code:: bash
+
+      export REMOTE_QNX=user@kiunix
+
+4. Setup the ssh server and ssh keys
+
+   The ssh key should not be protected by a password, or it will prompt for it everytime.
+   Create the key on the client:
+
+   .. code:: bash
+
+      ssh-keygen -t rsa -f foo.rsa
+
+   Then copy foo.rsa.pub to the remote machine (user@kiunix:/home/user/.ssh/authorized_keys),
+   and make sure the permissions are correct (chmod go-w ~ ~/.ssh ~/.ssh/authorized_keys)
+
+   A separate key for the build processes can be set in the environment variable WAF_SSH_KEY.
+   The tool will then use 'ssh-keyscan' to avoid prompting for remote hosts, so
+   be warned to use this feature on internal networks only (MITM).
+
+   .. code:: bash
+
+      export WAF_SSH_KEY=~/foo.rsa
+
+5. Perform the build:
+
+   .. code:: bash
+
+      waf configure_all build_all --remote
+
+"""
+
+
+import getpass, os, re, sys
+from collections import OrderedDict
+from waflib import Context, Options, Utils, ConfigSet
+
+from waflib.Build import BuildContext, CleanContext, InstallContext, UninstallContext
+from waflib.Configure import ConfigurationContext
+
+
+is_remote = False
+if '--remote' in sys.argv:
+       is_remote = True
+       sys.argv.remove('--remote')
+
+class init(Context.Context):
+       """
+       Generates the *_all commands
+       """
+       cmd = 'init'
+       fun = 'init'
+       def execute(self):
+               for x in list(Context.g_module.variants):
+                       self.make_variant(x)
+               lst = ['remote']
+               for k in Options.commands:
+                       if k.endswith('_all'):
+                               name = k.replace('_all', '')
+                               for x in Context.g_module.variants:
+                                       lst.append('%s_%s' % (name, x))
+                       else:
+                               lst.append(k)
+               del Options.commands[:]
+               Options.commands += lst
+
+       def make_variant(self, x):
+               for y in (BuildContext, CleanContext, InstallContext, UninstallContext):
+                       name = y.__name__.replace('Context','').lower()
+                       class tmp(y):
+                               cmd = name + '_' + x
+                               fun = 'build'
+                               variant = x
+               class tmp(ConfigurationContext):
+                       cmd = 'configure_' + x
+                       fun = 'configure'
+                       variant = x
+                       def __init__(self, **kw):
+                               ConfigurationContext.__init__(self, **kw)
+                               self.setenv(x)
+
+class remote(BuildContext):
+       cmd = 'remote'
+       fun = 'build'
+
+       def get_ssh_hosts(self):
+               lst = []
+               for v in Context.g_module.variants:
+                       self.env.HOST = self.login_to_host(self.variant_to_login(v))
+                       cmd = Utils.subst_vars('${SSH_KEYSCAN} -t rsa,ecdsa ${HOST}', self.env)
+                       out, err = self.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH)
+                       lst.append(out.strip())
+               return lst
+
+       def setup_private_ssh_key(self):
+               """
+               When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory
+               Make sure that the ssh key does not prompt for a password
+               """
+               key = os.environ.get('WAF_SSH_KEY', '')
+               if not key:
+                       return
+               if not os.path.isfile(key):
+                       self.fatal('Key in WAF_SSH_KEY must point to a valid file')
+               self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh')
+               self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts')
+               self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key))
+               self.ssh_config = os.path.join(self.ssh_dir, 'config')
+               for x in self.ssh_hosts, self.ssh_key, self.ssh_config:
+                       if not os.path.isfile(x):
+                               if not os.path.isdir(self.ssh_dir):
+                                       os.makedirs(self.ssh_dir)
+                               Utils.writef(self.ssh_key, Utils.readf(key), 'wb')
+                               os.chmod(self.ssh_key, 448)
+
+                               Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts()))
+                               os.chmod(self.ssh_key, 448)
+
+                               Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb')
+                               os.chmod(self.ssh_config, 448)
+               self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key]
+               self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh')
+
+       def skip_unbuildable_variant(self):
+               # skip variants that cannot be built on this OS
+               for k in Options.commands:
+                       a, _, b = k.partition('_')
+                       if b in Context.g_module.variants:
+                               c, _, _ = b.partition('_')
+                               if c != Utils.unversioned_sys_platform():
+                                       Options.commands.remove(k)
+
+       def login_to_host(self, login):
+               return re.sub('(\w+@)', '', login)
+
+       def variant_to_login(self, variant):
+               """linux_32_debug -> search env.LINUX_32 and then env.LINUX"""
+               x = variant[:variant.rfind('_')]
+               ret = os.environ.get('REMOTE_' + x.upper(), '')
+               if not ret:
+                       x = x[:x.find('_')]
+                       ret = os.environ.get('REMOTE_' + x.upper(), '')
+               if not ret:
+                       ret = '%s@localhost' % getpass.getuser()
+               return ret
+
+       def execute(self):
+               global is_remote
+               if not is_remote:
+                       self.skip_unbuildable_variant()
+               else:
+                       BuildContext.execute(self)
+
+       def restore(self):
+               self.top_dir = os.path.abspath(Context.g_module.top)
+               self.srcnode = self.root.find_node(self.top_dir)
+               self.path = self.srcnode
+
+               self.out_dir = os.path.join(self.top_dir, Context.g_module.out)
+               self.bldnode = self.root.make_node(self.out_dir)
+               self.bldnode.mkdir()
+
+               self.env = ConfigSet.ConfigSet()
+
+       def extract_groups_of_builds(self):
+               """Return a dict mapping each variants to the commands to build"""
+               self.vgroups = {}
+               for x in reversed(Options.commands):
+                       _, _, variant = x.partition('_')
+                       if variant in Context.g_module.variants:
+                               try:
+                                       dct = self.vgroups[variant]
+                               except KeyError:
+                                       dct = self.vgroups[variant] = OrderedDict()
+                               try:
+                                       dct[variant].append(x)
+                               except KeyError:
+                                       dct[variant] = [x]
+                               Options.commands.remove(x)
+
+       def custom_options(self, login):
+               try:
+                       return Context.g_module.host_options[login]
+               except (AttributeError, KeyError):
+                       return {}
+
+       def recurse(self, *k, **kw):
+               self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx')
+               self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh')
+               self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan')
+               try:
+                       self.env.WAF = getattr(Context.g_module, 'waf')
+               except AttributeError:
+                       try:
+                               os.stat('waf')
+                       except KeyError:
+                               self.fatal('Put a waf file in the directory (./waf-light --tools=remote)')
+                       else:
+                               self.env.WAF = './waf'
+
+               self.extract_groups_of_builds()
+               self.setup_private_ssh_key()
+               for k, v in self.vgroups.items():
+                       task = self(rule=rsync_and_ssh, always=True)
+                       task.env.login = self.variant_to_login(k)
+
+                       task.env.commands = []
+                       for opt, value in v.items():
+                               task.env.commands += value
+                       task.env.variant = task.env.commands[0].partition('_')[2]
+                       for opt, value in self.custom_options(k):
+                               task.env[opt] = value
+               self.jobs = len(self.vgroups)
+
+       def make_mkdir_command(self, task):
+               return Utils.subst_vars('${SSH} ${SSH_OPTS} ${login} "rm -fr ${remote_dir} && mkdir -p ${remote_dir}"', task.env)
+
+       def make_send_command(self, task):
+               return Utils.subst_vars('${RSYNC} ${RSYNC_SEND_OPTS} -e "${SSH} ${SSH_OPTS}" ${local_dir} ${login}:${remote_dir}', task.env)
+
+       def make_exec_command(self, task):
+               txt = '''${SSH} ${SSH_OPTS} ${login} "cd ${remote_dir} && ${WAF} ${commands}"'''
+               return Utils.subst_vars(txt, task.env)
+
+       def make_save_command(self, task):
+               return Utils.subst_vars('${RSYNC} ${RSYNC_SAVE_OPTS} -e "${SSH} ${SSH_OPTS}" ${login}:${remote_dir_variant} ${build_dir}', task.env)
+
+def rsync_and_ssh(task):
+
+       # remove a warning
+       task.uid_ = id(task)
+
+       bld = task.generator.bld
+
+       task.env.user, _, _ = task.env.login.partition('@')
+       task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
+       task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
+       task.env.local_dir = bld.srcnode.abspath() + '/'
+
+       task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
+       task.env.build_dir = bld.bldnode.abspath()
+
+       ret = task.exec_command(bld.make_mkdir_command(task))
+       if ret:
+               return ret
+       ret = task.exec_command(bld.make_send_command(task))
+       if ret:
+               return ret
+       ret = task.exec_command(bld.make_exec_command(task))
+       if ret:
+               return ret
+       ret = task.exec_command(bld.make_save_command(task))
+       if ret:
+               return ret
diff --git a/third_party/waf/waflib/extras/review.py b/third_party/waf/waflib/extras/review.py
new file mode 100644 (file)
index 0000000..34796cb
--- /dev/null
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Laurent Birtz, 2011
+# moved the code into a separate tool (ita)
+
+"""
+There are several things here:
+- a different command-line option management making options persistent
+- the review command to display the options set
+
+Assumptions:
+- configuration options are not always added to the right group (and do not count on the users to do it...)
+- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration
+- when the options change, the build is invalidated (forcing a reconfiguration)
+"""
+
+import os, textwrap, shutil
+from waflib import Logs, Context, ConfigSet, Options, Build, Configure
+
+class Odict(dict):
+       """Ordered dictionary"""
+       def __init__(self, data=None):
+               self._keys = []
+               dict.__init__(self)
+               if data:
+                       # we were provided a regular dict
+                       if isinstance(data, dict):
+                               self.append_from_dict(data)
+
+                       # we were provided a tuple list
+                       elif type(data) == list:
+                               self.append_from_plist(data)
+
+                       # we were provided invalid input
+                       else:
+                               raise Exception("expected a dict or a tuple list")
+
+       def append_from_dict(self, dict):
+               map(self.__setitem__, dict.keys(), dict.values())
+
+       def append_from_plist(self, plist):
+               for pair in plist:
+                       if len(pair) != 2:
+                               raise Exception("invalid pairs list")
+               for (k, v) in plist:
+                       self.__setitem__(k, v)
+
+       def __delitem__(self, key):
+               if not key in self._keys:
+                       raise KeyError(key)
+               dict.__delitem__(self, key)
+               self._keys.remove(key)
+
+       def __setitem__(self, key, item):
+               dict.__setitem__(self, key, item)
+               if key not in self._keys:
+                       self._keys.append(key)
+
+       def clear(self):
+               dict.clear(self)
+               self._keys = []
+
+       def copy(self):
+               return Odict(self.plist())
+
+       def items(self):
+               return zip(self._keys, self.values())
+
+       def keys(self):
+               return list(self._keys) # return a copy of the list
+
+       def values(self):
+               return map(self.get, self._keys)
+
+       def plist(self):
+               p = []
+               for k, v in self.items():
+                       p.append( (k, v) )
+               return p
+
+       def __str__(self):
+               buf = []
+               buf.append("{ ")
+               for k, v in self.items():
+                       buf.append('%r : %r, ' % (k, v))
+               buf.append("}")
+               return ''.join(buf)
+
+review_options = Odict()
+"""
+Ordered dictionary mapping configuration option names to their optparse option.
+"""
+
+review_defaults = {}
+"""
+Dictionary mapping configuration option names to their default value.
+"""
+
+old_review_set = None
+"""
+Review set containing the configuration values before parsing the command line.
+"""
+
+new_review_set = None
+"""
+Review set containing the configuration values after parsing the command line.
+"""
+
+class OptionsReview(Options.OptionsContext):
+       def __init__(self, **kw):
+               super(self.__class__, self).__init__(**kw)
+
+       def prepare_config_review(self):
+               """
+               Find the configuration options that are reviewable, detach
+               their default value from their optparse object and store them
+               into the review dictionaries.
+               """
+               gr = self.get_option_group('configure options')
+               for opt in gr.option_list:
+                       if opt.action != 'store' or opt.dest in ("out", "top"):
+                               continue
+                       review_options[opt.dest] = opt
+                       review_defaults[opt.dest] = opt.default
+                       if gr.defaults.has_key(opt.dest):
+                               del gr.defaults[opt.dest]
+                       opt.default = None
+
+       def parse_args(self):
+               self.prepare_config_review()
+               self.parser.get_option('--prefix').help = 'installation prefix'
+               super(OptionsReview, self).parse_args()
+               Context.create_context('review').refresh_review_set()
+
+class ReviewContext(Context.Context):
+       '''reviews the configuration values'''
+
+       cmd = 'review'
+
+       def __init__(self, **kw):
+               super(self.__class__, self).__init__(**kw)
+
+               out = Options.options.out
+               if not out:
+                       out = getattr(Context.g_module, Context.OUT, None)
+               if not out:
+                       out = Options.lockfile.replace('.lock-waf', '')
+               self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath()
+               """Path to the build directory"""
+
+               self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR)
+               """Path to the cache directory"""
+
+               self.review_path = os.path.join(self.cache_path, 'review.cache')
+               """Path to the review cache file"""
+
+       def execute(self):
+               """
+               Display and store the review set. Invalidate the cache as required.
+               """
+               if not self.compare_review_set(old_review_set, new_review_set):
+                       self.invalidate_cache()
+               self.store_review_set(new_review_set)
+               print(self.display_review_set(new_review_set))
+
+       def invalidate_cache(self):
+               """Invalidate the cache to prevent bad builds."""
+               try:
+                       Logs.warn("Removing the cached configuration since the options have changed")
+                       shutil.rmtree(self.cache_path)
+               except:
+                       pass
+
+       def refresh_review_set(self):
+               """
+               Obtain the old review set and the new review set, and import the new set.
+               """
+               global old_review_set, new_review_set
+               old_review_set = self.load_review_set()
+               new_review_set = self.update_review_set(old_review_set)
+               self.import_review_set(new_review_set)
+
+       def load_review_set(self):
+               """
+               Load and return the review set from the cache if it exists.
+               Otherwise, return an empty set.
+               """
+               if os.path.isfile(self.review_path):
+                       return ConfigSet.ConfigSet(self.review_path)
+               return ConfigSet.ConfigSet()
+
+       def store_review_set(self, review_set):
+               """
+               Store the review set specified in the cache.
+               """
+               if not os.path.isdir(self.cache_path):
+                       os.makedirs(self.cache_path)
+               review_set.store(self.review_path)
+
+       def update_review_set(self, old_set):
+               """
+               Merge the options passed on the command line with those imported
+               from the previous review set and return the corresponding
+               preview set.
+               """
+
+               # Convert value to string. It's important that 'None' maps to
+               # the empty string.
+               def val_to_str(val):
+                       if val == None or val == '':
+                               return ''
+                       return str(val)
+
+               new_set = ConfigSet.ConfigSet()
+               opt_dict = Options.options.__dict__
+
+               for name in review_options.keys():
+                       # the option is specified explicitly on the command line
+                       if name in opt_dict:
+                               # if the option is the default, pretend it was never specified
+                               if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]):
+                                       new_set[name] = opt_dict[name]
+                       # the option was explicitly specified in a previous command
+                       elif name in old_set:
+                               new_set[name] = old_set[name]
+
+               return new_set
+
+       def import_review_set(self, review_set):
+               """
+               Import the actual value of the reviewable options in the option
+               dictionary, given the current review set.
+               """
+               for name in review_options.keys():
+                       if name in review_set:
+                               value = review_set[name]
+                       else:
+                               value = review_defaults[name]
+                       setattr(Options.options, name, value)
+
+       def compare_review_set(self, set1, set2):
+               """
+               Return true if the review sets specified are equal.
+               """
+               if len(set1.keys()) != len(set2.keys()): return False
+               for key in set1.keys():
+                       if not key in set2 or set1[key] != set2[key]:
+                               return False
+               return True
+
+       def display_review_set(self, review_set):
+               """
+               Return the string representing the review set specified.
+               """
+               term_width = Logs.get_term_cols()
+               lines = []
+               for dest in review_options.keys():
+                       opt = review_options[dest]
+                       name = ", ".join(opt._short_opts + opt._long_opts)
+                       help = opt.help
+                       actual = None
+                       if dest in review_set: actual = review_set[dest]
+                       default = review_defaults[dest]
+                       lines.append(self.format_option(name, help, actual, default, term_width))
+               return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
+
+       def format_option(self, name, help, actual, default, term_width):
+               """
+               Return the string representing the option specified.
+               """
+               def val_to_str(val):
+                       if val == None or val == '':
+                               return "(void)"
+                       return str(val)
+
+               max_name_len = 20
+               sep_len = 2
+
+               w = textwrap.TextWrapper()
+               w.width = term_width - 1
+               if w.width < 60: w.width = 60
+
+               out = ""
+
+               # format the help
+               out += w.fill(help) + "\n"
+
+               # format the name
+               name_len = len(name)
+               out += Logs.colors.CYAN + name + Logs.colors.NORMAL
+
+               # set the indentation used when the value wraps to the next line
+               w.subsequent_indent = " ".rjust(max_name_len + sep_len)
+               w.width -= (max_name_len + sep_len)
+
+               # the name string is too long, switch to the next line
+               if name_len > max_name_len:
+                       out += "\n" + w.subsequent_indent
+
+               # fill the remaining of the line with spaces
+               else:
+                       out += " ".rjust(max_name_len + sep_len - name_len)
+
+               # format the actual value, if there is one
+               if actual != None:
+                       out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent
+
+               # format the default value
+               default_fmt = val_to_str(default)
+               if actual != None:
+                       default_fmt = "default: " + default_fmt
+               out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL
+
+               return out
+
+# Monkey-patch ConfigurationContext.execute() to have it store the review set.
+old_configure_execute = Configure.ConfigurationContext.execute
+def new_configure_execute(self):
+       old_configure_execute(self)
+       Context.create_context('review').store_review_set(new_review_set)
+Configure.ConfigurationContext.execute = new_configure_execute
diff --git a/third_party/waf/waflib/extras/rst.py b/third_party/waf/waflib/extras/rst.py
new file mode 100644 (file)
index 0000000..c8cd752
--- /dev/null
@@ -0,0 +1,251 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero, 2013 (zougloub)
+
+"""
+reStructuredText support (experimental)
+
+Example::
+
+       def configure(conf):
+               conf.load('rst')
+               if not conf.env.RST2HTML:
+                       conf.fatal('The program rst2html is required')
+
+       def build(bld):
+               bld(
+                features = 'rst',
+                type     = 'rst2html', # rst2html, rst2pdf, ...
+                source   = 'index.rst', # mandatory, the source
+                deps     = 'image.png', # to give additional non-trivial dependencies
+               )
+
+By default the tool looks for a set of programs in PATH.
+The tools are defined in `rst_progs`.
+To configure with a special program use::
+
+       $ RST2HTML=/path/to/rst2html waf configure
+
+This tool is experimental; don't hesitate to contribute to it.
+
+"""
+
+import re
+from waflib import Node, Utils, Task, Errors, Logs
+from waflib.TaskGen import feature, before_method
+
+rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
+
+def parse_rst_node(node, nodes, names, seen):
+       # TODO add extensibility, to handle custom rst include tags...
+       if node in seen:
+               return
+       seen.append(node)
+       code = node.read()
+       re_rst = re.compile(r'^\s*.. ((?P<subst>\|\S+\|) )?(?P<type>include|image|figure):: (?P<file>.*)$', re.M)
+       for match in re_rst.finditer(code):
+               ipath = match.group('file')
+               itype = match.group('type')
+               Logs.debug("rst: visiting %s: %s" % (itype, ipath))
+               found = node.parent.find_resource(ipath)
+               if found:
+                       nodes.append(found)
+                       if itype == 'include':
+                               parse_rst_node(found, nodes, names, seen)
+               else:
+                       names.append(ipath)
+
+class docutils(Task.Task):
+       """
+       Compile a rst file.
+       """
+
+       def scan(self):
+               """
+               A recursive regex-based scanner that finds rst dependencies.
+               """
+
+               nodes = []
+               names = []
+               seen = []
+
+               node = self.inputs[0]
+
+               if not node:
+                       return (nodes, names)
+
+               parse_rst_node(node, nodes, names, seen)
+
+               Logs.debug("rst: %s: found the following file deps: %s" % (repr(self), nodes))
+               if names:
+                       Logs.warn("rst: %s: could not find the following file deps: %s" % (repr(self), names))
+
+               return (nodes, names)
+
+       def check_status(self, msg, retcode):
+               """
+               Check an exit status and raise an error with a particular message
+
+               :param msg: message to display if the code is non-zero
+               :type msg: string
+               :param retcode: condition
+               :type retcode: boolean
+               """
+               if retcode != 0:
+                       raise Errors.WafError("%r command exit status %r" % (msg, retcode))
+
+       def run(self):
+               """
+               Runs the rst compilation using docutils
+               """
+               raise NotImplementedError()
+
+class rst2html(docutils):
+       color = 'BLUE'
+
+       def __init__(self, *args, **kw):
+               docutils.__init__(self, *args, **kw)
+               self.command = self.generator.env.RST2HTML
+               self.attributes = ['stylesheet']
+
+       def scan(self):
+               nodes, names = docutils.scan(self)
+
+               for attribute in self.attributes:
+                       stylesheet = getattr(self.generator, attribute, None)
+                       if stylesheet is not None:
+                               ssnode = self.generator.to_nodes(stylesheet)[0]
+                               nodes.append(ssnode)
+                               Logs.debug("rst: adding dep to %s %s" % (attribute, stylesheet))
+
+               return nodes, names
+
+       def run(self):
+               cwdn = self.outputs[0].parent
+               src = self.inputs[0].path_from(cwdn)
+               dst = self.outputs[0].path_from(cwdn)
+
+               cmd = self.command + [src, dst]
+               cmd += Utils.to_list(getattr(self.generator, 'options', []))
+               for attribute in self.attributes:
+                       stylesheet = getattr(self.generator, attribute, None)
+                       if stylesheet is not None:
+                               stylesheet = self.generator.to_nodes(stylesheet)[0]
+                               cmd += ['--%s' % attribute, stylesheet.path_from(cwdn)]
+
+               return self.exec_command(cmd, cwd=cwdn.abspath())
+
+class rst2s5(rst2html):
+       def __init__(self, *args, **kw):
+               rst2html.__init__(self, *args, **kw)
+               self.command = self.generator.env.RST2S5
+               self.attributes = ['stylesheet']
+
+class rst2latex(rst2html):
+       def __init__(self, *args, **kw):
+               rst2html.__init__(self, *args, **kw)
+               self.command = self.generator.env.RST2LATEX
+               self.attributes = ['stylesheet']
+
+class rst2xetex(rst2html):
+       def __init__(self, *args, **kw):
+               rst2html.__init__(self, *args, **kw)
+               self.command = self.generator.env.RST2XETEX
+               self.attributes = ['stylesheet']
+
+class rst2pdf(docutils):
+       color = 'BLUE'
+       def run(self):
+               cwdn = self.outputs[0].parent
+               src = self.inputs[0].path_from(cwdn)
+               dst = self.outputs[0].path_from(cwdn)
+
+               cmd = self.generator.env.RST2PDF + [src, '-o', dst]
+               cmd += Utils.to_list(getattr(self.generator, 'options', []))
+
+               return self.exec_command(cmd, cwd=cwdn.abspath())
+
+
+@feature('rst')
+@before_method('process_source')
+def apply_rst(self):
+       """
+       Create :py:class:`rst` or other rst-related task objects
+       """
+
+       if self.target:
+               if isinstance(self.target, Node.Node):
+                       tgt = self.target
+               elif isinstance(self.target, str):
+                       tgt = self.path.get_bld().make_node(self.target)
+               else:
+                       self.bld.fatal("rst: Don't know how to build target name %s which is not a string or Node for %s" % (self.target, self))
+       else:
+               tgt = None
+
+       tsk_type = getattr(self, 'type', None)
+
+       src = self.to_nodes(self.source)
+       assert len(src) == 1
+       src = src[0]
+
+       if tsk_type is not None and tgt is None:
+               if tsk_type.startswith('rst2'):
+                       ext = tsk_type[4:]
+               else:
+                       self.bld.fatal("rst: Could not detect the output file extension for %s" % self)
+               tgt = src.change_ext('.%s' % ext)
+       elif tsk_type is None and tgt is not None:
+               out = tgt.name
+               ext = out[out.rfind('.')+1:]
+               self.type = 'rst2' + ext
+       elif tsk_type is not None and tgt is not None:
+               # the user knows what he wants
+               pass
+       else:
+               self.bld.fatal("rst: Need to indicate task type or target name for %s" % self)
+
+       deps_lst = []
+
+       if getattr(self, 'deps', None):
+               deps = self.to_list(self.deps)
+               for filename in deps:
+                       n = self.path.find_resource(filename)
+                       if not n:
+                               self.bld.fatal('Could not find %r for %r' % (filename, self))
+                       if not n in deps_lst:
+                               deps_lst.append(n)
+
+       try:
+               task = self.create_task(self.type, src, tgt)
+       except KeyError:
+               self.bld.fatal("rst: Task of type %s not implemented (created by %s)" % (self.type, self))
+
+       task.env = self.env
+
+       # add the manual dependencies
+       if deps_lst:
+               try:
+                       lst = self.bld.node_deps[task.uid()]
+                       for n in deps_lst:
+                               if not n in lst:
+                                       lst.append(n)
+               except KeyError:
+                       self.bld.node_deps[task.uid()] = deps_lst
+
+       inst_to = getattr(self, 'install_path', None)
+       if inst_to:
+               self.install_task = self.bld.install_files(inst_to, task.outputs[:], env=self.env)
+
+       self.source = []
+
+def configure(self):
+       """
+       Try to find the rst programs.
+
+       Do not raise any error if they are not found.
+       You'll have to use additional code in configure() to die
+       if programs were not found.
+       """
+       for p in rst_progs:
+               self.find_program(p, mandatory=False)
diff --git a/third_party/waf/waflib/extras/smart_continue.py b/third_party/waf/waflib/extras/smart_continue.py
new file mode 100644 (file)
index 0000000..8c171a8
--- /dev/null
@@ -0,0 +1,80 @@
+#! /usr/bin/env python
+# Thomas Nagy, 2011
+
+# Try to cancel the tasks that cannot run with the option -k when an error occurs:
+# 1 direct file dependencies
+# 2 tasks listed in the before/after/ext_in/ext_out attributes
+
+from waflib import Task, Runner
+
+Task.CANCELED = 4
+
+def cancel_next(self, tsk):
+       if not isinstance(tsk, Task.TaskBase):
+               return
+       if tsk.hasrun >= Task.SKIPPED:
+               # normal execution, no need to do anything here
+               return
+
+       try:
+               canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
+       except AttributeError:
+               canceled_tasks = self.canceled_tasks = set([])
+               canceled_nodes = self.canceled_nodes = set([])
+
+       try:
+               canceled_nodes.update(tsk.outputs)
+       except AttributeError:
+               pass
+
+       try:
+               canceled_tasks.add(tsk)
+       except AttributeError:
+               pass
+
+def get_out(self):
+       tsk = self.out.get()
+       if not self.stop:
+               self.add_more_tasks(tsk)
+       self.count -= 1
+       self.dirty = True
+       self.cancel_next(tsk) # new code
+
+def error_handler(self, tsk):
+       if not self.bld.keep:
+               self.stop = True
+       self.error.append(tsk)
+       self.cancel_next(tsk) # new code
+
+Runner.Parallel.cancel_next = cancel_next
+Runner.Parallel.get_out = get_out
+Runner.Parallel.error_handler = error_handler
+
+def get_next_task(self):
+       tsk = self.get_next_task_smart_continue()
+       if not tsk:
+               return tsk
+
+       try:
+               canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
+       except AttributeError:
+               pass
+       else:
+               # look in the tasks that this one is waiting on
+               # if one of them was canceled, cancel this one too
+               for x in tsk.run_after:
+                       if x in canceled_tasks:
+                               tsk.hasrun = Task.CANCELED
+                               self.cancel_next(tsk)
+                               break
+               else:
+                       # so far so good, now consider the nodes
+                       for x in getattr(tsk, 'inputs', []) + getattr(tsk, 'deps', []):
+                               if x in canceled_nodes:
+                                       tsk.hasrun = Task.CANCELED
+                                       self.cancel_next(tsk)
+                                       break
+       return tsk
+
+Runner.Parallel.get_next_task_smart_continue = Runner.Parallel.get_next_task
+Runner.Parallel.get_next_task = get_next_task
diff --git a/third_party/waf/waflib/extras/stale.py b/third_party/waf/waflib/extras/stale.py
new file mode 100644 (file)
index 0000000..a1e63ee
--- /dev/null
@@ -0,0 +1,96 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy, 2006-2015 (ita)
+
+"""
+Add a pre-build hook to remove build files (declared in the system)
+that do not have a corresponding target
+
+This can be used for example to remove the targets
+that have changed name without performing
+a full 'waf clean'
+
+Of course, it will only work if there are no dynamically generated
+nodes/tasks, in which case the method will have to be modified
+to exclude some folders for example.
+"""
+
+from waflib import Logs, Build
+from waflib.Runner import Parallel
+
+DYNAMIC_EXT = [] # add your non-cleanable files/extensions here
+MOC_H_EXTS = '.cpp .cxx .hpp .hxx .h'.split()
+
+def can_delete(node):
+       """Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files"""
+       if not node.name.endswith('.moc'):
+               return True
+       base = node.name[:-4]
+       p1 = node.parent.get_src()
+       p2 = node.parent.get_bld()
+       for k in MOC_H_EXTS:
+               h_name = base + k
+               n = p1.search_node(h_name)
+               if n:
+                       return False
+               n = p2.search_node(h_name)
+               if n:
+                       return False
+
+               # foo.cpp.moc, foo.h.moc, etc.
+               if base.endswith(k):
+                       return False
+
+       return True
+
+# recursion over the nodes to find the stale files
+def stale_rec(node, nodes):
+       if node.abspath() in node.ctx.env[Build.CFG_FILES]:
+               return
+
+       if getattr(node, 'children', []):
+               for x in node.children.values():
+                       if x.name != "c4che":
+                               stale_rec(x, nodes)
+       else:
+               for ext in DYNAMIC_EXT:
+                       if node.name.endswith(ext):
+                               break
+               else:
+                       if not node in nodes:
+                               if can_delete(node):
+                                       Logs.warn("Removing stale file -> %s" % node.abspath())
+                                       node.delete()
+
+old = Parallel.refill_task_list
+def refill_task_list(self):
+       iit = old(self)
+       bld = self.bld
+
+       # execute this operation only once
+       if getattr(self, 'stale_done', False):
+               return iit
+       self.stale_done = True
+
+       # this does not work in partial builds
+       if hasattr(bld, 'options') and bld.options.targets and bld.options.targets != '*':
+               return iit
+
+       # this does not work in dynamic builds
+       if not hasattr(bld, 'post_mode') or bld.post_mode == Build.POST_LAZY:
+               return iit
+
+       # obtain the nodes to use during the build
+       nodes = []
+       for i in range(len(bld.groups)):
+               tasks = bld.get_tasks_group(i)
+               for x in tasks:
+                       try:
+                               nodes.extend(x.outputs)
+                       except:
+                               pass
+
+       stale_rec(bld.bldnode, nodes)
+       return iit
+
+Parallel.refill_task_list = refill_task_list
diff --git a/third_party/waf/waflib/extras/stracedeps.py b/third_party/waf/waflib/extras/stracedeps.py
new file mode 100644 (file)
index 0000000..f9581a9
--- /dev/null
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Execute tasks through strace to obtain dependencies after the process is run. This
+scheme is similar to that of the Fabricate script.
+
+To use::
+
+  def configure(conf):
+     conf.load('strace')
+
+WARNING:
+* This will not work when advanced scanners are needed (qt4/qt5)
+* The overhead of running 'strace' is significant (56s -> 1m29s)
+* It will not work on Windows :-)
+"""
+
+import os, re, threading
+from waflib import Task, Logs, Utils
+
+#TRACECALLS = 'trace=access,chdir,clone,creat,execve,exit_group,fork,lstat,lstat64,mkdir,open,rename,stat,stat64,symlink,vfork'
+TRACECALLS = 'trace=process,file'
+
+BANNED = ('/tmp', '/proc', '/sys', '/dev')
+
+s_process = r'(?:clone|fork|vfork)\(.*?(?P<npid>\d+)'
+s_file = r'(?P<call>\w+)\("(?P<path>([^"\\]|\\.)*)"(.*)'
+re_lines = re.compile(r'^(?P<pid>\d+)\s+(?:(?:%s)|(?:%s))\r*$' % (s_file, s_process), re.IGNORECASE | re.MULTILINE)
+strace_lock = threading.Lock()
+
+def configure(conf):
+       conf.find_program('strace')
+
+def task_method(func):
+       # Decorator function to bind/replace methods on the base Task class
+       #
+       # The methods Task.exec_command and Task.sig_implicit_deps already exists and are rarely overridden
+       # we thus expect that we are the only ones doing this
+       try:
+               setattr(Task.Task, 'nostrace_%s' % func.__name__, getattr(Task.Task, func.__name__))
+       except AttributeError:
+               pass
+       setattr(Task.Task, func.__name__, func)
+       return func
+
+@task_method
+def get_strace_file(self):
+       try:
+               return self.strace_file
+       except AttributeError:
+               pass
+
+       if self.outputs:
+               ret = self.outputs[0].abspath() + '.strace'
+       else:
+               ret = '%s%s%d%s' % (self.generator.bld.bldnode.abspath(), os.sep, id(self), '.strace')
+       self.strace_file = ret
+       return ret
+
+@task_method
+def get_strace_args(self):
+       return (self.env.STRACE or ['strace']) + ['-e', TRACECALLS, '-f', '-o', self.get_strace_file()]
+
+@task_method
+def exec_command(self, cmd, **kw):
+       bld = self.generator.bld
+       try:
+               if not kw.get('cwd', None):
+                       kw['cwd'] = bld.cwd
+       except AttributeError:
+               bld.cwd = kw['cwd'] = bld.variant_dir
+
+       args = self.get_strace_args()
+       fname = self.get_strace_file()
+       if isinstance(cmd, list):
+               cmd = args + cmd
+       else:
+               cmd = '%s %s' % (' '.join(args), cmd)
+
+       try:
+               ret = bld.exec_command(cmd, **kw)
+       finally:
+               if not ret:
+                       self.parse_strace_deps(fname, kw['cwd'])
+       return ret
+
+@task_method
+def sig_implicit_deps(self):
+       # bypass the scanner functions
+       return
+
+@task_method
+def parse_strace_deps(self, path, cwd):
+       # uncomment the following line to disable the dependencies and force a file scan
+       # return
+       try:
+               cnt = Utils.readf(path)
+       finally:
+               try:
+                       os.remove(path)
+               except OSError:
+                       pass
+
+       nodes = []
+       bld = self.generator.bld
+       try:
+               cache = bld.strace_cache
+       except AttributeError:
+               cache = bld.strace_cache = {}
+
+       # chdir and relative paths
+       pid_to_cwd = {}
+
+       global BANNED
+       done = set([])
+       for m in re.finditer(re_lines, cnt):
+               # scraping the output of strace
+               pid = m.group('pid')
+               if m.group('npid'):
+                       npid = m.group('npid')
+                       pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd)
+                       continue
+
+               p = m.group('path').replace('\\"', '"')
+
+               if p == '.' or m.group().find('= -1 ENOENT') > -1:
+                       # just to speed it up a bit
+                       continue
+
+               if not os.path.isabs(p):
+                       p = os.path.join(pid_to_cwd.get(pid, cwd), p)
+
+               call = m.group('call')
+               if call == 'chdir':
+                       pid_to_cwd[pid] = p
+                       continue
+
+               if p in done:
+                       continue
+               done.add(p)
+
+               for x in BANNED:
+                       if p.startswith(x):
+                               break
+               else:
+                       if p.endswith('/') or os.path.isdir(p):
+                               continue
+
+                       try:
+                               node = cache[p]
+                       except KeyError:
+                               strace_lock.acquire()
+                               try:
+                                       cache[p] = node = bld.root.find_node(p)
+                                       if not node:
+                                               continue
+                               finally:
+                                       strace_lock.release()
+                       nodes.append(node)
+
+       # record the dependencies then force the task signature recalculation for next time
+       if Logs.verbose:
+               Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
+       bld = self.generator.bld
+       bld.node_deps[self.uid()] = nodes
+       bld.raw_deps[self.uid()] = []
+       try:
+               del self.cache_sig
+       except AttributeError:
+               pass
+       self.signature()
diff --git a/third_party/waf/waflib/extras/swig.py b/third_party/waf/waflib/extras/swig.py
new file mode 100644 (file)
index 0000000..b654db7
--- /dev/null
@@ -0,0 +1,178 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Petar Forai
+# Thomas Nagy 2008-2010 (ita)
+
+import re
+from waflib import Task, Logs
+from waflib.TaskGen import extension
+from waflib.Configure import conf
+from waflib.Tools import c_preproc
+
+"""
+tasks have to be added dynamically:
+- swig interface files may be created at runtime
+- the module name may be unknown in advance
+"""
+
+SWIG_EXTS = ['.swig', '.i']
+
+re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
+
+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
+re_2 = re.compile('[#%]include [<"](.*)[">]', re.M)
+
+class swig(Task.Task):
+       color   = 'BLUE'
+       run_str = '${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}'
+       ext_out = ['.h'] # might produce .h files although it is not mandatory
+       vars = ['SWIG_VERSION', 'SWIGDEPS']
+
+       def runnable_status(self):
+               for t in self.run_after:
+                       if not t.hasrun:
+                               return Task.ASK_LATER
+
+               if not getattr(self, 'init_outputs', None):
+                       self.init_outputs = True
+                       if not getattr(self, 'module', None):
+                               # search the module name
+                               txt = self.inputs[0].read()
+                               m = re_module.search(txt)
+                               if not m:
+                                       raise ValueError("could not find the swig module name")
+                               self.module = m.group(1)
+
+                       swig_c(self)
+
+                       # add the language-specific output files as nodes
+                       # call funs in the dict swig_langs
+                       for x in self.env['SWIGFLAGS']:
+                               # obtain the language
+                               x = x[1:]
+                               try:
+                                       fun = swig_langs[x]
+                               except KeyError:
+                                       pass
+                               else:
+                                       fun(self)
+
+               return super(swig, self).runnable_status()
+
+       def scan(self):
+               "scan for swig dependencies, climb the .i files"
+               lst_src = []
+
+               seen = []
+               to_see = [self.inputs[0]]
+
+               while to_see:
+                       node = to_see.pop(0)
+                       if node in seen:
+                               continue
+                       seen.append(node)
+                       lst_src.append(node)
+
+                       # read the file
+                       code = node.read()
+                       code = c_preproc.re_nl.sub('', code)
+                       code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+
+                       # find .i files and project headers
+                       names = re_2.findall(code)
+                       for n in names:
+                               for d in self.generator.includes_nodes + [node.parent]:
+                                       u = d.find_resource(n)
+                                       if u:
+                                               to_see.append(u)
+                                               break
+                               else:
+                                       Logs.warn('could not find %r' % n)
+
+               return (lst_src, [])
+
+# provide additional language processing
+swig_langs = {}
+def swigf(fun):
+       swig_langs[fun.__name__.replace('swig_', '')] = fun
+swig.swigf = swigf
+
+def swig_c(self):
+       ext = '.swigwrap_%d.c' % self.generator.idx
+       flags = self.env['SWIGFLAGS']
+       if '-c++' in flags:
+               ext += 'xx'
+       out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
+
+       if '-c++' in flags:
+               c_tsk = self.generator.cxx_hook(out_node)
+       else:
+               c_tsk = self.generator.c_hook(out_node)
+
+       c_tsk.set_run_after(self)
+
+       ge = self.generator.bld.producer
+       ge.outstanding.insert(0, c_tsk)
+       ge.total += 1
+
+       try:
+               ltask = self.generator.link_task
+       except AttributeError:
+               pass
+       else:
+               ltask.set_run_after(c_tsk)
+               ltask.inputs.append(c_tsk.outputs[0])
+
+       self.outputs.append(out_node)
+
+       if not '-o' in self.env['SWIGFLAGS']:
+               self.env.append_value('SWIGFLAGS', ['-o', self.outputs[0].abspath()])
+
+@swigf
+def swig_python(tsk):
+       node = tsk.inputs[0].parent
+       if tsk.outdir:
+               node = tsk.outdir
+       tsk.set_outputs(node.find_or_declare(tsk.module+'.py'))
+
+@swigf
+def swig_ocaml(tsk):
+       node = tsk.inputs[0].parent
+       if tsk.outdir:
+               node = tsk.outdir
+       tsk.set_outputs(node.find_or_declare(tsk.module+'.ml'))
+       tsk.set_outputs(node.find_or_declare(tsk.module+'.mli'))
+
+@extension(*SWIG_EXTS)
+def i_file(self, node):
+       # the task instance
+       tsk = self.create_task('swig')
+       tsk.set_inputs(node)
+       tsk.module = getattr(self, 'swig_module', None)
+
+       flags = self.to_list(getattr(self, 'swig_flags', []))
+       tsk.env.append_value('SWIGFLAGS', flags)
+
+       tsk.outdir = None
+       if '-outdir' in flags:
+               outdir = flags[flags.index('-outdir')+1]
+               outdir = tsk.generator.bld.bldnode.make_node(outdir)
+               outdir.mkdir()
+               tsk.outdir = outdir
+
+@conf
+def check_swig_version(self):
+       """Returns a tuple representing the swig version, like (1,3,28)"""
+       reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
+       swig_out = self.cmd_and_log(self.env.SWIG + ['-version'])
+
+       swigver = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
+       self.env['SWIG_VERSION'] = swigver
+       msg = 'Checking for swig version'
+       self.msg(msg, '.'.join(map(str, swigver)))
+       return swigver
+
+def configure(conf):
+       conf.find_program('swig', var='SWIG')
+       conf.env.SWIGPATH_ST = '-I%s'
+       conf.env.SWIGDEF_ST = '-D%s'
diff --git a/third_party/waf/waflib/extras/syms.py b/third_party/waf/waflib/extras/syms.py
new file mode 100644 (file)
index 0000000..d2efd99
--- /dev/null
@@ -0,0 +1,86 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+this tool supports the export_symbols_regex to export the symbols in a shared library.
+by default, all symbols are exported by gcc, and nothing by msvc.
+to use the tool, do something like:
+
+def build(ctx):
+       ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib')
+
+only the symbols starting with 'mylib_' will be exported.
+"""
+
+import os
+import re
+from waflib.Context import STDOUT
+from waflib.Task import Task
+from waflib.Errors import WafError
+from waflib.TaskGen import feature, after_method
+
+class gen_sym(Task):
+       def run(self):
+               obj = self.inputs[0]
+               kw = {}
+               if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+                       re_nm = re.compile(r'External\s+\|\s+_(' + self.generator.export_symbols_regex + r')\b')
+
+                       cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
+
+                       # Dumpbin requires custom environment sniffed out by msvc.py earlier
+                       if self.env['PATH']:
+                               env = dict(self.env.env or os.environ)
+                               env.update(PATH = os.pathsep.join(self.env['PATH']))
+                               kw['env'] = env
+
+               else:
+                       if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
+                               re_nm = re.compile(r'T\s+_(' + self.generator.export_symbols_regex + r')\b')
+                       elif self.env.DEST_BINFMT=='mac-o':
+                               re_nm=re.compile(r'T\s+(_?'+self.generator.export_symbols_regex+r')\b')
+                       else:
+                               re_nm = re.compile(r'T\s+(' + self.generator.export_symbols_regex + r')\b')
+                       cmd = [self.env.NM[0] or 'nm', '-g', obj.abspath()]
+               syms = re_nm.findall(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))
+               self.outputs[0].write('%r' % syms)
+
+class compile_sym(Task):
+       def run(self):
+               syms = {}
+               for x in self.inputs:
+                       slist = eval(x.read())
+                       for s in slist:
+                               syms[s] = 1
+               lsyms = list(syms.keys())
+               lsyms.sort()
+               if self.env.DEST_BINFMT == 'pe':
+                       self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms))
+               elif self.env.DEST_BINFMT == 'elf':
+                       self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n")
+               elif self.env.DEST_BINFMT=='mac-o':
+                       self.outputs[0].write('\n'.join(lsyms) + '\n')
+               else:
+                       raise WafError('NotImplemented')
+
+@feature('syms')
+@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local')
+def do_the_symbol_stuff(self):
+       ins = [x.outputs[0] for x in self.compiled_tasks]
+       self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+
+       tsk = self.create_task('compile_sym',
+                              [x.outputs[0] for x in self.gen_sym_tasks],
+                              self.path.find_or_declare(getattr(self, 'sym_filename', self.target + '.def')))
+       self.link_task.set_run_after(tsk)
+       self.link_task.dep_nodes.append(tsk.outputs[0])
+       if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+               self.link_task.env.append_value('LINKFLAGS', ['/def:' + tsk.outputs[0].bldpath()])
+       elif self.env.DEST_BINFMT == 'pe': #gcc on windows takes *.def as an additional input
+               self.link_task.inputs.append(tsk.outputs[0])
+       elif self.env.DEST_BINFMT == 'elf':
+               self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + tsk.outputs[0].bldpath()])
+       elif self.env.DEST_BINFMT=='mac-o':
+               self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,'+tsk.outputs[0].bldpath()])
+       else:
+               raise WafError('NotImplemented')
diff --git a/third_party/waf/waflib/extras/sync_exec.py b/third_party/waf/waflib/extras/sync_exec.py
new file mode 100644 (file)
index 0000000..ba241fc
--- /dev/null
@@ -0,0 +1,8 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+This tool is obsolete, the sync_exec feature is now the default
+"""
+
+pass
diff --git a/third_party/waf/waflib/extras/unc.py b/third_party/waf/waflib/extras/unc.py
new file mode 100644 (file)
index 0000000..e630c2a
--- /dev/null
@@ -0,0 +1,110 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2014 (ita)
+
+"""
+This module enables automatic handling of network paths of the form \\server\share for both input
+and output files. While a typical script may require the following::
+
+       import os
+       def build(bld):
+
+               node = bld.root.make_node('\\\\COMPUTER\\share\\test.txt')
+
+               # mark the server/share levels as folders
+               k = node.parent
+               while k:
+                       k.cache_isdir = True
+                       k = k.parent
+
+               # clear the file if removed
+               if not os.path.isfile(node.abspath()):
+                       node.sig = None
+
+               # create the folder structure
+               if node.parent.height() > 2:
+                       node.parent.mkdir()
+
+               # then the task generator
+               def myfun(tsk):
+                       tsk.outputs[0].write("data")
+               bld(rule=myfun, source='wscript', target=[nd])
+
+this tool will make the process much easier, for example::
+
+       def configure(conf):
+               conf.load('unc') # do not import the module directly
+
+       def build(bld):
+               def myfun(tsk):
+                       tsk.outputs[0].write("data")
+               bld(rule=myfun, update_outputs=True,
+                       source='wscript',
+                       target='\\\\COMPUTER\\share\\test.txt')
+               bld(rule=myfun, update_outputs=True,
+                       source='\\\\COMPUTER\\share\\test.txt',
+                       target='\\\\COMPUTER\\share\\test2.txt')
+"""
+
+import os
+from waflib import Node, Utils, Context
+
+def find_resource(self, lst):
+       if isinstance(lst, str):
+               lst = [x for x in Node.split_path(lst) if x and x != '.']
+
+       if lst[0].startswith('\\\\'):
+               if len(lst) < 3:
+                       return None
+               node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
+               node.cache_isdir = True
+               node.parent.cache_isdir = True
+
+               ret = node.search_node(lst[2:])
+               if not ret:
+                       ret = node.find_node(lst[2:])
+               if ret and os.path.isdir(ret.abspath()):
+                       return None
+               return ret
+
+       return self.find_resource_orig(lst)
+
+def find_or_declare(self, lst):
+       if isinstance(lst, str):
+               lst = [x for x in Node.split_path(lst) if x and x != '.']
+
+       if lst[0].startswith('\\\\'):
+               if len(lst) < 3:
+                       return None
+               node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
+               node.cache_isdir = True
+               node.parent.cache_isdir = True
+               ret = node.find_node(lst[2:])
+               if not ret:
+                       ret = node.make_node(lst[2:])
+               if not os.path.isfile(ret.abspath()):
+                       ret.sig = None
+                       ret.parent.mkdir()
+               return ret
+
+       return self.find_or_declare_orig(lst)
+
+def abspath(self):
+       """For MAX_PATH limitations"""
+       ret = self.abspath_orig()
+       if not ret.startswith("\\"):
+               return "\\\\?\\" + ret
+       return ret
+
+if Utils.is_win32:
+       Node.Node.find_resource_orig = Node.Node.find_resource
+       Node.Node.find_resource = find_resource
+
+       Node.Node.find_or_declare_orig = Node.Node.find_or_declare
+       Node.Node.find_or_declare = find_or_declare
+
+       Node.Node.abspath_orig = Node.Node.abspath
+       Node.Node.abspath = abspath
+
+       for k in list(Context.cache_modules.keys()):
+               Context.cache_modules["\\\\?\\" + k] = Context.cache_modules[k]
diff --git a/third_party/waf/waflib/extras/unity.py b/third_party/waf/waflib/extras/unity.py
new file mode 100644 (file)
index 0000000..f30ba50
--- /dev/null
@@ -0,0 +1,67 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Compile whole groups of C/C++ files at once.
+
+def build(bld):
+       bld.load('compiler_cxx unity')
+"""
+
+import sys
+from waflib import Task, Options
+from waflib.Tools import c_preproc
+from waflib import TaskGen
+
+MAX_BATCH = 50
+
+def options(opt):
+       global MAX_BATCH
+       opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH, help='batch size (0 for no batch)')
+
+class unity(Task.Task):
+       color = 'BLUE'
+       scan = c_preproc.scan
+       def run(self):
+               lst = ['#include "%s"\n' % node.abspath() for node in self.inputs]
+               txt = ''.join(lst)
+               self.outputs[0].write(txt)
+
+@TaskGen.taskgen_method
+def batch_size(self):
+       return getattr(Options.options, 'batchsize', MAX_BATCH)
+
+def make_batch_fun(ext):
+       # this generic code makes this quite unreadable, defining the function two times might have been better
+       def make_batch(self, node):
+               cnt = self.batch_size()
+               if cnt <= 1:
+                       return self.create_compiled_task(ext, node)
+               x = getattr(self, 'master_%s' % ext, None)
+               if not x or len(x.inputs) >= cnt:
+                       x = self.create_task('unity')
+                       setattr(self, 'master_%s' % ext, x)
+
+                       cnt_cur = getattr(self, 'cnt_%s' % ext, 0)
+                       cxxnode = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, ext))
+                       x.outputs = [cxxnode]
+                       setattr(self, 'cnt_%s' % ext, cnt_cur + 1)
+                       self.create_compiled_task(ext, cxxnode)
+               x.inputs.append(node)
+       return make_batch
+
+def enable_support(cc, cxx):
+       if cxx or not cc:
+               TaskGen.extension('.cpp', '.cc', '.cxx', '.C', '.c++')(make_batch_fun('cxx'))
+       if cc:
+               TaskGen.extension('.c')(make_batch_fun('c'))
+       else:
+               TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
+
+has_c = '.c' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_c' in sys.modules
+has_cpp = '.cpp' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_cxx' in sys.modules
+enable_support(has_c, has_cpp) # by default
+
+def build(bld):
+       # it is best to do this
+       enable_support(bld.env.CC_NAME, bld.env.CXX_NAME)
diff --git a/third_party/waf/waflib/extras/use_config.py b/third_party/waf/waflib/extras/use_config.py
new file mode 100644 (file)
index 0000000..ffaafce
--- /dev/null
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
+
+"""
+When a project has a lot of options the 'waf configure' command line can be
+very long and it becomes a cause of error.
+This tool provides a convenient way to load a set of configuration parameters
+from a local file or from a remote url.
+
+The configuration parameters are stored in a Python file that is imported as
+an extra waf tool can be.
+
+Example:
+$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ...
+
+The file 'myconf1' will be downloaded from 'http://www.anywhere.org'
+(or 'http://www.anywhere.org/wafcfg').
+If the files are available locally, it could be:
+$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ...
+
+The configuration of 'myconf1.py' is automatically loaded by calling
+its 'configure' function. In this example, it defines environment variables and
+set options:
+
+def configure(self):
+       self.env['CC'] = 'gcc-4.8'
+       self.env.append_value('LIBPATH', [...])
+       self.options.perlbinary = '/usr/local/bin/perl'
+       self.options.pyc = False
+
+The corresponding command line should have been:
+$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl
+
+
+This is an extra tool, not bundled with the default waf binary.
+To add the use_config tool to the waf file:
+$ ./waf-light --tools=use_config
+
+When using this tool, the wscript will look like:
+
+       def options(opt):
+               opt.load('use_config')
+
+       def configure(conf):
+               conf.load('use_config')
+"""
+
+import sys
+import os.path as osp
+import os
+
+try:
+       from urllib import request
+except ImportError:
+       from urllib import urlopen
+else:
+       urlopen = request.urlopen
+
+
+from waflib import Errors, Context, Logs, Utils, Options, Configure
+
+try:
+       from urllib.parse import urlparse
+except ImportError:
+       from urlparse import urlparse
+
+
+
+
+DEFAULT_DIR = 'wafcfg'
+# add first the current wafcfg subdirectory
+sys.path.append(osp.abspath(DEFAULT_DIR))
+
+def options(self):
+       group = self.add_option_group('configure options')
+       group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
+
+       group.add_option('--use-config', action='store', default=None,
+                                        metavar='CFG', dest='use_config',
+                                        help='force the configuration parameters by importing '
+                                                 'CFG.py. Several modules may be provided (comma '
+                                                 'separated).')
+       group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR,
+                                        metavar='CFG_DIR', dest='use_config_dir',
+                                        help='path or url where to find the configuration file')
+
+def download_check(node):
+       """
+       Hook to check for the tools which are downloaded. Replace with your function if necessary.
+       """
+       pass
+
+
+def download_tool(tool, force=False, ctx=None):
+       """
+       Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::
+
+               $ waf configure --download
+       """
+       for x in Utils.to_list(Context.remote_repo):
+               for sub in Utils.to_list(Context.remote_locs):
+                       url = '/'.join((x, sub, tool + '.py'))
+                       try:
+                               web = urlopen(url)
+                               try:
+                                       if web.getcode() != 200:
+                                               continue
+                               except AttributeError:
+                                       pass
+                       except Exception:
+                               # on python3 urlopen throws an exception
+                               # python 2.3 does not have getcode and throws an exception to fail
+                               continue
+                       else:
+                               tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
+                               tmp.write(web.read(), 'wb')
+                               Logs.warn('Downloaded %s from %s' % (tool, url))
+                               download_check(tmp)
+                               try:
+                                       module = Context.load_tool(tool)
+                               except Exception:
+                                       Logs.warn('The tool %s from %s is unusable' % (tool, url))
+                                       try:
+                                               tmp.delete()
+                                       except Exception:
+                                               pass
+                                       continue
+                               return module
+
+       raise Errors.WafError('Could not load the Waf tool')
+
+def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
+       try:
+               module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
+       except ImportError as e:
+               if Options.options.download:
+                       module = download_tool(tool, ctx=ctx)
+                       if not module:
+                               ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
+               else:
+                       ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
+       return module
+
+Context.load_tool_default = Context.load_tool
+Context.load_tool = load_tool
+Configure.download_tool = download_tool
+
+def configure(self):
+       opts = self.options
+       use_cfg = opts.use_config
+       if use_cfg is None:
+               return
+       url = urlparse(opts.use_config_dir)
+       kwargs = {}
+       if url.scheme:
+               kwargs['download'] = True
+               kwargs['remote_url'] = url.geturl()
+               # search first with the exact url, else try with +'/wafcfg'
+               kwargs['remote_locs'] = ['', DEFAULT_DIR]
+       tooldir = url.geturl() + ' ' + DEFAULT_DIR
+       for cfg in use_cfg.split(','):
+               Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg)
+               self.load(cfg, tooldir=tooldir, **kwargs)
+       self.start_msg('Checking for configuration')
+       self.end_msg(use_cfg)
diff --git a/third_party/waf/waflib/extras/why.py b/third_party/waf/waflib/extras/why.py
new file mode 100644 (file)
index 0000000..c3875f4
--- /dev/null
@@ -0,0 +1,75 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+This tool modifies the task signature scheme to store and obtain
+information about the task execution (why it must run, etc)::
+
+       def configure(conf):
+               conf.load('why')
+
+After adding the tool, a full rebuild is necessary:
+waf clean build --zones=task
+"""
+
+from waflib import Task, Utils, Logs, Errors
+
+def signature(self):
+       # compute the result one time, and suppose the scan_signature will give the good result
+       try: return self.cache_sig
+       except AttributeError: pass
+
+       self.m = Utils.md5()
+       self.m.update(self.hcode)
+       id_sig = self.m.digest()
+
+       # explicit deps
+       self.m = Utils.md5()
+       self.sig_explicit_deps()
+       exp_sig = self.m.digest()
+
+       # env vars
+       self.m = Utils.md5()
+       self.sig_vars()
+       var_sig = self.m.digest()
+
+       # implicit deps / scanner results
+       self.m = Utils.md5()
+       if self.scan:
+               try:
+                       self.sig_implicit_deps()
+               except Errors.TaskRescan:
+                       return self.signature()
+       impl_sig = self.m.digest()
+
+       ret = self.cache_sig = impl_sig + id_sig + exp_sig + var_sig
+       return ret
+
+
+Task.Task.signature = signature
+
+old = Task.Task.runnable_status
+def runnable_status(self):
+       ret = old(self)
+       if ret == Task.RUN_ME:
+               try:
+                       old_sigs = self.generator.bld.task_sigs[self.uid()]
+               except (KeyError, AttributeError):
+                       Logs.debug("task: task must run as no previous signature exists")
+               else:
+                       new_sigs = self.cache_sig
+                       def v(x):
+                               return Utils.to_hex(x)
+
+                       Logs.debug("Task %r" % self)
+                       msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable']
+                       tmp = 'task: -> %s: %s %s'
+                       for x in range(len(msgs)):
+                               l = len(Utils.SIG_NIL)
+                               a = new_sigs[x*l : (x+1)*l]
+                               b = old_sigs[x*l : (x+1)*l]
+                               if (a != b):
+                                       Logs.debug(tmp % (msgs[x].ljust(35), v(a), v(b)))
+       return ret
+Task.Task.runnable_status = runnable_status
diff --git a/third_party/waf/waflib/extras/win32_opts.py b/third_party/waf/waflib/extras/win32_opts.py
new file mode 100644 (file)
index 0000000..28491cd
--- /dev/null
@@ -0,0 +1,175 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Windows-specific optimizations
+
+This module can help reducing the overhead of listing files on windows (more than 10000 files).
+"""
+
+import os
+from waflib import Utils, Build, Node, Logs
+
+try:
+       TP = '%s\\*'.decode('ascii')
+except AttributeError:
+       TP = '%s\\*'
+
+if Utils.is_win32:
+       from waflib.extras import md5_tstamp
+       import ctypes, ctypes.wintypes
+
+       FindFirstFile        = ctypes.windll.kernel32.FindFirstFileW
+       FindNextFile         = ctypes.windll.kernel32.FindNextFileW
+       FindClose            = ctypes.windll.kernel32.FindClose
+       FILE_ATTRIBUTE_DIRECTORY = 0x10
+       INVALID_HANDLE_VALUE = -1
+       UPPER_FOLDERS = ('.', '..')
+       try:
+               UPPER_FOLDERS = [unicode(x) for x in UPPER_FOLDERS]
+       except NameError:
+               pass
+
+       def cached_hash_file(self):
+               try:
+                       cache = self.ctx.cache_listdir_cache_hash_file
+               except AttributeError:
+                       cache = self.ctx.cache_listdir_cache_hash_file = {}
+
+               if id(self.parent) in cache:
+                       try:
+                               t = cache[id(self.parent)][self.name]
+                       except KeyError:
+                               raise IOError('Not a file')
+               else:
+                       # an opportunity to list the files and the timestamps at once
+                       findData = ctypes.wintypes.WIN32_FIND_DATAW()
+                       find     = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData))
+
+                       if find == INVALID_HANDLE_VALUE:
+                               cache[id(self.parent)] = {}
+                               raise IOError('Not a file')
+
+                       cache[id(self.parent)] = lst_files = {}
+                       try:
+                               while True:
+                                       if findData.cFileName not in UPPER_FOLDERS:
+                                               thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
+                                               if not thatsadir:
+                                                       ts = findData.ftLastWriteTime
+                                                       d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime
+                                                       lst_files[str(findData.cFileName)] = d
+                                       if not FindNextFile(find, ctypes.byref(findData)):
+                                               break
+                       except Exception:
+                               cache[id(self.parent)] = {}
+                               raise IOError('Not a file')
+                       finally:
+                               FindClose(find)
+                       t = lst_files[self.name]
+
+               fname = self.abspath()
+               if fname in Build.hashes_md5_tstamp:
+                       if Build.hashes_md5_tstamp[fname][0] == t:
+                               return Build.hashes_md5_tstamp[fname][1]
+
+               try:
+                       fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
+               except OSError:
+                       raise IOError('Cannot read from %r' % fname)
+               f = os.fdopen(fd, 'rb')
+               m = Utils.md5()
+               rb = 1
+               try:
+                       while rb:
+                               rb = f.read(200000)
+                               m.update(rb)
+               finally:
+                       f.close()
+
+               # ensure that the cache is overwritten
+               Build.hashes_md5_tstamp[fname] = (t, m.digest())
+               return m.digest()
+       Node.Node.cached_hash_file = cached_hash_file
+
+       def get_bld_sig_win32(self):
+               try:
+                       return self.ctx.hash_cache[id(self)]
+               except KeyError:
+                       pass
+               except AttributeError:
+                       self.ctx.hash_cache = {}
+
+               if not self.is_bld():
+                       if self.is_child_of(self.ctx.srcnode):
+                               self.sig = self.cached_hash_file()
+                       else:
+                               self.sig = Utils.h_file(self.abspath())
+               self.ctx.hash_cache[id(self)] = ret = self.sig
+               return ret
+       Node.Node.get_bld_sig = get_bld_sig_win32
+
+       def isfile_cached(self):
+               # optimize for nt.stat calls, assuming there are many files for few folders
+               try:
+                       cache = self.__class__.cache_isfile_cache
+               except AttributeError:
+                       cache = self.__class__.cache_isfile_cache = {}
+
+               try:
+                       c1 = cache[id(self.parent)]
+               except KeyError:
+                       c1 = cache[id(self.parent)] = []
+
+                       curpath = self.parent.abspath()
+                       findData = ctypes.wintypes.WIN32_FIND_DATAW()
+                       find     = FindFirstFile(TP % curpath, ctypes.byref(findData))
+
+                       if find == INVALID_HANDLE_VALUE:
+                               Logs.error("invalid win32 handle isfile_cached %r" % self.abspath())
+                               return os.path.isfile(self.abspath())
+
+                       try:
+                               while True:
+                                       if findData.cFileName not in UPPER_FOLDERS:
+                                               thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
+                                               if not thatsadir:
+                                                       c1.append(str(findData.cFileName))
+                                       if not FindNextFile(find, ctypes.byref(findData)):
+                                               break
+                       except Exception as e:
+                               Logs.error('exception while listing a folder %r %r' % (self.abspath(), e))
+                               return os.path.isfile(self.abspath())
+                       finally:
+                               FindClose(find)
+               return self.name in c1
+       Node.Node.isfile_cached = isfile_cached
+
+       def find_or_declare_win32(self, lst):
+               # assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
+               if isinstance(lst, str):
+                       lst = [x for x in Node.split_path(lst) if x and x != '.']
+
+               node = self.get_bld().search(lst)
+               if node:
+                       if not node.isfile_cached():
+                               node.sig = None
+                               try:
+                                       node.parent.mkdir()
+                               except OSError:
+                                       pass
+                       return node
+               self = self.get_src()
+               node = self.find_node(lst)
+               if node:
+                       if not node.isfile_cached():
+                               node.sig = None
+                               try:
+                                       node.parent.mkdir()
+                               except OSError:
+                                       pass
+                       return node
+               node = self.get_bld().make_node(lst)
+               node.parent.mkdir()
+               return node
+       Node.Node.find_or_declare = find_or_declare_win32
diff --git a/third_party/waf/waflib/fixpy2.py b/third_party/waf/waflib/fixpy2.py
new file mode 100644 (file)
index 0000000..a7bcef5
--- /dev/null
@@ -0,0 +1,73 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2016 (ita)
+
+import os
+
+all_modifs = {}
+
+def fixdir(dir):
+       """Call all substitution functions on Waf folders"""
+       global all_modifs
+       for k in all_modifs:
+               for v in all_modifs[k]:
+                       modif(os.path.join(dir, 'waflib'), k, v)
+
+def modif(dir, name, fun):
+       """Call a substitution function"""
+       if name == '*':
+               lst = []
+               for y in '. Tools extras'.split():
+                       for x in os.listdir(os.path.join(dir, y)):
+                               if x.endswith('.py'):
+                                       lst.append(y + os.sep + x)
+               for x in lst:
+                       modif(dir, x, fun)
+               return
+
+       filename = os.path.join(dir, name)
+       f = open(filename, 'r')
+       try:
+               txt = f.read()
+       finally:
+               f.close()
+
+       txt = fun(txt)
+
+       f = open(filename, 'w')
+       try:
+               f.write(txt)
+       finally:
+               f.close()
+
+def subst(*k):
+       """register a substitution function"""
+       def do_subst(fun):
+               global all_modifs
+               for x in k:
+                       try:
+                               all_modifs[x].append(fun)
+                       except KeyError:
+                               all_modifs[x] = [fun]
+               return fun
+       return do_subst
+
+@subst('*')
+def r1(code):
+       "utf-8 fixes for python < 2.6"
+       code = code.replace(',e:', ',e:')
+       code = code.replace("", '')
+       return code.replace('', '')
+
+@subst('Runner.py')
+def r4(code):
+       "generator syntax"
+       return code.replace('next(self.biter)', 'self.biter.next()')
+
+@subst('Context.py')
+def r5(code):
+       return code.replace("('Execution failure: %s'%str(e),ex=e)", "('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")
diff --git a/third_party/waf/waflib/processor.py b/third_party/waf/waflib/processor.py
new file mode 100755 (executable)
index 0000000..53d2d9e
--- /dev/null
@@ -0,0 +1,67 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016 (ita)
+
+import os, sys, traceback, base64, signal
+try:
+       import cPickle
+except ImportError:
+       import pickle as cPickle
+
+try:
+       import subprocess32 as subprocess
+except ImportError:
+       import subprocess
+
+try:
+       TimeoutExpired = subprocess.TimeoutExpired
+except AttributeError:
+       class TimeoutExpired(object):
+               pass
+
+def run():
+       txt = sys.stdin.readline().strip()
+       if not txt:
+               # parent process probably ended
+               sys.exit(1)
+       [cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt))
+       cargs = cargs or {}
+
+       ret = 1
+       out, err, ex, trace = (None, None, None, None)
+       try:
+               proc = subprocess.Popen(cmd, **kwargs)
+               try:
+                       out, err = proc.communicate(**cargs)
+               except TimeoutExpired:
+                       if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
+                               os.killpg(proc.pid, signal.SIGKILL)
+                       else:
+                               proc.kill()
+                       out, err = proc.communicate()
+                       exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out)
+                       exc.stderr = err
+                       raise exc
+               ret = proc.returncode
+       except Exception ,e:
+               exc_type, exc_value, tb = sys.exc_info()
+               exc_lines = traceback.format_exception(exc_type, exc_value, tb)
+               trace = str(cmd) + '\n' + ''.join(exc_lines)
+               ex = e.__class__.__name__
+
+       # it is just text so maybe we do not need to pickle()
+       tmp = [ret, out, err, ex, trace]
+       obj = base64.b64encode(cPickle.dumps(tmp))
+       sys.stdout.write(obj.decode())
+       sys.stdout.write('\n')
+       sys.stdout.flush()
+
+while 1:
+       try:
+               run()
+       except KeyboardInterrupt:
+               break