#!/usr/bin/env python
-# encoding: ISO8859-1
-# Thomas Nagy, 2005-2015
-
+# encoding: latin-1
+# Thomas Nagy, 2005-2018
+#
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
import os, sys, inspect
-VERSION="1.9.10"
+VERSION="2.0.4"
REVISION="x"
GIT="x"
-INSTALL=''
-C1='#>'
-C2='#6'
-C3='#4'
+INSTALL="x"
+C1='x'
+C2='x'
+C3='x'
cwd = os.getcwd()
join = os.path.join
+if sys.hexversion<0x206000f:
+ raise ImportError('Python >= 2.6 is required to create the waf file')
WAF='waf'
def b(x):
pass
def find_lib():
- return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf'))
+ path = '../../third_party/waf'
+ paths = [path, path+'/waflib']
+ return [os.path.abspath(os.path.join(os.path.dirname(__file__), x)) for x in paths]
wafdir = find_lib()
-sys.path.insert(0, wafdir)
+for p in wafdir:
+ sys.path.insert(0, p)
if __name__ == '__main__':
-
- # TODO: remove these when possible
- from waflib.extras import compat15
+ #import extras.compat15#PRELUDE
import sys
from waflib.Tools import ccroot, c, ar, compiler_c, gcc
sys.modules['compiler_cc'] = compiler_c
sys.modules['gcc'] = gcc
- from waflib import Options
+ from waflib import Options
Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0:
os.environ['NOCLIMB'] = "1"
Task.classes['cc_link'] = o
from waflib import Scripting
- Scripting.waf_entry_point(cwd, VERSION, wafdir)
+ Scripting.waf_entry_point(cwd, VERSION, wafdir[0])
--- /dev/null
+#!/usr/bin/env python
+# encoding: ISO8859-1
+# Thomas Nagy, 2005-2015
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import os, sys, inspect
+
+VERSION="1.9.10"
+REVISION="x"
+GIT="x"
+INSTALL=''
+C1='#>'
+C2='#6'
+C3='#4'
+cwd = os.getcwd()
+join = os.path.join
+
+
+WAF='waf'
+def b(x):
+ return x
+if sys.hexversion>0x300000f:
+ WAF='waf3'
+ def b(x):
+ return x.encode()
+
+def err(m):
+ print(('\033[91mError: %s\033[0m' % m))
+ sys.exit(1)
+
+def unpack_wafdir(dir, src):
+ f = open(src,'rb')
+ c = 'corrupt archive (%d)'
+ while 1:
+ line = f.readline()
+ if not line: err('run waf-light from a folder containing waflib')
+ if line == b('#==>\n'):
+ txt = f.readline()
+ if not txt: err(c % 1)
+ if f.readline() != b('#<==\n'): err(c % 2)
+ break
+ if not txt: err(c % 3)
+ txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
+
+ import shutil, tarfile
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ try:
+ for x in ('Tools', 'extras'):
+ os.makedirs(join(dir, 'waflib', x))
+ except OSError:
+ err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
+
+ os.chdir(dir)
+ tmp = 't.bz2'
+ t = open(tmp,'wb')
+ try: t.write(txt)
+ finally: t.close()
+
+ try:
+ t = tarfile.open(tmp)
+ except:
+ try:
+ os.system('bunzip2 t.bz2')
+ t = tarfile.open('t')
+ tmp = 't'
+ except:
+ os.chdir(cwd)
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ err("Waf cannot be unpacked, check that bzip2 support is present")
+
+ try:
+ for x in t: t.extract(x)
+ finally:
+ t.close()
+
+ for x in ('Tools', 'extras'):
+ os.chmod(join('waflib',x), 493)
+
+ if sys.hexversion<0x300000f:
+ sys.path = [join(dir, 'waflib')] + sys.path
+ import fixpy2
+ fixpy2.fixdir(dir)
+
+ os.remove(tmp)
+ os.chdir(cwd)
+
+ try: dir = unicode(dir, 'mbcs')
+ except: pass
+ try:
+ from ctypes import windll
+ windll.kernel32.SetFileAttributesW(dir, 2)
+ except:
+ pass
+
+def test(dir):
+ try:
+ os.stat(join(dir, 'waflib'))
+ return os.path.abspath(dir)
+ except OSError:
+ pass
+
+def find_lib():
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf'))
+
+wafdir = find_lib()
+sys.path.insert(0, wafdir)
+
+if __name__ == '__main__':
+
+ # TODO: remove these when possible
+ from waflib.extras import compat15
+ import sys
+
+ from waflib.Tools import ccroot, c, ar, compiler_c, gcc
+ sys.modules['cc'] = c
+ sys.modules['ccroot'] = ccroot
+ sys.modules['ar'] = ar
+ sys.modules['compiler_cc'] = compiler_c
+ sys.modules['gcc'] = gcc
+
+ from waflib import Options
+ Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
+ if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0:
+ os.environ['NOCLIMB'] = "1"
+ # there is a single top-level, but libraries must build independently
+ os.environ['NO_LOCK_IN_TOP'] = "1"
+
+ from waflib import Task
+ class o(object):
+ display = None
+ Task.classes['cc_link'] = o
+
+ from waflib import Scripting
+ Scripting.waf_entry_point(cwd, VERSION, wafdir)
+
# handle substitution of variables in .in files
-import re, os
-import Build, sys, Logs
+import sys
+import re
+import os
+from waflib import Build, Logs
from samba_utils import SUBST_VARS_RECURSIVE
def subst_at_vars(task):
# based on suncc.py from waf
import os, optparse
-import Utils, Options, Configure
-import ccroot, ar
-from Configure import conftest
+from waflib import Utils, Options, Configure
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conftest
-from compiler_cc import c_compiler
+from waflib.Tools.compiler_c import c_compiler
c_compiler['default'] = ['gcc', 'generic_cc']
c_compiler['hpux'] = ['gcc', 'generic_cc']
# based on suncc.py from waf
import os, optparse, sys
-import Utils, Options, Configure
-import ccroot, ar
-from Configure import conftest
-import gcc
+from waflib import Utils, Options, Configure
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conftest
+from waflib.Tools import gcc
@conftest
gcc.gcc_modifier_hpux = gcc_modifier_hpux
-from TaskGen import feature, after
+from waflib.TaskGen import feature, after
@feature('cprogram', 'cshlib')
@after('apply_link', 'apply_lib_vars', 'apply_obj_vars')
def hpux_addfullpath(self):
# based on suncc.py from waf
import os, optparse
-import Utils, Options, Configure
-import ccroot, ar
-from Configure import conftest
+from waflib import Utils, Options, Configure
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conftest
-from compiler_cc import c_compiler
+from waflib.Tools.compiler_c import c_compiler
c_compiler['irix'] = ['gcc', 'irixcc']
import sys, random, threading
try: from Queue import Queue
except ImportError: from queue import Queue
-import Utils, Options
-from Constants import EXCEPTION, CRASHED, MAXJOBS, ASK_LATER, SKIPPED, SKIP_ME, SUCCESS
+from waflib import Utils, Options, Errors
+from waflib.TaskGen import EXCEPTION, CRASHED, MAXJOBS, ASK_LATER, SKIPPED, SKIP_ME, SUCCESS
GAP = 15
else:
try:
tsk.post_run()
- except Utils.WafError:
+ except Errors.WafError:
pass
except Exception:
tsk.err_msg = Utils.ex_stack()
# handle substitution of variables in pc files
import os, re, sys
-import Build, Logs
+from waflib import Build, Logs
from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST
def subst_at_vars(task):
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
-import Options, Build, os
+import os
+from waflib import Options, Build
from samba_utils import os_path_relpath, TO_LIST, samba_add_onoff_option
from samba_autoconf import library_flags
-Options.Handler.SAMBA3_ADD_OPTION = samba_add_onoff_option
+Options.OptionsContext.SAMBA3_ADD_OPTION = samba_add_onoff_option
def SAMBA3_IS_STATIC_MODULE(bld, module):
'''Check whether module is in static list'''
'''fix the build arguments for s3 build rules to include the
necessary includes, subdir and cflags options '''
s3dir = os.path.join(bld.env.srcdir, 'source3')
- s3reldir = os_path_relpath(s3dir, bld.curdir)
+ s3reldir = os_path_relpath(s3dir, bld.path.abspath())
# the extra_includes list is relative to the source3 directory
extra_includes = [ '.', 'include', 'lib' ]
# functions for handling ABI checking of libraries
-import Options, Utils, os, Logs, samba_utils, sys, Task, fnmatch, re, Build
-from TaskGen import feature, before, after
+import os
+import sys
+import re
+import fnmatch
+
+from waflib import Options, Utils, Logs, Task, Build, Errors
+from waflib.TaskGen import feature, before, after
+import samba_utils
# these type maps cope with platform specific names for common types
# please add new type mappings into the list below
old_sigs = samba_utils.load_file(sig_file)
if old_sigs is None or Options.options.ABI_UPDATE:
if not save_sigs(sig_file, parsed_sigs):
- raise Utils.WafError('Failed to save ABI file "%s"' % sig_file)
+ raise Errors.WafError('Failed to save ABI file "%s"' % sig_file)
Logs.warn('Generated ABI signatures %s' % sig_file)
return
got_error = True
if got_error:
- raise Utils.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)
+ raise Errors.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)
-t = Task.task_type_from_func('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
+t = Task.task_factory('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
t.quiet = True
# allow "waf --abi-check" to force re-checking the ABI
if '--abi-check' in sys.argv:
# a waf tool to add autoconf-like macros to the configure section
import os, sys
-import Build, Options, preproc, Logs
-from Configure import conf
-from TaskGen import feature
+from waflib import Build, Options, Logs, Context
+from waflib.Configure import conf
+from waflib.TaskGen import feature
+from waflib.Tools import c_preproc as preproc
from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p
missing_headers = set()
if v != [] and v != 0:
conf.env.in_compound = v + 1
return
- conf.check_message_1(msg)
- conf.saved_check_message_1 = conf.check_message_1
- conf.check_message_1 = null_check_message_1
- conf.saved_check_message_2 = conf.check_message_2
- conf.check_message_2 = null_check_message_2
+ conf.start_msg(msg)
+ conf.saved_check_message_1 = conf.start_msg
+ conf.start_msg = null_check_message_1
+ conf.saved_check_message_2 = conf.end_msg
+ conf.end_msg = null_check_message_2
conf.env.in_compound = 1
conf.env.in_compound -= 1
if conf.env.in_compound != 0:
return
- conf.check_message_1 = conf.saved_check_message_1
- conf.check_message_2 = conf.saved_check_message_2
- p = conf.check_message_2
+ conf.start_msg = conf.saved_check_message_1
+ conf.end_msg = conf.saved_check_message_2
+ p = conf.end_msg
if result is True:
p('ok')
elif not result:
cflags.append(extra_cflags)
if local_include:
- cflags.append('-I%s' % conf.curdir)
+ cflags.append('-I%s' % conf.path.abspath())
if not link:
type='nolink'
@conf
def IN_LAUNCH_DIR(conf):
'''return True if this rule is being run from the launch directory'''
- return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir)
-Options.Handler.IN_LAUNCH_DIR = IN_LAUNCH_DIR
+ return os.path.realpath(conf.path.abspath()) == os.path.realpath(Context.launch_dir)
+Options.OptionsContext.IN_LAUNCH_DIR = IN_LAUNCH_DIR
@conf
# when -C is chosen, we will use a private cache and will
# not look into system includes. This roughtly matches what
# autoconf does with -C
- cache_path = os.path.join(conf.blddir, '.confcache')
+ cache_path = os.path.join(conf.bldnode.abspath(), '.confcache')
mkdir_p(cache_path)
Options.cache_global = os.environ['WAFCACHE'] = cache_path
else:
# waf build tool for building automatic prototypes from C source
import os
-import Build
+from waflib import Build
from samba_utils import SET_TARGET_TYPE, os_path_relpath
def SAMBA_AUTOPROTO(bld, header, source):
'''rule for samba prototype generation'''
bld.SET_BUILD_GROUP('prototypes')
- relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath())
+ relpath = os_path_relpath(bld.path.abspath(), bld.srcnode.abspath())
name = os.path.join(relpath, header)
SET_TARGET_TYPE(bld, name, 'PROTOTYPE')
t = bld(
# functions to support bundled libraries
import sys
-import Build, Options, Logs
-from Configure import conf
+from waflib import Build, Options, Logs
+from waflib.Configure import conf
from samba_utils import TO_LIST
def PRIVATE_NAME(bld, name, private_extension, private_library):
def BUILTIN_DEFAULT(opt, builtins):
'''set a comma separated default list of builtin libraries for this package'''
- if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options:
+ if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options.__dict__:
return
- Options.options['BUILTIN_LIBRARIES_DEFAULT'] = builtins
-Options.Handler.BUILTIN_DEFAULT = BUILTIN_DEFAULT
+ Options.options.__dict__['BUILTIN_LIBRARIES_DEFAULT'] = builtins
+Options.OptionsContext.BUILTIN_DEFAULT = BUILTIN_DEFAULT
def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''):
'''set a default private library extension'''
- if 'PRIVATE_EXTENSION_DEFAULT' in Options.options:
+ if 'PRIVATE_EXTENSION_DEFAULT' in Options.options.__dict__:
return
- Options.options['PRIVATE_EXTENSION_DEFAULT'] = extension
- Options.options['PRIVATE_EXTENSION_EXCEPTION'] = noextension
-Options.Handler.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT
+ Options.options.__dict__['PRIVATE_EXTENSION_DEFAULT'] = extension
+ Options.options.__dict__['PRIVATE_EXTENSION_EXCEPTION'] = noextension
+Options.OptionsContext.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT
def minimum_library_version(conf, libname, default):
# to test for commonly needed configuration options
import os, shutil, re
-import Build, Configure, Utils, Options, Logs
-from Configure import conf
+from waflib import Build, Configure, Utils, Options, Logs, Errors
+from waflib.Configure import conf
from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH
def add_option(self, *k, **kw):
'''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests'''
- Options.parser = self
+ Options.OptionsContext.parser = self
match = kw.get('match', [])
if match:
del kw['match']
opt = self.parser.add_option(*k, **kw)
opt.match = match
return opt
-Options.Handler.add_option = add_option
+Options.OptionsContext.add_option = add_option
@conf
def check(self, *k, **kw):
'''Override the waf defaults to inject --with-directory options'''
if not 'env' in kw:
- kw['env'] = self.env.copy()
+ kw['env'] = self.env.derive()
# match the configuration test with specific options, for example:
# --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
additional_dirs = []
if 'msg' in kw:
msg = kw['msg']
- for x in Options.Handler.parser.parser.option_list:
+ for x in Options.OptionsContext.parser.parser.option_list:
if getattr(x, 'match', None) and msg in x.match:
d = getattr(Options.options, x.dest, '')
if d:
add_options_dir(additional_dirs, kw['env'])
self.validate_c(kw)
- self.check_message_1(kw['msg'])
+ self.start_msg(kw['msg'])
ret = None
try:
ret = self.run_c_code(*k, **kw)
except Configure.ConfigurationError as e:
- self.check_message_2(kw['errmsg'], 'YELLOW')
+ self.end_msg(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw and kw['mandatory']:
if Logs.verbose > 1:
raise
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
kw['success'] = ret
- self.check_message_2(self.ret_msg(kw['okmsg'], kw))
+ self.end_msg(self.ret_msg(kw['okmsg'], kw))
# success! keep the CPPPATH/LIBPATH
add_options_dir(additional_dirs, self.env)
'''find a directory to run tests in'''
k = 0
while k < 10000:
- dir = os.path.join(conf.blddir, '.conf_check_%d' % k)
+ dir = os.path.join(conf.bldnode.abspath(), '.conf_check_%d' % k)
try:
shutil.rmtree(dir)
except OSError:
# we need to run the program, try to get its result
args = conf.SAMBA_CROSS_ARGS(msg=msg)
- proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
+ proc = Utils.subprocess.Popen([lastprog] + args,
+ stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE)
(out, err) = proc.communicate()
w = conf.log.write
w(str(out))
else:
msg = "perl manpage generation"
- conf.check_message_1(msg)
+ conf.start_msg(msg)
dir = find_config_dir(conf)
""")
back = os.path.abspath('.')
os.chdir(bdir)
- proc = Utils.pproc.Popen(['perl', 'Makefile.PL'],
- stdout=Utils.pproc.PIPE,
- stderr=Utils.pproc.PIPE)
+ proc = Utils.subprocess.Popen(['perl', 'Makefile.PL'],
+ stdout=Utils.subprocess.PIPE,
+ stderr=Utils.subprocess.PIPE)
(out, err) = proc.communicate()
os.chdir(back)
ret = (proc.returncode == 0)
if not ret:
- conf.check_message_2('not found', color='YELLOW')
+ conf.end_msg('not found', color='YELLOW')
return
if section:
man = Utils.readf(os.path.join(bdir,'Makefile'))
m = re.search('MAN%sEXT\s+=\s+(\w+)' % section, man)
if not m:
- conf.check_message_2('not found', color='YELLOW')
+ conf.end_msg('not found', color='YELLOW')
return
ext = m.group(1)
- conf.check_message_2(ext)
+ conf.end_msg(ext)
return ext
- conf.check_message_2('ok')
+ conf.end_msg('ok')
return True
# option not supported by compiler - use a standard list of directories
dirlist = [ '/usr/lib', '/usr/lib64' ]
except:
- raise Utils.WafError('Unexpected error running "%s"' % (cmd))
+ raise Errors.WafError('Unexpected error running "%s"' % (cmd))
else:
dirlist = []
for line in out:
# functions for handling cross-compilation
import os, sys, re, shlex
-import Utils, Logs, Options
-from Configure import conf
+from waflib import Utils, Logs, Options, Errors
+from waflib.Configure import conf
real_Popen = None
f.close()
return (int(m.group(1)), m.group(2))
else:
- raise Utils.WafError("Bad answer format '%s' in %s" % (line, ca_file))
+ raise Errors.WafError("Bad answer format '%s' in %s" % (line, ca_file))
f.close()
return ANSWER_UNKNOWN
-class cross_Popen(Utils.pproc.Popen):
+class cross_Popen(Utils.subprocess.Popen):
'''cross-compilation wrapper for Popen'''
def __init__(*k, **kw):
(obj, args) = k
if conf.env.CROSS_ANSWERS:
if msg is None:
- raise Utils.WafError("Cannot have NULL msg in cross-answers")
+ raise Errors.WafError("Cannot have NULL msg in cross-answers")
ret.extend(['--cross-answers', os.path.join(Options.launch_dir, conf.env.CROSS_ANSWERS), msg])
if ret == []:
- raise Utils.WafError("Cannot cross-compile without either --cross-execute or --cross-answers")
+ raise Errors.WafError("Cannot cross-compile without either --cross-execute or --cross-answers")
return ret
'''check if we have some unanswered questions'''
global cross_answers_incomplete
if conf.env.CROSS_COMPILE and cross_answers_incomplete:
- raise Utils.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS)
+ raise Errors.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS)
return True
import os, sys, re, time
-import Build, Environment, Options, Logs, Utils
-from Logs import debug
-from Configure import conf
+from waflib import Build, Options, Logs, Utils, Errors
+from waflib.Logs import debug
+from waflib.Configure import conf
+from waflib import ConfigSet
from samba_bundled import BUILTIN_LIBRARY
from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath
Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))
for tname in subsystems[s]:
if len(subsystems[s][tname]) > 1:
- raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
+ raise Errors.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
return True
def save_samba_deps(bld, tgt_list):
'''save the dependency calculations between builds, to make
further builds faster'''
- denv = Environment.Environment()
+ denv = ConfigSet.ConfigSet()
denv.version = savedeps_version
denv.savedeps_inputs = savedeps_inputs
def load_samba_deps(bld, tgt_list):
'''load a previous set of build dependencies if possible'''
- depsfile = os.path.join(bld.bdir, "sambadeps")
- denv = Environment.Environment()
+ depsfile = os.path.join(bld.bldnode.abspath(), "sambadeps")
+ denv = ConfigSet.ConfigSet()
try:
debug('deps: checking saved dependencies')
denv.load_fast(depsfile)
# uses git ls-files to get file lists
import os, sys, tarfile
-import Utils, Scripting, Logs, Options
-from Configure import conf
+from waflib import Utils, Scripting, Logs, Options
+from waflib.Configure import conf
from samba_utils import os_path_relpath
from waflib import Context
if not isinstance(appname, str) or not appname:
# this copes with a mismatch in the calling arguments for dist()
- appname = Utils.g_module.APPNAME
- version = Utils.g_module.VERSION
+ appname = Context.g_module.APPNAME
+ version = Context.g_module.VERSION
if not version:
- version = Utils.g_module.VERSION
+ version = Context.g_module.VERSION
- srcdir = os.path.normpath(os.path.join(os.path.dirname(Utils.g_module.root_path), Utils.g_module.srcdir))
+ srcdir = os.path.normpath(os.path.join(os.path.dirname(Context.g_module.root_path), Context.g_module.srcdir))
if not dist_dirs:
Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package')
# specialist handling of header files for Samba
import os, re, sys, fnmatch
-import Build, Logs, Utils
+from waflib import Build, Logs, Utils, Errors
from samba_utils import TO_LIST, os_path_relpath
os.unlink(tgt)
sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % (
os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))
- raise Utils.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (
+ raise Errors.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (
hpath, relsrc, task.env.RELPATH))
infile.close()
outfile.close()
else:
h_name = h
inst_name = os.path.basename(h)
- relpath1 = os_path_relpath(bld.srcnode.abspath(), bld.curdir)
- relpath2 = os_path_relpath(bld.curdir, bld.srcnode.abspath())
+ curdir = bld.path.abspath()
+ relpath1 = os_path_relpath(bld.srcnode.abspath(), curdir)
+ relpath2 = os_path_relpath(curdir, bld.srcnode.abspath())
targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path))
- if not os.path.exists(os.path.join(bld.curdir, targetdir)):
- raise Utils.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))
+ if not os.path.exists(os.path.join(curdir, targetdir)):
+ raise Errors.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))
target = os.path.join(targetdir, inst_name)
# the source path of the header, relative to the top of the source tree
# library use
import os
-import Utils
-from TaskGen import feature, before, after
+from waflib import Utils, Errors
+from waflib.TaskGen import feature, before, after
from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath
@feature('install_bin')
return
if not self.link_task.outputs or not self.link_task.outputs[0]:
- raise Utils.WafError('no outputs found for %s in symlink_bin' % self.name)
+ raise Errors.WafError('no outputs found for %s in symlink_bin' % self.name)
binpath = self.link_task.outputs[0].abspath(self.env)
bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name)
# a waf tool to add extension based build patterns for Samba
-import Build
+from waflib import Build
from wafsamba import samba_version_file
def write_version_header(task):
-import Utils
-from Configure import conf
+from waflib import Utils
+from waflib.Configure import conf
done = {}
return
done["done"] = True
conf.find_program('perl', var='PERL', mandatory=mandatory)
- conf.check_tool('perl')
+ conf.load('perl')
path_perl = conf.find_program('perl')
conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl)
conf.check_perl_version(version)
# waf build tool for building IDL files with pidl
import os
-import Build, Utils
-from TaskGen import feature, before
+from waflib import Build, Utils
+from waflib.TaskGen import feature, before
from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE
def SAMBA_PIDL(bld, pname, source,
pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS')
pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])]
- t.more_includes = '#' + bld.path.relpath_gen(bld.srcnode)
+ t.more_includes = '#' + bld.path.path_from(bld.srcnode)
Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL
# waf build tool for building IDL files with pidl
import os
-import Build, Logs, Utils, Configure
-from Configure import conf
+from waflib import Build, Logs, Utils, Configure, Errors
+from waflib.Configure import conf
@conf
def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):
interpreters = []
if conf.env['EXTRA_PYTHON']:
- conf.all_envs['extrapython'] = conf.env.copy()
+ conf.all_envs['extrapython'] = conf.env.derive()
conf.setenv('extrapython')
conf.env['PYTHON'] = conf.env['EXTRA_PYTHON']
conf.env['IS_EXTRA_PYTHON'] = 'yes'
conf.find_program('python', var='PYTHON', mandatory=True)
- conf.check_tool('python')
+ conf.load('python')
try:
conf.check_python_version((3, 3, 0))
except Exception:
conf.setenv('default')
conf.find_program('python', var='PYTHON', mandatory=mandatory)
- conf.check_tool('python')
+ conf.load('python')
path_python = conf.find_program('python')
conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python)
conf.check_python_version(version)
def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
if conf.env.disable_python:
if mandatory:
- raise Utils.WafError("Cannot check for python headers when "
+ raise Errors.WafError("Cannot check for python headers when "
"--disable-python specified")
conf.msg("python headers", "Check disabled due to --disable-python")
if conf.env['EXTRA_PYTHON']:
extraversion = conf.all_envs['extrapython']['PYTHON_VERSION']
if extraversion == conf.env['PYTHON_VERSION']:
- raise Utils.WafError("extrapython %s is same as main python %s" % (
+ raise Errors.WafError("extrapython %s is same as main python %s" % (
extraversion, conf.env['PYTHON_VERSION']))
else:
conf.msg("python headers", "using cache")
def _check_python_headers(conf, mandatory):
try:
- Configure.ConfigurationError
+ conf.errors.ConfigurationError
conf.check_python_headers()
- except Configure.ConfigurationError:
+ except conf.errors.ConfigurationError:
if mandatory:
raise
# functions to support third party libraries
import os
-import Utils, Build
-from Configure import conf
+from waflib import Utils, Build, Context
+from waflib.Configure import conf
@conf
def CHECK_FOR_THIRD_PARTY(conf):
- return os.path.exists(os.path.join(Utils.g_module.srcdir, 'third_party'))
+ return os.path.exists(os.path.join(Context.g_module.srcdir, 'third_party'))
Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY
import os, sys, re, fnmatch, shlex, inspect
from optparse import SUPPRESS_HELP
-from waflib import Build, Options, Utils, Task, Logs, Configure, Errors
-from TaskGen import feature, before, after
-from Configure import ConfigurationContext
-from Logs import debug
+from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context
+from waflib.TaskGen import feature, before, after
+from waflib.Configure import ConfigurationContext
+from waflib.Logs import debug
+from waflib import ConfigSet
# TODO: make this a --option
LIB_PATH="shared"
'''set the target type of a target'''
cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
if target in cache and cache[target] != 'EMPTY':
- Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
+ Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target]))
sys.exit(1)
LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
- debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
+ debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath()))
return True
def ASSERT(ctx, expression, msg):
'''a build assert call'''
if not expression:
- raise Utils.WafError("ERROR: %s\n" % msg)
+ raise Errors.WafError("ERROR: %s\n" % msg)
Build.BuildContext.ASSERT = ASSERT
def ADD_COMMAND(opt, name, function):
'''add a new top level command to waf'''
- Utils.g_module.__dict__[name] = function
+ Context.g_module.__dict__[name] = function
opt.name = function
-Options.Handler.ADD_COMMAND = ADD_COMMAND
+Options.OptionsContext.ADD_COMMAND = ADD_COMMAND
@feature('c', 'cc', 'cshlib', 'cprogram')
if re.match('\$\{\w+\}', v):
vname = v[2:-1]
if not vname in env:
- raise KeyError("Failed to find variable %s in %s" % (vname, string))
+ raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env)))
v = env[vname]
if isinstance(v, list):
v = ' '.join(v)
if not isinstance(varstr, str):
return varstr
- import Environment
- env = Environment.Environment()
+ env = ConfigSet.ConfigSet()
ret = varstr
# substitute on user supplied dict if avaiilable
if vars is not None:
def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
env = LOAD_ENVIRONMENT()
if pythonpath is None:
- pythonpath = os.path.join(Utils.g_module.blddir, 'python')
+ pythonpath = os.path.join(Context.g_module.blddir, 'python')
result = 0
for interp in env.python_interpreters:
if not isinstance(interp, str):
# Try to use MD5 function. In FIPS mode this will cause an exception
foo = md5.md5('abcd')
except:
- import Constants
- Constants.SIG_NIL = hash('abcd')
+ Context.SIG_NIL = hash('abcd')
class replace_md5(object):
def __init__(self):
self.val = None
def LOAD_ENVIRONMENT():
'''load the configuration environment, allowing access to env vars
from new commands'''
- import Environment
- env = Environment.Environment()
+ env = ConfigSet.ConfigSet()
try:
- env.load('bin/c4che/default_cache.py')
+ p = os.path.join(Context.g_module.out, 'c4che/default_cache.py')
+ env.load(p)
except (OSError, IOError):
pass
return env
def IS_NEWER(bld, file1, file2):
'''return True if file1 is newer than file2'''
- t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
- t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
+ curdir = bld.path.abspath()
+ t1 = os.stat(os.path.join(curdir, file1)).st_mtime
+ t2 = os.stat(os.path.join(curdir, file2)).st_mtime
return t1 > t2
Build.BuildContext.IS_NEWER = IS_NEWER
'''recurse into a directory, relative to the curdir or top level'''
try:
visited_dirs = ctx.visited_dirs
- except:
+ except AttributeError:
visited_dirs = ctx.visited_dirs = set()
- d = os.path.join(ctx.curdir, directory)
+ d = os.path.join(ctx.path.abspath(), directory)
if os.path.exists(d):
abspath = os.path.abspath(d)
else:
- abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))
+ abspath = os.path.abspath(os.path.join(Context.g_module.srcdir, directory))
ctxclass = ctx.__class__.__name__
key = ctxclass + ':' + abspath
if key in visited_dirs:
# already done it
return
visited_dirs.add(key)
- relpath = os_path_relpath(abspath, ctx.curdir)
+ relpath = os_path_relpath(abspath, ctx.path.abspath())
+ if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext']:
+ return ctx.recurse(relpath)
if 'waflib.extras.compat15' in sys.modules:
return ctx.recurse(relpath)
- if ctxclass == 'Handler':
- return ctx.sub_options(relpath)
- if ctxclass == 'ConfigurationContext':
- return ctx.sub_config(relpath)
- if ctxclass == 'BuildContext':
- return ctx.add_subdirs(relpath)
- Logs.error('Unknown RECURSE context class', ctxclass)
+ Logs.error('Unknown RECURSE context class: {}'.format(ctxclass))
raise
-Options.Handler.RECURSE = RECURSE
+Options.OptionsContext.RECURSE = RECURSE
Build.BuildContext.RECURSE = RECURSE
gr = opt.add_option_group(name)
option_groups[name] = gr
return gr
-Options.Handler.option_group = option_group
+Options.OptionsContext.option_group = option_group
def save_file(filename, contents, create_dir=False):
def reconfigure(ctx):
'''rerun configure if necessary'''
- import Configure, samba_wildcard, Scripting
if not os.path.exists(".lock-wscript"):
- raise Utils.WafError('configure has not been run')
+ raise Errors.WafError('configure has not been run')
+ import samba_wildcard
bld = samba_wildcard.fake_build_environment()
Configure.autoconfig = True
Scripting.check_configured(bld)
tgt_list.append(t)
return tgt_list
-from Constants import WSCRIPT_FILE
+from waflib.Context import WSCRIPT_FILE
def PROCESS_SEPARATE_RULE(self, rule):
''' cause waf to process additional script based on `rule'.
You should have file named wscript_<stage>_rule in the current directory
stage = 'configure'
elif isinstance(self, Build.BuildContext):
stage = 'build'
- file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule)
- txt = load_file(file_path)
- if txt:
- dc = {'ctx': self}
- if getattr(self.__class__, 'pre_recurse', None):
- dc = self.pre_recurse(txt, file_path, self.curdir)
- exec(compile(txt, file_path, 'exec'), dc)
- if getattr(self.__class__, 'post_recurse', None):
- dc = self.post_recurse(txt, file_path, self.curdir)
+ file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule)
+ node = self.root.find_node(file_path)
+ if node:
+ try:
+ cache = self.recurse_cache
+ except AttributeError:
+ cache = self.recurse_cache = {}
+ if node not in cache:
+ cache[node] = True
+ self.pre_recurse(node)
+ try:
+ function_code = node.read('rU', None)
+ exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
+ finally:
+ self.post_recurse(node)
Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
default=default)
opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",
dest=dest)
-Options.Handler.samba_add_onoff_option = samba_add_onoff_option
+Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option
import os
-import Utils
+from waflib import Utils, Context
import samba_utils
from samba_git import find_git
env = samba_utils.LOAD_ENVIRONMENT()
version = samba_version_file("./VERSION", ".", env, is_install=is_install)
- Utils.g_module.VERSION = version.STRING
+ Context.g_module.VERSION = version.STRING
return version
# compatibility layer for building with more recent waf versions
import os, shlex, sys
-import Build, Configure, Node, Utils, Options, Logs
+from waflib import Build, Configure, Node, Utils, Options, Logs
from waflib import ConfigSet
-from TaskGen import feature, after
-from Configure import conf, ConfigurationContext
+from waflib.TaskGen import feature, after
+from waflib.Configure import conf, ConfigurationContext
from waflib.Tools import bison, flex
sys.modules['bison'] = bison
Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program
Configure.ConfigurationContext.find_program = find_program_samba
-def PROCESS_SEPARATE_RULE(self, rule):
- ''' cause waf to process additional script based on `rule'.
- You should have file named wscript_<stage>_rule in the current directory
- where stage is either 'configure' or 'build'
- '''
- stage = ''
- if isinstance(self, Configure.ConfigurationContext):
- stage = 'configure'
- elif isinstance(self, Build.BuildContext):
- stage = 'build'
- script = self.path.find_node('wscript_'+stage+'_'+rule)
- if script:
- txt = script.read()
- bld = self
- conf = self
- ctx = self
- dc = {'ctx': self, 'conf': self, 'bld': self}
- if getattr(self.__class__, 'pre_recurse', None):
- dc = self.pre_recurse(script)
- exec(compile(txt, script.abspath(), 'exec'), dc)
- if getattr(self.__class__, 'post_recurse', None):
- dc = self.post_recurse(script)
-
-Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
-ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
-
Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada
Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada
additional_dirs = []
if 'msg' in kw:
msg = kw['msg']
- for x in Options.parser.parser.option_list:
+ for x in Options.OptionsContext.parser.parser.option_list:
if getattr(x, 'match', None) and msg in x.match:
d = getattr(Options.options, x.dest, '')
if d:
kw['mandatory'] = False
kw['global_define'] = True
return self.check_cfg(*k, **kw)
+
+def cmd_output(cmd, **kw):
+
+ silent = False
+ if 'silent' in kw:
+ silent = kw['silent']
+ del(kw['silent'])
+
+ if 'e' in kw:
+ tmp = kw['e']
+ del(kw['e'])
+ kw['env'] = tmp
+
+ kw['shell'] = isinstance(cmd, str)
+ kw['stdout'] = Utils.subprocess.PIPE
+ if silent:
+ kw['stderr'] = Utils.subprocess.PIPE
+
+ try:
+ p = Utils.subprocess.Popen(cmd, **kw)
+ output = p.communicate()[0]
+ except OSError as e:
+ raise ValueError(str(e))
+
+ if p.returncode:
+ if not silent:
+ msg = "command execution failed: %s -> %r" % (cmd, str(output))
+ raise ValueError(msg)
+ output = ''
+ return output
+Utils.cmd_output = cmd_output
# based on playground/evil in the waf svn tree
import os, datetime, fnmatch
-import Scripting, Utils, Options, Logs, Environment
-from Constants import SRCDIR, BLDDIR
+from waflib import Scripting, Utils, Options, Logs, Errors
+from waflib import ConfigSet
from samba_utils import LOCAL_CACHE, os_path_relpath
def run_task(t, k):
'''run a single build task'''
ret = t.run()
if ret:
- raise Utils.WafError("Failed to build %s: %u" % (k, ret))
+ raise Errors.WafError("Failed to build %s: %u" % (k, ret))
def run_named_build_task(cmd):
if not found:
- raise Utils.WafError("Unable to find build target matching %s" % cmd)
+ raise Errors.WafError("Unable to find build target matching %s" % cmd)
def rewrite_compile_targets():
def fake_build_environment(info=True, flush=False):
"""create all the tasks for the project, but do not run the build
return the build context in use"""
- bld = getattr(Utils.g_module, 'build_context', Utils.Context)()
+ bld = getattr(Context.g_module, 'build_context', Utils.Context)()
bld = Scripting.check_configured(bld)
Options.commands['install'] = False
bld.is_install = 0 # False
try:
- proj = Environment.Environment(Options.lockfile)
+ proj = ConfigSet.ConfigSet(Options.lockfile)
except IOError:
- raise Utils.WafError("Project not configured (run 'waf configure' first)")
+ raise Errors.WafError("Project not configured (run 'waf configure' first)")
- bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
if info:
Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
- bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
+ bld.add_subdirs([os.path.split(Context.g_module.root_path)[0]])
bld.pre_build()
if flush:
to exclude some folders for example.
"""
-import Logs, Build, os, samba_utils, Options, Utils
+import Logs, Build, os, samba_utils, Options, Utils, Errors
from Runner import Parallel
old_refill_task_list = Parallel.refill_task_list
# paranoia
if bin_base[-4:] != '/bin':
- raise Utils.WafError("Invalid bin base: %s" % bin_base)
+ raise Errors.WafError("Invalid bin base: %s" % bin_base)
# obtain the expected list of files
expected = []
# using nm, producing a set of exposed defined/undefined symbols
import os, re, subprocess
-import Utils, Build, Options, Logs
-from Logs import debug
+from waflib import Utils, Build, Options, Logs, Errors
+from waflib.Logs import debug
from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list, os_path_relpath
# these are the data structures used in symbols.py:
if dep2 == name and t.in_library != t2.in_library:
Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname)))
Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library))
- # raise Utils.WafError("illegal mutual dependency")
+ # raise Errors.WafError("illegal mutual dependency")
def check_syslib_collisions(bld, tgt_list):
Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib))
has_error = True
if has_error:
- raise Utils.WafError("symbols in common with system libraries")
+ raise Errors.WafError("symbols in common with system libraries")
def check_dependencies(bld, t):
why = Options.options.WHYNEEDED.split(":")
if len(why) != 2:
- raise Utils.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY")
+ raise Errors.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY")
target = why[0]
subsystem = why[1]
else:
libnames.append(lib)
if fail_on_error:
- raise Utils.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
+ raise Errors.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
else:
print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
# based on suncc.py from waf
import os, optparse
-import Utils, Options, Configure
-import ccroot, ar
-from Configure import conftest
+from waflib import Utils, Options, Configure
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conftest
-from compiler_cc import c_compiler
+from waflib.Tools.compiler_c import c_compiler
c_compiler['osf1V'] = ['gcc', 'tru64cc']
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
-import Build, os, sys, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs, Constants
-from Configure import conf
-from Logs import debug
+import os, sys, re, shutil, fnmatch
+from waflib import Build, Options, Task, Utils, TaskGen, Logs, Context, Errors
+from waflib.Configure import conf
+from waflib.Logs import debug
from samba_utils import SUBST_VARS_RECURSIVE
TaskGen.task_gen.apply_verif = Utils.nada
os.environ['PYTHONUNBUFFERED'] = '1'
-if Constants.HEXVERSION not in (0x105019, 0x1090a00):
+if Context.HEXVERSION not in (0x2000400,):
Logs.error('''
Please use the version of waf that comes with Samba, not
a system installed version. See http://wiki.samba.org/index.php/Waf
call the right version of waf.''')
sys.exit(1)
-
@conf
def SAMBA_BUILD_ENV(conf):
'''create the samba build environment'''
- conf.env.BUILD_DIRECTORY = conf.blddir
- mkdir_p(os.path.join(conf.blddir, LIB_PATH))
- mkdir_p(os.path.join(conf.blddir, LIB_PATH, "private"))
- mkdir_p(os.path.join(conf.blddir, "modules"))
- mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc'))
+ conf.env.BUILD_DIRECTORY = getattr(Context.g_module, Context.OUT)
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH, "private"))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, "modules"))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'python/samba/dcerpc'))
# this allows all of the bin/shared and bin/python targets
# to be expressed in terms of build directory paths
- mkdir_p(os.path.join(conf.blddir, 'default'))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'default'))
for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python_modules')]:
- link_target = os.path.join(conf.blddir, 'default/' + target)
+ link_target = os.path.join(conf.env.BUILD_DIRECTORY, 'default/' + target)
if not os.path.lexists(link_target):
os.symlink('../' + source, link_target)
# get perl to put the blib files in the build directory
- blib_bld = os.path.join(conf.blddir, 'default/pidl/blib')
- blib_src = os.path.join(conf.srcdir, 'pidl/blib')
+ blib_bld = os.path.join(conf.env.BUILD_DIRECTORY, 'default/pidl/blib')
+ blib_src = os.path.join(conf.srcnode.abspath(), 'pidl/blib')
mkdir_p(blib_bld + '/man1')
mkdir_p(blib_bld + '/man3')
if os.path.islink(blib_src):
public_headers = None
if private_library and public_headers:
- raise Utils.WafError("private library '%s' must not have public header files" %
+ raise Errors.WafError("private library '%s' must not have public header files" %
libname)
if LIB_MUST_BE_PRIVATE(bld, libname):
# we don't want any public libraries without version numbers
if (not private_library and target_type != 'PYTHON' and not realname):
if vnum is None and soname is None:
- raise Utils.WafError("public library '%s' must have a vnum" %
+ raise Errors.WafError("public library '%s' must have a vnum" %
libname)
if pc_files is None:
- raise Utils.WafError("public library '%s' must have pkg-config file" %
+ raise Errors.WafError("public library '%s' must have pkg-config file" %
libname)
if public_headers is None and not bld.env['IS_EXTRA_PYTHON']:
- raise Utils.WafError("public library '%s' must have header files" %
+ raise Errors.WafError("public library '%s' must have header files" %
libname)
if bundled_name is not None:
vscript = None
if bld.env.HAVE_LD_VERSION_SCRIPT:
if private_library:
- version = "%s_%s" % (Utils.g_module.APPNAME, Utils.g_module.VERSION)
+ version = "%s_%s" % (Context.g_module.APPNAME, Context.g_module.VERSION)
elif vnum:
version = "%s_%s" % (libname, vnum)
else:
fullpath = bld.path.find_or_declare(fullname)
vscriptpath = bld.path.find_or_declare(vscript)
if not fullpath:
- raise Utils.WafError("unable to find fullpath for %s" % fullname)
+ raise Errors.WafError("unable to find fullpath for %s" % fullname)
if not vscriptpath:
- raise Utils.WafError("unable to find vscript path for %s" % vscript)
+ raise Errors.WafError("unable to find vscript path for %s" % vscript)
bld.add_manual_dependency(fullpath, vscriptpath)
if bld.is_install:
# also make the .inst file depend on the vscript
target = os.path.join(installdir, iname)
tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))
mkdir_p(tgtdir)
- link_src = os.path.normpath(os.path.join(bld.curdir, s))
+ link_src = os.path.normpath(os.path.join(bld.path.abspath(), s))
link_dst = os.path.join(tgtdir, os.path.basename(iname))
if os.path.islink(link_dst) and os.readlink(link_dst) == link_src:
continue
if not path:
return []
- destpath = bld.get_install_path(path, env)
+ destpath = bld.EXPAND_VARIABLES(path)
if bld.is_install > 0:
if not os.path.isdir(destpath):
os.chmod(destpath, chmod)
except OSError as e:
if not os.path.isdir(destpath):
- raise Utils.WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
+ raise Errors.WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
Build.BuildContext.INSTALL_DIR = INSTALL_DIR
def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755, env=None):
# this is a base set of waf rules that everything else pulls in first
import os, sys
-import wafsamba, Configure, Logs, Options, Utils
+from waflib import Configure, Logs, Options, Utils, Context, Errors
+import wafsamba
from samba_utils import os_path_relpath
from optparse import SUPPRESS_HELP
if '--enable-auto-reconfigure' in sys.argv:
Configure.autoconfig = 'clobber'
-def set_options(opt):
- opt.tool_options('compiler_cc')
+def default_value(option, default=''):
+ if option in Options.options.__dict__:
+ return Options.options.__dict__[option]
+ return default
- opt.tool_options('gnu_dirs')
+def options(opt):
+ opt.load('compiler_cc')
+
+ opt.load('gnu_dirs')
gr = opt.option_group('library handling options')
help=("comma separated list of normally public libraries to build instead as private libraries. May include !LIBNAME to disable making a library private. Can be 'NONE' or 'ALL' [auto]"),
action="store", dest='PRIVATE_LIBS', default='')
- extension_default = Options.options['PRIVATE_EXTENSION_DEFAULT']
+ extension_default = default_value('PRIVATE_EXTENSION_DEFAULT')
gr.add_option('--private-library-extension',
help=("name extension for private libraries [%s]" % extension_default),
action="store", dest='PRIVATE_EXTENSION', default=extension_default)
- extension_exception = Options.options['PRIVATE_EXTENSION_EXCEPTION']
+ extension_exception = default_value('PRIVATE_EXTENSION_EXCEPTION')
gr.add_option('--private-extension-exception',
help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception),
action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception)
- builtin_default = Options.options['BUILTIN_LIBRARIES_DEFAULT']
+ builtin_default = default_value('BUILTIN_LIBRARIES_DEFAULT')
gr.add_option('--builtin-libraries',
help=("command separated list of libraries to build directly into binaries [%s]" % builtin_default),
action="store", dest='BUILTIN_LIBRARIES', default=builtin_default)
action="store", dest='MODULESDIR', default='${PREFIX}/modules')
opt.add_option('--with-privatelibdir',
- help=("private library directory [PREFIX/lib/%s]" % Utils.g_module.APPNAME),
+ help=("private library directory [PREFIX/lib/%s]" % Context.g_module.APPNAME),
action="store", dest='PRIVATELIBDIR', default=None)
opt.add_option('--with-libiconv',
@Utils.run_once
def configure(conf):
conf.env.hlist = []
- conf.env.srcdir = conf.srcdir
+ conf.env.srcdir = conf.srcnode.abspath()
conf.define('SRCDIR', conf.env['srcdir'])
conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache)
# load our local waf extensions
- conf.check_tool('gnu_dirs')
- conf.check_tool('wafsamba')
- conf.check_tool('print_commands')
+ conf.load('gnu_dirs')
+ conf.load('wafsamba')
conf.CHECK_CC_ENV()
- conf.check_tool('compiler_cc')
+ conf.load('compiler_cc')
conf.CHECK_STANDARD_LIBPATH()
# older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated
if Options.options.enable_gccdeps:
# stale file removal - the configuration may pick up the old .pyc file
- p = os.path.join(conf.srcdir, 'buildtools/wafsamba/gccdeps.pyc')
+ p = os.path.join(conf.env.srcdir, 'buildtools/wafsamba/gccdeps.pyc')
if os.path.exists(p):
os.remove(p)
conf.load('gccdeps')
# see if we need special largefile flags
if not conf.CHECK_LARGEFILE():
- raise Utils.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8')
+ raise Errors.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8')
if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H:
conf.DEFINE('STDC_HEADERS', 1)
def build(bld):
# give a more useful message if the source directory has moved
- relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath())
+ curdir = bld.path.abspath()
+ srcdir = bld.srcnode.abspath()
+ relpath = os_path_relpath(curdir, srcdir)
if relpath.find('../') != -1:
- Logs.error('bld.curdir %s is not a child of %s' % (bld.curdir, bld.srcnode.abspath()))
- raise Utils.WafError('''The top source directory has moved. Please run distclean and reconfigure''')
+ Logs.error('bld.path %s is not a child of %s' % (curdir, srcdir))
+ raise Errors.WafError('''The top source directory has moved. Please run distclean and reconfigure''')
bld.CHECK_MAKEFLAGS()
bld.SETUP_BUILD_GROUPS()
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Classes related to the build phase (build, clean, install, step, etc)
if not hasattr(self, v):
setattr(self, v, {})
- def set_cur(self, cur):
- self.current_group = cur
- def get_cur(self):
- return self.current_group
- cur = property(get_cur, set_cur)
-
def get_variant_dir(self):
"""Getter for the variant_dir attribute"""
if not self.variant:
return self.out_dir
- return os.path.join(self.out_dir, self.variant)
+ return os.path.join(self.out_dir, os.path.normpath(self.variant))
variant_dir = property(get_variant_dir, None)
def __call__(self, *k, **kw):
self.add_to_group(ret, group=kw.get('group'))
return ret
- def rule(self, *k, **kw):
- """
- Wrapper for creating a task generator using the decorator notation. The following code::
-
- @bld.rule(target="foo")
- def _(tsk):
- print("bar")
-
- is equivalent to::
-
- def bar(tsk):
- print("bar")
-
- bld(
- target = "foo",
- rule = bar,
- )
- """
- def f(rule):
- ret = self(*k, **kw)
- ret.rule = rule
- return ret
- return f
-
def __copy__(self):
"""
Build contexts cannot be copied
Node.Nod3 = self.node_class
try:
data = cPickle.loads(data)
- except Exception ,e:
+ except Exception as e:
Logs.debug('build: Could not pickle the build cache %s: %r', dbfn, e)
else:
for x in SAVED_ATTRS:
try:
self.producer.start()
except KeyboardInterrupt:
- self.store()
+ if self.is_dirty():
+ self.store()
raise
else:
- if self.producer.dirty:
+ if self.is_dirty():
self.store()
if self.producer.error:
raise Errors.BuildError(self.producer.error)
+ def is_dirty(self):
+ return self.producer.dirty
+
def setup(self, tool, tooldir=None, funs=None):
"""
Import waf tools defined during the configuration::
:param funs: unused variable
"""
if isinstance(tool, list):
- for i in tool: self.setup(i, tooldir)
+ for i in tool:
+ self.setup(i, tooldir)
return
module = Context.load_tool(tool, tooldir)
- if hasattr(module, "setup"): module.setup(self)
+ if hasattr(module, "setup"):
+ module.setup(self)
def get_env(self):
"""Getter for the env property"""
right = '][%s%s%s]' % (col1, self.timer, col2)
cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
- if cols < 7: cols = 7
+ if cols < 7:
+ cols = 7
ratio = ((cols * idx)//total) - 1
def add_to_group(self, tgen, group=None):
"""Adds a task or a task generator to the build; there is no attempt to remove it if it was already added."""
- assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.TaskBase))
+ assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.Task))
tgen.bld = self
self.get_group(group).append(tgen)
def get_targets(self):
"""
- Returns the task generator corresponding to the 'targets' list; used internally
- by :py:meth:`waflib.Build.BuildContext.get_build_iterator` to perform partial builds::
+ This method returns a pair containing the index of the last build group to post,
+ and the list of task generator objects corresponding to the target names.
+
+ This is used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+ to perform partial builds::
$ waf --targets=myprogram,myshlib
+
+ :return: the minimum build group index, and list of task generators
+ :rtype: tuple
"""
to_post = []
min_grp = 0
Post task generators from the group indexed by self.current_group; used internally
by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
"""
+ def tgpost(tg):
+ try:
+ f = tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+
if self.targets == '*':
for tg in self.groups[self.current_group]:
- try:
- f = tg.post
- except AttributeError:
- pass
- else:
- f()
+ tgpost(tg)
elif self.targets:
if self.current_group < self._min_grp:
for tg in self.groups[self.current_group]:
- try:
- f = tg.post
- except AttributeError:
- pass
- else:
- f()
+ tgpost(tg)
else:
for tg in self._exact_tg:
tg.post()
ln = self.srcnode
for tg in self.groups[self.current_group]:
try:
- f = tg.post
+ p = tg.path
except AttributeError:
pass
else:
- if tg.path.is_child_of(ln):
- f()
+ if p.is_child_of(ln):
+ tgpost(tg)
def get_tasks_group(self, idx):
"""
Returns all task instances for the build group at position idx,
used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
- :rtype: list of :py:class:`waflib.Task.TaskBase`
+ :rtype: list of :py:class:`waflib.Task.Task`
"""
tasks = []
for tg in self.groups[idx]:
Creates a Python generator object that returns lists of tasks that may be processed in parallel.
:return: tasks which can be executed immediately
- :rtype: generator returning lists of :py:class:`waflib.Task.TaskBase`
+ :rtype: generator returning lists of :py:class:`waflib.Task.Task`
"""
- self.current_group = 0
-
if self.targets and self.targets != '*':
(self._min_grp, self._exact_tg) = self.get_targets()
- global lazy_post
if self.post_mode != POST_LAZY:
- while self.current_group < len(self.groups):
+ for self.current_group, _ in enumerate(self.groups):
self.post_group()
- self.current_group += 1
- self.current_group = 0
- while self.current_group < len(self.groups):
+ for self.current_group, _ in enumerate(self.groups):
# first post the task generators for the group
if self.post_mode != POST_AT_ONCE:
self.post_group()
# then extract the tasks
tasks = self.get_tasks_group(self.current_group)
+
# if the constraints are set properly (ext_in/ext_out, before/after)
# the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
# (but leave set_file_constraints for the installation step)
Task.set_precedence_constraints(tasks)
self.cur_tasks = tasks
- self.current_group += 1
- if not tasks: # return something else the build will stop
- continue
- yield tasks
+ if tasks:
+ yield tasks
while 1:
+ # the build stops once there are no tasks to process
yield []
def install_files(self, dest, files, **kw):
try:
self.copy_fun(src, tgt)
- except EnvironmentError ,e:
+ except EnvironmentError as e:
if not os.path.exists(src):
Logs.error('File %r does not exist', src)
elif not os.path.isfile(src):
#self.uninstall.append(tgt)
try:
os.remove(tgt)
- except OSError ,e:
+ except OSError as e:
if e.errno != errno.ENOENT:
if not getattr(self, 'uninstall_error', None):
self.uninstall_error = True
super(UninstallContext, self).__init__(**kw)
self.is_install = UNINSTALL
- def execute(self):
- """
- See :py:func:`waflib.Build.BuildContext.execute`.
- """
- # TODO just mark the tasks are already run with hasrun=Task.SKIPPED?
- try:
- # do not execute any tasks
- def runnable_status(self):
- return Task.SKIP_ME
- setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
- setattr(Task.Task, 'runnable_status', runnable_status)
-
- super(UninstallContext, self).execute()
- finally:
- setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
-
class CleanContext(BuildContext):
'''cleans the project'''
cmd = 'clean'
self.store()
def clean(self):
- """Remove files from the build directory if possible, and reset the caches"""
+ """
+ Remove most files from the build directory, and reset all caches.
+
+ Custom lists of files to clean can be declared as `bld.clean_files`.
+ For example, exclude `build/program/myprogram` from getting removed::
+
+ def build(bld):
+ bld.clean_files = bld.bldnode.ant_glob('**',
+ excl='.lock* config.log c4che/* config.h program/myprogram',
+ quiet=True, generator=True)
+ """
Logs.debug('build: clean called')
- if self.bldnode != self.srcnode:
+ if hasattr(self, 'clean_files'):
+ for n in self.clean_files:
+ n.delete()
+ elif self.bldnode != self.srcnode:
# would lead to a disaster if top == out
lst = []
for env in self.all_envs.values():
for pat in self.files.split(','):
matcher = self.get_matcher(pat)
for tg in g:
- if isinstance(tg, Task.TaskBase):
+ if isinstance(tg, Task.Task):
lst = [tg]
else:
lst = tg.tasks
for tsk in lst:
do_exec = False
- for node in getattr(tsk, 'inputs', []):
+ for node in tsk.inputs:
if matcher(node, output=False):
do_exec = True
break
- for node in getattr(tsk, 'outputs', []):
+ for node in tsk.outputs:
if matcher(node, output=True):
do_exec = True
break
pattern = re.compile(pat)
def match(node, output):
- if output == True and not out:
+ if output and not out:
return False
- if output == False and not inn:
+ if not output and not inn:
return False
if anode:
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
if 'foo' in env:
print(env['foo'])
"""
- if key in self.table: return True
- try: return self.parent.__contains__(key)
- except AttributeError: return False # parent may not exist
+ if key in self.table:
+ return True
+ try:
+ return self.parent.__contains__(key)
+ except AttributeError:
+ return False # parent may not exist
def keys(self):
"""Dict interface"""
def __setitem__(self, key, value):
"""
- Dictionary interface: set value for key
+ Dictionary interface: set value from key
"""
self.table[key] = value
def __delitem__(self, key):
"""
- Dictionary interface: mark the key as missing
+ Dictionary interface: mark the value as missing
"""
self[key] = []
conf.env['value']
"""
if name in self.__slots__:
- return object.__getattr__(self, name)
+ return object.__getattribute__(self, name)
else:
return self[name]
:type key: string
"""
s = self[key]
- if isinstance(s, str): return s
+ if isinstance(s, str):
+ return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
env = self
while 1:
table_list.insert(0, env.table)
- try: env = env.parent
- except AttributeError: break
+ try:
+ env = env.parent
+ except AttributeError:
+ break
merged_table = {}
for table in table_list:
merged_table.update(table)
Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
"""
self.table = self.undo_stack.pop(-1)
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Configuration system
* hold configuration routines such as ``find_program``, etc
"""
-import os, shlex, sys, time, re, shutil
+import os, re, shlex, shutil, sys, time, traceback
from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
WAF_CONFIG_LOG = 'config.log'
"""
if not env.PREFIX:
if Options.options.prefix or Utils.is_win32:
- env.PREFIX = Utils.sane_path(Options.options.prefix)
+ env.PREFIX = Options.options.prefix
else:
- env.PREFIX = ''
+ env.PREFIX = '/'
if not env.BINDIR:
if Options.options.bindir:
- env.BINDIR = Utils.sane_path(Options.options.bindir)
+ env.BINDIR = Options.options.bindir
else:
env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
if not env.LIBDIR:
if Options.options.libdir:
- env.LIBDIR = Utils.sane_path(Options.options.libdir)
+ env.LIBDIR = Options.options.libdir
else:
env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
tmpenv = self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
- def load(self, input, tooldir=None, funs=None, with_sys_path=True, cache=False):
+ def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False):
"""
Load Waf tools, which will be imported whenever a build is started.
- :param input: waf tools to import
- :type input: list of string
+ :param tool_list: waf tools to import
+ :type tool_list: list of string
:param tooldir: paths for the imports
:type tooldir: list of string
:param funs: functions to execute from the waf tools
:type cache: bool
"""
- tools = Utils.to_list(input)
- if tooldir: tooldir = Utils.to_list(tooldir)
+ tools = Utils.to_list(tool_list)
+ if tooldir:
+ tooldir = Utils.to_list(tooldir)
for tool in tools:
# avoid loading the same tool more than once with the same functions
# used by composite projects
module = None
try:
module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
- except ImportError ,e:
- self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
- except Exception ,e:
+ except ImportError as e:
+ self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e))
+ except Exception as e:
self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
- self.to_log(Utils.ex_stack())
+ self.to_log(traceback.format_exc())
raise
if funs is not None:
else:
func = getattr(module, 'configure', None)
if func:
- if type(func) is type(Utils.readf): func(self)
- else: self.eval_rules(func)
+ if type(func) is type(Utils.readf):
+ func(self)
+ else:
+ self.eval_rules(func)
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
return cmd
@conf
-def check_waf_version(self, mini='1.8.99', maxi='2.0.0', **kw):
+def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw):
"""
Raise a Configuration error if the Waf version does not strictly match the given bounds::
- conf.check_waf_version(mini='1.8.99', maxi='2.0.0')
+ conf.check_waf_version(mini='1.9.99', maxi='2.1.0')
:type mini: number, tuple or string
:param mini: Minimum required version
:param filename: name of the file to search for
:param path_list: list of directories to search
- :return: the first occurrence filename or '' if filename could not be found
+ :return: the first matching filename; else a configuration exception is raised
"""
for n in Utils.to_list(filename):
for d in Utils.to_list(path_list):
:type msg: string
:param interpreter: interpreter for the program
:type interpreter: ConfigSet variable key
+ :raises: :py:class:`waflib.Errors.ConfigurationError`
"""
exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
try:
bld.compile()
except Errors.WafError:
- ret = 'Test does not build: %s' % Utils.ex_stack()
+ ret = 'Test does not build: %s' % traceback.format_exc()
self.fatal(ret)
else:
ret = getattr(bld, 'retval', 0)
else:
self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
return ret
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2010-2016 (ita)
+# Thomas Nagy, 2010-2018 (ita)
"""
Classes and functions enabling the command system
import waflib.Node
# the following 3 constants are updated on each new release (do not touch)
-HEXVERSION=0x1090a00
+HEXVERSION=0x2000400
"""Constant updated on new releases"""
-WAFVERSION="1.9.10"
+WAFVERSION="2.0.4"
"""Constant updated on new releases"""
-WAFREVISION="ae3f254315e0dcea4059703987148882ba414894"
+WAFREVISION="5996879673deb7166b61a299be317a738de6891e"
"""Git revision when the waf version is updated"""
-ABI = 99
+ABI = 20
"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
:return: Context object
:rtype: :py:class:`waflib.Context.Context`
"""
- global classes
for x in classes:
if x.cmd == cmd_name:
return x(*k, **kw)
Context classes must provide an attribute 'cmd' representing the command name, and a function
attribute 'fun' representing the function name that the command uses.
"""
- def __init__(cls, name, bases, dict):
- super(store_context, cls).__init__(name, bases, dict)
+ def __init__(cls, name, bases, dct):
+ super(store_context, cls).__init__(name, bases, dct)
name = cls.__name__
if name in ('ctx', 'Context'):
if not getattr(cls, 'fun', None):
cls.fun = cls.cmd
- global classes
classes.insert(0, cls)
ctx = store_context('ctx', (object,), {})
try:
rd = kw['run_dir']
except KeyError:
- global run_dir
rd = run_dir
# binds the context to the nodes in use to avoid a context singleton
Here, it calls the function name in the top-level wscript file. Most subclasses
redefine this method to provide additional functionality.
"""
- global g_module
self.recurse([os.path.dirname(g_module.root_path)])
def pre_recurse(self, node):
raise Errors.WafError('Cannot read the folder %r' % d)
raise Errors.WafError('No wscript file in directory %s' % d)
+ def log_command(self, cmd, kw):
+ if Logs.verbose:
+ fmt = os.environ.get('WAF_CMD_FORMAT')
+ if fmt == 'string':
+ if not isinstance(cmd, str):
+ cmd = Utils.shell_escape(cmd)
+ Logs.debug('runner: %r', cmd)
+ Logs.debug('runner_env: kw=%s', kw)
+
def exec_command(self, cmd, **kw):
"""
Runs an external process and returns the exit status::
:type kw: dict
:returns: process exit status
:rtype: integer
+ :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
+ :raises: :py:class:`waflib.Errors.WafError` in case of execution failure
"""
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r', cmd)
- Logs.debug('runner_env: kw=%s', kw)
+ self.log_command(cmd, kw)
if self.logger:
self.logger.info(cmd)
try:
ret, out, err = Utils.run_process(cmd, kw, cargs)
- except Exception ,e:
+ except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
if out:
if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+ out = out.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if self.logger:
self.logger.debug('out: %s', out)
else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err:
if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+ err = err.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if self.logger:
self.logger.error('err: %s' % err)
else:
"""
Executes a process and returns stdout/stderr if the execution is successful.
An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
- will be bound to the WafError object::
+ will be bound to the WafError object (configuration tests)::
def configure(conf):
out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
- (out, err) = conf.cmd_and_log(cmd, input='\\n', output=waflib.Context.STDOUT)
+ (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
try:
conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
- except Exception ,e:
+ except Errors.WafError as e:
print(e.stdout, e.stderr)
:param cmd: args for subprocess.Popen
:type cmd: list or string
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
- :returns: process exit status
- :rtype: integer
+ :returns: a tuple containing the contents of stdout and stderr
+ :rtype: string
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
"""
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r', cmd)
+ self.log_command(cmd, kw)
if 'quiet' in kw:
quiet = kw['quiet']
try:
ret, out, err = Utils.run_process(cmd, kw, cargs)
- except Exception ,e:
+ except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+ out = out.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
+ err = err.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if out and quiet != STDOUT and quiet != BOTH:
self.to_log('out: %s' % out)
if self.logger:
self.logger.info('from %s: %s' % (self.path.abspath(), msg))
try:
- msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
+ logfile = self.logger.handlers[0].baseFilename
except AttributeError:
pass
+ else:
+ if os.environ.get('WAF_PRINT_FAILURE_LOG'):
+ # see #1930
+ msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile))
+ else:
+ msg = '%s\n(complete log in %s)' % (msg, logfile)
raise self.errors.ConfigurationError(msg, ex=ex)
def to_log(self, msg):
result = kw.get('result') or k[0]
defcolor = 'GREEN'
- if result == True:
+ if result is True:
msg = 'ok'
- elif result == False:
+ elif not result:
msg = 'not found'
defcolor = 'YELLOW'
else:
:param ban: list of exact file names to exclude
:type ban: list of string
"""
- global waf_dir
if os.path.isdir(waf_dir):
lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
for x in lst:
sys.path = tooldir + sys.path
try:
__import__(tool)
+ except ImportError as e:
+ e.waf_sys_path = list(sys.path)
+ raise
finally:
for d in tooldir:
sys.path.remove(d)
Context.tools[tool] = ret
return ret
else:
- if not with_sys_path: sys.path.insert(0, waf_dir)
+ if not with_sys_path:
+ sys.path.insert(0, waf_dir)
try:
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
try:
x = None
else: # raise an exception
__import__(tool)
+ except ImportError as e:
+ e.waf_sys_path = list(sys.path)
+ raise
finally:
- if not with_sys_path: sys.path.remove(waf_dir)
+ if not with_sys_path:
+ sys.path.remove(waf_dir)
ret = sys.modules[x % tool]
Context.tools[tool] = ret
return ret
finally:
if not with_sys_path:
sys.path += back_path
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2010-2016 (ita)
+# Thomas Nagy, 2010-2018 (ita)
"""
Exceptions used in the Waf code
:param ex: exception causing this error (optional)
:type ex: exception
"""
+ Exception.__init__(self)
self.msg = msg
assert not isinstance(msg, Exception)
lst = ['Build failed']
for tsk in self.tasks:
txt = tsk.format_error()
- if txt: lst.append(txt)
+ if txt:
+ lst.append(txt)
return '\n'.join(lst)
class ConfigurationError(WafError):
class TaskNotReady(WafError):
"""Task-specific exception type signalling that task signatures cannot be computed"""
pass
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
logging, colors, terminal width and pretty-print
:param rec: log entry
"""
- global verbose
rec.zone = rec.module
if rec.levelno >= logging.INFO:
return True
"""
Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0
"""
- global verbose
if verbose:
k = list(k)
k[0] = k[0].replace('\n', ' ')
- global log
log.debug(*k, **kw)
def error(*k, **kw):
"""
Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2
"""
- global log, verbose
log.error(*k, **kw)
if verbose > 2:
st = traceback.extract_stack()
buf.append(' File %r, line %d, in %s' % (filename, lineno, name))
if line:
buf.append(' %s' % line.strip())
- if buf: log.error('\n'.join(buf))
+ if buf:
+ log.error('\n'.join(buf))
def warn(*k, **kw):
"""
Wraps logging.warn
"""
- global log
log.warn(*k, **kw)
def info(*k, **kw):
"""
Wraps logging.info
"""
- global log
log.info(*k, **kw)
def init_log():
:type name: string
"""
logger = logging.getLogger(name)
- hdlr = logging.FileHandler(path, 'w')
+ if sys.hexversion > 0x3000000:
+ encoding = sys.stdout.encoding
+ else:
+ encoding = None
+ hdlr = logging.FileHandler(path, 'w', encoding=encoding)
formatter = logging.Formatter('%(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
:param sep: a string to append at the end (line separator)
:type sep: string
"""
- global info
info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Node: filesystem structure
**/.#*
**/%*%
**/._*
+**/*.swp
**/CVS
**/CVS/**
**/.cvsignore
recursive traversal in :py:meth:`waflib.Node.Node.ant_glob`
"""
+def ant_matcher(s, ignorecase):
+ reflags = re.I if ignorecase else 0
+ ret = []
+ for x in Utils.to_list(s):
+ x = x.replace('\\', '/').replace('//', '/')
+ if x.endswith('/'):
+ x += '**'
+ accu = []
+ for k in x.split('/'):
+ if k == '**':
+ accu.append(k)
+ else:
+ k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
+ k = '^%s$' % k
+ try:
+ exp = re.compile(k, flags=reflags)
+ except Exception as e:
+ raise Errors.WafError('Invalid pattern: %s' % k, e)
+ else:
+ accu.append(exp)
+ ret.append(accu)
+ return ret
+
+def ant_sub_filter(name, nn):
+ ret = []
+ for lst in nn:
+ if not lst:
+ pass
+ elif lst[0] == '**':
+ ret.append(lst)
+ if len(lst) > 1:
+ if lst[1].match(name):
+ ret.append(lst[2:])
+ else:
+ ret.append([])
+ elif lst[0].match(name):
+ ret.append(lst[1:])
+ return ret
+
+def ant_sub_matcher(name, pats):
+ nacc = ant_sub_filter(name, pats[0])
+ nrej = ant_sub_filter(name, pats[1])
+ if [] in nrej:
+ nacc = []
+ return [nacc, nrej]
+
class Node(object):
"""
This class is organized in two parts:
"""
raise Errors.WafError('nodes are not supposed to be copied')
- def read(self, flags='r', encoding='ISO8859-1'):
+ def read(self, flags='r', encoding='latin-1'):
"""
Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`::
"""
return Utils.readf(self.abspath(), flags, encoding)
- def write(self, data, flags='w', encoding='ISO8859-1'):
+ def write(self, data, flags='w', encoding='latin-1'):
"""
Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`::
if isinstance(lst, str):
lst = [x for x in Utils.split_path(lst) if x and x != '.']
+ if lst and lst[0].startswith('\\\\') and not self.parent:
+ node = self.ctx.root.make_node(lst[0])
+ node.cache_isdir = True
+ return node.find_node(lst[1:])
+
cur = self
for x in lst:
if x == '..':
p = p.parent
return p is node
- def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True):
+ def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
"""
Recursive method used by :py:meth:`waflib.Node.ant_glob`.
:type src: bool
:param remove: remove files/folders that do not exist (True by default)
:type remove: bool
+ :param quiet: disable build directory traversal warnings (verbose mode)
+ :type quiet: bool
:returns: A generator object to iterate from
:rtype: iterator
"""
if isdir:
if dir:
yield node
- else:
- if src:
- yield node
+ elif src:
+ yield node
if isdir:
node.cache_isdir = True
if maxdepth:
- for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove):
+ for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove, quiet=quiet):
yield k
raise StopIteration
:type dir: bool
:param src: return files (True by default)
:type src: bool
- :param remove: remove files/folders that do not exist (True by default)
- :type remove: bool
:param maxdepth: maximum depth of recursion
:type maxdepth: int
:param ignorecase: ignore case while matching (False by default)
:type ignorecase: bool
:returns: The corresponding Nodes
- :rtype: list of :py:class:`waflib.Node.Node` instances
+ :type generator: bool
+ :param remove: remove files/folders that do not exist (True by default)
+ :type remove: bool
+ :param quiet: disable build directory traversal warnings (verbose mode)
+ :type quiet: bool
+ :returns: Whether to evaluate the Nodes lazily, alters the type of the returned value
+ :rtype: by default, list of :py:class:`waflib.Node.Node` instances
"""
-
src = kw.get('src', True)
- dir = kw.get('dir', False)
-
+ dir = kw.get('dir')
excl = kw.get('excl', exclude_regs)
incl = k and k[0] or kw.get('incl', '**')
- reflags = kw.get('ignorecase', 0) and re.I
-
- def to_pat(s):
- lst = Utils.to_list(s)
- ret = []
- for x in lst:
- x = x.replace('\\', '/').replace('//', '/')
- if x.endswith('/'):
- x += '**'
- lst2 = x.split('/')
- accu = []
- for k in lst2:
- if k == '**':
- accu.append(k)
- else:
- k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
- k = '^%s$' % k
- try:
- #print "pattern", k
- accu.append(re.compile(k, flags=reflags))
- except Exception ,e:
- raise Errors.WafError('Invalid pattern: %s' % k, e)
- ret.append(accu)
- return ret
-
- def filtre(name, nn):
- ret = []
- for lst in nn:
- if not lst:
- pass
- elif lst[0] == '**':
- ret.append(lst)
- if len(lst) > 1:
- if lst[1].match(name):
- ret.append(lst[2:])
- else:
- ret.append([])
- elif lst[0].match(name):
- ret.append(lst[1:])
- return ret
-
- def accept(name, pats):
- nacc = filtre(name, pats[0])
- nrej = filtre(name, pats[1])
- if [] in nrej:
- nacc = []
- return [nacc, nrej]
-
- ret = [x for x in self.ant_iter(accept=accept, pats=[to_pat(incl), to_pat(excl)], maxdepth=kw.get('maxdepth', 25), dir=dir, src=src, remove=kw.get('remove', True))]
- if kw.get('flat', False):
- return ' '.join([x.path_from(self) for x in ret])
+ remove = kw.get('remove', True)
+ maxdepth = kw.get('maxdepth', 25)
+ ignorecase = kw.get('ignorecase', False)
+ quiet = kw.get('quiet', False)
+ pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase))
- return ret
+ if kw.get('generator'):
+ return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet))
- # --------------------------------------------------------------------------------
- # the following methods require the source/build folders (bld.srcnode/bld.bldnode)
- # using a subclass is a possibility, but is that really necessary?
- # --------------------------------------------------------------------------------
+ it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)
+ if kw.get('flat'):
+ # returns relative paths as a space-delimited string
+ # prefer Node objects whenever possible
+ return ' '.join(x.path_from(self) for x in it)
+ return list(it)
+
+ # ----------------------------------------------------------------------------
+ # the methods below require the source/build folders (bld.srcnode/bld.bldnode)
def is_src(self):
"""
def find_or_declare(self, lst):
"""
- Use this method in the build phase to declare output files.
+ Use this method in the build phase to declare output files which
+ are meant to be written in the build directory.
- If 'self' is in build directory, it first tries to return an existing node object.
- If no Node is found, it tries to find one in the source directory.
- If no Node is found, a new Node object is created in the build directory, and the
- intermediate folders are added.
+ This method creates the Node object and its parent folder
+ as needed.
:param lst: relative path
:type lst: string or list of string
"""
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- node = self.get_bld().search_node(lst)
- if node:
- if not os.path.isfile(node.abspath()):
- node.parent.mkdir()
- return node
- self = self.get_src()
- node = self.find_node(lst)
- if node:
- return node
- node = self.get_bld().make_node(lst)
+ if isinstance(lst, str) and os.path.isabs(lst):
+ node = self.ctx.root.make_node(lst)
+ else:
+ node = self.get_bld().make_node(lst)
node.parent.mkdir()
return node
raise
return ret
- # --------------------------------------------
- # TODO waf 2.0, remove the sig and cache_sig attributes
- def get_sig(self):
- return self.h_file()
- def set_sig(self, val):
- # clear the cache, so that past implementation should still work
- try:
- del self.get_bld_sig.__cache__[(self,)]
- except (AttributeError, KeyError):
- pass
- sig = property(get_sig, set_sig)
- cache_sig = property(get_sig, set_sig)
-
pickle_lock = Utils.threading.Lock()
"""Lock mandatory for thread-safe node serialization"""
class Nod3(Node):
"""Mandatory subclass for thread-safe node serialization"""
pass # do not remove
+
+
#!/usr/bin/env python
# encoding: utf-8
# Scott Newton, 2005 (scottn)
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
Support for waf command-line options
import os, tempfile, optparse, sys, re
from waflib import Logs, Utils, Context, Errors
-options = {}
+options = optparse.Values()
"""
A global dictionary representing user-provided command-line options::
"""
Command-line options parser.
"""
- def __init__(self, ctx):
- optparse.OptionParser.__init__(self, conflict_handler="resolve",
+ def __init__(self, ctx, allow_unknown=False):
+ optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
self.formatter.width = Logs.get_term_cols()
self.ctx = ctx
+ self.allow_unknown = allow_unknown
+
+ def _process_args(self, largs, rargs, values):
+ """
+ Custom _process_args to allow unknown options according to the allow_unknown status
+ """
+ while rargs:
+ try:
+ optparse.OptionParser._process_args(self,largs,rargs,values)
+ except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e:
+ if self.allow_unknown:
+ largs.append(e.opt_str)
+ else:
+ self.error(str(e))
def print_usage(self, file=None):
return self.print_help(file)
p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)')
p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]')
p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)')
- p('--profile', dest='profile', default='', action='store_true', help=optparse.SUPPRESS_HELP)
+ p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
+ p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
+ p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit")
gr = self.add_option_group('Configuration options')
self.option_groups['configure options'] = gr
return group
return None
- def parse_args(self, _args=None):
- """
- Parses arguments from a list which is not necessarily the command-line.
+ def sanitize_path(self, path, cwd=None):
+ if not cwd:
+ cwd = Context.launch_dir
+ p = os.path.expanduser(path)
+ p = os.path.join(cwd, p)
+ p = os.path.normpath(p)
+ p = os.path.abspath(p)
+ return p
- :param _args: arguments
- :type _args: list of strings
+ def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
+ """
+ Just parse the arguments
"""
- global options, commands, envvars
+ self.parser.allow_unknown = allow_unknown
(options, leftover_args) = self.parser.parse_args(args=_args)
-
+ envvars = []
+ commands = []
for arg in leftover_args:
if '=' in arg:
envvars.append(arg)
- else:
+ elif arg != 'options':
commands.append(arg)
- if options.destdir:
- options.destdir = Utils.sane_path(options.destdir)
+ for name in 'top out destdir prefix bindir libdir'.split():
+ # those paths are usually expanded from Context.launch_dir
+ if getattr(options, name, None):
+ path = self.sanitize_path(getattr(options, name), cwd)
+ setattr(options, name, path)
+ return options, commands, envvars
+
+ def init_module_vars(self, arg_options, arg_commands, arg_envvars):
+ options.__dict__.clear()
+ del commands[:]
+ del envvars[:]
+ options.__dict__.update(arg_options.__dict__)
+ commands.extend(arg_commands)
+ envvars.extend(arg_envvars)
+
+ for var in envvars:
+ (name, value) = var.split('=', 1)
+ os.environ[name.strip()] = value
+
+ def init_logs(self, options, commands, envvars):
+ Logs.verbose = options.verbose
if options.verbose >= 1:
self.load('errcheck')
colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
Logs.enable_colors(colors)
+ if options.zones:
+ Logs.zones = options.zones.split(',')
+ if not Logs.verbose:
+ Logs.verbose = 1
+ elif Logs.verbose > 0:
+ Logs.zones = ['runner']
+ if Logs.verbose > 2:
+ Logs.zones = ['*']
+
+ def parse_args(self, _args=None):
+ """
+ Parses arguments from a list which is not necessarily the command-line.
+ Initializes the module variables options, commands and envvars
+ If help is requested, prints it and exit the application
+
+ :param _args: arguments
+ :type _args: list of strings
+ """
+ options, commands, envvars = self.parse_cmd_args()
+ self.init_logs(options, commands, envvars)
+ self.init_module_vars(options, commands, envvars)
+
def execute(self):
"""
See :py:func:`waflib.Context.Context.execute`
super(OptionsContext, self).execute()
self.parse_args()
Utils.alloc_process_pool(options.jobs)
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Runner.py: Task scheduling and execution
"""
-import random
+import heapq, traceback
try:
from queue import Queue
except ImportError:
from Queue import Queue
from waflib import Utils, Task, Errors, Logs
-GAP = 20
+GAP = 5
"""
Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
"""
+class PriorityTasks(object):
+ def __init__(self):
+ self.lst = []
+ def __len__(self):
+ return len(self.lst)
+ def __iter__(self):
+ return iter(self.lst)
+ def clear(self):
+ self.lst = []
+ def append(self, task):
+ heapq.heappush(self.lst, task)
+ def appendleft(self, task):
+ heapq.heappush(self.lst, task)
+ def pop(self):
+ return heapq.heappop(self.lst)
+ def extend(self, lst):
+ if self.lst:
+ for x in lst:
+ self.append(x)
+ else:
+ if isinstance(lst, list):
+ self.lst = lst
+ heapq.heapify(lst)
+ else:
+ self.lst = lst.lst
+
class Consumer(Utils.threading.Thread):
"""
Daemon thread object that executes a task. It shares a semaphore with
"""
try:
if not self.spawner.master.stop:
- self.task.process()
+ self.spawner.master.process_task(self.task)
finally:
self.spawner.sem.release()
self.spawner.master.out.put(self.task)
"""
Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
- :py:class:`waflib.Task.TaskBase` instance.
+ :py:class:`waflib.Task.Task` instance.
"""
def __init__(self, master):
Utils.threading.Thread.__init__(self)
Instance of :py:class:`waflib.Build.BuildContext`
"""
- self.outstanding = Utils.deque()
- """List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
+ self.outstanding = PriorityTasks()
+ """Heap of :py:class:`waflib.Task.Task` that may be ready to be executed"""
- self.frozen = Utils.deque()
- """List of :py:class:`waflib.Task.TaskBase` that are not ready yet"""
+ self.postponed = PriorityTasks()
+ """Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons"""
+
+ self.incomplete = set()
+ """List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)"""
self.ready = Queue(0)
- """List of :py:class:`waflib.Task.TaskBase` ready to be executed by consumers"""
+ """List of :py:class:`waflib.Task.Task` ready to be executed by consumers"""
self.out = Queue(0)
- """List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
+ """List of :py:class:`waflib.Task.Task` returned by the task consumers"""
self.count = 0
"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
- self.processed = 1
+ self.processed = 0
"""Amount of tasks processed"""
self.stop = False
Flag that indicates that the build cache must be saved when a task was executed
(calls :py:meth:`waflib.Build.BuildContext.store`)"""
+ self.revdeps = Utils.defaultdict(set)
+ """
+ The reverse dependency graph of dependencies obtained from Task.run_after
+ """
+
self.spawner = Spawner(self)
"""
Coordinating daemon thread that spawns thread consumers
"""
Obtains the next Task instance to run
- :rtype: :py:class:`waflib.Task.TaskBase`
+ :rtype: :py:class:`waflib.Task.Task`
"""
if not self.outstanding:
return None
- return self.outstanding.popleft()
+ return self.outstanding.pop()
def postpone(self, tsk):
"""
- Adds the task to the list :py:attr:`waflib.Runner.Parallel.frozen`.
+ Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`.
The order is scrambled so as to consume as many tasks in parallel as possible.
:param tsk: task instance
- :type tsk: :py:class:`waflib.Task.TaskBase`
+ :type tsk: :py:class:`waflib.Task.Task`
"""
- if random.randint(0, 1):
- self.frozen.appendleft(tsk)
- else:
- self.frozen.append(tsk)
+ self.postponed.append(tsk)
def refill_task_list(self):
"""
- Adds the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
+ Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
+ Ensures that all tasks in the current build group are complete before processing the next one.
"""
while self.count > self.numjobs * GAP:
self.get_out()
while not self.outstanding:
if self.count:
self.get_out()
- elif self.frozen:
+ if self.outstanding:
+ break
+ elif self.postponed:
try:
cond = self.deadlock == self.processed
except AttributeError:
pass
else:
if cond:
- msg = 'check the build order for the tasks'
- for tsk in self.frozen:
- if not tsk.run_after:
- msg = 'check the methods runnable_status'
- break
lst = []
- for tsk in self.frozen:
- lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
- raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
+ for tsk in self.postponed:
+ deps = [id(x) for x in tsk.run_after if not x.hasrun]
+ lst.append('%s\t-> %r' % (repr(tsk), deps))
+ if not deps:
+ lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk))
+ raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst))
self.deadlock = self.processed
- if self.frozen:
- self.outstanding.extend(self.frozen)
- self.frozen.clear()
+ if self.postponed:
+ self.outstanding.extend(self.postponed)
+ self.postponed.clear()
elif not self.count:
- self.outstanding.extend(self.biter.next())
- self.total = self.bld.total()
- break
+ if self.incomplete:
+ for x in self.incomplete:
+ for k in x.run_after:
+ if not k.hasrun:
+ break
+ else:
+ # dependency added after the build started without updating revdeps
+ self.incomplete.remove(x)
+ self.outstanding.append(x)
+ break
+ else:
+ raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete)
+ else:
+ tasks = next(self.biter)
+ ready, waiting = self.prio_and_split(tasks)
+ self.outstanding.extend(ready)
+ self.incomplete.update(waiting)
+ self.total = self.bld.total()
+ break
def add_more_tasks(self, tsk):
"""
- If a task provides :py:attr:`waflib.Task.TaskBase.more_tasks`, then the tasks contained
+ If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained
in that list are added to the current build and will be processed before the next build group.
+ The priorities for dependent tasks are not re-calculated globally
+
:param tsk: task instance
- :type tsk: :py:attr:`waflib.Task.TaskBase`
+ :type tsk: :py:attr:`waflib.Task.Task`
"""
if getattr(tsk, 'more_tasks', None):
- self.outstanding.extend(tsk.more_tasks)
+ # TODO recompute priorities globally?
+ ready, waiting = self.prio_and_split(tsk.more_tasks)
+ self.outstanding.extend(ready)
+ self.incomplete.update(waiting)
self.total += len(tsk.more_tasks)
+ def mark_finished(self, tsk):
+ def try_unfreeze(x):
+ # DAG ancestors are likely to be in the incomplete set
+ if x in self.incomplete:
+ # TODO remove dependencies to free some memory?
+ # x.run_after.remove(tsk)
+ for k in x.run_after:
+ if not k.hasrun:
+ break
+ else:
+ self.incomplete.remove(x)
+ self.outstanding.append(x)
+
+ if tsk in self.revdeps:
+ for x in self.revdeps[tsk]:
+ if isinstance(x, Task.TaskGroup):
+ x.prev.remove(tsk)
+ if not x.prev:
+ for k in x.next:
+ # TODO necessary optimization?
+ k.run_after.remove(x)
+ try_unfreeze(k)
+ # TODO necessary optimization?
+ x.next = []
+ else:
+ try_unfreeze(x)
+ del self.revdeps[tsk]
+
def get_out(self):
"""
Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.
- :rtype: :py:attr:`waflib.Task.TaskBase`
+ :rtype: :py:attr:`waflib.Task.Task`
"""
tsk = self.out.get()
if not self.stop:
self.add_more_tasks(tsk)
+ self.mark_finished(tsk)
+
self.count -= 1
self.dirty = True
return tsk
Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.
:param tsk: task instance
- :type tsk: :py:attr:`waflib.Task.TaskBase`
+ :type tsk: :py:attr:`waflib.Task.Task`
"""
self.ready.put(tsk)
+ def process_task(self, tsk):
+ """
+ Processes a task and attempts to stop the build in case of errors
+ """
+ tsk.process()
+ if tsk.hasrun != Task.SUCCESS:
+ self.error_handler(tsk)
+
def skip(self, tsk):
"""
Mark a task as skipped/up-to-date
"""
tsk.hasrun = Task.SKIPPED
+ self.mark_finished(tsk)
+
+ def cancel(self, tsk):
+ """
+ Mark a task as failed because of unsatisfiable dependencies
+ """
+ tsk.hasrun = Task.CANCELED
+ self.mark_finished(tsk)
def error_handler(self, tsk):
"""
- Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
- the build is executed with::
+ Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set,
+ unless the build is executed with::
$ waf build -k
:param tsk: task instance
- :type tsk: :py:attr:`waflib.Task.TaskBase`
+ :type tsk: :py:attr:`waflib.Task.Task`
"""
- if hasattr(tsk, 'scan') and hasattr(tsk, 'uid'):
- # TODO waf 2.0 - this breaks encapsulation
- try:
- del self.bld.imp_sigs[tsk.uid()]
- except KeyError:
- pass
if not self.bld.keep:
self.stop = True
self.error.append(tsk)
return tsk.runnable_status()
except Exception:
self.processed += 1
- tsk.err_msg = Utils.ex_stack()
+ tsk.err_msg = traceback.format_exc()
if not self.stop and self.bld.keep:
self.skip(tsk)
if self.bld.keep == 1:
- # if -k stop at the first exception, if -kk try to go as far as possible
+ # if -k stop on the first exception, if -kk try to go as far as possible
if Logs.verbose > 1 or not self.error:
self.error.append(tsk)
self.stop = True
if Logs.verbose > 1:
self.error.append(tsk)
return Task.EXCEPTION
- tsk.hasrun = Task.EXCEPTION
+ tsk.hasrun = Task.EXCEPTION
self.error_handler(tsk)
+
return Task.EXCEPTION
def start(self):
self.processed += 1
continue
- if self.stop: # stop immediately after a failure was detected
+ if self.stop: # stop immediately after a failure is detected
break
-
st = self.task_status(tsk)
if st == Task.RUN_ME:
self.count += 1
if self.numjobs == 1:
tsk.log_display(tsk.generator.bld)
try:
- tsk.process()
+ self.process_task(tsk)
finally:
self.out.put(tsk)
else:
self.add_task(tsk)
- if st == Task.ASK_LATER:
+ elif st == Task.ASK_LATER:
self.postpone(tsk)
elif st == Task.SKIP_ME:
self.processed += 1
self.skip(tsk)
self.add_more_tasks(tsk)
+ elif st == Task.CANCEL_ME:
+ # A dependency problem has occurred, and the
+ # build is most likely run with `waf -k`
+ if Logs.verbose > 1:
+ self.error.append(tsk)
+ self.processed += 1
+ self.cancel(tsk)
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
self.get_out()
self.ready.put(None)
- assert (self.count == 0 or self.stop)
+ if not self.stop:
+ assert not self.count
+ assert not self.postponed
+ assert not self.incomplete
+
+ def prio_and_split(self, tasks):
+ """
+ Label input tasks with priority values, and return a pair containing
+ the tasks that are ready to run and the tasks that are necessarily
+ waiting for other tasks to complete.
+
+ The priority system is really meant as an optional layer for optimization:
+ dependency cycles are found quickly, and builds should be more efficient.
+ A high priority number means that a task is processed first.
+
+ This method can be overridden to disable the priority system::
+
+ def prio_and_split(self, tasks):
+ return tasks, []
+
+ :return: A pair of task lists
+ :rtype: tuple
+ """
+ # to disable:
+ #return tasks, []
+ for x in tasks:
+ x.visited = 0
+
+ reverse = self.revdeps
+
+ for x in tasks:
+ for k in x.run_after:
+ if isinstance(k, Task.TaskGroup):
+ if k.done:
+ pass
+ else:
+ k.done = True
+ for j in k.prev:
+ reverse[j].add(k)
+ else:
+ reverse[k].add(x)
+
+ # the priority number is not the tree depth
+ def visit(n):
+ if isinstance(n, Task.TaskGroup):
+ return sum(visit(k) for k in n.next)
+
+ if n.visited == 0:
+ n.visited = 1
+
+ if n in reverse:
+ rev = reverse[n]
+ n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev)
+ else:
+ n.prio_order = n.tree_weight
+
+ n.visited = 2
+ elif n.visited == 1:
+ raise Errors.WafError('Dependency cycle found!')
+ return n.prio_order
+
+ for x in tasks:
+ if x.visited != 0:
+ # must visit all to detect cycles
+ continue
+ try:
+ visit(x)
+ except Errors.WafError:
+ self.debug_cycles(tasks, reverse)
+
+ ready = []
+ waiting = []
+ for x in tasks:
+ for k in x.run_after:
+ if not k.hasrun:
+ waiting.append(x)
+ break
+ else:
+ ready.append(x)
+ return (ready, waiting)
+
+ def debug_cycles(self, tasks, reverse):
+ tmp = {}
+ for x in tasks:
+ tmp[x] = 0
+
+ def visit(n, acc):
+ if isinstance(n, Task.TaskGroup):
+ for k in n.next:
+ visit(k, acc)
+ return
+ if tmp[n] == 0:
+ tmp[n] = 1
+ for k in reverse.get(n, []):
+ visit(k, [n] + acc)
+ tmp[n] = 2
+ elif tmp[n] == 1:
+ lst = []
+ for tsk in acc:
+ lst.append(repr(tsk))
+ if tsk is n:
+ # exclude prior nodes, we want the minimum cycle
+ break
+ raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst))
+ for x in tasks:
+ visit(x, [])
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"Module called for configuring, compiling and installing targets"
+from __future__ import with_statement
+
import os, shlex, shutil, traceback, errno, sys, stat
from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
:param wafdir: absolute path representing the directory of the waf library
:type wafdir: string
"""
-
Logs.init_log()
if Context.WAFVERSION != version:
Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
sys.exit(1)
- if '--version' in sys.argv:
- Context.run_dir = current_directory
- ctx = Context.create_context('options')
- ctx.curdir = current_directory
- ctx.parse_args()
- sys.exit(0)
+ # Store current directory before any chdir
+ Context.waf_dir = wafdir
+ Context.run_dir = Context.launch_dir = current_directory
+ start_dir = current_directory
+ no_climb = os.environ.get('NOCLIMB')
if len(sys.argv) > 1:
- # os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones)
+ # os.path.join handles absolute paths
# if sys.argv[1] is not an absolute path, then it is relative to the current working directory
potential_wscript = os.path.join(current_directory, sys.argv[1])
- # maybe check if the file is executable
- # perhaps extract 'wscript' as a constant
- if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript):
+ if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
# need to explicitly normalize the path, as it may contain extra '/.'
- # TODO abspath?
- current_directory = os.path.normpath(os.path.dirname(potential_wscript))
+ path = os.path.normpath(os.path.dirname(potential_wscript))
+ start_dir = os.path.abspath(path)
+ no_climb = True
sys.argv.pop(1)
- Context.waf_dir = wafdir
- Context.launch_dir = current_directory
+ ctx = Context.create_context('options')
+ (options, commands, env) = ctx.parse_cmd_args(allow_unknown=True)
+ if options.top:
+ start_dir = Context.run_dir = Context.top_dir = options.top
+ no_climb = True
+ if options.out:
+ Context.out_dir = options.out
# if 'configure' is in the commands, do not search any further
- no_climb = os.environ.get('NOCLIMB')
if not no_climb:
for k in no_climb_commands:
- for y in sys.argv:
+ for y in commands:
if y.startswith(k):
no_climb = True
break
- # if --top is provided assume the build started in the top directory
- for i, x in enumerate(sys.argv):
- # WARNING: this modifies sys.argv
- if x.startswith('--top='):
- Context.run_dir = Context.top_dir = Utils.sane_path(x[6:])
- sys.argv[i] = '--top=' + Context.run_dir
- if x.startswith('--out='):
- Context.out_dir = Utils.sane_path(x[6:])
- sys.argv[i] = '--out=' + Context.out_dir
-
# try to find a lock file (if the project was configured)
# at the same time, store the first wscript file seen
- cur = current_directory
- while cur and not Context.top_dir:
+ cur = start_dir
+ while cur:
try:
lst = os.listdir(cur)
except OSError:
break
if not Context.run_dir:
- if '-h' in sys.argv or '--help' in sys.argv:
- Logs.warn('No wscript file found: the help message may be incomplete')
- Context.run_dir = current_directory
- ctx = Context.create_context('options')
- ctx.curdir = current_directory
- ctx.parse_args()
+ if options.whelp:
+ Logs.warn('These are the generic options (no wscript/project found)')
+ ctx.parser.print_help()
sys.exit(0)
- Logs.error('Waf: Run from a directory containing a file named %r', Context.WSCRIPT_FILE)
+ Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE)
sys.exit(1)
try:
try:
set_main_module(os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)))
- except Errors.WafError ,e:
+ except Errors.WafError as e:
Logs.pprint('RED', e.verbose_msg)
Logs.error(str(e))
sys.exit(1)
- except Exception ,e:
+ except Exception as e:
Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
traceback.print_exc(file=sys.stdout)
sys.exit(2)
- if '--profile' in sys.argv:
+ if options.profile:
import cProfile, pstats
cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(75) # or 'cumulative'
else:
try:
- run_commands()
- except Errors.WafError ,e:
+ try:
+ run_commands()
+ except:
+ if options.pdb:
+ import pdb
+ type, value, tb = sys.exc_info()
+ traceback.print_exc()
+ pdb.post_mortem(tb)
+ else:
+ raise
+ except Errors.WafError as e:
if Logs.verbose > 1:
Logs.pprint('RED', e.verbose_msg)
Logs.error(e.msg)
sys.exit(1)
except SystemExit:
raise
- except Exception ,e:
+ except Exception as e:
traceback.print_exc(file=sys.stdout)
sys.exit(2)
except KeyboardInterrupt:
Parses the command-line options and initialize the logging system.
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
"""
- Context.create_context('options').execute()
-
- for var in Options.envvars:
- (name, value) = var.split('=', 1)
- os.environ[name.strip()] = value
-
+ ctx = Context.create_context('options')
+ ctx.execute()
if not Options.commands:
- Options.commands = [default_cmd]
- Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076
-
- # process some internal Waf options
- Logs.verbose = Options.options.verbose
- #Logs.init_log()
-
- if Options.options.zones:
- Logs.zones = Options.options.zones.split(',')
- if not Logs.verbose:
- Logs.verbose = 1
- elif Logs.verbose > 0:
- Logs.zones = ['runner']
-
- if Logs.verbose > 2:
- Logs.zones = ['*']
+ Options.commands.append(default_cmd)
+ if Options.options.whelp:
+ ctx.parser.print_help()
+ sys.exit(0)
def run_command(cmd_name):
"""
pass
def distclean(ctx):
- '''removes the build directory'''
- lst = os.listdir('.')
- for f in lst:
- if f == Options.lockfile:
- try:
- proj = ConfigSet.ConfigSet(f)
- except IOError:
- Logs.warn('Could not read %r', f)
- continue
+ '''removes build folders and data'''
- if proj['out_dir'] != proj['top_dir']:
- try:
- shutil.rmtree(proj['out_dir'])
- except EnvironmentError ,e:
- if e.errno != errno.ENOENT:
- Logs.warn('Could not remove %r', proj['out_dir'])
- else:
- distclean_dir(proj['out_dir'])
+ def remove_and_log(k, fun):
+ try:
+ fun(k)
+ except EnvironmentError as e:
+ if e.errno != errno.ENOENT:
+ Logs.warn('Could not remove %r', k)
- for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
- p = os.path.join(k, Options.lockfile)
- try:
- os.remove(p)
- except OSError ,e:
- if e.errno != errno.ENOENT:
- Logs.warn('Could not remove %r', p)
+ # remove waf cache folders on the top-level
+ if not Options.commands:
+ for k in os.listdir('.'):
+ for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split():
+ if k.startswith(x):
+ remove_and_log(k, shutil.rmtree)
+
+ # remove a build folder, if any
+ cur = '.'
+ if ctx.options.no_lock_in_top:
+ cur = ctx.options.out
+
+ try:
+ lst = os.listdir(cur)
+ except OSError:
+ Logs.warn('Could not read %r', cur)
+ return
+
+ if Options.lockfile in lst:
+ f = os.path.join(cur, Options.lockfile)
+ try:
+ env = ConfigSet.ConfigSet(f)
+ except EnvironmentError:
+ Logs.warn('Could not read %r', f)
+ return
- # remove local waf cache folders
- if not Options.commands:
- for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split():
- if f.startswith(x):
- shutil.rmtree(f, ignore_errors=True)
+ if not env.out_dir or not env.top_dir:
+ Logs.warn('Invalid lock file %r', f)
+ return
+
+ if env.out_dir == env.top_dir:
+ distclean_dir(env.out_dir)
+ else:
+ remove_and_log(env.out_dir, shutil.rmtree)
+
+ for k in (env.out_dir, env.top_dir, env.run_dir):
+ p = os.path.join(k, Options.lockfile)
+ remove_and_log(p, os.remove)
class Dist(Context.Context):
'''creates an archive containing the project source code'''
self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
try:
- from hashlib import sha1
+ from hashlib import sha256
except ImportError:
digest = ''
else:
- digest = ' (sha=%r)' % sha1(node.read(flags='rb')).hexdigest()
+ digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest()
Logs.info('New archive created: %s%s', self.arch_name, digest)
tinfo.gname = 'root'
if os.path.isfile(p):
- fu = open(p, 'rb')
- try:
- tar.addfile(tinfo, fileobj=fu)
- finally:
- fu.close()
+ with open(p, 'rb') as f:
+ tar.addfile(tinfo, fileobj=f)
else:
tar.addfile(tinfo)
try:
return self.excl
except AttributeError:
- self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+ self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
if Context.out_dir:
nd = self.root.find_node(Context.out_dir)
if nd:
pass
class DistCheck(Dist):
- """
- Creates an archive of the project, then attempts to build the project in a temporary directory::
-
- $ waf distcheck
- """
+ """creates an archive with dist, then tries to build it"""
fun = 'distcheck'
cmd = 'distcheck'
"""
import tempfile, tarfile
- try:
- t = tarfile.open(self.get_arch_name())
+ with tarfile.open(self.get_arch_name()) as t:
for x in t:
t.extract(x)
- finally:
- t.close()
instdir = tempfile.mkdtemp('.inst', self.get_base_name())
cmd = self.make_distcheck_cmd(instdir)
cmd = env.config_cmd or 'configure'
if Configure.autoconfig == 'clobber':
tmp = Options.options.__dict__
- Options.options.__dict__ = env.options
+ if env.options:
+ Options.options.__dict__ = env.options
try:
run_command(cmd)
finally:
return execute_method(self)
return execute
Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Tasks represent atomic operations such as processes.
"""
-import os, re, sys, tempfile
+import os, re, sys, tempfile, traceback
from waflib import Utils, Logs, Errors
# task states
EXCEPTION = 3
"""An exception occurred in the task execution"""
+CANCELED = 4
+"""A dependency for the task is missing so it was cancelled"""
+
SKIPPED = 8
"""The task did not have to be executed"""
RUN_ME = -3
"""The task must be executed"""
+CANCEL_ME = -4
+"""The task cannot be executed because of a dependency problem"""
+
COMPILE_TEMPLATE_SHELL = '''
def f(tsk):
env = tsk.env
super(store_task_type, cls).__init__(name, bases, dict)
name = cls.__name__
- if name != 'evil' and name != 'TaskBase':
- global classes
+ if name != 'evil' and name != 'Task':
if getattr(cls, 'run_str', None):
# if a string is provided, convert it to a method
(f, dvars) = compile_fun(cls.run_str, cls.shell)
evil = store_task_type('evil', (object,), {})
"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified"
-class TaskBase(evil):
+class Task(evil):
"""
- Base class for all Waf tasks, which should be seen as an interface.
- For illustration purposes, instances of this class will execute the attribute
- 'fun' in :py:meth:`waflib.Task.TaskBase.run`. When in doubt, create
- subclasses of :py:class:`waflib.Task.Task` instead.
+ This class deals with the filesystem (:py:class:`waflib.Node.Node`). The method :py:class:`waflib.Task.Task.runnable_status`
+ uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes,
+ the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output
+ nodes (if present).
+ """
+ vars = []
+ """ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
- Subclasses must override these methods:
+ always_run = False
+ """Specify whether task instances must always be executed or not (class attribute)"""
- #. __str__: string to display to the user
- #. runnable_status: ask the task if it should be run, skipped, or if we have to ask later
- #. run: what to do to execute the task
- #. post_run: what to do after the task has been executed
- """
+ shell = False
+ """Execute the command with the shell (class attribute)"""
color = 'GREEN'
"""Color for the console display, see :py:const:`waflib.Logs.colors_lst`"""
after = []
"""List of task class names to execute after instances of this class"""
- hcode = ''
+ hcode = Utils.SIG_NIL
"""String representing an additional hash for the class representation"""
keep_last_cmd = False
This may be useful for certain extensions but it can a lot of memory.
"""
- __slots__ = ('hasrun', 'generator')
+ weight = 0
+ """Optional weight to tune the priority for task instances.
+ The higher, the earlier. The weight only applies to single task objects."""
+
+ tree_weight = 0
+ """Optional weight to tune the priority of task instances and whole subtrees.
+ The higher, the earlier."""
+
+ prio_order = 0
+ """Priority order set by the scheduler on instances during the build phase.
+ You most likely do not need to set it.
+ """
+
+ __slots__ = ('hasrun', 'generator', 'env', 'inputs', 'outputs', 'dep_nodes', 'run_after')
def __init__(self, *k, **kw):
- """
- The base task class requires a task generator (set to *self* if missing)
- """
self.hasrun = NOT_RUN
try:
self.generator = kw['generator']
except KeyError:
self.generator = self
- def __repr__(self):
- return '\n\t{task %r: %s %s}' % (self.__class__.__name__, id(self), str(getattr(self, 'fun', '')))
+ self.env = kw['env']
+ """:py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)"""
- def __str__(self):
- "String to display to the user"
- if hasattr(self, 'fun'):
- return self.fun.__name__
- return self.__class__.__name__
+ self.inputs = []
+ """List of input nodes, which represent the files used by the task instance"""
- def keyword(self):
- "Display keyword used to prettify the console outputs"
- if hasattr(self, 'fun'):
- return 'Function'
- return 'Processing'
+ self.outputs = []
+ """List of output nodes, which represent the files created by the task instance"""
+
+ self.dep_nodes = []
+ """List of additional nodes to depend on"""
+
+ self.run_after = set()
+ """Set of tasks that must be executed before this one"""
+
+ def __lt__(self, other):
+ return self.priority() > other.priority()
+ def __le__(self, other):
+ return self.priority() >= other.priority()
+ def __gt__(self, other):
+ return self.priority() < other.priority()
+ def __ge__(self, other):
+ return self.priority() <= other.priority()
def get_cwd(self):
"""
x = '"%s"' % x
return x
+ def priority(self):
+ """
+ Priority of execution; the higher, the earlier
+
+ :return: the priority value
+ :rtype: a tuple of numeric values
+ """
+ return (self.weight + self.prio_order, - getattr(self.generator, 'tg_idx_count', 0))
+
def split_argfile(self, cmd):
"""
Splits a list of process commands into the executable part and its list of arguments
:type cmd: list of string (best) or string (process will use a shell)
:return: the return code
:rtype: int
+
+ Optional parameters:
+
+ #. cwd: current working directory (Node or string)
+ #. stdout: set to None to prevent waf from capturing the process standard output
+ #. stderr: set to None to prevent waf from capturing the process standard error
+ #. timeout: timeout value (Python 3)
"""
if not 'cwd' in kw:
kw['cwd'] = self.get_cwd()
env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
+ if hasattr(self, 'stdout'):
+ kw['stdout'] = self.stdout
+ if hasattr(self, 'stderr'):
+ kw['stderr'] = self.stderr
+
# workaround for command line length limit:
# http://support.microsoft.com/kb/830473
if not isinstance(cmd, str) and (len(repr(cmd)) >= 8192 if Utils.is_win32 else len(cmd) > 200000):
cmd, args = self.split_argfile(cmd)
try:
(fd, tmp) = tempfile.mkstemp()
- os.write(fd, '\r\n'.join(args))
+ os.write(fd, '\r\n'.join(args).encode())
os.close(fd)
if Logs.verbose:
Logs.debug('argfile: @%r -> %r', tmp, args)
else:
return self.generator.bld.exec_command(cmd, **kw)
- def runnable_status(self):
- """
- Returns the Task status
-
- :return: a task state in :py:const:`waflib.Task.RUN_ME`, :py:const:`waflib.Task.SKIP_ME` or :py:const:`waflib.Task.ASK_LATER`.
- :rtype: int
- """
- return RUN_ME
-
- def uid(self):
- """
- Computes a unique identifier for the task
-
- :rtype: string or bytes
- """
- return Utils.SIG_NIL
-
def process(self):
"""
- Assume that the task has had a ``master`` which is an instance of :py:class:`waflib.Runner.Parallel`.
- Execute the task and then put it back in the queue :py:attr:`waflib.Runner.Parallel.out` (may be replaced by subclassing).
+ Runs the task and handles errors
:return: 0 or None if everything is fine
:rtype: integer
"""
# remove the task signature immediately before it is executed
- # in case of failure the task will be executed again
- m = self.generator.bld.producer
+ # so that the task will be executed again in case of failure
try:
- # TODO another place for this?
del self.generator.bld.task_sigs[self.uid()]
except KeyError:
pass
try:
ret = self.run()
except Exception:
- self.err_msg = Utils.ex_stack()
+ self.err_msg = traceback.format_exc()
self.hasrun = EXCEPTION
-
- # TODO cleanup
- m.error_handler(self)
- return
-
- if ret:
- self.err_code = ret
- self.hasrun = CRASHED
else:
- try:
- self.post_run()
- except Errors.WafError:
- pass
- except Exception:
- self.err_msg = Utils.ex_stack()
- self.hasrun = EXCEPTION
+ if ret:
+ self.err_code = ret
+ self.hasrun = CRASHED
else:
- self.hasrun = SUCCESS
- if self.hasrun != SUCCESS:
- m.error_handler(self)
-
- def run(self):
- """
- Called by threads to execute the tasks. The default is empty and meant to be overridden in subclasses.
-
- .. warning:: It is a bad idea to create nodes in this method, so avoid :py:meth:`waflib.Node.Node.ant_glob`
-
- :rtype: int
- """
- if hasattr(self, 'fun'):
- return self.fun(self)
- return 0
+ try:
+ self.post_run()
+ except Errors.WafError:
+ pass
+ except Exception:
+ self.err_msg = traceback.format_exc()
+ self.hasrun = EXCEPTION
+ else:
+ self.hasrun = SUCCESS
- def post_run(self):
- "Update build data after successful Task execution. Override in subclasses."
- pass
+ if self.hasrun != SUCCESS and self.scan:
+ # rescan dependencies on next run
+ try:
+ del self.generator.bld.imp_sigs[self.uid()]
+ except KeyError:
+ pass
def log_display(self, bld):
"Writes the execution status on the context logger"
def cur():
# the current task position, computed as late as possible
- tmp = -1
- if hasattr(master, 'ready'):
- tmp -= master.ready.qsize()
- return master.processed + tmp
+ return master.processed - master.ready.qsize()
if self.generator.bld.progress_bar == 1:
return self.generator.bld.progress_line(cur(), master.total, col1, col2)
:return: a hash value
:rtype: string
"""
- cls = self.__class__
- tup = (str(cls.before), str(cls.after), str(cls.ext_in), str(cls.ext_out), cls.__name__, cls.hcode)
- return hash(tup)
+ return (tuple(self.before), tuple(self.after), tuple(self.ext_in), tuple(self.ext_out), self.__class__.__name__, self.hcode)
def format_error(self):
"""
return ' -> task in %r failed%s' % (name, msg)
elif self.hasrun == MISSING:
return ' -> missing files in %r%s' % (name, msg)
+ elif self.hasrun == CANCELED:
+ return ' -> %r canceled because of missing dependencies' % name
else:
return 'invalid status for task in %r: %r' % (name, self.hasrun)
The results will be slightly different if FOO_ST is a list, for example::
- env.FOO_ST = ['-a', '-b']
+ env.FOO = ['p1', 'p2']
env.FOO_ST = '-I%s'
# ${FOO_ST:FOO} returns
['-Ip1', '-Ip2']
- env.FOO = ['p1', 'p2']
+ env.FOO_ST = ['-a', '-b']
# ${FOO_ST:FOO} returns
['-a', '-b', 'p1', '-a', '-b', 'p2']
"""
lst.append(y)
return lst
-class Task(TaskBase):
- """
- This class deals with the filesystem (:py:class:`waflib.Node.Node`). The method :py:class:`waflib.Task.Task.runnable_status`
- uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes,
- the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output
- nodes (if present).
- """
- vars = []
- """ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
-
- always_run = False
- """Specify whether task instances must always be executed or not (class attribute)"""
-
- shell = False
- """Execute the command with the shell (class attribute)"""
-
- def __init__(self, *k, **kw):
- TaskBase.__init__(self, *k, **kw)
-
- self.env = kw['env']
- """:py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)"""
-
- self.inputs = []
- """List of input nodes, which represent the files used by the task instance"""
-
- self.outputs = []
- """List of output nodes, which represent the files created by the task instance"""
-
- self.dep_nodes = []
- """List of additional nodes to depend on"""
-
- self.run_after = set()
- """Set of tasks that must be executed before this one"""
-
def __str__(self):
"string to display to the user"
name = self.__class__.__name__
src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
- if self.outputs: sep = ' -> '
- else: sep = ''
+ if self.outputs:
+ sep = ' -> '
+ else:
+ sep = ''
return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str)
def keyword(self):
- """
- See :py:meth:`waflib.Task.TaskBase`
- """
+ "Display keyword used to prettify the console outputs"
name = self.__class__.__name__
if name.endswith(('lib', 'program')):
return 'Linking'
:param inp: input nodes
:type inp: node or list of nodes
"""
- if isinstance(inp, list): self.inputs += inp
- else: self.inputs.append(inp)
+ if isinstance(inp, list):
+ self.inputs += inp
+ else:
+ self.inputs.append(inp)
def set_outputs(self, out):
"""
:param out: output nodes
:type out: node or list of nodes
"""
- if isinstance(out, list): self.outputs += out
- else: self.outputs.append(out)
+ if isinstance(out, list):
+ self.outputs += out
+ else:
+ self.outputs.append(out)
def set_run_after(self, task):
"""
:param task: task
:type task: :py:class:`waflib.Task.Task`
"""
- assert isinstance(task, TaskBase)
+ assert isinstance(task, Task)
self.run_after.add(task)
def signature(self):
def runnable_status(self):
"""
- See :py:meth:`waflib.Task.TaskBase.runnable_status`
+ Returns the Task status
+
+ :return: a task state in :py:const:`waflib.Task.RUN_ME`,
+ :py:const:`waflib.Task.SKIP_ME`, :py:const:`waflib.Task.CANCEL_ME` or :py:const:`waflib.Task.ASK_LATER`.
+ :rtype: int
"""
- #return 0 # benchmarking
+ bld = self.generator.bld
+ if bld.is_install < 0:
+ return SKIP_ME
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
+ elif t.hasrun < SKIPPED:
+ # a dependency has an error
+ return CANCEL_ME
# first compute the signature
try:
return ASK_LATER
# compare the signature to a signature computed previously
- bld = self.generator.bld
key = self.uid()
try:
prev_sig = bld.task_sigs[key]
continue
for v in d:
- if isinstance(v, bld.root.__class__):
+ try:
v = v.get_bld_sig()
- elif hasattr(v, '__call__'):
- v = v() # dependency is a function, call it
+ except AttributeError:
+ if hasattr(v, '__call__'):
+ v = v() # dependency is a function, call it
upd(v)
def sig_vars(self):
try:
return self.uid_
except AttributeError:
- m = Utils.md5(self.__class__.__name__.encode('iso8859-1', 'xmlcharrefreplace'))
+ m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace'))
up = m.update
for x in self.inputs + self.outputs:
- up(x.abspath().encode('iso8859-1', 'xmlcharrefreplace'))
+ up(x.abspath().encode('latin-1', 'xmlcharrefreplace'))
self.uid_ = m.digest()
return self.uid_
uid.__doc__ = Task.uid.__doc__
waflib.Task.is_before(t1, t2) # True
:param t1: Task object
- :type t1: :py:class:`waflib.Task.TaskBase`
+ :type t1: :py:class:`waflib.Task.Task`
:param t2: Task object
- :type t2: :py:class:`waflib.Task.TaskBase`
+ :type t2: :py:class:`waflib.Task.Task`
"""
to_list = Utils.to_list
for k in to_list(t2.ext_in):
Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs
:param tasks: tasks
- :type tasks: list of :py:class:`waflib.Task.TaskBase`
+ :type tasks: list of :py:class:`waflib.Task.Task`
"""
ins = Utils.defaultdict(set)
outs = Utils.defaultdict(set)
for x in tasks:
- for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []):
- ins[id(a)].add(x)
- for a in getattr(x, 'outputs', []):
- outs[id(a)].add(x)
+ for a in x.inputs:
+ ins[a].add(x)
+ for a in x.dep_nodes:
+ ins[a].add(x)
+ for a in x.outputs:
+ outs[a].add(x)
links = set(ins.keys()).intersection(outs.keys())
for k in links:
for a in ins[k]:
a.run_after.update(outs[k])
+
+class TaskGroup(object):
+ """
+ Wrap nxm task order constraints into a single object
+ to prevent the creation of large list/set objects
+
+ This is an optimization
+ """
+ def __init__(self, prev, next):
+ self.prev = prev
+ self.next = next
+ self.done = False
+
+ def get_hasrun(self):
+ for k in self.prev:
+ if not k.hasrun:
+ return NOT_RUN
+ return SUCCESS
+
+ hasrun = property(get_hasrun, None)
+
def set_precedence_constraints(tasks):
"""
Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes
:param tasks: tasks
- :type tasks: list of :py:class:`waflib.Task.TaskBase`
+ :type tasks: list of :py:class:`waflib.Task.Task`
"""
cstr_groups = Utils.defaultdict(list)
for x in tasks:
else:
continue
- aval = set(cstr_groups[keys[a]])
- for x in cstr_groups[keys[b]]:
- x.run_after.update(aval)
+ a = cstr_groups[keys[a]]
+ b = cstr_groups[keys[b]]
+
+ if len(a) < 2 or len(b) < 2:
+ for x in b:
+ x.run_after.update(a)
+ else:
+ group = TaskGroup(set(a), set(b))
+ for x in b:
+ x.run_after.add(group)
def funex(c):
"""
app = parm.append
for (var, meth) in extr:
if var == 'SRC':
- if meth: app('tsk.inputs%s' % meth)
- else: app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
+ if meth:
+ app('tsk.inputs%s' % meth)
+ else:
+ app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
elif var == 'TGT':
- if meth: app('tsk.outputs%s' % meth)
- else: app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
+ if meth:
+ app('tsk.outputs%s' % meth)
+ else:
+ app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
elif meth:
if meth.startswith(':'):
if var not in dvars:
if var not in dvars:
dvars.append(var)
app("p('%s')" % var)
- if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
- else: parm = ''
+ if parm:
+ parm = "%% (%s) " % (',\n\t\t'.join(parm))
+ else:
+ parm = ''
c = COMPILE_TEMPLATE_SHELL % (line, parm)
Logs.debug('action: %s', c.strip().splitlines())
"""
Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing:
- * The function created (compiled) for use as :py:meth:`waflib.Task.TaskBase.run`
+ * The function created (compiled) for use as :py:meth:`waflib.Task.Task.run`
* The list of variables that must cause rebuilds when *env* data is modified
for example::
params['run'] = func
cls = type(Task)(name, (Task,), params)
- global classes
classes[name] = cls
if ext_in:
return cls
+TaskBase = Task
+"Provided for compatibility reasons, TaskBase should not be used"
-def always_run(cls):
- """
- Deprecated Task class decorator (to be removed in waf 2.0)
-
- Set all task instances of this class to be executed whenever a build is started
- The task signature is calculated, but the result of the comparison between
- task signatures is bypassed
- """
- Logs.warn('This decorator is deprecated, set always_run on the task class instead!')
- cls.always_run = True
- return cls
-
-def update_outputs(cls):
- """
- Obsolete, to be removed in waf 2.0
- """
- return cls
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Task generators
class task_gen(object):
"""
- Instances of this class create :py:class:`waflib.Task.TaskBase` when
+ Instances of this class create :py:class:`waflib.Task.Task` when
calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
A few notes:
mappings = Utils.ordered_iter_dict()
"""Mappings are global file extension mappings that are retrieved in the order of definition"""
- prec = Utils.defaultdict(list)
+ prec = Utils.defaultdict(set)
"""Dict that holds the precedence execution rules for task generator methods"""
def __init__(self, *k, **kw):
The extra key/value elements passed in ``kw`` are set as attributes
"""
- self.source = ''
+ self.source = []
self.target = ''
self.meths = []
self.env = self.bld.env.derive()
self.path = self.bld.path # emulate chdir when reading scripts
- # provide a unique id
+ # Provide a unique index per folder
+ # This is part of a measure to prevent output file name collisions
+ path = self.path.abspath()
try:
- self.idx = self.bld.idx[self.path] = self.bld.idx.get(self.path, 0) + 1
+ self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1
except AttributeError:
self.bld.idx = {}
- self.idx = self.bld.idx[self.path] = 1
+ self.idx = self.bld.idx[path] = 1
+
+ # Record the global task generator count
+ try:
+ self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1
+ except AttributeError:
+ self.tg_idx_count = self.bld.tg_idx_count = 1
for key, val in kw.items():
setattr(self, key, val)
tmp = []
for a in keys:
for x in prec.values():
- if a in x: break
+ if a in x:
+ break
else:
tmp.append(a)
- tmp.sort()
+ tmp.sort(reverse=True)
# topological sort
out = []
break
else:
tmp.append(x)
+ tmp.sort(reverse=True)
if prec:
buf = ['Cycle detected in the method execution:']
for k, v in prec.items():
buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
raise Errors.WafError('\n'.join(buf))
- out.reverse()
self.meths = out
# then we run the methods in order
:param tgt: output nodes
:type tgt: list of :py:class:`waflib.Tools.Node.Node`
:return: A task object
- :rtype: :py:class:`waflib.Task.TaskBase`
+ :rtype: :py:class:`waflib.Task.Task`
"""
task = Task.classes[name](env=self.env.derive(), generator=self)
if src:
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
- if not func.__name__ in task_gen.prec[fun_name]:
- task_gen.prec[fun_name].append(func.__name__)
- #task_gen.prec[fun_name].sort()
+ task_gen.prec[func.__name__].add(fun_name)
return func
return deco
before = before_method
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
- if not fun_name in task_gen.prec[func.__name__]:
- task_gen.prec[func.__name__].append(fun_name)
- #task_gen.prec[func.__name__].sort()
+ task_gen.prec[fun_name].add(func.__name__)
return func
return deco
after = after_method
return func
return deco
-# ---------------------------------------------------------------
-# The following methods are task generator methods commonly used
-# they are almost examples, the rest of waf core does not depend on them
-
@taskgen_method
def to_nodes(self, lst, path=None):
"""
- Converts the input list into a list of nodes.
+ Flatten the input list of string/nodes/lists into a list of nodes.
+
It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
if isinstance(lst, Node.Node):
lst = [lst]
- # either a list or a string, convert to a list of nodes
for x in Utils.to_list(lst):
if isinstance(x, str):
node = find(x)
- else:
+ elif hasattr(x, 'name'):
node = x
+ else:
+ tmp.extend(self.to_nodes(x))
+ continue
if not node:
- raise Errors.WafError("source not found: %r in %r" % (x, self))
+ raise Errors.WafError('source not found: %r in %r' % (x, self))
tmp.append(node)
return tmp
def build(bld):
bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
+
+ Main attributes processed:
+
+ * rule: command to execute, it can be a tuple of strings for multiple commands
+ * chmod: permissions for the resulting files (integer value such as Utils.O755)
+ * shell: set to False to execute the command directly (default is True to use a shell)
+ * scan: scanner function
+ * vars: list of variables to trigger rebuilts, such as CFLAGS
+ * cls_str: string to display when executing the task
+ * cls_keyword: label to display when executing the task
+ * cache_rule: by default, try to re-use similar classes, set to False to disable
+ * source: list of Node or string objects representing the source files required by this task
+ * target: list of Node or string objects representing the files that this task creates
+ * cwd: current working directory (Node or string)
+ * stdout: standard output, set to None to prevent waf from capturing the text
+ * stderr: standard error, set to None to prevent waf from capturing the text
+ * timeout: timeout for command execution (Python 3)
+ * always: whether to always run the command (False by default)
"""
if not getattr(self, 'rule', None):
return
return [nodes, []]
cls.scan = scan
- # TODO use these values in the cache key if provided
- # (may cause excessive caching)
- for x in ('after', 'before', 'ext_in', 'ext_out'):
- setattr(cls, x, getattr(self, x, []))
-
if use_cache:
cache[key] = cls
# now create one instance
tsk = self.create_task(name)
+ for x in ('after', 'before', 'ext_in', 'ext_out'):
+ setattr(tsk, x, getattr(self, x, []))
+
+ if hasattr(self, 'stdout'):
+ tsk.stdout = self.stdout
+
+ if hasattr(self, 'stderr'):
+ tsk.stderr = self.stderr
+
if getattr(self, 'timeout', None):
tsk.timeout = self.timeout
# methods during instance attribute look-up."
tsk.run = functools.partial(tsk.run, tsk)
-
@feature('seq')
def sequence_order(self):
"""
if getattr(self.generator, 'is_copy', None):
for i, x in enumerate(self.outputs):
x.write(self.inputs[i].read('rb'), 'wb')
+ stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy
+ os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime))
self.force_permissions()
return None
self.force_permissions()
return ret
- code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
+ code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1'))
if getattr(self.generator, 'subst_fun', None):
code = self.generator.subst_fun(self, code)
if code is not None:
- self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
+ self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.force_permissions()
return None
lst.append(g(1))
return "%%(%s)s" % g(1)
return ''
- global re_m4
code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)
try:
d[x] = tmp
code = code % d
- self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
+ self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.generator.bld.raw_deps[self.uid()] = lst
# make sure the signature is updated
- try: delattr(self, 'cache_sig')
- except AttributeError: pass
+ try:
+ delattr(self, 'cache_sig')
+ except AttributeError:
+ pass
self.force_permissions()
upd = self.m.update
if getattr(self.generator, 'fun', None):
- upd(Utils.h_fun(self.generator.fun))
+ upd(Utils.h_fun(self.generator.fun).encode())
if getattr(self.generator, 'subst_fun', None):
- upd(Utils.h_fun(self.generator.subst_fun))
+ upd(Utils.h_fun(self.generator.subst_fun).encode())
# raw_deps: persistent custom values returned by the scanner
vars = self.generator.bld.raw_deps.get(self.uid(), [])
if not a:
raise Errors.WafError('could not find %r for %r' % (x, self))
- has_constraints = False
tsk = self.create_task('subst', a, b)
for k in ('after', 'before', 'ext_in', 'ext_out'):
val = getattr(self, k, None)
if val:
- has_constraints = True
setattr(tsk, k, val)
# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
- if not has_constraints:
- global HEADER_EXTS
- for xt in HEADER_EXTS:
- if b.name.endswith(xt):
- tsk.before = [k for k in ('c', 'cxx') if k in Task.classes]
- break
+ for xt in HEADER_EXTS:
+ if b.name.endswith(xt):
+ tsk.ext_in = tsk.ext_in + ['.h']
+ break
inst_to = getattr(self, 'install_path', None)
if inst_to:
install_from=b, chmod=getattr(self, 'chmod', Utils.O644))
self.source = []
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
"""
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
conf.env.ARFLAGS = ['rcs']
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2008-2016 (ita)
+# Thomas Nagy, 2008-2018 (ita)
"""
Assembly support, used by tools such as gas and nasm
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
-# Thomas Nagy 2009-2016 (ita)
+# Thomas Nagy 2009-2018 (ita)
"""
The **bison** program is a code generator which creates C or C++ files.
"""
conf.find_program('bison', var='BISON')
conf.env.BISONFLAGS = ['-d']
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"Base for c programs/libraries"
class cstlib(stlink_task):
"Links object files into a c static libraries"
pass # do not remove
+
"""
set_features(kw, 'objects')
return bld(*k, **kw)
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
C/C++/D configuration helpers
DEFKEYS = 'define_key'
INCKEYS = 'include_key'
-cfg_ver = {
- 'atleast-version': '>=',
- 'exact-version': '==',
- 'max-version': '<=',
-}
-
-SNIP_FUNCTION = '''
-int main(int argc, char **argv) {
- void (*p)();
- (void)argc; (void)argv;
- p=(void(*)())(%s);
- return !p;
-}
-'''
-"""Code template for checking for functions"""
-
-SNIP_TYPE = '''
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- if ((%(type_name)s *) 0) return 0;
- if (sizeof (%(type_name)s)) return 0;
- return 1;
-}
-'''
-"""Code template for checking for types"""
-
SNIP_EMPTY_PROGRAM = '''
int main(int argc, char **argv) {
(void)argc; (void)argv;
}
'''
-SNIP_FIELD = '''
-int main(int argc, char **argv) {
- char *off;
- (void)argc; (void)argv;
- off = (char*) &((%(type_name)s*)0)->%(field_name)s;
- return (size_t) off < sizeof(%(type_name)s);
-}
-'''
-
MACRO_TO_DESTOS = {
'__linux__' : 'linux',
'__GNU__' : 'gnu', # hurd
static = False
elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'):
app('LINKFLAGS', x)
- elif x.startswith(('-m', '-f', '-dynamic', '-O')):
+ elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')):
+ # Adding the -W option breaks python builds on Openindiana
app('CFLAGS', x)
app('CXXFLAGS', x)
elif x.startswith('-bundle'):
self.find_program('pkg-config', var='PKGCONFIG')
kw['path'] = self.env.PKGCONFIG
- # pkg-config version
- if 'atleast_pkgconfig_version' in kw:
- if not 'msg' in kw:
+ # verify that exactly one action is requested
+ s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw)
+ if s != 1:
+ raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set')
+ if not 'msg' in kw:
+ if 'atleast_pkgconfig_version' in kw:
kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version']
- return
+ elif 'modversion' in kw:
+ kw['msg'] = 'Checking for %r version' % kw['modversion']
+ else:
+ kw['msg'] = 'Checking for %r' %(kw['package'])
- if not 'okmsg' in kw:
+ # let the modversion check set the okmsg to the detected version
+ if not 'okmsg' in kw and not 'modversion' in kw:
kw['okmsg'] = 'yes'
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
- if 'modversion' in kw:
- if not 'msg' in kw:
- kw['msg'] = 'Checking for %r version' % kw['modversion']
+ # pkg-config version
+ if 'atleast_pkgconfig_version' in kw:
+ pass
+ elif 'modversion' in kw:
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['modversion']
if not 'define_name' in kw:
kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store'])
- return
-
- if not 'package' in kw:
- raise ValueError('a package name is required')
-
- if not 'uselib_store' in kw:
- kw['uselib_store'] = kw['package'].upper()
-
- if not 'define_name' in kw:
- kw['define_name'] = self.have_define(kw['uselib_store'])
-
- if not 'msg' in kw:
- kw['msg'] = 'Checking for %r' % (kw['package'] or kw['path'])
-
- for x in cfg_ver:
- # Gotcha: only one predicate is allowed at a time
- # TODO remove in waf 2.0
- y = x.replace('-', '_')
- if y in kw:
- package = kw['package']
- if Logs.verbose:
- Logs.warn('Passing %r to conf.check_cfg() is obsolete, pass parameters directly, eg:', y)
- Logs.warn(" conf.check_cfg(package='%s', args=['--libs', '--cflags', '%s >= 1.6'])", package, package)
- if not 'msg' in kw:
- kw['msg'] = 'Checking for %r %s %s' % (package, cfg_ver[x], kw[y])
- break
+ else:
+ if not 'uselib_store' in kw:
+ kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper()
+ if not 'define_name' in kw:
+ kw['define_name'] = self.have_define(kw['uselib_store'])
@conf
def exec_cfg(self, kw):
"""
- Executes ``pkg-config`` or other ``-config`` applications to colect configuration flags:
+ Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags:
* if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
* if modversion is given, then return the module version
if 'atleast_pkgconfig_version' in kw:
cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
self.cmd_and_log(cmd, env=env)
- if not 'okmsg' in kw:
- kw['okmsg'] = 'yes'
return
- for x in cfg_ver:
- # TODO remove in waf 2.0
- y = x.replace('-', '_')
- if y in kw:
- self.cmd_and_log(path + ['--%s=%s' % (x, kw[y]), kw['package']], env=env)
- if not 'okmsg' in kw:
- kw['okmsg'] = 'yes'
- define_it()
- break
-
# single version for a module
if 'modversion' in kw:
version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip()
+ if not 'okmsg' in kw:
+ kw['okmsg'] = version
self.define(kw['define_name'], version)
return version
val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip()
var = '%s_%s' % (kw['uselib_store'], v)
v_env[var] = val
- if not 'okmsg' in kw:
- kw['okmsg'] = 'yes'
return
# so we assume the command-line will output flags to be parsed afterwards
ret = self.cmd_and_log(lst, env=env)
- if not 'okmsg' in kw:
- kw['okmsg'] = 'yes'
define_it()
self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix'))
def configure(conf):
conf.load('compiler_c')
conf.check_cfg(package='glib-2.0', args='--libs --cflags')
- conf.check_cfg(package='glib-2.0', uselib_store='GLIB', atleast_version='2.10.0',
- args='--cflags --libs')
conf.check_cfg(package='pango')
conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs'])
conf.check_cfg(package='pango',
conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO')
print(conf.env.FOO_includedir)
"""
- if k:
- lst = k[0].split()
- kw['package'] = lst[0]
- kw['args'] = ' '.join(lst[1:])
-
self.validate_cfg(kw)
if 'msg' in kw:
self.start_msg(kw['msg'], **kw)
:param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
:type auto_add_header_name: bool
"""
+ for x in ('type_name', 'field_name', 'function_name'):
+ if x in kw:
+ Logs.warn('Invalid argument %r in test' % x)
if not 'build_fun' in kw:
kw['build_fun'] = build_fun
if not 'compile_mode' in kw:
kw['compile_mode'] = 'c'
- if 'cxx' in Utils.to_list(kw.get('features',[])) or kw.get('compiler', '') == 'cxx':
+ if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx':
kw['compile_mode'] = 'cxx'
if not 'type' in kw:
return ''.join(['#include <%s>\n' % x for x in dct])
return ''
- #OSX
if 'framework_name' in kw:
+ # OSX, not sure this is used anywhere
fwkname = kw['framework_name']
if not 'uselib_store' in kw:
kw['uselib_store'] = fwkname.upper()
- if not kw.get('no_header', False):
- if not 'header_name' in kw:
- kw['header_name'] = []
+ if not kw.get('no_header'):
fwk = '%s/%s.h' % (fwkname, fwkname)
if kw.get('remove_dot_h'):
fwk = fwk[:-2]
- kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
-
+ val = kw.get('header_name', [])
+ kw['header_name'] = Utils.to_list(val) + [fwk]
kw['msg'] = 'Checking for framework %s' % fwkname
kw['framework'] = fwkname
- #kw['frameworkpath'] = set it yourself
-
- if 'function_name' in kw:
- fu = kw['function_name']
- if not 'msg' in kw:
- kw['msg'] = 'Checking for function %s' % fu
- kw['code'] = to_header(kw) + SNIP_FUNCTION % fu
- if not 'uselib_store' in kw:
- kw['uselib_store'] = fu.upper()
- if not 'define_name' in kw:
- kw['define_name'] = self.have_define(fu)
-
- elif 'type_name' in kw:
- tu = kw['type_name']
- if not 'header_name' in kw:
- kw['header_name'] = 'stdint.h'
- if 'field_name' in kw:
- field = kw['field_name']
- kw['code'] = to_header(kw) + SNIP_FIELD % {'type_name' : tu, 'field_name' : field}
- if not 'msg' in kw:
- kw['msg'] = 'Checking for field %s in %s' % (field, tu)
- if not 'define_name' in kw:
- kw['define_name'] = self.have_define((tu + '_' + field).upper())
- else:
- kw['code'] = to_header(kw) + SNIP_TYPE % {'type_name' : tu}
- if not 'msg' in kw:
- kw['msg'] = 'Checking for type %s' % tu
- if not 'define_name' in kw:
- kw['define_name'] = self.have_define(tu.upper())
elif 'header_name' in kw:
if not 'msg' in kw:
kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']
# in case defines lead to very long command-lines
- if kw.get('merge_config_header', False) or env.merge_config_header:
+ if kw.get('merge_config_header') or env.merge_config_header:
kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
env.DEFINES = [] # modify the copy
- if not kw.get('success'): kw['success'] = None
+ if not kw.get('success'):
+ kw['success'] = None
if 'define_name' in kw:
self.undefine(kw['define_name'])
is_success = 0
if kw['execute']:
if kw['success'] is not None:
- if kw.get('define_ret', False):
+ if kw.get('define_ret'):
is_success = kw['success']
else:
is_success = (kw['success'] == 0)
is_success = (kw['success'] == 0)
if kw.get('define_name'):
- # TODO this is still way too complicated
comment = kw.get('comment', '')
define_name = kw['define_name']
if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str):
self.env[define_name] = int(is_success)
if 'header_name' in kw:
- if kw.get('auto_add_header_name', False):
+ if kw.get('auto_add_header_name'):
self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))
if is_success and 'uselib_store' in kw:
:type define_prefix: string
:param define_prefix: prefix all the defines in the file with a particular prefix
"""
- if not configfile: configfile = WAF_CONFIG_H
+ if not configfile:
+ configfile = WAF_CONFIG_H
waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile)
node = top and self.bldnode or self.path.get_bld()
cmd = cc + ['-dM', '-E', '-']
env = conf.env.env or None
try:
- out, err = conf.cmd_and_log(cmd, output=0, input='\n', env=env)
- except Exception:
+ out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env)
+ except Errors.WafError:
conf.fatal('Could not determine the compiler version %r' % cmd)
if gcc:
conf.env.DEST_BINFMT = 'elf'
elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
conf.env.DEST_BINFMT = 'pe'
+ if not conf.env.IMPLIBDIR:
+ conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files
conf.env.LIBDIR = conf.env.BINDIR
elif isD('__APPLE__'):
conf.env.DEST_BINFMT = 'mac-o'
cmd = cc + ['-V']
try:
out, err = conf.cmd_and_log(cmd, output=0)
- except Errors.WafError ,e:
+ except Errors.WafError as e:
# Older versions of the compiler exit with non-zero status when reporting their version
if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')):
conf.fatal('Could not find suncc %r' % cmd)
# ============ parallel configuration
-class cfgtask(Task.TaskBase):
+class cfgtask(Task.Task):
"""
A task that executes build configuration tests (calls conf.check)
Make sure to use locks if concurrent access to the same conf.env data is necessary.
"""
def __init__(self, *k, **kw):
- Task.TaskBase.__init__(self, *k, **kw)
+ Task.Task.__init__(self, *k, **kw)
self.run_after = set()
def display(self):
def uid(self):
return Utils.SIG_NIL
+ def signature(self):
+ return Utils.SIG_NIL
+
def run(self):
conf = self.conf
bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath())
return 1
def process(self):
- Task.TaskBase.process(self)
+ Task.Task.process(self)
if 'msg' in self.args:
with self.generator.bld.multicheck_lock:
self.conf.start_msg(self.args['msg'])
bld = par()
bld.keep = kw.get('run_all_tests', True)
+ bld.imp_sigs = {}
tasks = []
id_to_task = {}
for dct in k:
- x = Task.classes['cfgtask'](bld=bld)
+ x = Task.classes['cfgtask'](bld=bld, env=None)
tasks.append(x)
x.args = dct
x.bld = bld
if x.hasrun != Task.SUCCESS:
if x.args.get('mandatory', True):
self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information')
+
+@conf
+def check_gcc_o_space(self, mode='c'):
+ if int(self.env.CC_VERSION[0]) > 4:
+ # this is for old compilers
+ return
+ self.env.stash()
+ if mode == 'c':
+ self.env.CCLNK_TGT_F = ['-o', '']
+ elif mode == 'cxx':
+ self.env.CXXLNK_TGT_F = ['-o', '']
+ features = '%s %sshlib' % (mode, mode)
+ try:
+ self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features)
+ except self.errors.ConfigurationError:
+ self.env.revert()
+ else:
+ self.env.commit()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy 2008-2016 (ita)
+# Thomas Nagy 2008-2018 (ita)
"""
MacOSX related tools
context = getattr(self, 'context', {})
txt = txt.format(**context)
self.outputs[0].write(txt)
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
C/C++ preprocessor for finding dependencies
go_absolute = False
"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
-standard_includes = ['/usr/include']
+standard_includes = ['/usr/local/include', '/usr/include']
if Utils.is_win32:
standard_includes = []
use_trigraphs = 0
"""Apply trigraph rules (False by default)"""
+# obsolete, do not use
strict_quotes = 0
-"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""
g_optrans = {
'not':'!',
for u in syms.split():
prec[u] = x
-def trimquotes(s):
- """
- Remove the single quotes around an expression::
-
- trimquotes("'test'") == "test"
-
- :param s: expression to transform
- :type s: string
- :rtype: string
- """
- # TODO remove in waf 2.0
- if not s: return ''
- s = s.rstrip()
- if s[0] == "'" and s[-1] == "'": return s[1:-1]
- return s
-
def reduce_nums(val_1, val_2, val_op):
"""
Apply arithmetic rules to compute a result
#print val_1, val_2, val_op
# now perform the operation, make certain a and b are numeric
- try: a = 0 + val_1
- except TypeError: a = int(val_1)
- try: b = 0 + val_2
- except TypeError: b = int(val_2)
+ try:
+ a = 0 + val_1
+ except TypeError:
+ a = int(val_1)
+ try:
+ b = 0 + val_2
+ except TypeError:
+ b = int(val_2)
d = val_op
- if d == '%': c = a%b
- elif d=='+': c = a+b
- elif d=='-': c = a-b
- elif d=='*': c = a*b
- elif d=='/': c = a/b
- elif d=='^': c = a^b
- elif d=='==': c = int(a == b)
- elif d=='|' or d == 'bitor': c = a|b
- elif d=='||' or d == 'or' : c = int(a or b)
- elif d=='&' or d == 'bitand': c = a&b
- elif d=='&&' or d == 'and': c = int(a and b)
- elif d=='!=' or d == 'not_eq': c = int(a != b)
- elif d=='^' or d == 'xor': c = int(a^b)
- elif d=='<=': c = int(a <= b)
- elif d=='<': c = int(a < b)
- elif d=='>': c = int(a > b)
- elif d=='>=': c = int(a >= b)
- elif d=='<<': c = a<<b
- elif d=='>>': c = a>>b
- else: c = 0
+ if d == '%':
+ c = a % b
+ elif d=='+':
+ c = a + b
+ elif d=='-':
+ c = a - b
+ elif d=='*':
+ c = a * b
+ elif d=='/':
+ c = a / b
+ elif d=='^':
+ c = a ^ b
+ elif d=='==':
+ c = int(a == b)
+ elif d=='|' or d == 'bitor':
+ c = a | b
+ elif d=='||' or d == 'or' :
+ c = int(a or b)
+ elif d=='&' or d == 'bitand':
+ c = a & b
+ elif d=='&&' or d == 'and':
+ c = int(a and b)
+ elif d=='!=' or d == 'not_eq':
+ c = int(a != b)
+ elif d=='^' or d == 'xor':
+ c = int(a^b)
+ elif d=='<=':
+ c = int(a <= b)
+ elif d=='<':
+ c = int(a < b)
+ elif d=='>':
+ c = int(a > b)
+ elif d=='>=':
+ c = int(a >= b)
+ elif d=='<<':
+ c = a << b
+ elif d=='>>':
+ c = a >> b
+ else:
+ c = 0
return c
def get_num(lst):
:return: a pair containing the number and the rest of the list
:rtype: tuple(value, list)
"""
- if not lst: raise PreprocError('empty list for get_num')
+ if not lst:
+ raise PreprocError('empty list for get_num')
(p, v) = lst[0]
if p == OP:
if v == '(':
:rtype: value, list
"""
- if not lst: raise PreprocError('empty list for get_term')
+ if not lst:
+ raise PreprocError('empty list for get_term')
num, lst = get_num(lst)
if not lst:
return (num, [])
one_param.append((p2, v2))
count_paren += 1
elif v2 == ')':
- if one_param: args.append(one_param)
+ if one_param:
+ args.append(one_param)
break
elif v2 == ',':
- if not one_param: raise PreprocError('empty param in funcall %r' % v)
+ if not one_param:
+ raise PreprocError('empty param in funcall %r' % v)
args.append(one_param)
one_param = []
else:
one_param.append((p2, v2))
else:
one_param.append((p2, v2))
- if v2 == '(': count_paren += 1
- elif v2 == ')': count_paren -= 1
+ if v2 == '(':
+ count_paren += 1
+ elif v2 == ')':
+ count_paren -= 1
else:
raise PreprocError('malformed macro')
accu.append((p2, v2))
accu.extend(toks)
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
- # TODO not sure
# first collect the tokens
va_toks = []
st = len(macro_def[0])
for x in args[pt-st+1:]:
va_toks.extend(x)
va_toks.append((OP, ','))
- if va_toks: va_toks.pop() # extra comma
+ if va_toks:
+ va_toks.pop() # extra comma
if len(accu)>1:
(p3, v3) = accu[-1]
(p4, v4) = accu[-2]
:rtype: int
"""
reduce_tokens(lst, defs, [])
- if not lst: raise PreprocError('missing tokens to evaluate')
+ if not lst:
+ raise PreprocError('missing tokens to evaluate')
if lst:
p, v = lst[0]
p, name = t[0]
p, v = t[1]
- if p != OP: raise PreprocError('expected (')
+ if p != OP:
+ raise PreprocError('expected (')
i = 1
pindex = 0
return ord(txt)
c = txt[1]
if c == 'x':
- if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
+ if len(txt) == 4 and txt[3] in string.hexdigits:
+ return int(txt[2:], 16)
return int(txt[2:], 16)
elif c.isdigit():
- if c == '0' and len(txt)==2: return 0
+ if c == '0' and len(txt)==2:
+ return 0
for i in 3, 2, 1:
if len(txt) > i and txt[1:1+i].isdigit():
return (1+i, int(txt[1:1+i], 8))
else:
- try: return chr_esc[c]
- except KeyError: raise PreprocError('could not parse char literal %r' % txt)
+ try:
+ return chr_esc[c]
+ except KeyError:
+ raise PreprocError('could not parse char literal %r' % txt)
def tokenize(s):
"""
v = m(name)
if v:
if name == IDENT:
- try:
- g_optrans[v]
+ if v in g_optrans:
name = OP
- except KeyError:
- # c++ specific
- if v.lower() == "true":
- v = 1
- name = NUM
- elif v.lower() == "false":
- v = 0
- name = NUM
+ elif v.lower() == "true":
+ v = 1
+ name = NUM
+ elif v.lower() == "false":
+ v = 0
+ name = NUM
elif name == NUM:
- if m('oct'): v = int(v, 8)
- elif m('hex'): v = int(m('hex'), 16)
- elif m('n0'): v = m('n0')
+ if m('oct'):
+ v = int(v, 8)
+ elif m('hex'):
+ v = int(m('hex'), 16)
+ elif m('n0'):
+ v = m('n0')
else:
v = m('char')
- if v: v = parse_char(v)
- else: v = m('n2') or m('n4')
+ if v:
+ v = parse_char(v)
+ else:
+ v = m('n2') or m('n4')
elif name == OP:
- if v == '%:': v = '#'
- elif v == '%:%:': v = '##'
+ if v == '%:':
+ v = '#'
+ elif v == '%:%:':
+ v = '##'
elif name == STR:
# remove the quotes around the string
v = v[1:-1]
self.ban_includes = set()
"""Includes that must not be read (#pragma once)"""
+ self.listed = set()
+ """Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""
+
def cached_find_resource(self, node, filename):
"""
Find a file from the input directory
try:
cache = node.ctx.preproc_cache_node
except AttributeError:
- global FILE_CACHE_SIZE
cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)
key = (node, filename)
cache[key] = ret
return ret
- def tryfind(self, filename):
+ def tryfind(self, filename, kind='"', env=None):
"""
Try to obtain a node from the filename based from the include paths. Will add
the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
"""
if filename.endswith('.moc'):
# we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
- # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. TODO waf 1.9
+ # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
self.names.append(filename)
return None
self.curfile = filename
- # for msvc it should be a for loop over the whole stack
- found = self.cached_find_resource(self.currentnode_stack[-1], filename)
+ found = None
+ if kind == '"':
+ if env.MSVC_VERSION:
+ for n in reversed(self.currentnode_stack):
+ found = self.cached_find_resource(n, filename)
+ if found:
+ break
+ else:
+ found = self.cached_find_resource(self.currentnode_stack[-1], filename)
- for n in self.nodepaths:
- if found:
- break
- found = self.cached_find_resource(n, filename)
+ if not found:
+ for n in self.nodepaths:
+ found = self.cached_find_resource(n, filename)
+ if found:
+ break
+ listed = self.listed
if found and not found in self.ban_includes:
- # TODO duplicates do not increase the no-op build times too much, but they may be worth removing
- self.nodes.append(found)
+ if found not in listed:
+ listed.add(found)
+ self.nodes.append(found)
self.addlines(found)
else:
- if not filename in self.names:
+ if filename not in listed:
+ listed.add(filename)
self.names.append(filename)
return found
# return a list of tuples : keyword, line
code = node.read()
if use_trigraphs:
- for (a, b) in trig_def: code = code.split(a).join(b)
+ for (a, b) in trig_def:
+ code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return re_lines.findall(code)
try:
cache = node.ctx.preproc_cache_lines
except AttributeError:
- global LINE_CACHE_SIZE
cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
try:
return cache[node]
raise PreprocError('could not read the file %r' % node)
except Exception:
if Logs.verbose > 0:
- Logs.error('parsing %r failed', node)
- traceback.print_exc()
+ Logs.error('parsing %r failed %s', node, traceback.format_exc())
else:
self.lines.extend(lines)
continue
try:
- ve = Logs.verbose
- if ve: Logs.debug('preproc: line is %s - %s state is %s', token, line, self.state)
state = self.state
# make certain we define the state if we are about to enter in an if block
if token == 'if':
ret = eval_macro(tokenize(line), self.defs)
- if ret: state[-1] = accepted
- else: state[-1] = ignored
+ if ret:
+ state[-1] = accepted
+ else:
+ state[-1] = ignored
elif token == 'ifdef':
m = re_mac.match(line)
- if m and m.group() in self.defs: state[-1] = accepted
- else: state[-1] = ignored
+ if m and m.group() in self.defs:
+ state[-1] = accepted
+ else:
+ state[-1] = ignored
elif token == 'ifndef':
m = re_mac.match(line)
- if m and m.group() in self.defs: state[-1] = ignored
- else: state[-1] = accepted
+ if m and m.group() in self.defs:
+ state[-1] = ignored
+ else:
+ state[-1] = accepted
elif token == 'include' or token == 'import':
(kind, inc) = extract_include(line, self.defs)
- if ve: Logs.debug('preproc: include found %s (%s) ', inc, kind)
- if kind == '"' or not strict_quotes:
- self.current_file = self.tryfind(inc)
- if token == 'import':
- self.ban_includes.add(self.current_file)
+ self.current_file = self.tryfind(inc, kind, env)
+ if token == 'import':
+ self.ban_includes.add(self.current_file)
elif token == 'elif':
if state[-1] == accepted:
state[-1] = skipped
if eval_macro(tokenize(line), self.defs):
state[-1] = accepted
elif token == 'else':
- if state[-1] == accepted: state[-1] = skipped
- elif state[-1] == ignored: state[-1] = accepted
+ if state[-1] == accepted:
+ state[-1] = skipped
+ elif state[-1] == ignored:
+ state[-1] = accepted
elif token == 'define':
try:
self.defs[self.define_name(line)] = line
elif token == 'pragma':
if re_pragma_once.match(line.lower()):
self.ban_includes.add(self.current_file)
- except Exception ,e:
+ except Exception as e:
if Logs.verbose:
- Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
+ Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())
def define_name(self, line):
"""
This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
"""
-
- global go_absolute
-
try:
incn = task.generator.includes_nodes
except AttributeError:
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2016 (ita)
+# Thomas Nagy, 2016-2018 (ita)
"""
Various configuration tests.
"""
color = 'PINK'
def run(self):
- txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
+ txt = self.inputs[0].read(flags='rb').decode('latin-1')
if txt.find('LiTTleEnDian') > -1:
self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS') > -1:
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
return tmp[0]
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Classes and methods shared by tools providing support for C-like language such
"""
color = 'YELLOW'
+ weight = 3
+ """Try to process link tasks as early as possible"""
+
inst_to = None
"""Default installation path for the link task outputs, or None to disable"""
def rm_tgt(cls):
old = cls.run
def wrap(self):
- try: os.remove(self.outputs[0].abspath())
- except OSError: pass
+ try:
+ os.remove(self.outputs[0].abspath())
+ except OSError:
+ pass
return old(self)
setattr(cls, 'run', wrap)
rm_tgt(stlink_task)
try:
inst_to = self.install_path
except AttributeError:
- inst_to = self.link_task.__class__.inst_to
+ inst_to = self.link_task.inst_to
if inst_to:
# install a copy of the node list we have at this moment (implib not added)
self.install_task = self.add_install_files(
self.add_objects_from_tgen(y)
if getattr(y, 'export_includes', None):
- self.includes.extend(y.to_incnodes(y.export_includes))
+ # self.includes may come from a global variable #2035
+ self.includes = self.includes + y.to_incnodes(y.export_includes)
if getattr(y, 'export_defines', None):
self.env.append_value('DEFINES', self.to_list(y.export_defines))
self.create_task('vnum', node, outs)
if getattr(self, 'install_task', None):
- self.install_task.hasrun = Task.SKIP_ME
+ self.install_task.hasrun = Task.SKIPPED
path = self.install_task.install_to
if self.env.DEST_OS == 'openbsd':
libname = self.link_task.outputs[0].name
try:
inst_to = self.install_path
except AttributeError:
- inst_to = self.link_task.__class__.inst_to
+ inst_to = self.link_task.inst_to
if inst_to:
p = Utils.subst_vars(inst_to, self.env)
path = os.path.join(p, name2)
else:
lst.append(os.path.normpath(os.path.join(base, x)))
self.env[var] = lst
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy 2009-2016 (ita)
+# Thomas Nagy 2009-2018 (ita)
"""
Detect the Clang++ C++ compiler
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
+
conf.start_msg('Checking for %r (C compiler)' % compiler)
try:
conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
+ except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_c: %r', e)
for x in test_for_compiler.split():
opt.load('%s' % x)
+
conf.start_msg('Checking for %r (C++ compiler)' % compiler)
try:
conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
+ except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_cxx: %r', e)
for x in test_for_compiler.split():
opt.load('%s' % x)
+
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2016 (ita)
+# Thomas Nagy, 2016-2018 (ita)
"""
Try to detect a D compiler from the list of supported compilers::
conf.start_msg('Checking for %r (D compiler)' % compiler)
try:
conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
+ except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_d: %r', e)
for x in test_for_compiler.split():
opt.load('%s' % x)
+
conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
try:
conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
+ except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_fortran: %r', e)
for x in test_for_compiler.split():
opt.load('%s' % x)
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
C# support. A simple example::
else:
out = node.change_ext('.pdb')
self.cs_task.outputs.append(out)
- try:
- self.install_task.source.append(out)
- except AttributeError:
- pass
+
+ if getattr(self, 'install_task', None):
+ self.pdb_install_task = self.add_install_files(
+ install_to=self.install_task.install_to, install_from=out)
if csdebug == 'pdbonly':
val = ['/debug+', '/debug:pdbonly']
val = ['/debug-']
self.env.append_value('CSFLAGS', val)
+@feature('cs')
+@after_method('debug_cs')
+def doc_cs(self):
+ """
+ The C# targets may create .xml documentation files::
+
+ def build(bld):
+ bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True)
+ # csdoc is a boolean value
+ """
+ csdoc = getattr(self, 'csdoc', self.env.CSDOC)
+ if not csdoc:
+ return
+
+ node = self.cs_task.outputs[0]
+ out = node.change_ext('.xml')
+ self.cs_task.outputs.append(out)
+
+ if getattr(self, 'install_task', None):
+ self.doc_install_task = self.add_install_files(
+ install_to=self.install_task.install_to, install_from=out)
+
+ self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath())
class mcs(Task.Task):
"""
color = 'YELLOW'
run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
- def exec_command(self, cmd, **kw):
- if '/noconfig' in cmd:
- raise ValueError('/noconfig is not allowed when using response files, check your flags!')
- return super(self.__class__, self).exec_command(cmd, **kw)
+ def split_argfile(self, cmd):
+ inline = [cmd[0]]
+ infile = []
+ for x in cmd[1:]:
+ # csc doesn't want /noconfig in @file
+ if x.lower() == '/noconfig':
+ inline.append(x)
+ else:
+ infile.append(self.quote_flag(x))
+ return (inline, infile)
def configure(conf):
"""
:rtype: :py:class:`waflib.TaskGen.task_gen`
"""
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"Base for c++ programs and libraries"
class cxxstlib(stlink_task):
"Links object files into c++ static libraries"
pass # do not remove
+
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2007-2016 (ita)
+# Thomas Nagy, 2007-2018 (ita)
from waflib import Utils, Task, Errors
from waflib.TaskGen import taskgen_method, feature, extension
if not node:
raise Errors.WafError('file %r not found on d obj' % i[0])
self.create_task('d_header', node, node.change_ext('.di'))
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2016 (ita)
+# Thomas Nagy, 2016-2018 (ita)
from waflib import Utils
from waflib.Configure import conf
ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
if execute:
self.env.DLIBRARY = ret.strip()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2016 (ita)
+# Thomas Nagy, 2016-2018 (ita)
"""
Provide a scanner for finding dependencies on d files
i += 1
while i < max:
c = txt[i]
- if c == delim: break
+ if c == delim:
+ break
elif c == '\\': # skip the character following backslash
i += 1
i += 1
elif c == '/': # try to replace a comment with whitespace
buf.append(txt[begin:i])
i += 1
- if i == max: break
+ if i == max:
+ break
c = txt[i]
if c == '+': # eat nesting /+ +/ comment
i += 1
c = None
elif prev == '+' and c == '/':
nesting -= 1
- if nesting == 0: break
+ if nesting == 0:
+ break
c = None
i += 1
elif c == '*': # eat /* */ comment
while i < max:
prev = c
c = txt[i]
- if prev == '*' and c == '/': break
+ if prev == '*' and c == '/':
+ break
i += 1
elif c == '/': # eat // comment
i += 1
names = self.get_strings(code) # obtain the import strings
for x in names:
# optimization
- if x in self.allnames: continue
+ if x in self.allnames:
+ continue
self.allnames.append(x)
# for each name, see if it is like a node or not
nodes = gruik.nodes
names = gruik.names
return (nodes, names)
+
Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
"""
conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
+
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2008-2016 (ita)
+# Thomas Nagy, 2008-2018 (ita)
import sys
from waflib.Tools import ar, d
if str(conf.env.D).find('ldc') > -1:
conf.common_flags_ldc()
+
'importpath':'includes',
'installpath':'install_path',
'iscopy':'is_copy',
+'uses':'use',
}
meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
for (k, v) in uids.items():
if len(v) > 1:
Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
+ tg_details = tsk.generator.name
+ if Logs.verbose > 2:
+ tg_details = tsk.generator
for tsk in v:
- Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tsk.generator)
+ Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
def check_invalid_constraints(self):
feat = set()
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
if '.' in sp:
Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
- if kw.get('remove', True):
- try:
- if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
- Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)', self)
- except AttributeError:
- pass
return self.old_ant_glob(*k, **kw)
Node.Node.old_ant_glob = Node.Node.ant_glob
Node.Node.ant_glob = ant_glob
+ # catch ant_glob on build folders
+ def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
+ if remove:
+ try:
+ if self.is_child_of(self.ctx.bldnode) and not quiet:
+ quiet = True
+ Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
+ except AttributeError:
+ pass
+ return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
+ Node.Node.old_ant_iter = Node.Node.ant_iter
+ Node.Node.ant_iter = ant_iter
+
# catch conflicting ext_in/ext_out/before/after declarations
old = Task.is_before
def is_before(t1, t2):
else:
for x in ('before', 'after'):
for y in self.to_list(getattr(self, x, [])):
- if not Task.classes.get(y, None):
+ if not Task.classes.get(y):
Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
TaskGen.feature('*')(check_err_order)
elif name == 'prepend':
raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
if name in self.__slots__:
- return object.__getattr__(self, name, default)
+ return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
else:
return self[name]
ConfigSet.ConfigSet.__getattr__ = _getattr
Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
"""
enhance_lib()
+
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
"""
Fortran support
"""
-from waflib import Utils, Task
+from waflib import Utils, Task, Errors
from waflib.Tools import ccroot, fc_config, fc_scan
from waflib.TaskGen import extension
from waflib.Configure import conf
class fc(Task.Task):
"""
- Fortran tasks can only run when all fortran tasks in the current group are ready to be executed
+ Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed
This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
"""
ret = tsk.runnable_status()
if ret == Task.ASK_LATER:
# we have to wait for one of the other fortran tasks to be ready
- # this may deadlock if there are dependencies between the fortran tasks
+ # this may deadlock if there are dependencies between fortran tasks
# but this should not happen (we are setting them here!)
for x in lst:
x.mod_fortran_done = None
- # TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end
return Task.ASK_LATER
ins = Utils.defaultdict(set)
name = bld.modfile(x.replace('MOD@', ''))
node = bld.srcnode.find_or_declare(name)
tsk.set_outputs(node)
- outs[id(node)].add(tsk)
+ outs[node].add(tsk)
# the .mod files to use
for tsk in lst:
if node and node not in tsk.outputs:
if not node in bld.node_deps[key]:
bld.node_deps[key].append(node)
- ins[id(node)].add(tsk)
+ ins[node].add(tsk)
# if the intersection matches, set the order
for k in ins.keys():
kw['output'] = 0
try:
(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
- except Exception:
+ except Errors.WafError:
return -1
if bld.out:
bld.to_log('out: %s\n' % bld.out)
if bld.err:
bld.to_log('err: %s\n' % bld.err)
+
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
"""
Fortran configuration helpers
v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
v.fcshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = 'lib%s.dll.a'
+ v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'
v.FCFLAGS_fcshlib = []
else:
env = dict(os.environ)
env['LANG'] = 'C'
- input = stdin and '\n' or None
+ input = stdin and '\n'.encode() or None
try:
out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
- except Errors.WafError ,e:
+ except Errors.WafError as e:
# An WafError might indicate an error code during the command
# execution, in this case we still obtain the stderr and stdout,
# which we can use to find the version string.
"""
Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
"""
- for x in ('-qopenmp', '-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
+ for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
try:
self.check_fc(
msg = 'Checking for OpenMP flag %s' % x,
break
else:
self.fatal('Could not find OpenMP')
+
+@conf
+def check_gfortran_o_space(self):
+ if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4:
+ # This is for old compilers and only for gfortran.
+ # No idea how other implementations handle this. Be safe and bail out.
+ return
+ self.env.stash()
+ self.env.FCLNK_TGT_F = ['-o', '']
+ try:
+ self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib')
+ except self.errors.ConfigurationError:
+ self.env.revert()
+ else:
+ self.env.commit()
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
import re
if not found:
if not filename in self.names:
self.names.append(filename)
+
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
The **flex** program is a code generator which creates C or C++ files.
bld = tsk.generator.bld
wd = bld.variant_dir
def to_list(xx):
- if isinstance(xx, str): return [xx]
+ if isinstance(xx, str):
+ return [xx]
return xx
tsk.last_cmd = lst = []
lst.extend(to_list(env.FLEX))
if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
# this is the flex shipped with MSYS
conf.env.FLEX_MSYS = True
+
#! /usr/bin/env python
# encoding: utf-8
# KWS 2010
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
import re
from waflib import Utils
conf.fc_add_flags()
conf.g95_flags()
conf.g95_modifier_platform()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2008-2016 (ita)
+# Thomas Nagy, 2008-2018 (ita)
"Detect as/gas/gcc for compiling assembly files"
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
v.cprogram_PATTERN = '%s.exe'
v.cshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = 'lib%s.dll.a'
+ v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'
v.CFLAGS_cshlib = []
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
+ conf.check_gcc_o_space()
+
conf.load('d')
conf.common_flags_gdc()
conf.d_platform_flags()
+
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
import re
from waflib import Utils
version_re = re.compile(r"GNU\s*Fortran", re.I).search
cmd = fc + ['--version']
out, err = fc_config.getoutput(conf, cmd, stdin=False)
- if out: match = version_re(out)
- else: match = version_re(err)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
if not match:
conf.fatal('Could not determine the compiler type')
conf.fc_add_flags()
conf.gfortran_flags()
conf.gfortran_modifier_platform()
+ conf.check_gfortran_o_space()
#! /usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
Support for GLib2 tools:
)
ret = bld.exec_command(cmd1)
- if ret: return ret
+ if ret:
+ return ret
#print self.outputs[1].abspath()
c = '''#include "%s"\n''' % self.outputs[0].name
raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
self.settings_enum_namespace = namespace
- if type(filename_list) != 'list':
+ if not isinstance(filename_list, list):
filename_list = [filename_list]
self.settings_enum_files = filename_list
def getstr(varname):
return getattr(Options.options, varname, getattr(conf.env,varname, ''))
- # TODO make this dependent on the gnu_dirs tool?
gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
if not gsettingsschemadir:
datadir = getstr('DATADIR')
"""
gr = opt.add_option_group('Installation directories')
gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')
+
str_default = default
str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
v.cxxprogram_PATTERN = '%s.exe'
v.cxxshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = 'lib%s.dll.a'
+ v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'
v.CXXFLAGS_cxxshlib = []
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
+ conf.check_gcc_o_space('cxx')
+
#!/usr/bin/env python
# encoding: utf-8
# Stian Selnes 2008
-# Thomas Nagy 2009-2016 (ita)
+# Thomas Nagy 2009-2018 (ita)
"""
Detects the Intel C compiler
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy 2009-2016 (ita)
+# Thomas Nagy 2009-2018 (ita)
"""
Detects the Intel C++ compiler
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
+
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
-import os, re
+import os, re, traceback
from waflib import Utils, Logs, Errors
from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
from waflib.Configure import conf
Detects the Intel Fortran compilers
"""
if Utils.is_win32:
- compiler, version, path, includes, libdirs, arch = conf.detect_ifort(True)
+ compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
v = conf.env
v.DEST_CPU = arch
v.PATH = path
v.MSVC_COMPILER = compiler
try:
v.MSVC_VERSION = float(version)
- except Exception:
- raise
+ except ValueError:
v.MSVC_VERSION = float(version[:-3])
conf.find_ifort_win32()
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
- except WindowsError:
+ except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
- except WindowsError:
+ except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
- except WindowsError:
+ except OSError:
break
index += 1
if not version_pattern.match(version):
continue
targets = {}
for target,arch in all_ifort_platforms:
- if target=='intel64': targetDir='EM64T_NATIVE'
- else: targetDir=target
+ if target=='intel64':
+ targetDir='EM64T_NATIVE'
+ else:
+ targetDir=target
try:
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except WindowsError:
+ except OSError:
pass
else:
- batch_file=os.path.join(path,'bin','iclvars.bat')
+ batch_file=os.path.join(path,'bin','ifortvars.bat')
if os.path.isfile(batch_file):
targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
try:
icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except WindowsError:
+ except OSError:
continue
else:
- batch_file=os.path.join(path,'bin','iclvars.bat')
+ batch_file=os.path.join(path,'bin','ifortvars.bat')
if os.path.isfile(batch_file):
targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
major = version[0:2]
try:
conf.cmd_and_log(fc + ['/help'], env=env)
except UnicodeError:
- st = Utils.ex_stack()
+ st = traceback.format_exc()
if conf.logger:
conf.logger.error(st)
conf.fatal('ifort: Unicode error - check the code page?')
- except Exception ,e:
+ except Exception as e:
Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
else:
return
self.is_done = True
try:
- vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat)
+ vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat)
except Errors.ConfigurationError:
self.is_valid = False
return
# before setting anything, check if the compiler is really intel fortran
env = dict(conf.environ)
- if path: env.update(PATH = ';'.join(path))
+ if path:
+ env.update(PATH = ';'.join(path))
if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
conf.fatal('not intel fortran compiler could not be identified')
man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
self.link_task.outputs.append(man_node)
self.env.DO_MANIFEST = True
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
Support for translation tools such as msgfmt and intltool
Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
"""
+from __future__ import with_statement
+
import os, re
from waflib import Context, Task, Utils, Logs
import waflib.Tools.ccroot
:param install_path: installation path
:type install_path: string
"""
- try: self.meths.remove('process_source')
- except ValueError: pass
+ try:
+ self.meths.remove('process_source')
+ except ValueError:
+ pass
self.ensure_localedir()
The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
"""
- try: self.meths.remove('process_source')
- except ValueError: pass
+ try:
+ self.meths.remove('process_source')
+ except ValueError:
+ pass
self.ensure_localedir()
linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
if linguas:
# scan LINGUAS file for locales to process
- file = open(linguas.abspath())
- langs = []
- for line in file.readlines():
- # ignore lines containing comments
- if not line.startswith('#'):
- langs += line.split()
- file.close()
+ with open(linguas.abspath()) as f:
+ langs = []
+ for line in f.readlines():
+ # ignore lines containing comments
+ if not line.startswith('#'):
+ langs += line.split()
re_linguas = re.compile('[-a-zA-Z_@.]+')
for lang in langs:
# Make sure that we only process lines which contain locales
conf.find_intltool_merge()
if conf.env.CC or conf.env.CXX:
conf.check(header_name='locale.h')
+
Compiler definition for irix/MIPSpro cc compiler
"""
+from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
try:
conf.cmd_and_log(cc + ['-version'])
- except Exception:
+ except Errors.WafError:
conf.fatal('%r -version could not be executed' % cc)
v.CC = cc
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
Java support
if not y:
self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
tmp.append(y)
+
tsk.srcdir = tmp
if getattr(self, 'compat', None):
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
@feature('javac')
+@before_method('propagate_uselib_vars')
@after_method('apply_java')
def use_javac_files(self):
"""
"""
Sets the CLASSPATH value on the *javac* task previously created.
"""
- self.env.append_value('CLASSPATH', getattr(self, 'classpath', []))
+ if getattr(self, 'classpath', None):
+ self.env.append_unique('CLASSPATH', getattr(self, 'classpath', []))
for x in self.tasks:
x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
if manifest:
jarcreate = getattr(self, 'jarcreate', 'cfm')
if not isinstance(manifest,Node.Node):
- node = self.path.find_or_declare(manifest)
+ node = self.path.find_resource(manifest)
else:
node = manifest
+ if not node:
+ self.bld.fatal('invalid manifest file %r for %r' % (manifest, self))
tsk.dep_nodes.append(node)
jaropts.insert(0, node.abspath())
else:
if not t.hasrun:
return Task.ASK_LATER
if not self.inputs:
- global JAR_RE
try:
self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
except Exception:
return Task.ASK_LATER
if not self.inputs:
- global SOURCE_RE
self.inputs = []
for x in self.srcdir:
- self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
+ if x.exists():
+ self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
return super(javac, self).runnable_status()
def post_run(self):
break
else:
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
+
conf.load('d')
conf.common_flags_ldc2()
conf.d_platform_flags()
+
#!/usr/bin/env python
# encoding: utf-8
# Sebastian Schlingmann, 2008
-# Thomas Nagy, 2008-2016 (ita)
+# Thomas Nagy, 2008-2018 (ita)
"""
Lua support.
Detect the luac compiler and set *conf.env.LUAC*
"""
conf.find_program('luac', var='LUAC')
+
if filename in cache and cache[filename][0] == st.st_mtime:
return cache[filename][1]
- global STRONGEST
if STRONGEST:
ret = Utils.h_file(filename)
else:
if stat.S_ISDIR(st[stat.ST_MODE]):
raise IOError('Not a file')
- ret = Utils.md5(str((st.st_mtime, st.st_size))).digest()
+ ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest()
cache[filename] = (st.st_mtime, ret)
return ret
h_file.__doc__ = Node.Node.h_file.__doc__
Node.Node.h_file = h_file
+
Compilers supported:
-* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 12.0 (Visual Studio 2013)
+* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017)
* wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0
* icl => Intel compiler, versions 9, 10, 11, 13
* winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now)
Setting PYTHONUNBUFFERED gives the unbuffered output.
"""
-import os, sys, re
+import os, sys, re, traceback
from waflib import Utils, Logs, Options, Errors
from waflib.TaskGen import after_method, feature
platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions = getattr(Options.options, 'msvc_version', '').split(',')
if desired_versions == ['']:
- desired_versions = conf.env.MSVC_VERSIONS or list(versiondict.keys())
+ desired_versions = conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys())))
# Override lazy detection by evaluating after the fact.
lazy_detect = getattr(Options.options, 'msvc_lazy', True)
conf.env.MSVC_INSTALLED_VERSIONS = versiondict
for version in desired_versions:
+ Logs.debug('msvc: detecting %r - %r', version, desired_versions)
try:
targets = versiondict[version]
except KeyError:
continue
+
+ seen = set()
for arch in platforms:
+ if arch in seen:
+ continue
+ else:
+ seen.add(arch)
try:
cfg = targets[arch]
except KeyError:
continue
+
cfg.evaluate()
if cfg.is_valid:
compiler,revision = version.rsplit(' ', 1)
try:
conf.cmd_and_log(cxx + ['/help'], env=env)
except UnicodeError:
- st = Utils.ex_stack()
+ st = traceback.format_exc()
if conf.logger:
conf.logger.error(st)
conf.fatal('msvc: Unicode error - check the code page?')
- except Exception ,e:
+ except Exception as e:
Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e))
conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
else:
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
-@conf
-def gather_wsdk_versions(conf, versions):
- """
- Use winreg to add the msvc versions to the input list
-
- :param versions: list to modify
- :type versions: list
- """
- version_pattern = re.compile('^v..?.?\...?.?')
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
- except WindowsError:
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
- except WindowsError:
- return
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except WindowsError:
- break
- index += 1
- if not version_pattern.match(version):
- continue
- try:
- msvc_version = Utils.winreg.OpenKey(all_versions, version)
- path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
- except WindowsError:
- continue
- if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
- targets = {}
- for target,arch in all_msvc_platforms:
- targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
- versions['wsdk ' + version[1:]] = targets
-
def gather_wince_supported_platforms():
"""
Checks SmartPhones SDKs
supported_wince_platforms = []
try:
ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
- except WindowsError:
+ except OSError:
try:
ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
- except WindowsError:
+ except OSError:
ce_sdk = ''
if not ce_sdk:
return supported_wince_platforms
try:
sdk_device = Utils.winreg.EnumKey(ce_sdk, index)
sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
- except WindowsError:
+ except OSError:
break
index += 1
try:
path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir')
- except WindowsError:
+ except OSError:
try:
path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation')
- except WindowsError:
+ except OSError:
continue
path,xml = os.path.split(path)
path = str(path)
prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
- except WindowsError:
+ except OSError:
prefix = 'SOFTWARE\\Microsoft\\' + vcver
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
- except WindowsError:
+ except OSError:
continue
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
- except WindowsError:
+ except OSError:
break
index += 1
match = version_pattern.match(version)
:param version: compiler version number
:param bat_target: ?
:param bat: path to the batch file to run
- :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
"""
self.conf = ctx
self.name = None
(self.bindirs, self.incdirs, self.libdirs) = vs
def __str__(self):
- return str((self.bindirs, self.incdirs, self.libdirs))
+ return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
def __repr__(self):
- return repr((self.bindirs, self.incdirs, self.libdirs))
+ return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
+
+@conf
+def gather_wsdk_versions(conf, versions):
+ """
+ Use winreg to add the msvc versions to the input list
+
+ :param versions: list to modify
+ :type versions: list
+ """
+ version_pattern = re.compile('^v..?.?\...?.?')
+ try:
+ all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
+ except OSError:
+ try:
+ all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
+ except OSError:
+ return
+ index = 0
+ while 1:
+ try:
+ version = Utils.winreg.EnumKey(all_versions, index)
+ except OSError:
+ break
+ index += 1
+ if not version_pattern.match(version):
+ continue
+ try:
+ msvc_version = Utils.winreg.OpenKey(all_versions, version)
+ path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
+ except OSError:
+ continue
+ if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
+ targets = {}
+ for target,arch in all_msvc_platforms:
+ targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
+ versions['wsdk ' + version[1:]] = targets
@conf
def gather_msvc_targets(conf, versions, version, vc_path):
elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')):
targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))
if targets:
- versions['msvc ' + version] = targets
+ versions['msvc %s' % version] = targets
@conf
def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms):
incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include]
libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib]
def combine_common(obj, compiler_env):
+ # TODO this is likely broken, remove in waf 2.1
(common_bindirs,_1,_2) = compiler_env
return (bindirs + common_bindirs, incdirs, libdirs)
targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common)
if targets:
versions['winphone ' + version] = targets
+@conf
+def gather_vswhere_versions(conf, versions):
+ try:
+ import json
+ except ImportError:
+ Logs.error('Visual Studio 2017 detection requires Python 2.6')
+ return
+
+ prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)'))
+
+ vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
+ args = [vswhere, '-products', '*', '-legacy', '-format', 'json']
+ try:
+ txt = conf.cmd_and_log(args)
+ except Errors.WafError as e:
+ Logs.debug('msvc: vswhere.exe failed %s', e)
+ return
+
+ if sys.version_info[0] < 3:
+ try:
+ txt = txt.decode(sys.stdout.encoding or 'cp1252')
+ except UnicodeError:
+ txt = txt.decode('utf-8', 'replace')
+ arr = json.loads(txt)
+ arr.sort(key=lambda x: x['installationVersion'])
+ for entry in arr:
+ ver = entry['installationVersion']
+ ver = str('.'.join(ver.split('.')[:2]))
+ path = str(os.path.abspath(entry['installationPath']))
+ if os.path.exists(path) and ('msvc %s' % ver) not in versions:
+ conf.gather_msvc_targets(versions, ver, path)
+
@conf
def gather_msvc_versions(conf, versions):
vc_paths = []
try:
try:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC")
- except WindowsError:
+ except OSError:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++")
path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir')
- except WindowsError:
+ except OSError:
try:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
path,type = Utils.winreg.QueryValueEx(msvc_version, version)
- except WindowsError:
+ except OSError:
continue
else:
vc_paths.append((version, os.path.abspath(str(path))))
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
- except WindowsError:
+ except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
- except WindowsError:
+ except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
- except WindowsError:
+ except OSError:
break
index += 1
if not version_pattern.match(version):
continue
targets = {}
for target,arch in all_icl_platforms:
- if target=='intel64': targetDir='EM64T_NATIVE'
- else: targetDir=target
+ if target=='intel64':
+ targetDir='EM64T_NATIVE'
+ else:
+ targetDir=target
try:
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except WindowsError:
+ except OSError:
pass
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
try:
icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except WindowsError:
+ except OSError:
continue
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
version_pattern = re.compile('^...?.?\...?.?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites')
- except WindowsError:
+ except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
- except WindowsError:
+ except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
- except WindowsError:
+ except OSError:
break
index += 1
if not version_pattern.match(version):
continue
targets = {}
for target,arch in all_icl_platforms:
- if target=='intel64': targetDir='EM64T_NATIVE'
- else: targetDir=target
+ if target=='intel64':
+ targetDir='EM64T_NATIVE'
+ else:
+ targetDir=target
try:
try:
defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
- except WindowsError:
+ except OSError:
if targetDir == 'EM64T_NATIVE':
defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
else:
Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except WindowsError:
+ except OSError:
pass
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
self.gather_intel_composer_versions(dct)
self.gather_wsdk_versions(dct)
self.gather_msvc_versions(dct)
+ self.gather_vswhere_versions(dct)
Logs.debug('msvc: detected versions %r', list(dct.keys()))
return dct
(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
if lt_path != None and lt_libname != None:
- if lt_static == True:
+ if lt_static:
# file existence check has been made by find_lt_names
return os.path.join(lt_path,lt_libname)
v.MSVC_COMPILER = compiler
try:
v.MSVC_VERSION = float(version)
- except TypeError:
+ except ValueError:
v.MSVC_VERSION = float(version[:-3])
def _get_prog_names(conf, compiler):
# before setting anything, check if the compiler is really msvc
env = dict(conf.environ)
- if path: env.update(PATH = ';'.join(path))
+ if path:
+ env.update(PATH = ';'.join(path))
if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env):
conf.fatal('the msvc compiler could not be identified')
# linker
if not v.LINK_CXX:
- # TODO: var=LINK_CXX to let so that LINK_CXX can be overridden?
- v.LINK_CXX = conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name)
+ conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name, var='LINK_CXX')
if not v.LINK_CC:
v.LINK_CC = v.LINK_CXX
v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:'
- # Subsystem specific flags
- v.CFLAGS_CONSOLE = v.CXXFLAGS_CONSOLE = ['/SUBSYSTEM:CONSOLE']
- v.CFLAGS_NATIVE = v.CXXFLAGS_NATIVE = ['/SUBSYSTEM:NATIVE']
- v.CFLAGS_POSIX = v.CXXFLAGS_POSIX = ['/SUBSYSTEM:POSIX']
- v.CFLAGS_WINDOWS = v.CXXFLAGS_WINDOWS = ['/SUBSYSTEM:WINDOWS']
- v.CFLAGS_WINDOWSCE = v.CXXFLAGS_WINDOWSCE = ['/SUBSYSTEM:WINDOWSCE']
-
# CRT specific flags
v.CFLAGS_CRT_MULTITHREADED = v.CXXFLAGS_CRT_MULTITHREADED = ['/MT']
v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD']
Insert configuration flags for windows phone applications (adds /ZW, /TP...)
"""
make_winapp(self, 'WINAPI_FAMILY_PHONE_APP')
- conf.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
+ self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
@feature('winapp')
@after_method('process_use')
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2008-2016 (ita)
+# Thomas Nagy, 2008-2018 (ita)
"""
Nasm tool (asm processing)
x.write('')
for (name, cls) in Task.classes.items():
cls.run = run
+
#!/usr/bin/env python
# encoding: utf-8
# andersg at 0x63.nu 2007
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
"""
Support for Perl extensions. A C/C++ compiler is required::
"""
import os
-from waflib import Task, Options, Utils
+from waflib import Task, Options, Utils, Errors
from waflib.Configure import conf
from waflib.TaskGen import extension, feature, before_method
*lib* prefix from library names.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
- if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
+ if not 'PERLEXT' in self.uselib:
+ self.uselib.append('PERLEXT')
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
@extension('.xs')
self.start_msg('perl module %s' % module)
try:
r = self.cmd_and_log(cmd)
- except Exception:
+ except Errors.WafError:
self.end_msg(False)
return None
self.end_msg(r or True)
"""
opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
+
"""
import os, sys
-from waflib import Utils, Options, Errors, Logs, Task, Node
+from waflib import Errors, Logs, Node, Options, Task, Utils
from waflib.TaskGen import extension, before_method, after_method, feature
from waflib.Configure import conf
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
'''
"""
-Piece of Python code used in :py:func:`waflib.Tools.python.pytask` for byte-compiling python files
+Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
"""
DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
"""
Add signature of .py file, so it will be byte-compiled when necessary
"""
- assert(getattr(self, 'install_path')), 'add features="py"'
+ assert(hasattr(self, 'install_path')), 'add features="py"'
# where to install the python file
if self.install_path:
conf.start_msg(msg)
try:
ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
- except Exception:
+ except Errors.WafError:
conf.end_msg(False)
conf.fatal('Could not find the python module %r' % module_name)
v.NOPYCACHE=Options.options.nopycache
if not v.PYTHON:
- v.PYTHON = getattr(Options.options, 'python', None) or sys.executable
+ v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
v.PYTHON = Utils.to_list(v.PYTHON)
conf.find_program('python', var='PYTHON')
help='Installation path for python modules (py, platform-independent .py and .pyc files)')
pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
This tool helps with finding Qt5 tools and libraries,
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
- self.includes = list(incs)
+ self.includes = sorted(incs)
Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.
QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
"""
+from __future__ import with_statement
+
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
- gen.outstanding.appendleft(tsk)
+ gen.outstanding.append(tsk)
gen.total += 1
return tsk
Parses ``.qrc`` files
"""
def __init__(self):
+ ContentHandler.__init__(self)
self.buf = []
self.files = []
def startElement(self, name, attrs):
@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Creates rcc and cxx tasks for ``.qrc`` files"
- rcnode = node.change_ext('_rc.cpp')
+ rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
@extension(*EXT_UI)
def create_uic_task(self, node):
"Create uic tasks for user interface ``.ui`` definition files"
- uictask = self.create_task('ui5', node)
- uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
+
+ """
+ If UIC file is used in more than one bld, we would have a conflict in parallel execution
+ It is not possible to change the file names (like .self.idx. as for objects) as they have
+ to be referenced by the source file, but we can assume that the transformation will be identical
+ and the tasks can be shared in a global cache.
+ """
+ try:
+ uic_cache = self.bld.uic_cache
+ except AttributeError:
+ uic_cache = self.bld.uic_cache = {}
+
+ if node not in uic_cache:
+ uictask = uic_cache[node] = self.create_task('ui5', node)
+ uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
@extension('.ts')
def add_lang(self, node):
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
- qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+ qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
if getattr(self, 'update', None) and Options.options.trans_qt5:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
- a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
+ a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
- rcnode = self.path.find_or_declare(rcnode + '.qrc')
+ rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst = []
for flag in self.to_list(self.env.CXXFLAGS):
- if len(flag) < 2: continue
+ if len(flag) < 2:
+ continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
- fi = open(self.inputs[0].abspath(), 'r')
- try:
- parser.parse(fi)
- finally:
- fi.close()
+ with open(self.inputs[0].abspath(), 'r') as f:
+ parser.parse(f)
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
- if nd: nodes.append(nd)
- else: names.append(x)
+ if nd:
+ nodes.append(nd)
+ else:
+ names.append(x)
return (nodes, names)
class moc(Task.Task):
self.fatal('Could not build a simple Qt application')
# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
- from waflib import Utils
if Utils.unversioned_sys_platform() == 'freebsd':
frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
try:
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
- self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path')
+ self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
find_bin(['moc-qt5', 'moc'], 'QT_MOC')
find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
else:
- for j in ('', 'd'):
- k = '_DEBUG' if j == 'd' else ''
- ret = self.find_single_qt5_lib(i + j, uselib + k, env.QTLIBS, qtincludes, force_static)
- if not force_static and not ret:
- ret = self.find_single_qt5_lib(i + j, uselib + k, env.QTLIBS, qtincludes, True)
- self.msg('Checking for %s' % (i + j), ret, 'GREEN' if ret else 'YELLOW')
+ ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
+ if not force_static and not ret:
+ ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
+ self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
else:
path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (
self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS)
- for i in self.qt5_vars_debug + self.qt5_vars:
+ for i in self.qt5_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
@conf
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(self.qt5_vars, 'LIBPATH_QTCORE')
- process_lib(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')
@conf
def add_qt5_rpath(self):
accu.append('-Wl,--rpath='+lib)
env['RPATH_' + var] = accu
process_rpath(self.qt5_vars, 'LIBPATH_QTCORE')
- process_rpath(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')
@conf
def set_qt5_libs_to_check(self):
if qtextralibs:
self.qt5_vars.extend(qtextralibs.split(','))
- if not hasattr(self, 'qt5_vars_debug'):
- self.qt5_vars_debug = [a + '_DEBUG' for a in self.qt5_vars]
- self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)
-
@conf
def set_qt5_defines(self):
if sys.platform != 'win32':
for x in self.qt5_vars:
y=x.replace('Qt5', 'Qt')[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
- self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
def options(opt):
"""
opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
+
#!/usr/bin/env python
# encoding: utf-8
# daniel.svensson at purplescout.se 2008
-# Thomas Nagy 2016 (ita)
+# Thomas Nagy 2016-2018 (ita)
"""
Support for Ruby extensions. A C/C++ compiler is required::
"""
import os
-from waflib import Options, Utils, Task
+from waflib import Errors, Options, Task, Utils
from waflib.TaskGen import before_method, feature, extension
from waflib.Configure import conf
try:
version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
- except Exception:
+ except Errors.WafError:
self.fatal('could not determine ruby version')
self.env.RUBY_VERSION = version
try:
- ver = tuple(map(int, version.split(".")))
- except Exception:
+ ver = tuple(map(int, version.split('.')))
+ except Errors.WafError:
self.fatal('unsupported ruby version %r' % version)
cver = ''
self.start_msg('Ruby module %s' % module_name)
try:
self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
- except Exception:
+ except Errors.WafError:
self.end_msg(False)
self.fatal('Could not find the ruby module %r' % module_name)
self.end_msg(True)
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
+from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
cc = conf.find_program('cc', var='CC')
try:
conf.cmd_and_log(cc + ['-flags'])
- except Exception:
+ except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc)
v.CC_NAME = 'sun'
conf.get_suncc_version(cc)
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
+from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
cc = conf.find_program(['CC', 'c++'], var='CXX')
try:
conf.cmd_and_log(cc + ['-flags'])
- except Exception:
+ except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc)
v.CXX_NAME = 'sun'
conf.get_suncc_version(cc)
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
"""
TeX/LaTeX/PDFLaTeX/XeLaTeX support
node = self.inputs[0]
nodes = []
- if not node: return nodes
+ if not node:
+ return nodes
code = node.read()
for match in re_bibunit.finditer(code):
path = match.group('file')
if path:
+ found = None
for k in ('', '.bib'):
# add another loop for the tex include paths?
Logs.debug('tex: trying %s%s', path, k)
fi = node.parent.find_resource(path + k)
if fi:
+ found = True
nodes.append(fi)
- # no break, people are crazy
- else:
+ # no break
+ if not found:
Logs.debug('tex: could not find %s', path)
Logs.debug('tex: found the following bibunit files: %s', nodes)
nodes = []
names = []
seen = []
- if not node: return (nodes, names)
+ if not node:
+ return (nodes, names)
def parse_node(node):
if node in seen:
return
seen.append(node)
code = node.read()
- global re_tex
for match in re_tex.finditer(code):
multibib = match.group('type')
except self.errors.ConfigurationError:
pass
v.DVIPSFLAGS = '-Ppdf'
+
"""
import re
-from waflib import Context, Task, Utils, Logs, Options, Errors, Node
+from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils
from waflib.TaskGen import extension, taskgen_method
from waflib.Configure import conf
package_obj = self.bld.get_tgen_by_name(package)
except Errors.WafError:
continue
+
+ # in practice the other task is already processed
+ # but this makes it explicit
+ package_obj.post()
package_name = package_obj.target
- for task in package_obj.tasks:
+ task = getattr(package_obj, 'valatask', None)
+ if task:
for output in task.outputs:
if output.name == package_name + ".vapi":
valatask.set_run_after(task)
if self.is_lib and valatask.install_binding:
headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
- try:
- self.install_vheader.source = headers_list
- except AttributeError:
+ if headers_list:
self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list)
vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
- try:
- self.install_vapi.source = vapi_list
- except AttributeError:
+ if vapi_list:
self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list)
gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
- try:
- self.install_gir.source = gir_list
- except AttributeError:
+ if gir_list:
self.install_gir = self.add_install_files(
install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list)
valatask.outputs.append(c_node)
self.source.append(c_node)
+@extension('.vapi')
+def vapi_file(self, node):
+ try:
+ valatask = self.valatask
+ except AttributeError:
+ valatask = self.valatask = self.create_task('valac')
+ self.init_vala_task()
+ valatask.inputs.append(node)
+
@conf
def find_valac(self, valac_name, min_version):
"""
valac = self.find_program(valac_name, var='VALAC')
try:
output = self.cmd_and_log(valac + ['--version'])
- except Exception:
+ except Errors.WafError:
valac_version = None
else:
ver = re.search(r'\d+.\d+.\d+', output).group().split('.')
valaopts.add_option('--vala-target-glib', default=None,
dest='vala_target_glib', metavar='MAJOR.MINOR',
help='Target version of glib for Vala GObject code generation')
+
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2006
-# Thomas Nagy, 2010-2016 (ita)
+# Thomas Nagy, 2010-2018 (ita)
"""
-Unit testing system for C/C++/D providing test execution:
+Unit testing system for C/C++/D and interpreted languages providing test execution:
* in parallel, by using ``waf -j``
* partial (only the tests that have changed) or full (by using ``waf --alltests``)
(with extension _run.py) that are useful for debugging purposes.
"""
-import os, sys
+import os, shlex, sys
from waflib.TaskGen import feature, after_method, taskgen_method
from waflib import Utils, Task, Logs, Options
from waflib.Tools import ccroot
sys.exit(status)
"""
+@taskgen_method
+def handle_ut_cwd(self, key):
+ """
+ Task generator method, used internally to limit code duplication.
+ This method may disappear anytime.
+ """
+ cwd = getattr(self, key, None)
+ if cwd:
+ if isinstance(cwd, str):
+ # we want a Node instance
+ if os.path.isabs(cwd):
+ self.ut_cwd = self.bld.root.make_node(cwd)
+ else:
+ self.ut_cwd = self.path.make_node(cwd)
+
+@feature('test_scripts')
+def make_interpreted_test(self):
+ """Create interpreted unit tests."""
+ for x in ['test_scripts_source', 'test_scripts_template']:
+ if not hasattr(self, x):
+ Logs.warn('a test_scripts taskgen i missing %s' % x)
+ return
+
+ self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))
+
+ script_nodes = self.to_nodes(self.test_scripts_source)
+ for script_node in script_nodes:
+ tsk = self.create_task('utest', [script_node])
+ tsk.vars = lst + tsk.vars
+ tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())
+
+ self.handle_ut_cwd('test_scripts_cwd')
+
+ env = getattr(self, 'test_scripts_env', None)
+ if env:
+ self.ut_env = env
+ else:
+ self.ut_env = dict(os.environ)
+
+ paths = getattr(self, 'test_scripts_paths', {})
+ for (k,v) in paths.items():
+ p = self.ut_env.get(k, '').split(os.pathsep)
+ if isinstance(v, str):
+ v = v.split(os.pathsep)
+ self.ut_env[k] = os.pathsep.join(p + v)
+
@feature('test')
@after_method('apply_link', 'process_use')
def make_test(self):
self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
tsk.vars = lst + tsk.vars
- if getattr(self, 'ut_cwd', None):
- if isinstance(self.ut_cwd, str):
- # we want a Node instance
- if os.path.isabs(self.ut_cwd):
- self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
- else:
- self.ut_cwd = self.path.make_node(self.ut_cwd)
- else:
- self.ut_cwd = tsk.inputs[0].parent
+ self.handle_ut_cwd('ut_cwd')
if not hasattr(self, 'ut_paths'):
paths = []
else:
add_path('LD_LIBRARY_PATH')
+ if not hasattr(self, 'ut_cmd'):
+ self.ut_cmd = getattr(Options.options, 'testcmd', False)
+
@taskgen_method
def add_test_results(self, tup):
"""Override and return tup[1] to interrupt the build immediately if a test does not run"""
Logs.debug("ut: %r", tup)
- self.utest_result = tup
+ try:
+ self.utest_results.append(tup)
+ except AttributeError:
+ self.utest_results = [tup]
try:
self.bld.utest_results.append(tup)
except AttributeError:
if hasattr(self.generator, 'ut_run'):
return self.generator.ut_run(self)
- # TODO ut_exec, ut_fun, ut_cmd should be considered obsolete
self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
- if getattr(self.generator, 'ut_fun', None):
- self.generator.ut_fun(self)
- testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False)
- if testcmd:
- self.ut_exec = (testcmd % ' '.join(self.ut_exec)).split(' ')
+ ut_cmd = getattr(self.generator, 'ut_cmd', False)
+ if ut_cmd:
+ self.ut_exec = shlex.split(ut_cmd % ' '.join(self.ut_exec))
return self.exec_command(self.ut_exec)
def exec_command(self, cmd, **kw):
Logs.debug('runner: %r', cmd)
if getattr(Options.options, 'dump_test_scripts', False):
- global SCRIPT_TEMPLATE
script_code = SCRIPT_TEMPLATE % {
'python': sys.executable,
'env': self.get_test_env(),
- 'cwd': self.get_cwd().abspath(), 'cmd': cmd
+ 'cwd': self.get_cwd().abspath(),
+ 'cmd': cmd
}
script_file = self.inputs[0].abspath() + '_run.py'
Utils.writef(script_file, script_code)
Logs.info('Test debug file written as %r' % script_file)
proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
- stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE)
+ stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
(stdout, stderr) = proc.communicate()
self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
testlock.acquire()
testlock.release()
def get_cwd(self):
- return self.generator.ut_cwd
+ return getattr(self.generator, 'ut_cwd', self.inputs[0].parent)
+
+ def sig_explicit_deps(self):
+ lst = [os.stat(node.abspath()).st_mtime for node in self.inputs]
+ self.m.update(str(lst))
+ return super(utest, self).sig_explicit_deps()
def summary(bld):
"""
total = len(lst)
tfail = len([x for x in lst if x[1]])
- Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
+ Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total))
for (f, code, out, err) in lst:
if not code:
- Logs.pprint('CYAN', ' %s' % f)
+ Logs.pprint('GREEN', ' %s' % f)
- Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
+ Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total))
for (f, code, out, err) in lst:
if code:
- Logs.pprint('CYAN', ' %s' % f)
+ Logs.pprint('RED', ' %s' % f)
def set_exit_code(bld):
"""
"""
opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
- opt.add_option('--clear-failed', action='store_true', default=False, help='Force failed unit tests to run again next time', dest='clear_failed_tests')
- opt.add_option('--testcmd', action='store', default=False,
- help = 'Run the unit tests using the test-cmd string'
- ' example "--test-cmd="valgrind --error-exitcode=1'
- ' %s" to run under valgrind', dest='testcmd')
+ opt.add_option('--clear-failed', action='store_true', default=False,
+ help='Force failed unit tests to run again next time', dest='clear_failed_tests')
+ opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
+ help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
opt.add_option('--dump-test-scripts', action='store_true', default=False,
- help='Create python scripts to help debug tests', dest='dump_test_scripts')
+ help='Create python scripts to help debug tests', dest='dump_test_scripts')
+
"""
code = node.read()
if c_preproc.use_trigraphs:
- for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
+ for (a, b) in c_preproc.trig_def:
+ code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
ret = []
conf.find_program('windres', var='WINRC', path_list=v.PATH)
v.WINRC_TGT_F = '-o'
v.WINRC_SRC_F = '-i'
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2016 (ita)
+# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
"""
Utilities and platform-specific fixes
through Python versions 2.5 to 3.X and across different platforms (win32, linux, etc)
"""
-import atexit, os, sys, errno, traceback, inspect, re, datetime, platform, base64, signal, functools
+from __future__ import with_statement
+
+import atexit, os, sys, errno, inspect, re, datetime, platform, base64, signal, functools, time
try:
import cPickle
try:
TimeoutExpired = subprocess.TimeoutExpired
except AttributeError:
- class TimeoutExpired(object):
+ class TimeoutExpired(Exception):
pass
from collections import deque, defaultdict
pass
threading.Lock = threading.Thread = Lock
-SIG_NIL = 'SIG_NIL_SIG_NIL_'
+SIG_NIL = 'SIG_NIL_SIG_NIL_'.encode()
"""Arbitrary null value for hashes. Modify this value according to the hash function in use"""
O644 = 420
node.val = val
self.table[key] = node
+class lazy_generator(object):
+ def __init__(self, fun, params):
+ self.fun = fun
+ self.params = params
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ try:
+ it = self.it
+ except AttributeError:
+ it = self.it = self.fun(*self.params)
+ return next(it)
+
is_win32 = os.sep == '\\' or sys.platform == 'win32' # msys2
"""
Whether this system is a Windows series
"""
-def readf(fname, m='r', encoding='ISO8859-1'):
+def readf(fname, m='r', encoding='latin-1'):
"""
Reads an entire file into a string. See also :py:meth:`waflib.Node.Node.readf`::
if sys.hexversion > 0x3000000 and not 'b' in m:
m += 'b'
- f = open(fname, m)
- try:
+ with open(fname, m) as f:
txt = f.read()
- finally:
- f.close()
if encoding:
txt = txt.decode(encoding)
else:
txt = txt.decode()
else:
- f = open(fname, m)
- try:
+ with open(fname, m) as f:
txt = f.read()
- finally:
- f.close()
return txt
-def writef(fname, data, m='w', encoding='ISO8859-1'):
+def writef(fname, data, m='w', encoding='latin-1'):
"""
Writes an entire file from a string.
See also :py:meth:`waflib.Node.Node.writef`::
if sys.hexversion > 0x3000000 and not 'b' in m:
data = data.encode(encoding)
m += 'b'
- f = open(fname, m)
- try:
+ with open(fname, m) as f:
f.write(data)
- finally:
- f.close()
def h_file(fname):
"""
:return: hash of the file contents
:rtype: string or bytes
"""
- f = open(fname, 'rb')
m = md5()
- try:
+ with open(fname, 'rb') as f:
while fname:
fname = f.read(200000)
m.update(fname)
- finally:
- f.close()
return m.digest()
-def readf_win32(f, m='r', encoding='ISO8859-1'):
+def readf_win32(f, m='r', encoding='latin-1'):
flags = os.O_NOINHERIT | os.O_RDONLY
if 'b' in m:
flags |= os.O_BINARY
if sys.hexversion > 0x3000000 and not 'b' in m:
m += 'b'
- f = os.fdopen(fd, m)
- try:
+ with os.fdopen(fd, m) as f:
txt = f.read()
- finally:
- f.close()
if encoding:
txt = txt.decode(encoding)
else:
txt = txt.decode()
else:
- f = os.fdopen(fd, m)
- try:
+ with os.fdopen(fd, m) as f:
txt = f.read()
- finally:
- f.close()
return txt
-def writef_win32(f, data, m='w', encoding='ISO8859-1'):
+def writef_win32(f, data, m='w', encoding='latin-1'):
if sys.hexversion > 0x3000000 and not 'b' in m:
data = data.encode(encoding)
m += 'b'
fd = os.open(f, flags)
except OSError:
raise OSError('Cannot write to %r' % f)
- f = os.fdopen(fd, m)
- try:
+ with os.fdopen(fd, m) as f:
f.write(data)
- finally:
- f.close()
def h_file_win32(fname):
try:
fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
except OSError:
raise OSError('Cannot read from %r' % fname)
- f = os.fdopen(fd, 'rb')
m = md5()
- try:
+ with os.fdopen(fd, 'rb') as f:
while fname:
fname = f.read(200000)
m.update(fname)
- finally:
- f.close()
return m.digest()
# always save these
return ret
return ver
-def ex_stack():
- """
- Extracts the stack to display exceptions. Deprecated: use traceback.format_exc()
-
- :return: a string represening the last exception
- """
- # TODO remove in waf 2.0
- return traceback.format_exc()
-
def to_list(val):
"""
Converts a string argument to a list by splitting it by spaces.
re_sp = re.compile('[/\\\\]+')
def split_path_win32(path):
if path.startswith('\\\\'):
- ret = re_sp.split(path)[2:]
- ret[0] = '\\' + ret[0]
+ ret = re_sp.split(path)[1:]
+ ret[0] = '\\\\' + ret[0]
+ if ret[0] == '\\\\?':
+ return ret[1:]
return ret
return re_sp.split(path)
msysroot = None
def split_path_msys(path):
- if path.startswith(('/', '\\')) and not path.startswith(('\\', '\\\\')):
+ if path.startswith(('/', '\\')) and not path.startswith(('//', '\\\\')):
# msys paths can be in the form /usr/bin
global msysroot
if not msysroot:
# msys has python 2.7 or 3, so we can use this
- msysroot = subprocess.check_output(['cygpath', '-w', '/']).decode(sys.stdout.encoding or 'iso8859-1')
+ msysroot = subprocess.check_output(['cygpath', '-w', '/']).decode(sys.stdout.encoding or 'latin-1')
msysroot = msysroot.strip()
path = os.path.normpath(msysroot + os.sep + path)
return split_path_win32(path)
if not os.path.isdir(path):
try:
os.makedirs(path)
- except OSError ,e:
+ except OSError as e:
if not os.path.isdir(path):
raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
fu = fu.upper()
return fu
+re_sh = re.compile('\\s|\'|"')
+"""
+Regexp used for shell_escape below
+"""
+
+def shell_escape(cmd):
+ """
+ Escapes a command:
+ ['ls', '-l', 'arg space'] -> ls -l 'arg space'
+ """
+ if isinstance(cmd, str):
+ return cmd
+ return ' '.join(repr(x) if re_sh.search(x) else x for x in cmd)
+
def h_list(lst):
"""
- Hash lists. We would prefer to use hash(tup) for tuples because it is much more efficient,
- but Python now enforces hash randomization by assuming everybody is running a web application.
+ Hashes lists of ordered data.
+
+ Using hash(tup) for tuples would be much more efficient,
+ but Python now enforces hash randomization
:param lst: list to hash
:type lst: list of strings
:return: hash of the list
"""
- return md5(repr(lst)).digest()
+ return md5(repr(lst).encode()).digest()
def h_fun(fun):
"""
# or just a python function
ret = str(h_fun(ins))
if sys.hexversion > 0x3000000:
- ret = ret.encode('iso8859-1', 'xmlcharrefreplace')
+ ret = ret.encode('latin-1', 'xmlcharrefreplace')
return ret
reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
class Timer(object):
"""
Simple object for timing the execution of commands.
- Its string representation is the current time::
+ Its string representation is the duration::
from waflib.Utils import Timer
timer = Timer()
s = str(timer)
"""
def __init__(self):
- self.start_time = datetime.datetime.utcnow()
+ self.start_time = self.now()
def __str__(self):
- delta = datetime.datetime.utcnow() - self.start_time
+ delta = self.now() - self.start_time
+ if not isinstance(delta, datetime.timedelta):
+ delta = datetime.timedelta(seconds=delta)
days = delta.days
hours, rem = divmod(delta.seconds, 3600)
minutes, seconds = divmod(rem, 60)
result += '%dm' % minutes
return '%s%.3fs' % (result, seconds)
+ def now(self):
+ return datetime.datetime.utcnow()
+
+ if hasattr(time, 'perf_counter'):
+ def now(self):
+ return time.perf_counter()
+
def read_la_file(path):
"""
Reads property files, used by msvc.py
return None
try:
result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0])
- except WindowsError:
+ except OSError:
pass
else:
if os.path.isfile(result):
kwargs['env'] = dict(os.environ)
try:
obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs]))
- except TypeError:
+ except (TypeError, AttributeError):
return run_regular_process(cmd, kwargs, cargs)
proc = get_process()
return run_regular_process(cmd, kwargs, cargs)
proc.stdin.write(obj)
- proc.stdin.write('\n')
+ proc.stdin.write('\n'.encode())
proc.stdin.flush()
obj = proc.stdout.readline()
if not obj:
raise OSError('Preforked sub-process %r died' % proc.pid)
process_pool.append(proc)
- ret, out, err, ex, trace = cPickle.loads(base64.b64decode(obj))
+ lst = cPickle.loads(base64.b64decode(obj))
+ # Jython wrapper failures (bash/execvp)
+ assert len(lst) == 5
+ ret, out, err, ex, trace = lst
if ex:
if ex == 'OSError':
raise OSError(trace)
out, err = (None, None)
try:
status = proc.wait(**cargs)
- except TimeoutExpired ,e:
+ except TimeoutExpired as e:
if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
os.killpg(proc.pid, signal.SIGKILL)
else:
if (sys.hexversion<0x207000f and not is_win32) or sys.hexversion>=0x306000f:
atexit.register(atexit_pool)
-if sys.platform == 'cli' or not sys.executable:
+if os.environ.get('WAF_NO_PREFORK') or sys.platform == 'cli' or not sys.executable:
run_process = run_regular_process
get_process = alloc_process_pool = nada
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2016 (ita)
+# Thomas Nagy, 2005-2018 (ita)
return struct.unpack("HHHH", fcntl.ioctl(FD, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)))[1]
try:
fun()
- except Exception ,e:
+ except Exception as e:
pass
else:
get_term_cols = fun
+
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-from waflib import Logs
-Logs.warn('This tool has been merged to the main library, remove the references to "add_objects"')
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2015 (ita)
"""
-Build as batches.
-
Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
cc -c ../file1.c ../file2.c ../file3.c
Files are output on the directory where the compiler is called, and dependencies are more difficult
to track (do not run the command on all source files if only one file changes)
-
As such, we do as if the files were compiled one by one, but no command is actually run:
replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
signatures from each slave and finds out the command-line to run.
-Just import this module in the configuration (no other change required).
-This is provided as an example, for performance unity builds are recommended (fewer tasks and
-fewer jobs to execute). See waflib/extras/unity.py.
+Just import this module to start using it:
+def build(bld):
+ bld.load('batched_cc')
+
+Note that this is provided as an example, unity builds are recommended
+for best performance results (fewer tasks and fewer jobs to execute).
+See waflib/extras/unity.py.
"""
from waflib import Task, Utils
MAX_BATCH = 50
-c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
c_fun, _ = Task.compile_fun_noshell(c_str)
-cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
count = 70000
-class batch_task(Task.Task):
+class batch(Task.Task):
color = 'PINK'
after = ['c', 'cxx']
before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
def uid(self):
- m = Utils.md5()
- m.update(Task.Task.uid(self))
- m.update(str(self.generator.idx).encode())
- return m.digest()
+ return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])
def __str__(self):
return 'Batch compilation for %d slaves' % len(self.slaves)
return Task.SKIP_ME
+ def get_cwd(self):
+ return self.slaves[0].outputs[0].parent
+
+ def batch_incpaths(self):
+ st = self.env.CPPPATH_ST
+ return [st % node.abspath() for node in self.generator.includes_nodes]
+
def run(self):
self.outputs = []
srclst.append(t.inputs[0].abspath())
self.env.SRCLST = srclst
- self.cwd = slaves[0].outputs[0].parent.abspath()
if self.slaves[0].__class__.__name__ == 'c':
ret = c_fun(self)
setattr(t, 'run', run)
setattr(t, 'old_post_run', t.post_run)
setattr(t, 'post_run', post_run)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Latex processing using "biber"
+"""
+
+import os
+from waflib import Task, Logs
+
+from waflib.Tools import tex as texmodule
+
+class tex(texmodule.tex):
+ biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
+ biber_fun.__doc__ = """
+ Execute the program **biber**
+ """
+
+ def bibfile(self):
+ return None
+
+ def bibunits(self):
+ self.env.env = {}
+ self.env.env.update(os.environ)
+ self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+ self.env.SRCFILE = self.aux_nodes[0].name[:-4]
+
+ if not self.env['PROMPT_LATEX']:
+ self.env.append_unique('BIBERFLAGS', '--quiet')
+
+ path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
+ if os.path.isfile(path):
+ Logs.warn('calling biber')
+ self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
+ else:
+ super(tex, self).bibfile()
+ super(tex, self).bibunits()
+
+class latex(tex):
+ texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+class pdflatex(tex):
+ texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+class xelatex(tex):
+ texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+
+def configure(self):
+ """
+ Almost the same as in tex.py, but try to detect 'biber'
+ """
+ v = self.env
+ for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+ try:
+ self.find_program(p, var=p.upper())
+ except self.errors.ConfigurationError:
+ pass
+ v['DVIPSFLAGS'] = '-Ppdf'
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import sep, readlink
+from waflib import Logs
+from waflib.TaskGen import feature, after_method
+from waflib.Task import Task, always_run
+
+def options(opt):
+ grp = opt.add_option_group('Bjam Options')
+ grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
+ grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
+ grp.add_option('--bjam_config', default=None)
+ grp.add_option('--bjam_toolset', default=None)
+
+def configure(cnf):
+ if not cnf.env.BJAM_SRC:
+ cnf.env.BJAM_SRC = cnf.options.bjam_src
+ if not cnf.env.BJAM_UNAME:
+ cnf.env.BJAM_UNAME = cnf.options.bjam_uname
+ try:
+ cnf.find_program('bjam', path_list=[
+ cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
+ ])
+ except Exception:
+ cnf.env.BJAM = None
+ if not cnf.env.BJAM_CONFIG:
+ cnf.env.BJAM_CONFIG = cnf.options.bjam_config
+ if not cnf.env.BJAM_TOOLSET:
+ cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset
+
+@feature('bjam')
+@after_method('process_rule')
+def process_bjam(self):
+ if not self.bld.env.BJAM:
+ self.create_task('bjam_creator')
+ self.create_task('bjam_build')
+ self.create_task('bjam_installer')
+ if getattr(self, 'always', False):
+ always_run(bjam_creator)
+ always_run(bjam_build)
+ always_run(bjam_installer)
+
+class bjam_creator(Task):
+ ext_out = 'bjam_exe'
+ vars=['BJAM_SRC', 'BJAM_UNAME']
+ def run(self):
+ env = self.env
+ gen = self.generator
+ bjam = gen.bld.root.find_dir(env.BJAM_SRC)
+ if not bjam:
+ Logs.error('Can not find bjam source')
+ return -1
+ bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
+ bjam_exe = bjam.find_resource(bjam_exe_relpath)
+ if bjam_exe:
+ env.BJAM = bjam_exe.srcpath()
+ return 0
+ bjam_cmd = ['./build.sh']
+ Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
+ result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
+ if not result == 0:
+ Logs.error('bjam failed')
+ return -1
+ bjam_exe = bjam.find_resource(bjam_exe_relpath)
+ if bjam_exe:
+ env.BJAM = bjam_exe.srcpath()
+ return 0
+ Logs.error('bjam failed')
+ return -1
+
+class bjam_build(Task):
+ ext_in = 'bjam_exe'
+ ext_out = 'install'
+ vars = ['BJAM_TOOLSET']
+ def run(self):
+ env = self.env
+ gen = self.generator
+ path = gen.path
+ bld = gen.bld
+ if hasattr(gen, 'root'):
+ build_root = path.find_node(gen.root)
+ else:
+ build_root = path
+ jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
+ if jam:
+ Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
+ jam_rel = jam.relpath_gen(build_root)
+ else:
+ Logs.warn('No build configuration in build_config/user-config.jam. Using default')
+ jam_rel = None
+ bjam_exe = bld.srcnode.find_node(env.BJAM)
+ if not bjam_exe:
+ Logs.error('env.BJAM is not set')
+ return -1
+ bjam_exe_rel = bjam_exe.relpath_gen(build_root)
+ cmd = ([bjam_exe_rel] +
+ (['--user-config=' + jam_rel] if jam_rel else []) +
+ ['--stagedir=' + path.get_bld().path_from(build_root)] +
+ ['--debug-configuration'] +
+ ['--with-' + lib for lib in self.generator.target] +
+ (['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
+ ['link=' + 'shared'] +
+ ['variant=' + 'release']
+ )
+ Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
+ ret = self.exec_command(cmd, cwd=build_root.srcpath())
+ if ret != 0:
+ return ret
+ self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
+ return 0
+
+class bjam_installer(Task):
+ ext_in = 'install'
+ def run(self):
+ gen = self.generator
+ path = gen.path
+ for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
+ files = []
+ for n in path.get_bld().ant_glob(pat):
+ try:
+ t = readlink(n.srcpath())
+ gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
+ except OSError:
+ files.append(n)
+ gen.bld.install_files(idir, files, postpone=False)
+ return 0
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Michal Proszek, 2014 (poxip)
+
+"""
+Detect the version of Blender, path
+and install the extension:
+
+ def options(opt):
+ opt.load('blender')
+ def configure(cnf):
+ cnf.load('blender')
+ def build(bld):
+ bld(name='io_mesh_raw',
+ feature='blender',
+ files=['file1.py', 'file2.py']
+ )
+If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
+Use ./waf configure --system to set the installation directory to system path
+"""
+import os
+import re
+from getpass import getuser
+
+from waflib import Utils
+from waflib.TaskGen import feature
+from waflib.Configure import conf
+
+def options(opt):
+ opt.add_option(
+ '-s', '--system',
+ dest='directory_system',
+ default=False,
+ action='store_true',
+ help='determines installation directory (default: user)'
+ )
+
+@conf
+def find_blender(ctx):
+ '''Return version number of blender, if not exist return None'''
+ blender = ctx.find_program('blender')
+ output = ctx.cmd_and_log(blender + ['--version'])
+ m = re.search(r'Blender\s*((\d+(\.|))*)', output)
+ if not m:
+ ctx.fatal('Could not retrieve blender version')
+
+ try:
+ blender_version = m.group(1)
+ except IndexError:
+ ctx.fatal('Could not retrieve blender version')
+
+ ctx.env['BLENDER_VERSION'] = blender_version
+ return blender
+
+@conf
+def configure_paths(ctx):
+ """Setup blender paths"""
+ # Get the username
+ user = getuser()
+ _platform = Utils.unversioned_sys_platform()
+ config_path = {'user': '', 'system': ''}
+ if _platform.startswith('linux'):
+ config_path['user'] = '/home/%s/.config/blender/' % user
+ config_path['system'] = '/usr/share/blender/'
+ elif _platform == 'darwin':
+ # MAC OS X
+ config_path['user'] = \
+ '/Users/%s/Library/Application Support/Blender/' % user
+ config_path['system'] = '/Library/Application Support/Blender/'
+ elif Utils.is_win32:
+ # Windows
+ appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
+ homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
+
+ config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
+ config_path['system'] = \
+ '%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
+ else:
+ ctx.fatal(
+ 'Unsupported platform. '
+ 'Available platforms: Linux, OSX, MS-Windows.'
+ )
+
+ blender_version = ctx.env['BLENDER_VERSION']
+
+ config_path['user'] += blender_version + '/'
+ config_path['system'] += blender_version + '/'
+
+ ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
+ if ctx.options.directory_system:
+ ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
+
+ ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
+ ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
+ )
+ Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
+
+def configure(ctx):
+ ctx.find_blender()
+ ctx.configure_paths()
+
+@feature('blender_list')
+def blender(self):
+ # Two ways to install a blender extension: as a module or just .py files
+ dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
+ Utils.check_dir(dest_dir)
+ self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Yannick LM 2011
+
+"""
+Support for the boo programming language, for example::
+
+ bld(features = "boo", # necessary feature
+ source = "src.boo", # list of boo files
+ gen = "world.dll", # target
+ type = "library", # library/exe ("-target:xyz" flag)
+ name = "world" # necessary if the target is referenced by 'use'
+ )
+"""
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method, before_method, extension
+
+@extension('.boo')
+def boo_hook(self, node):
+ # Nothing here yet ...
+ # TODO filter the non-boo source files in 'apply_booc' and remove this method
+ pass
+
+@feature('boo')
+@before_method('process_source')
+def apply_booc(self):
+ """Create a booc task """
+ src_nodes = self.to_nodes(self.source)
+ out_node = self.path.find_or_declare(self.gen)
+
+ self.boo_task = self.create_task('booc', src_nodes, [out_node])
+
+ # Set variables used by the 'booc' task
+ self.boo_task.env.OUT = '-o:%s' % out_node.abspath()
+
+ # type is "exe" by default
+ type = getattr(self, "type", "exe")
+ self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type
+
+@feature('boo')
+@after_method('apply_boo')
+def use_boo(self):
+ """"
+ boo applications honor the **use** keyword::
+ """
+ dep_names = self.to_list(getattr(self, 'use', []))
+ for dep_name in dep_names:
+ dep_task_gen = self.bld.get_tgen_by_name(dep_name)
+ if not dep_task_gen:
+ continue
+ dep_task_gen.post()
+ dep_task = getattr(dep_task_gen, 'boo_task', None)
+ if not dep_task:
+ # Try a cs task:
+ dep_task = getattr(dep_task_gen, 'cs_task', None)
+ if not dep_task:
+ # Try a link task:
+ dep_task = getattr(dep_task, 'link_task', None)
+ if not dep_task:
+ # Abort ...
+ continue
+ self.boo_task.set_run_after(dep_task) # order
+ self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
+ self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())
+
+class booc(Task.Task):
+ """Compiles .boo files """
+ color = 'YELLOW'
+ run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'
+
+@conf
+def check_booc(self):
+ self.find_program('booc', 'BOOC')
+ self.env.BOO_FLAGS = ['-nologo']
+
+def configure(self):
+ """Check that booc is available """
+ self.check_booc()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,boost
+ or, if you have waf >= 1.6.2
+$ ./waf update --files=boost
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('compiler_cxx boost')
+
+ def configure(conf):
+ conf.load('compiler_cxx boost')
+ conf.check_boost(lib='system filesystem')
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='BOOST')
+
+Options are generated, in order to specify the location of boost includes/libraries.
+The `check_boost` configuration function allows to specify the used boost libraries.
+It can also provide default arguments to the --boost-mt command-line arguments.
+Everything will be packaged together in a BOOST component that you can use.
+
+When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
+ - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
+ Errors: C4530
+ - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
+ So before calling `conf.check_boost` you might want to disabling by adding
+ conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
+ Errors:
+ - boost might also be compiled with /MT, which links the runtime statically.
+ If you have problems with redefined symbols,
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+ self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
+Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
+
+'''
+
+import sys
+import re
+from waflib import Utils, Logs, Errors
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method
+
+BOOST_LIBS = ['/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu',
+ '/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
+BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
+BOOST_VERSION_FILE = 'boost/version.hpp'
+BOOST_VERSION_CODE = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
+'''
+
+BOOST_ERROR_CODE = '''
+#include <boost/system/error_code.hpp>
+int main() { boost::system::error_code c; }
+'''
+
+BOOST_THREAD_CODE = '''
+#include <boost/thread.hpp>
+int main() { boost::thread t; }
+'''
+
+BOOST_LOG_CODE = '''
+#include <boost/log/trivial.hpp>
+#include <boost/log/utility/setup/console.hpp>
+#include <boost/log/utility/setup/common_attributes.hpp>
+int main() {
+ using namespace boost::log;
+ add_common_attributes();
+ add_console_log(std::clog, keywords::format = "%Message%");
+ BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
+}
+'''
+
+# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
+PLATFORM = Utils.unversioned_sys_platform()
+detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
+detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
+detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
+BOOST_TOOLSETS = {
+ 'borland': 'bcb',
+ 'clang': detect_clang,
+ 'como': 'como',
+ 'cw': 'cw',
+ 'darwin': 'xgcc',
+ 'edg': 'edg',
+ 'g++': detect_mingw,
+ 'gcc': detect_mingw,
+ 'icpc': detect_intel,
+ 'intel': detect_intel,
+ 'kcc': 'kcc',
+ 'kylix': 'bck',
+ 'mipspro': 'mp',
+ 'mingw': 'mgw',
+ 'msvc': 'vc',
+ 'qcc': 'qcc',
+ 'sun': 'sw',
+ 'sunc++': 'sw',
+ 'tru64cxx': 'tru',
+ 'vacpp': 'xlc'
+}
+
+
+def options(opt):
+ opt = opt.add_option_group('Boost Options')
+ opt.add_option('--boost-includes', type='string',
+ default='', dest='boost_includes',
+ help='''path to the directory where the boost includes are,
+ e.g., /path/to/boost_1_55_0/stage/include''')
+ opt.add_option('--boost-libs', type='string',
+ default='', dest='boost_libs',
+ help='''path to the directory where the boost libs are,
+ e.g., path/to/boost_1_55_0/stage/lib''')
+ opt.add_option('--boost-mt', action='store_true',
+ default=False, dest='boost_mt',
+ help='select multi-threaded libraries')
+ opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
+ help='''select libraries with tags (gd for debug, static is automatically added),
+ see doc Boost, Getting Started, chapter 6.1''')
+ opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
+ help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
+ opt.add_option('--boost-toolset', type='string',
+ default='', dest='boost_toolset',
+ help='force a toolset e.g. msvc, vc90, \
+ gcc, mingw, mgw45 (default: auto)')
+ py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
+ opt.add_option('--boost-python', type='string',
+ default=py_version, dest='boost_python',
+ help='select the lib python with this version \
+ (default: %s)' % py_version)
+
+
+@conf
+def __boost_get_version_file(self, d):
+ if not d:
+ return None
+ dnode = self.root.find_dir(d)
+ if dnode:
+ return dnode.find_node(BOOST_VERSION_FILE)
+ return None
+
+@conf
+def boost_get_version(self, d):
+ """silently retrieve the boost version number"""
+ node = self.__boost_get_version_file(d)
+ if node:
+ try:
+ txt = node.read()
+ except EnvironmentError:
+ Logs.error("Could not read the file %r", node.abspath())
+ else:
+ re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
+ m1 = re_but1.search(txt)
+ re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
+ m2 = re_but2.search(txt)
+ if m1 and m2:
+ return (m1.group(1), m2.group(1))
+ return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
+
+@conf
+def boost_get_includes(self, *k, **kw):
+ includes = k and k[0] or kw.get('includes')
+ if includes and self.__boost_get_version_file(includes):
+ return includes
+ for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
+ if self.__boost_get_version_file(d):
+ return d
+ if includes:
+ self.end_msg('headers not found in %s' % includes)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
+ self.fatal('The configuration failed')
+
+
+@conf
+def boost_get_toolset(self, cc):
+ toolset = cc
+ if not cc:
+ build_platform = Utils.unversioned_sys_platform()
+ if build_platform in BOOST_TOOLSETS:
+ cc = build_platform
+ else:
+ cc = self.env.CXX_NAME
+ if cc in BOOST_TOOLSETS:
+ toolset = BOOST_TOOLSETS[cc]
+ return isinstance(toolset, str) and toolset or toolset(self.env)
+
+
+@conf
+def __boost_get_libs_path(self, *k, **kw):
+ ''' return the lib path and all the files in it '''
+ if 'files' in kw:
+ return self.root.find_dir('.'), Utils.to_list(kw['files'])
+ libs = k and k[0] or kw.get('libs')
+ if libs:
+ path = self.root.find_dir(libs)
+ files = path.ant_glob('*boost_*')
+ if not libs or not files:
+ for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
+ if not d:
+ continue
+ path = self.root.find_dir(d)
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ path = self.root.find_dir(d + '64')
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ if not path:
+ if libs:
+ self.end_msg('libs not found in %s' % libs)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
+ self.fatal('The configuration failed')
+
+ self.to_log('Found the boost path in %r with the libraries:' % path)
+ for x in files:
+ self.to_log(' %r' % x)
+ return path, files
+
+@conf
+def boost_get_libs(self, *k, **kw):
+ '''
+ return the lib path and the required libs
+ according to the parameters
+ '''
+ path, files = self.__boost_get_libs_path(**kw)
+ files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ toolset_pat = '(-%s[0-9]{0,3})' % toolset
+ version = '-%s' % self.env.BOOST_VERSION
+
+ def find_lib(re_lib, files):
+ for file in files:
+ if re_lib.search(file.name):
+ self.to_log('Found boost lib %s' % file)
+ return file
+ return None
+
+ def format_lib_name(name):
+ if name.startswith('lib') and self.env.CC_NAME != 'msvc':
+ name = name[3:]
+ return name[:name.rfind('.')]
+
+ def match_libs(lib_names, is_static):
+ libs = []
+ lib_names = Utils.to_list(lib_names)
+ if not lib_names:
+ return libs
+ t = []
+ if kw.get('mt', False):
+ t.append('-mt')
+ if kw.get('abi'):
+ t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
+ elif is_static:
+ t.append('-s')
+ tags_pat = t and ''.join(t) or ''
+ ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
+ ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
+
+ for lib in lib_names:
+ if lib == 'python':
+ # for instance, with python='27',
+ # accepts '-py27', '-py2', '27', '-2.7' and '2'
+ # but will reject '-py3', '-py26', '26' and '3'
+ tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
+ else:
+ tags = tags_pat
+ # Trying libraries, from most strict match to least one
+ for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
+ 'boost_%s%s%s%s$' % (lib, tags, version, ext),
+ # Give up trying to find the right version
+ 'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
+ 'boost_%s%s%s$' % (lib, tags, ext),
+ 'boost_%s%s$' % (lib, ext),
+ 'boost_%s' % lib]:
+ self.to_log('Trying pattern %s' % pattern)
+ file = find_lib(re.compile(pattern), files)
+ if file:
+ libs.append(format_lib_name(file.name))
+ break
+ else:
+ self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
+ self.fatal('The configuration failed')
+ return libs
+
+ return path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)
+
+
+@conf
+def check_boost(self, *k, **kw):
+ """
+ Initialize boost libraries to be used.
+
+ Keywords: you can pass the same parameters as with the command line (without "--boost-").
+ Note that the command line has the priority, and should preferably be used.
+ """
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+ params = {
+ 'lib': k and k[0] or kw.get('lib'),
+ 'stlib': kw.get('stlib')
+ }
+ for key, value in self.options.__dict__.items():
+ if not key.startswith('boost_'):
+ continue
+ key = key[len('boost_'):]
+ params[key] = value and value or kw.get(key, '')
+
+ var = kw.get('uselib_store', 'BOOST')
+
+ self.start_msg('Checking boost includes')
+ self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
+ versions = self.boost_get_version(inc)
+ self.env.BOOST_VERSION = versions[0]
+ self.env.BOOST_VERSION_NUMBER = int(versions[1])
+ self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
+ int(versions[1]) / 100 % 1000,
+ int(versions[1]) % 100))
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])
+
+ if not params['lib'] and not params['stlib']:
+ return
+ if 'static' in kw or 'static' in params:
+ Logs.warn('boost: static parameter is deprecated, use stlib instead.')
+ self.start_msg('Checking boost libs')
+ path, libs, stlibs = self.boost_get_libs(**params)
+ self.env['LIBPATH_%s' % var] = [path]
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['LIB_%s' % var] = libs
+ self.env['STLIB_%s' % var] = stlibs
+ self.end_msg('ok')
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % path)
+ Logs.pprint('CYAN', ' shared libs : %s' % libs)
+ Logs.pprint('CYAN', ' static libs : %s' % stlibs)
+
+
+ def try_link():
+ if (params['lib'] and 'system' in params['lib']) or \
+ params['stlib'] and 'system' in params['stlib']:
+ self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
+ if (params['lib'] and 'thread' in params['lib']) or \
+ params['stlib'] and 'thread' in params['stlib']:
+ self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
+
+ def is_log_mt():
+ '''Check if found boost_log library is multithread-safe'''
+ for lib in libs:
+ if lib.startswith('boost_log'):
+ lib_log = lib
+ break
+ return '-mt' in lib_log
+
+ if params['lib'] and 'log' in params['lib']:
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
+ if not is_log_mt():
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
+ self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
+ if params['stlib'] and 'log' in params['stlib']:
+ # Static linking is assumed by default
+ if not is_log_mt():
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
+ self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
+
+ if params.get('linkage_autodetect', False):
+ self.start_msg("Attempting to detect boost linkage flags")
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ if toolset in ('vc',):
+ # disable auto-linking feature, causing error LNK1181
+ # because the code wants to be linked against
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+
+ # if no dlls are present, we guess the .lib files are not stubs
+ has_dlls = False
+ for x in Utils.listdir(path):
+ if x.endswith(self.env.cxxshlib_PATTERN % ''):
+ has_dlls = True
+ break
+ if not has_dlls:
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['STLIB_%s' % var] = libs
+ del self.env['LIB_%s' % var]
+ del self.env['LIBPATH_%s' % var]
+
+ # we attempt to play with some known-to-work CXXFLAGS combinations
+ for cxxflags in (['/MD', '/EHsc'], []):
+ self.env.stash()
+ self.env["CXXFLAGS_%s" % var] += cxxflags
+ try:
+ try_link()
+ except Errors.ConfigurationError as e:
+ self.env.revert()
+ exc = e
+ else:
+ self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
+ exc = None
+ self.env.commit()
+ break
+
+ if exc is not None:
+ self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
+ self.fatal('The configuration failed')
+ else:
+ self.start_msg('Checking for boost linkage')
+ try:
+ try_link()
+ except Errors.ConfigurationError as e:
+ self.end_msg("Could not link against boost libraries using supplied options")
+ self.fatal('The configuration failed')
+ self.end_msg('ok')
+
+
+@feature('cxx')
+@after_method('apply_link')
+def install_boost(self):
+ if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
+ return
+ install_boost.done = True
+ inst_to = getattr(self, 'install_path', '${BINDIR}')
+ for lib in self.env.LIB_BOOST:
+ try:
+ file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
+ self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
+ except:
+ continue
+install_boost.done = False
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015
Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
-or to hash the file in the build directory with its timestamp (similar to 'update_outputs')
+or to hash the file in the build directory with its timestamp
"""
import os
from waflib import Node, Utils
def get_bld_sig(self):
+ if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
+ return Utils.h_file(self.abspath())
+
try:
- return self.cache_sig
+ # add the creation time to the signature
+ return self.sig + str(os.stat(self.abspath()).st_mtime)
except AttributeError:
- pass
-
- if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
- self.sig = Utils.h_file(self.abspath())
- self.cache_sig = ret = self.sig
- else:
- # add the
- self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime)
- return ret
+ return None
Node.Node.get_bld_sig = get_bld_sig
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2013 (ita)
up = os.path.dirname(Context.g_module.__file__)
except AttributeError:
up = '.'
-LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M'))
+LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))
wlock = threading.Lock()
class log_to_file(object):
fileobj.close()
filename = sys.stdout.filename
- Logs.info('Output logged to %r' % filename)
+ Logs.info('Output logged to %r', filename)
# then copy the log file to "latest.log" if possible
up = os.path.dirname(os.path.abspath(filename))
shutil.copy(filename, os.path.join(up, 'latest.log'))
except OSError:
# this may fail on windows due to processes spawned
- #
pass
atexit.register(exit_cleanup)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Calle Rosenquist, 2017 (xbreak)
+"""
+Create task that copies source files to the associated build node.
+This is useful to e.g. construct a complete Python package so it can be unit tested
+without installation.
+
+Source files to be copied can be specified either in `buildcopy_source` attribute, or
+`source` attribute. If both are specified `buildcopy_source` has priority.
+
+Examples::
+
+ def build(bld):
+ bld(name = 'bar',
+ features = 'py buildcopy',
+ source = bld.path.ant_glob('src/bar/*.py'))
+
+ bld(name = 'py baz',
+ features = 'buildcopy',
+ buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])
+
+"""
+import os, shutil
+from waflib import Errors, Task, TaskGen, Utils, Node
+
+@TaskGen.before_method('process_source')
+@TaskGen.feature('buildcopy')
+def make_buildcopy(self):
+ """
+ Creates the buildcopy task.
+ """
+ def to_src_nodes(lst):
+ """Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
+ preference to nodes in build.
+ """
+ if isinstance(lst, Node.Node):
+ if not lst.is_src():
+ raise Errors.WafError('buildcopy: node %s is not in src'%lst)
+ if not os.path.isfile(lst.abspath()):
+ raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
+ return lst
+
+ if isinstance(lst, str):
+ lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+ node = self.bld.path.get_src().search_node(lst)
+ if node:
+ if not os.path.isfile(node.abspath()):
+ raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+ return node
+
+ node = self.bld.path.get_src().find_node(lst)
+ if node:
+ if not os.path.isfile(node.abspath()):
+ raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+ return node
+ raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
+
+ nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
+ node_pairs = [(n, n.get_bld()) for n in nodes]
+ self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)
+
+
+class buildcopy(Task.Task):
+ """
+ Copy for each pair `n` in `node_pairs`: n[0] -> n[1].
+
+ Attribute `node_pairs` should contain a list of tuples describing source and target:
+
+ node_pairs = [(in, out), ...]
+
+ """
+ color = 'PINK'
+
+ def keyword(self):
+ return 'Copying'
+
+ def run(self):
+ for f,t in self.node_pairs:
+ t.parent.mkdir()
+ shutil.copy2(f.abspath(), t.abspath())
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
def lines_includes(node):
code = node.read()
if c_preproc.use_trigraphs:
- for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
+ for (a, b) in c_preproc.trig_def:
+ code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
self.tryfind(y)
c_preproc.c_parser = dumb_parser
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# -*- coding: utf-8 vi:ts=4:noexpandtab
conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
+ out = out.decode(sys.stdout.encoding or 'latin-1')
k = {}
out = out.splitlines()
conf.env.ARFLAGS = ['rcs']
conf.env.cshlib_PATTERN = '%s.js'
conf.env.cxxshlib_PATTERN = '%s.js'
- conf.env.cstlib_PATTERN = '%s.bc'
- conf.env.cxxstlib_PATTERN = '%s.bc'
+ conf.env.cstlib_PATTERN = '%s.a'
+ conf.env.cxxstlib_PATTERN = '%s.a'
conf.env.cprogram_PATTERN = '%s.html'
conf.env.cxxprogram_PATTERN = '%s.html'
+ conf.env.CXX_TGT_F = ['-c', '-o', '']
+ conf.env.CC_TGT_F = ['-c', '-o', '']
+ conf.env.CXXLNK_TGT_F = ['-o', '']
+ conf.env.CCLNK_TGT_F = ['-o', '']
conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
-
-@feature('c', 'cxx', 'acm', 'includes')
-@after_method('propagate_uselib_vars', 'process_source', 'apply_incpaths')
-def apply_incpaths_emscripten(self):
- """
- Emscripten doesn't like absolute include paths
- """
- # TODO: in waf 1.9 we can switch back to bldnode as the default since path_from handles cross-drive paths
- if self.env.CC_NAME != 'emscripten' or self.env.CC_NAME != 'emscripten':
- return
- lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
- self.includes_nodes = lst
- self.env['INCPATHS'] = [x.path_from(self.bld.bldnode) for x in lst]
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
@conf
def get_sxc_version(conf, fc):
- version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
- cmd = fc + ['-V']
- p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
- out, err = p.communicate()
+ version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+ p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
+ out, err = p.communicate()
- if out: match = version_re(out)
- else: match = version_re(err)
- if not match:
- conf.fatal('Could not determine the NEC C compiler version.')
- k = match.groupdict()
- conf.env['C_VERSION'] = (k['major'], k['minor'])
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the NEC C compiler version.')
+ k = match.groupdict()
+ conf.env['C_VERSION'] = (k['major'], k['minor'])
@conf
def sxc_common_flags(conf):
- v=conf.env
- v['CC_SRC_F']=[]
- v['CC_TGT_F']=['-c','-o']
- if not v['LINK_CC']:v['LINK_CC']=v['CC']
- v['CCLNK_SRC_F']=[]
- v['CCLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['RPATH_ST']=''
- v['SONAME_ST']=[]
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']=[]
- v['LINKFLAGS_cprogram']=['']
- v['cprogram_PATTERN']='%s'
- v['CFLAGS_cshlib']=['-fPIC']
- v['LINKFLAGS_cshlib']=['']
- v['cshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cstlib']=[]
- v['cstlib_PATTERN']='lib%s.a'
+ v=conf.env
+ v['CC_SRC_F']=[]
+ v['CC_TGT_F']=['-c','-o']
+ if not v['LINK_CC']:
+ v['LINK_CC']=v['CC']
+ v['CCLNK_SRC_F']=[]
+ v['CCLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['RPATH_ST']=''
+ v['SONAME_ST']=[]
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']=[]
+ v['LINKFLAGS_cprogram']=['']
+ v['cprogram_PATTERN']='%s'
+ v['CFLAGS_cshlib']=['-fPIC']
+ v['LINKFLAGS_cshlib']=['']
+ v['cshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cstlib']=[]
+ v['cstlib_PATTERN']='lib%s.a'
def configure(conf):
conf.find_sxc()
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Anton Feldmann, 2012
+# "Base for cabal"
+
+from waflib import Task, Utils
+from waflib.TaskGen import extension
+from waflib.Utils import threading
+from shutil import rmtree
+
+lock = threading.Lock()
+registering = False
+
+def configure(self):
+ self.find_program('cabal', var='CABAL')
+ self.find_program('ghc-pkg', var='GHCPKG')
+ pkgconfd = self.bldnode.abspath() + '/package.conf.d'
+ self.env.PREFIX = self.bldnode.abspath() + '/dist'
+ self.env.PKGCONFD = pkgconfd
+ if self.root.find_node(pkgconfd + '/package.cache'):
+ self.msg('Using existing package database', pkgconfd, color='CYAN')
+ else:
+ pkgdir = self.root.find_dir(pkgconfd)
+ if pkgdir:
+ self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
+ rmtree(pkgdir.abspath())
+ pkgdir = None
+
+ self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
+ self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
+
+@extension('.cabal')
+def process_cabal(self, node):
+ out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
+ package_node = node.change_ext('.package')
+ package_node = out_dir_node.find_or_declare(package_node.name)
+ build_node = node.parent.get_bld()
+ build_path = build_node.abspath()
+ config_node = build_node.find_or_declare('setup-config')
+ inplace_node = build_node.find_or_declare('package.conf.inplace')
+
+ config_task = self.create_task('cabal_configure', node)
+ config_task.cwd = node.parent.abspath()
+ config_task.depends_on = getattr(self, 'depends_on', '')
+ config_task.build_path = build_path
+ config_task.set_outputs(config_node)
+
+ build_task = self.create_task('cabal_build', config_node)
+ build_task.cwd = node.parent.abspath()
+ build_task.build_path = build_path
+ build_task.set_outputs(inplace_node)
+
+ copy_task = self.create_task('cabal_copy', inplace_node)
+ copy_task.cwd = node.parent.abspath()
+ copy_task.depends_on = getattr(self, 'depends_on', '')
+ copy_task.build_path = build_path
+
+ last_task = copy_task
+ task_list = [config_task, build_task, copy_task]
+
+ if (getattr(self, 'register', False)):
+ register_task = self.create_task('cabal_register', inplace_node)
+ register_task.cwd = node.parent.abspath()
+ register_task.set_run_after(copy_task)
+ register_task.build_path = build_path
+
+ pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
+ pkgreg_task.cwd = node.parent.abspath()
+ pkgreg_task.set_run_after(register_task)
+ pkgreg_task.build_path = build_path
+
+ last_task = pkgreg_task
+ task_list += [register_task, pkgreg_task]
+
+ touch_task = self.create_task('cabal_touch', inplace_node)
+ touch_task.set_run_after(last_task)
+ touch_task.set_outputs(package_node)
+ touch_task.build_path = build_path
+
+ task_list += [touch_task]
+
+ return task_list
+
+def get_all_src_deps(node):
+ hs_deps = node.ant_glob('**/*.hs')
+ hsc_deps = node.ant_glob('**/*.hsc')
+ lhs_deps = node.ant_glob('**/*.lhs')
+ c_deps = node.ant_glob('**/*.c')
+ cpp_deps = node.ant_glob('**/*.cpp')
+ proto_deps = node.ant_glob('**/*.proto')
+ return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
+
+class Cabal(Task.Task):
+ def scan(self):
+ return (get_all_src_deps(self.generator.path), ())
+
+class cabal_configure(Cabal):
+ run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
+ shell = True
+
+ def scan(self):
+ out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
+ deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
+ return (deps, ())
+
+class cabal_build(Cabal):
+ run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
+ shell = True
+
+class cabal_copy(Cabal):
+ run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
+ shell = True
+
+class cabal_register(Cabal):
+ run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
+ shell = True
+
+class ghcpkg_register(Cabal):
+ run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
+ shell = True
+
+ def runnable_status(self):
+ global lock, registering
+
+ val = False
+ lock.acquire()
+ val = registering
+ lock.release()
+
+ if val:
+ return Task.ASK_LATER
+
+ ret = Task.Task.runnable_status(self)
+ if ret == Task.RUN_ME:
+ lock.acquire()
+ registering = True
+ lock.release()
+
+ return ret
+
+ def post_run(self):
+ global lock, registering
+
+ lock.acquire()
+ registering = False
+ lock.release()
+
+ return Task.Task.post_run(self)
+
+class cabal_touch(Cabal):
+ run_str = 'touch ${TGT}'
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to extend c_config.check_cfg()
conf.end_msg('%s += %s' % (k, v))
return True
+
+++ /dev/null
-#!/usr/bin/python
-# -*- coding: utf-8 vi:ts=4:noexpandtab
-# Tool to provide dedicated variables for cross-compilation
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-This tool allows to use environment variables to define cross-compilation things,
-mostly used when you use build variants.
-
-The variables are obtained from the environment in 3 ways:
-
-1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
-2. By defining HOST_x
-3. By defining ${CHOST//-/_}_x
-
-Usage:
-
-- In your build script::
-
- def configure(cfg):
- ...
- conf.load('c_cross_gnu')
- for variant in x_variants:
- conf.xcheck_host()
- conf.xcheck_host_var('POUET')
- ...
-
- ...
-
-- Then::
-
- CHOST=arm-hardfloat-linux-gnueabi waf configure
-
- env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
-
- CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
-
- HOST_CC="clang -..." waf configure
-
-"""
-
-import os
-from waflib import Utils, Configure
-
-try:
- from shlex import quote
-except ImportError:
- from pipes import quote
-
-def get_chost_stuff(conf):
- """
- Get the CHOST environment variable contents
- """
- chost = None
- chost_envar = None
- if conf.env.CHOST:
- chost = conf.env.CHOST[0]
- chost_envar = chost.replace('-', '_')
- return chost, chost_envar
-
-
-@Configure.conf
-def xcheck_envar(conf, name, wafname=None, cross=False):
- wafname = wafname or name
- envar = os.environ.get(name, None)
-
- if envar is None:
- return
-
- value = Utils.to_list(envar) if envar != '' else [envar]
-
- conf.env[wafname] = value
- if cross:
- pretty = 'cross-compilation %s' % wafname
- else:
- pretty = wafname
- conf.msg('Will use %s' % pretty,
- " ".join(quote(x) for x in value))
-
-@Configure.conf
-def xcheck_host_prog(conf, name, tool, wafname=None):
- wafname = wafname or name
-
- chost, chost_envar = get_chost_stuff(conf)
-
- specific = None
- if chost:
- specific = os.environ.get('%s_%s' % (chost_envar, name), None)
-
- if specific:
- value = Utils.to_list(specific)
- conf.env[wafname] += value
- conf.msg('Will use cross-compilation %s from %s_%s' \
- % (name, chost_envar, name),
- " ".join(quote(x) for x in value))
- return
- else:
- envar = os.environ.get('HOST_%s' % name, None)
- if envar is not None:
- value = Utils.to_list(envar)
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from HOST_%s' \
- % (name, name),
- " ".join(quote(x) for x in value))
- return
-
- if conf.env[wafname]:
- return
-
- value = None
- if chost:
- value = '%s-%s' % (chost, tool)
-
- if value:
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from CHOST' \
- % wafname, value)
-
-@Configure.conf
-def xcheck_host_envar(conf, name, wafname=None):
- wafname = wafname or name
-
- chost, chost_envar = get_chost_stuff(conf)
-
- specific = None
- if chost:
- specific = os.environ.get('%s_%s' % (chost_envar, name), None)
-
- if specific:
- value = Utils.to_list(specific)
- conf.env[wafname] += value
- conf.msg('Will use cross-compilation %s from %s_%s' \
- % (name, chost_envar, name),
- " ".join(quote(x) for x in value))
- return
-
-
- envar = os.environ.get('HOST_%s' % name, None)
- if envar is None:
- return
-
- value = Utils.to_list(envar) if envar != '' else [envar]
-
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from HOST_%s' \
- % (name, name),
- " ".join(quote(x) for x in value))
-
-
-@Configure.conf
-def xcheck_host(conf):
- conf.xcheck_envar('CHOST', cross=True)
- conf.xcheck_host_prog('CC', 'gcc')
- conf.xcheck_host_prog('CXX', 'g++')
- conf.xcheck_host_prog('LINK_CC', 'gcc')
- conf.xcheck_host_prog('LINK_CXX', 'g++')
- conf.xcheck_host_prog('AR', 'ar')
- conf.xcheck_host_prog('AS', 'as')
- conf.xcheck_host_prog('LD', 'ld')
- conf.xcheck_host_envar('CFLAGS')
- conf.xcheck_host_envar('CXXFLAGS')
- conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
- conf.xcheck_host_envar('LIB')
- conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
- conf.xcheck_host_envar('PKG_CONFIG_PATH')
-
- if not conf.env.env:
- conf.env.env = {}
- conf.env.env.update(os.environ)
- if conf.env.PKG_CONFIG_LIBDIR:
- conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
- if conf.env.PKG_CONFIG_PATH:
- conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013
"""
import sys, os, json, shlex, pipes
-from waflib import Logs, TaskGen
-from waflib.Tools import c, cxx
+from waflib import Logs, TaskGen, Task
+
+Task.Task.keep_last_cmd = True
if sys.hexversion >= 0x3030000:
quote = shlex.quote
else:
quote = pipes.quote
-@TaskGen.feature('*')
+@TaskGen.feature('c', 'cxx')
@TaskGen.after_method('process_use')
def collect_compilation_db_tasks(self):
"Add a compilation database entry for compiled tasks"
clang_db = self.bld.clang_compilation_database_tasks = []
self.bld.add_post_fun(write_compilation_database)
+ tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
for task in getattr(self, 'compiled_tasks', []):
- if isinstance(task, (c.c, cxx.cxx)):
+ if isinstance(task, tup):
clang_db.append(task)
def write_compilation_database(ctx):
"Write the clang compilation database as JSON"
database_file = ctx.bldnode.make_node('compile_commands.json')
- Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path))
+ Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
try:
root = json.load(database_file)
except IOError:
root = []
- clang_db = dict((x["file"], x) for x in root)
+ clang_db = dict((x['file'], x) for x in root)
for task in getattr(ctx, 'clang_compilation_database_tasks', []):
try:
cmd = task.last_cmd
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write(json.dumps(root, indent=2))
+
+# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled.
+# This will make sure compile_commands.json is always fully up to date.
+# Previously you could end up with a partial compile_commands.json if the build failed.
+for x in ('c', 'cxx'):
+ if x not in Task.classes:
+ continue
+
+ t = Task.classes[x]
+
+ def runnable_status(self):
+ def exec_command(cmd, **kw):
+ pass
+
+ run_status = self.old_runnable_status()
+ if run_status == Task.SKIP_ME:
+ setattr(self, 'old_exec_command', getattr(self, 'exec_command', None))
+ setattr(self, 'exec_command', exec_command)
+ self.run()
+ setattr(self, 'exec_command', getattr(self, 'old_exec_command', None))
+ return run_status
+
+ setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None))
+ setattr(t, 'runnable_status', runnable_status)
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# CodeLite Project
${if (project.get_key(x)=="sourcefile")}
<File Name="${x.abspath()}"/>
${endif}
- ${endfor}
+ ${endfor}
</VirtualDirectory>
<VirtualDirectory Name="include">
${for x in project.source}
$b = project.build_properties[0]}
<RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
<CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
- <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
+ <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
<Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
<Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
<Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
extr = []
def repl(match):
g = match.group
- if g('dollar'): return "$"
+ if g('dollar'):
+ return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
app("lst.append(%r)" % params[x])
f = extr[x]
- if f.startswith('if') or f.startswith('for'):
+ if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
- elif f.startswith('endif') or f.startswith('endfor'):
+ elif f.startswith(('endif', 'endfor')):
indent -= 1
- elif f.startswith('else') or f.startswith('elif'):
+ elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
BOM = '\xef\xbb\xbf'
try:
- BOM = bytes(BOM, 'iso8859-1') # python 3
-except NameError:
+ BOM = bytes(BOM, 'latin-1') # python 3
+except (TypeError, NameError):
pass
def stealth_write(self, data, flags='wb'):
data = data.decode(sys.getfilesystemencoding(), 'replace')
data = data.encode('utf-8')
- if self.name.endswith('.project') or self.name.endswith('.project'):
+ if self.name.endswith('.project'):
data = BOM + data
try:
except (IOError, ValueError):
self.write(data, flags=flags)
else:
- Logs.debug('codelite: skipping %s' % self.abspath())
+ Logs.debug('codelite: skipping %r', self)
Node.Node.stealth_write = stealth_write
re_quote = re.compile("[^a-zA-Z0-9-]")
return lst
def write(self):
- Logs.debug('codelite: creating %r' % self.path)
+ Logs.debug('codelite: creating %r', self.path)
#print "self.name:",self.name
# first write the project file
required for writing the source files
"""
name = node.name
- if name.endswith('.cpp') or name.endswith('.c'):
+ if name.endswith(('.cpp', '.c')):
return 'sourcefile'
return 'headerfile'
def get_rebuild_command(self, props):
return "%s clean build %s" % self.get_build_params(props)
-
+
def get_install_command(self, props):
return "%s install %s" % self.get_build_params(props)
def get_build_and_install_command(self, props):
return "%s build install %s" % self.get_build_params(props)
-
+
def get_build_and_install_all_command(self, props):
return "%s build install" % self.get_build_params(props)[0]
-
+
def get_clean_all_command(self, props):
return "%s clean" % self.get_build_params(props)[0]
-
+
def get_build_all_command(self, props):
return "%s build" % self.get_build_params(props)[0]
-
+
def get_rebuild_all_command(self, props):
return "%s clean build" % self.get_build_params(props)[0]
vsnode_alias.__init__(self, ctx, node, name)
self.tg = self.ctx() # fake one, cannot remove
self.exclude_files = Node.exclude_regs + '''
-waf-1.8.*
-waf3-1.8.*/**
-.waf-1.8.*
-.waf3-1.8.*/**
+waf-2*
+waf3-2*/**
+.waf-2*
+.waf3-2*/**
**/*.sdf
**/*.suo
**/*.ncb
tsk = self.tg.link_task
except AttributeError:
pass
- else:
+ else:
x.output_file = tsk.outputs[0].abspath()
x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
return getattr(x, 'path', None) and x.path.abspath() or x.name
self.all_projects.sort(key=sortfun)
-
def write_files(self):
-
"""
Write the project and solution files from the data collected
so far. It is unlikely that you will want to change this
# and finally write the solution file
node = self.get_solution_node()
node.parent.mkdir()
- Logs.warn('Creating %r' % node)
+ Logs.warn('Creating %r', node)
#a = dir(self.root)
#for b in a:
# print b
p = self.vsnode_target(self, tg)
p.collect_source() # delegate this processing
- p.collect_properties()
+ p.collect_properties()
self.all_projects.append(p)
def add_aliases(self):
p.iter_path = p.tg.path
make_parents(p)
-
-
-def options(ctx):
- pass
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
- cmd = frame.f_locals['cmd']
+ cmd = frame.f_locals.get('cmd')
if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
lines = []
for line in rec.msg.splitlines():
def options(opt):
Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
def options(opt):
Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))
+
try:
p = Utils.subprocess.Popen(cmd, **kw)
output = p.communicate()[0]
- except OSError ,e:
+ except OSError as e:
raise ValueError(str(e))
if p.returncode:
Options.Handler = Options.OptionsContext
Task.simple_task_type = Task.task_type_from_func = Task.task_factory
-Task.TaskBase.classes = Task.classes
+Task.Task.classes = Task.classes
def setitem(self, key, value):
if key.startswith('CCFLAGS'):
lst = y.to_list(y.add_objects)
lst.reverse()
for u in lst:
- if u in seen: continue
+ if u in seen:
+ continue
added = 1
names = [u]+names
- if added: continue # list of names modified, loop
+ if added:
+ continue # list of names modified, loop
# safe to process the current object
y.post()
"""Small example on how to link object files as if they were source
obj = bld.create_obj('cc')
obj.add_obj_file('foo.o')"""
- if not hasattr(self, 'obj_files'): self.obj_files = []
- if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
+ if not hasattr(self, 'obj_files'):
+ self.obj_files = []
+ if not 'process_obj_files' in self.meths:
+ self.meths.append('process_obj_files')
self.obj_files.append(file)
k = [repl.get(key, key) for key in k]
return TaskGen.before_method(*k)
TaskGen.before = before
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Michel Mooij, michel.mooij7@gmail.com
The result of the source code analysis will be stored both as xml and html
files in the build location for the task. Should any error be detected by
cppcheck the build will be aborted and a link to the html report will be shown.
+By default, one index.html file is created for each task generator. A global
+index.html file can be obtained by setting the following variable
+in the configuration section:
+
+ conf.env.CPPCHECK_SINGLE_HTML = False
When needed source code checking by cppcheck can be disabled per task, per
detected error or warning for a particular task. It can be also be disabled for
import sys
import xml.etree.ElementTree as ElementTree
-from waflib import Task, TaskGen, Logs, Context
+from waflib import Task, TaskGen, Logs, Context, Options
PYGMENTS_EXC_MSG= '''
The required module 'pygments' could not be found. Please install it using your
default='20', action='store',
help='maximum preprocessor (--max-configs) define iterations (default=20)')
+ opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs',
+ default='1', action='store',
+ help='number of jobs (-j) to do the checking work (default=1)')
def configure(conf):
if conf.options.cppcheck_skip:
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
+ conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs
+ if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable):
+ Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically')
conf.find_program('cppcheck', var='CPPCHECK')
+ # set to True to get a single index.html file
+ conf.env.CPPCHECK_SINGLE_HTML = False
@TaskGen.feature('c')
@TaskGen.feature('cxx')
def cppcheck_execute(self):
- if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip:
+ if hasattr(self.bld, 'conf'):
+ return
+ if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip:
return
if getattr(self, 'cppcheck_skip', False):
return
task = self.create_task('cppcheck')
task.cmd = _tgen_create_cmd(self)
task.fatal = []
- if not self.bld.options.cppcheck_err_resume:
+ if not Options.options.cppcheck_err_resume:
task.fatal.append('error')
max_configs = self.env.CPPCHECK_MAX_CONFIGS
bin_enable = self.env.CPPCHECK_BIN_ENABLE
lib_enable = self.env.CPPCHECK_LIB_ENABLE
+ jobs = self.env.CPPCHECK_JOBS
cmd = self.env.CPPCHECK
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
args.append('--max-configs=%s' % max_configs)
+ args.append('-j %s' % jobs)
if 'cxx' in features:
args.append('--language=c++')
args.append('--language=c')
args.append('--std=%s' % std_c)
- if self.bld.options.cppcheck_check_config:
+ if Options.options.cppcheck_check_config:
args.append('--check-config')
if set(['cprogram','cxxprogram']) & set(features):
root = ElementTree.fromstring(s)
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
cmd.text = str(self.cmd)
- body = ElementTree.tostring(root)
- node = self.generator.path.get_bld().find_or_declare('cppcheck.xml')
+ body = ElementTree.tostring(root).decode('us-ascii')
+ body_html_name = 'cppcheck-%s.xml' % self.generator.get_name()
+ if self.env.CPPCHECK_SINGLE_HTML:
+ body_html_name = 'cppcheck.xml'
+ node = self.generator.path.get_bld().find_or_declare(body_html_name)
node.write(header + body)
def _get_defects(self, xml_string):
def _create_html_files(self, defects):
sources = {}
- defects = [defect for defect in defects if defect.has_key('file')]
+ defects = [defect for defect in defects if 'file' in defect]
for defect in defects:
name = defect['file']
- if not sources.has_key(name):
+ if not name in sources:
sources[name] = [defect]
else:
sources[name].append(defect)
files = {}
css_style_defs = None
bpath = self.generator.path.get_bld().abspath()
- names = sources.keys()
+ names = list(sources.keys())
for i in range(0,len(names)):
name = names[i]
- htmlfile = 'cppcheck/%i.html' % (i)
+ if self.env.CPPCHECK_SINGLE_HTML:
+ htmlfile = 'cppcheck/%i.html' % (i)
+ else:
+ htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i)
errors = sources[name]
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
css_style_defs = self._create_html_file(name, htmlfile, errors)
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
+ if div.get('id') == 'menu':
+ indexlink = div.find('a')
+ if self.env.CPPCHECK_SINGLE_HTML:
+ indexlink.attrib['href'] = 'index.html'
+ else:
+ indexlink.attrib['href'] = 'index-%s.html' % name
if div.get('id') == 'content':
content = div
srcnode = self.generator.bld.root.find_node(sourcefile)
- hl_lines = [e['line'] for e in errors if e.has_key('line')]
+ hl_lines = [e['line'] for e in errors if 'line' in e]
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
- formatter.errors = [e for e in errors if e.has_key('line')]
+ formatter.errors = [e for e in errors if 'line' in e]
css_style_defs = formatter.get_style_defs('.highlight')
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
s = pygments.highlight(srcnode.read(), lexer, formatter)
table = ElementTree.fromstring(s)
content.append(table)
- s = ElementTree.tostring(root, method='html')
+ s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare(htmlfile)
node.write(s)
if div.get('id') == 'content':
content = div
self._create_html_table(content, files)
+ if div.get('id') == 'menu':
+ indexlink = div.find('a')
+ if self.env.CPPCHECK_SINGLE_HTML:
+ indexlink.attrib['href'] = 'index.html'
+ else:
+ indexlink.attrib['href'] = 'index-%s.html' % name
- s = ElementTree.tostring(root, method='html')
+ s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
- node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html')
+ index_html_name = 'cppcheck/index-%s.html' % name
+ if self.env.CPPCHECK_SINGLE_HTML:
+ index_html_name = 'cppcheck/index.html'
+ node = self.generator.path.get_bld().find_or_declare(index_html_name)
node.write(s)
return node
row = ElementTree.fromstring(s)
table.append(row)
- errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint)
+ errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint)
for e in errors:
- if not e.has_key('line'):
+ if not 'line' in e:
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
else:
attr = ''
for error in self.errors:
if int(error['line']) == line_no:
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
- line_no = line_no + 1
+ line_no += 1
yield i, t
<div>cppcheck - a tool for static C/C++ code analysis</div>
<div>
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
- Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
+ Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
IRC: #cppcheck at irc.freenode.net
</div>
}
"""
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
#
bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
'''
+from __future__ import absolute_import
import sys, re
import logging
import threading
help='specify the log level (default: 1)')
opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
help='break the build if error >= level (default: 5)')
+ opt.add_option('--cpplint-root', type='string',
+ default=None, dest='CPPLINT_ROOT',
+ help='root directory used to derive header guard')
opt.add_option('--cpplint-skip', action='store_true',
default=False, dest='CPPLINT_SKIP',
help='skip cpplint during build')
conf.end_msg('not found, skipping it.')
-class cpplint_formatter(Logs.formatter):
+class cpplint_formatter(Logs.formatter, object):
def __init__(self, fmt):
logging.Formatter.__init__(self, CPPLINT_FORMAT)
self.fmt = fmt
return super(cpplint_formatter, self).format(rec)
-class cpplint_handler(Logs.log_handler):
+class cpplint_handler(Logs.log_handler, object):
def __init__(self, stream=sys.stderr, **kw):
super(cpplint_handler, self).__init__(stream, **kw)
self.stream = stream
global critical_errors
with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
if self.env.CPPLINT_OUTPUT != 'waf':
- cpplint_tool._cpplint_state.output_format = self.env.CPPLINT_OUTPUT
- cpplint_tool._cpplint_state.SetFilters(self.env.CPPLINT_FILTERS)
+ cpplint_tool._SetOutputFormat(self.env.CPPLINT_OUTPUT)
+ cpplint_tool._SetFilters(self.env.CPPLINT_FILTERS)
cpplint_tool._line_length = self.env.CPPLINT_LINE_LENGTH
+ cpplint_tool._root = self.env.CPPLINT_ROOT
cpplint_tool.ProcessFile(self.inputs[0].abspath(), self.env.CPPLINT_LEVEL)
return critical_errors
@TaskGen.feature('cpplint')
@TaskGen.before_method('process_source')
def post_cpplint(self):
- if self.env.CPPLINT_SKIP:
- return
-
if not self.env.CPPLINT_INITIALIZED:
for key, value in Options.options.__dict__.items():
if not key.startswith('CPPLINT_') or self.env[key]:
- continue
+ continue
self.env[key] = value
self.env.CPPLINT_INITIALIZED = True
+ if self.env.CPPLINT_SKIP:
+ return
+
if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
return
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+# Tool to provide dedicated variables for cross-compilation
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+This tool allows to use environment variables to define cross-compilation
+variables intended for build variants.
+
+The variables are obtained from the environment in 3 ways:
+
+1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
+2. By defining HOST_x
+3. By defining ${CHOST//-/_}_x
+
+else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``.
+
+Usage:
+
+- In your build script::
+
+ def configure(cfg):
+ ...
+ for variant in x_variants:
+ setenv(variant)
+ conf.load('cross_gnu')
+ conf.xcheck_host_var('POUET')
+ ...
+
+
+- Then::
+
+ CHOST=arm-hardfloat-linux-gnueabi waf configure
+ env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
+ CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
+ HOST_CC="clang -..." waf configure
+
+This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH):
+
+.. code:: python
+
+ from waflib import Configure
+
+ #from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971
+ import waf_variants
+
+ variants='pc fw/variant1 fw/variant2'.split()
+
+ top = "."
+ out = "../build"
+
+ PIC = '33FJ128GP804' #dsPICxxx
+
+ @Configure.conf
+ def gcc_modifier_xc16(cfg):
+ v = cfg.env
+ v.cprogram_PATTERN = '%s.elf'
+ v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld',
+ '--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections',
+ '--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem'])
+ v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1',
+ '-msfr-warn=off','-mno-override-inline','-finline','-Winline']
+
+ def configure(cfg):
+ if 'fw' in cfg.variant: #firmware
+ cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too
+ cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16']
+ ...
+ else: #configure for pc SW
+ ...
+
+ def build(bld):
+ if 'fw' in bld.variant: #firmware
+ bld.program(source='maintst.c', target='maintst');
+ bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf")
+ else: #build for pc SW
+ ...
+
+"""
+
+import os
+from waflib import Utils, Configure
+from waflib.Tools import ccroot, gcc
+
+try:
+ from shlex import quote
+except ImportError:
+ from pipes import quote
+
+def get_chost_stuff(conf):
+ """
+ Get the CHOST environment variable contents
+ """
+ chost = None
+ chost_envar = None
+ if conf.env.CHOST:
+ chost = conf.env.CHOST[0]
+ chost_envar = chost.replace('-', '_')
+ return chost, chost_envar
+
+
+@Configure.conf
+def xcheck_var(conf, name, wafname=None, cross=False):
+ wafname = wafname or name
+
+ if wafname in conf.env:
+ value = conf.env[wafname]
+ if isinstance(value, str):
+ value = [value]
+ else:
+ envar = os.environ.get(name)
+ if not envar:
+ return
+ value = Utils.to_list(envar) if envar != '' else [envar]
+
+ conf.env[wafname] = value
+ if cross:
+ pretty = 'cross-compilation %s' % wafname
+ else:
+ pretty = wafname
+ conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value))
+
+@Configure.conf
+def xcheck_host_prog(conf, name, tool, wafname=None):
+ wafname = wafname or name
+
+ chost, chost_envar = get_chost_stuff(conf)
+
+ specific = None
+ if chost:
+ specific = os.environ.get('%s_%s' % (chost_envar, name))
+
+ if specific:
+ value = Utils.to_list(specific)
+ conf.env[wafname] += value
+ conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
+ " ".join(quote(x) for x in value))
+ return
+ else:
+ envar = os.environ.get('HOST_%s' % name)
+ if envar is not None:
+ value = Utils.to_list(envar)
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
+ " ".join(quote(x) for x in value))
+ return
+
+ if conf.env[wafname]:
+ return
+
+ value = None
+ if chost:
+ value = '%s-%s' % (chost, tool)
+
+ if value:
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)
+
+@Configure.conf
+def xcheck_host_envar(conf, name, wafname=None):
+ wafname = wafname or name
+
+ chost, chost_envar = get_chost_stuff(conf)
+
+ specific = None
+ if chost:
+ specific = os.environ.get('%s_%s' % (chost_envar, name))
+
+ if specific:
+ value = Utils.to_list(specific)
+ conf.env[wafname] += value
+ conf.msg('Will use cross-compilation %s from %s_%s' \
+ % (name, chost_envar, name),
+ " ".join(quote(x) for x in value))
+ return
+
+
+ envar = os.environ.get('HOST_%s' % name)
+ if envar is None:
+ return
+
+ value = Utils.to_list(envar) if envar != '' else [envar]
+
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
+ " ".join(quote(x) for x in value))
+
+
+@Configure.conf
+def xcheck_host(conf):
+ conf.xcheck_var('CHOST', cross=True)
+ conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS]
+ conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_')
+ conf.xcheck_host_prog('CC', 'gcc')
+ conf.xcheck_host_prog('CXX', 'g++')
+ conf.xcheck_host_prog('LINK_CC', 'gcc')
+ conf.xcheck_host_prog('LINK_CXX', 'g++')
+ conf.xcheck_host_prog('AR', 'ar')
+ conf.xcheck_host_prog('AS', 'as')
+ conf.xcheck_host_prog('LD', 'ld')
+ conf.xcheck_host_envar('CFLAGS')
+ conf.xcheck_host_envar('CXXFLAGS')
+ conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
+ conf.xcheck_host_envar('LIB')
+ conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
+ conf.xcheck_host_envar('PKG_CONFIG_PATH')
+
+ if not conf.env.env:
+ conf.env.env = {}
+ conf.env.env.update(os.environ)
+ if conf.env.PKG_CONFIG_LIBDIR:
+ conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
+ if conf.env.PKG_CONFIG_PATH:
+ conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
+
+def configure(conf):
+ """
+ Configuration example for gcc, it will not work for g++/clang/clang++
+ """
+ conf.xcheck_host()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010-2015
-import os, re
+import re
from waflib import Task, Logs
from waflib.TaskGen import extension
def post_run(self):
for x in self.outputs:
if x.name.endswith('.h'):
- if not os.path.exists(x.abspath()):
+ if not x.exists():
if Logs.verbose:
- Logs.warn('Expected %r' % x.abspath())
+ Logs.warn('Expected %r', x.abspath())
x.write('')
return Task.Task.post_run(self)
else:
mods.append(m.group(2))
- Logs.debug("cython: mods %r" % mods)
+ Logs.debug('cython: mods %r', mods)
incs = getattr(self.generator, 'cython_includes', [])
incs = [self.generator.path.find_dir(x) for x in incs]
incs.append(node.parent)
if implicit:
found.append(implicit)
- Logs.debug("cython: found %r" % found)
+ Logs.debug('cython: found %r', found)
# Now the .h created - store them in bld.raw_deps for later use
has_api = False
ctx.find_program('cython', var='CYTHON')
if ctx.options.cython_flags:
ctx.env.CYTHONFLAGS = ctx.options.cython_flags
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero, 2011 (zougloub)
v['CC_TGT_F'] = ['-c', '-o']
# linker
- if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+ if not v['LINK_CC']:
+ v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
Add the ``--with-diab-bindir`` command-line options.
"""
opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
tarinfo.name = os.path.split(x)[1]
else:
tarinfo.name = endname + x # todo, if tuple, then..
- Logs.debug("adding %r to %s" % (tarinfo.name, filename))
+ Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
with open(x, 'rb') as f:
tar.addfile(tarinfo, f)
- Logs.info('Created %s' % filename)
+ Logs.info('Created %s', filename)
class publish(Context.Context):
fun = 'publish'
try:
response = urlopen(req, timeout=TIMEOUT)
except URLError as e:
- Logs.warn('The package server is down! %r' % e)
+ Logs.warn('The package server is down! %r', e)
self.constraints = self.local_resolve(text)
else:
ret = response.read()
reasons = c.why()
if len(reasons) == 1:
- Logs.error('%s but no matching package could be found in this repository' % reasons[0])
+ Logs.error('%s but no matching package could be found in this repository', reasons[0])
else:
- Logs.error('Conflicts on package %r:' % c.pkgname)
+ Logs.error('Conflicts on package %r:', c.pkgname)
for r in reasons:
- Logs.error(' %s' % r)
+ Logs.error(' %s', r)
if errors:
self.fatal('The package requirements cannot be satisfied!')
try:
return self.cache_constraints[(pkgname, pkgver)]
except KeyError:
- #Logs.error("no key %r" % (pkgname, pkgver))
text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
ret = parse_constraints(text)
self.cache_constraints[(pkgname, pkgver)] = ret
def solution_to_constraints(self, versions, constraints):
solution = []
- for p in versions.keys():
+ for p in versions:
c = constraint()
solution.append(c)
def build(bld):
load_tools(bld, bld.variant)
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy 2008-2010 (ita)
def scan(self):
exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+ exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns]
file_patterns = self.pars.get('FILE_PATTERNS','').split()
if not file_patterns:
- file_patterns = DOXY_FILE_PATTERNS
+ file_patterns = DOXY_FILE_PATTERNS.split()
if self.pars.get('RECURSIVE') == 'YES':
file_patterns = ["**/%s" % pattern for pattern in file_patterns]
nodes = []
def post_run(self):
nodes = self.output_dir.ant_glob('**/*', quiet=True)
for x in nodes:
- x.sig = Utils.h_file(x.abspath())
+ self.generator.bld.node_sigs[x] = self.uid()
self.add_install()
return Task.Task.post_run(self)
self.outputs += nodes
if getattr(self.generator, 'install_path', None):
if not getattr(self.generator, 'doxy_tar', None):
- self.generator.bld.install_files(self.generator.install_path,
- self.outputs,
+ self.generator.add_install_files(install_to=self.generator.install_path,
+ install_from=self.outputs,
postpone=False,
cwd=self.output_dir,
relative_trick=True)
else:
tsk.env['TAROPTS'] = ['cf']
if getattr(self, 'install_path', None):
- self.bld.install_files(self.install_path, tsk.outputs)
+ self.add_install_files(install_to=self.install_path, install_from=tsk.outputs)
def configure(conf):
'''
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Matt Clarkson, 2012
pbData = blob_out.pbData
buffer = c_buffer(cbData)
memcpy(buffer, pbData, cbData)
- LocalFree(pbData);
+ LocalFree(pbData)
return buffer.raw
@conf
blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
blob_out = DATA_BLOB()
- if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
+ if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
return get_data(blob_out)
else:
return get_data(blob_out)
else:
self.fatal('Failed to decrypt data')
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Eclipse CDT 5.0 generator for Waf
+# Richard Quirk 2009-1011 (New BSD License)
+# Thomas Nagy 2011 (ported to Waf 1.6)
+
+"""
+Usage:
+
+def options(opt):
+ opt.load('eclipse')
+
+$ waf configure eclipse
+"""
+
+import sys, os
+from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node
+from xml.dom.minidom import Document
+
+STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ]
+
+oe_cdt = 'org.eclipse.cdt'
+cdt_mk = oe_cdt + '.make.core'
+cdt_core = oe_cdt + '.core'
+cdt_bld = oe_cdt + '.build.core'
+
+class eclipse(Build.BuildContext):
+ cmd = 'eclipse'
+ fun = Scripting.default_cmd
+
+ def execute(self):
+ """
+ Entry point
+ """
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+
+ appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
+ self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH'])
+
+ def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
+ """
+ Create the Eclipse CDT .project and .cproject files
+ @param appname The name that will appear in the Project Explorer
+ @param build The BuildContext object to extract includes from
+ @param workspace_includes Optional project includes to prevent
+ "Unresolved Inclusion" errors in the Eclipse editor
+ @param pythonpath Optional project specific python paths
+ """
+ hasc = hasjava = haspython = False
+ source_dirs = []
+ cpppath = self.env['CPPPATH']
+ javasrcpath = []
+ includes = STANDARD_INCLUDES
+ if sys.platform != 'win32':
+ cc = self.env.CC or self.env.CXX
+ if cc:
+ cmd = cc + ['-xc++', '-E', '-Wp,-v', '-']
+ try:
+ gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines()
+ except Errors.WafError:
+ pass
+ else:
+ includes = []
+ for ipath in gccout:
+ if ipath.startswith(' /'):
+ includes.append(ipath[1:])
+ cpppath += includes
+ Logs.warn('Generating Eclipse CDT project files')
+
+ for g in self.groups:
+ for tg in g:
+ if not isinstance(tg, TaskGen.task_gen):
+ continue
+
+ # Add local Python modules paths to configuration so object resolving will work in IDE
+ if 'py' in tg.features:
+ pypath = tg.path.relpath()
+ py_installfrom = getattr(tg, 'install_from', None)
+ if py_installfrom:
+ pypath += os.sep + py_installfrom
+ pythonpath.append(pypath)
+ haspython = True
+
+
+ # Add Java source directories so object resolving works in IDE
+ if 'java' in tg.features:
+ java_src = tg.path.relpath()
+ java_srcdir = getattr(tg, 'srcdir', None)
+ if java_srcdir:
+ if isinstance(java_srcdir, Node.Node):
+ java_srcdir = [java_srcdir]
+ for x in Utils.to_list(java_srcdir):
+ if isinstance(x, Node.Node):
+ x = x.name
+ if java_src == '.':
+ this_src = x
+ else:
+ this_src = java_src + os.sep + x
+ javasrcpath.append(this_src)
+ else:
+ javasrcpath.append(java_src)
+ hasjava = True
+
+ tg.post()
+ if not getattr(tg, 'link_task', None):
+ continue
+
+ features = Utils.to_list(getattr(tg, 'features', ''))
+
+ is_cc = 'c' in features or 'cxx' in features
+
+ incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES'])
+ for p in incnodes:
+ path = p.path_from(self.srcnode)
+
+ if (path.startswith("/")):
+ cpppath.append(path)
+ else:
+ workspace_includes.append(path)
+
+ if is_cc and path not in source_dirs:
+ source_dirs.append(path)
+
+ hasc = True
+
+ project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython)
+ self.srcnode.make_node('.project').write(project.toprettyxml())
+
+ if hasc:
+ waf = os.path.abspath(sys.argv[0])
+ project = self.impl_create_cproject(sys.executable, waf, appname, workspace_includes, cpppath, source_dirs)
+ self.srcnode.make_node('.cproject').write(project.toprettyxml())
+
+ if haspython:
+ project = self.impl_create_pydevproject(sys.path, pythonpath)
+ self.srcnode.make_node('.pydevproject').write(project.toprettyxml())
+
+ if hasjava:
+ project = self.impl_create_javaproject(javasrcpath)
+ self.srcnode.make_node('.classpath').write(project.toprettyxml())
+
+ def impl_create_project(self, executable, appname, hasc, hasjava, haspython):
+ doc = Document()
+ projectDescription = doc.createElement('projectDescription')
+ self.add(doc, projectDescription, 'name', appname)
+ self.add(doc, projectDescription, 'comment')
+ self.add(doc, projectDescription, 'projects')
+ buildSpec = self.add(doc, projectDescription, 'buildSpec')
+ buildCommand = self.add(doc, buildSpec, 'buildCommand')
+ self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder')
+ self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,')
+ arguments = self.add(doc, buildCommand, 'arguments')
+ # the default make-style targets are overwritten by the .cproject values
+ dictionaries = {
+ cdt_mk + '.contents': cdt_mk + '.activeConfigSettings',
+ cdt_mk + '.enableAutoBuild': 'false',
+ cdt_mk + '.enableCleanBuild': 'true',
+ cdt_mk + '.enableFullBuild': 'true',
+ }
+ for k, v in dictionaries.items():
+ self.addDictionary(doc, arguments, k, v)
+
+ natures = self.add(doc, projectDescription, 'natures')
+
+ if hasc:
+ nature_list = """
+ core.ccnature
+ managedbuilder.core.ScannerConfigNature
+ managedbuilder.core.managedBuildNature
+ core.cnature
+ """.split()
+ for n in nature_list:
+ self.add(doc, natures, 'nature', oe_cdt + '.' + n)
+
+ if haspython:
+ self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature')
+ if hasjava:
+ self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature')
+
+ doc.appendChild(projectDescription)
+ return doc
+
+ def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpppath, source_dirs=[]):
+ doc = Document()
+ doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0'))
+ cconf_id = cdt_core + '.default.config.1'
+ cproject = doc.createElement('cproject')
+ storageModule = self.add(doc, cproject, 'storageModule',
+ {'moduleId': cdt_core + '.settings'})
+ cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id})
+
+ storageModule = self.add(doc, cconf, 'storageModule',
+ {'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider',
+ 'id': cconf_id,
+ 'moduleId': cdt_core + '.settings',
+ 'name': 'Default'})
+
+ self.add(doc, storageModule, 'externalSettings')
+
+ extensions = self.add(doc, storageModule, 'extensions')
+ extension_list = """
+ VCErrorParser
+ MakeErrorParser
+ GCCErrorParser
+ GASErrorParser
+ GLDErrorParser
+ """.split()
+ self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'})
+ for e in extension_list:
+ self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'})
+
+ storageModule = self.add(doc, cconf, 'storageModule',
+ {'moduleId': 'cdtBuildSystem', 'version': '4.0.0'})
+ config = self.add(doc, storageModule, 'configuration',
+ {'artifactName': appname,
+ 'id': cconf_id,
+ 'name': 'Default',
+ 'parent': cdt_bld + '.prefbase.cfg'})
+ folderInfo = self.add(doc, config, 'folderInfo',
+ {'id': cconf_id+'.', 'name': '/', 'resourcePath': ''})
+
+ toolChain = self.add(doc, folderInfo, 'toolChain',
+ {'id': cdt_bld + '.prefbase.toolchain.1',
+ 'name': 'No ToolChain',
+ 'resourceTypeBasedDiscovery': 'false',
+ 'superClass': cdt_bld + '.prefbase.toolchain'})
+
+ self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''})
+
+ waf_build = '"%s" %s'%(waf, eclipse.fun)
+ waf_clean = '"%s" clean'%(waf)
+ self.add(doc, toolChain, 'builder',
+ {'autoBuildTarget': waf_build,
+ 'command': executable,
+ 'enableAutoBuild': 'false',
+ 'cleanBuildTarget': waf_clean,
+ 'enableIncrementalBuild': 'true',
+ 'id': cdt_bld + '.settings.default.builder.1',
+ 'incrementalBuildTarget': waf_build,
+ 'managedBuildOn': 'false',
+ 'name': 'Gnu Make Builder',
+ 'superClass': cdt_bld + '.settings.default.builder'})
+
+ tool_index = 1;
+ for tool_name in ("Assembly", "GNU C++", "GNU C"):
+ tool = self.add(doc, toolChain, 'tool',
+ {'id': cdt_bld + '.settings.holder.' + str(tool_index),
+ 'name': tool_name,
+ 'superClass': cdt_bld + '.settings.holder'})
+ if cpppath or workspace_includes:
+ incpaths = cdt_bld + '.settings.holder.incpaths'
+ option = self.add(doc, tool, 'option',
+ {'id': incpaths + '.' + str(tool_index),
+ 'name': 'Include Paths',
+ 'superClass': incpaths,
+ 'valueType': 'includePath'})
+ for i in workspace_includes:
+ self.add(doc, option, 'listOptionValue',
+ {'builtIn': 'false',
+ 'value': '"${workspace_loc:/%s/%s}"'%(appname, i)})
+ for i in cpppath:
+ self.add(doc, option, 'listOptionValue',
+ {'builtIn': 'false',
+ 'value': '"%s"'%(i)})
+ if tool_name == "GNU C++" or tool_name == "GNU C":
+ self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \
+ 'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \
+ 'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \
+ 'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' })
+ tool_index += 1
+
+ if source_dirs:
+ sourceEntries = self.add(doc, config, 'sourceEntries')
+ for i in source_dirs:
+ self.add(doc, sourceEntries, 'entry',
+ {'excluding': i,
+ 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
+ 'kind': 'sourcePath',
+ 'name': ''})
+ self.add(doc, sourceEntries, 'entry',
+ {
+ 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
+ 'kind': 'sourcePath',
+ 'name': i})
+
+ storageModule = self.add(doc, cconf, 'storageModule',
+ {'moduleId': cdt_mk + '.buildtargets'})
+ buildTargets = self.add(doc, storageModule, 'buildTargets')
+ def addTargetWrap(name, runAll):
+ return self.addTarget(doc, buildTargets, executable, name,
+ '"%s" %s'%(waf, name), runAll)
+ addTargetWrap('configure', True)
+ addTargetWrap('dist', False)
+ addTargetWrap('install', False)
+ addTargetWrap('check', False)
+
+ storageModule = self.add(doc, cproject, 'storageModule',
+ {'moduleId': 'cdtBuildSystem',
+ 'version': '4.0.0'})
+
+ self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname})
+
+ doc.appendChild(cproject)
+ return doc
+
+ def impl_create_pydevproject(self, system_path, user_path):
+ # create a pydevproject file
+ doc = Document()
+ doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"'))
+ pydevproject = doc.createElement('pydev_project')
+ prop = self.add(doc, pydevproject,
+ 'pydev_property',
+ 'python %d.%d'%(sys.version_info[0], sys.version_info[1]))
+ prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION')
+ prop = self.add(doc, pydevproject, 'pydev_property', 'Default')
+ prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER')
+ # add waf's paths
+ wafadmin = [p for p in system_path if p.find('wafadmin') != -1]
+ if wafadmin:
+ prop = self.add(doc, pydevproject, 'pydev_pathproperty',
+ {'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'})
+ for i in wafadmin:
+ self.add(doc, prop, 'path', i)
+ if user_path:
+ prop = self.add(doc, pydevproject, 'pydev_pathproperty',
+ {'name':'org.python.pydev.PROJECT_SOURCE_PATH'})
+ for i in user_path:
+ self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i)
+
+ doc.appendChild(pydevproject)
+ return doc
+
+ def impl_create_javaproject(self, javasrcpath):
+ # create a .classpath file for java usage
+ doc = Document()
+ javaproject = doc.createElement('classpath')
+ if javasrcpath:
+ for i in javasrcpath:
+ self.add(doc, javaproject, 'classpathentry',
+ {'kind': 'src', 'path': i})
+
+ self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'})
+ self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name })
+ doc.appendChild(javaproject)
+ return doc
+
+ def addDictionary(self, doc, parent, k, v):
+ dictionary = self.add(doc, parent, 'dictionary')
+ self.add(doc, dictionary, 'key', k)
+ self.add(doc, dictionary, 'value', v)
+ return dictionary
+
+ def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True):
+ target = self.add(doc, buildTargets, 'target',
+ {'name': name,
+ 'path': '',
+ 'targetID': oe_cdt + '.build.MakeTargetBuilder'})
+ self.add(doc, target, 'buildCommand', executable)
+ self.add(doc, target, 'buildArguments', None)
+ self.add(doc, target, 'buildTarget', buildTarget)
+ self.add(doc, target, 'stopOnError', 'true')
+ self.add(doc, target, 'useDefaultCommand', 'false')
+ self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower())
+
+ def add(self, doc, parent, tag, value = None):
+ el = doc.createElement(tag)
+ if (value):
+ if type(value) == type(str()):
+ el.appendChild(doc.createTextNode(value))
+ elif type(value) == type(dict()):
+ self.setAttributes(el, value)
+ parent.appendChild(el)
+ return el
+
+ def setAttributes(self, node, attrs):
+ for k, v in attrs.items():
+ node.setAttribute(k, v)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+# Przemyslaw Rzepecki, 2016
+
+"""
+Erlang support
+"""
+
+import re
+from waflib import Task, TaskGen
+from waflib.TaskGen import feature, after_method, before_method
+# to load the method "to_incnodes" below
+from waflib.Tools import ccroot
+
+# Those flags are required by the Erlang VM to execute/evaluate code in
+# non-interactive mode. It is used in this tool to create Erlang modules
+# documentation and run unit tests. The user can pass additional arguments to the
+# 'erl' command with ERL_FLAGS environment variable.
+EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval']
+
+def configure(conf):
+ conf.find_program('erlc', var='ERLC')
+ conf.find_program('erl', var='ERL')
+ conf.add_os_flags('ERLC_FLAGS')
+ conf.add_os_flags('ERL_FLAGS')
+ conf.env.ERLC_DEF_PATTERN = '-D%s'
+ conf.env.ERLC_INC_PATTERN = '-I%s'
+
+@TaskGen.extension('.erl')
+def process_erl_node(self, node):
+ tsk = self.create_task('erl', node, node.change_ext('.beam'))
+ tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes)
+ tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes])
+ tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', [])))
+ tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', [])))
+ tsk.cwd = tsk.outputs[0].parent
+
+class erl(Task.Task):
+ color = 'GREEN'
+ run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}'
+
+ def scan(task):
+ node = task.inputs[0]
+
+ deps = []
+ scanned = set([])
+ nodes_to_scan = [node]
+
+ for n in nodes_to_scan:
+ if n.abspath() in scanned:
+ continue
+
+ for i in re.findall('-include\("(.*)"\)\.', n.read()):
+ for d in task.erlc_incnodes:
+ r = d.find_node(i)
+ if r:
+ deps.append(r)
+ nodes_to_scan.append(r)
+ break
+ scanned.add(n.abspath())
+
+ return (deps, [])
+
+@TaskGen.extension('.beam')
+def process(self, node):
+ pass
+
+
+class erl_test(Task.Task):
+ color = 'BLUE'
+ run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}'
+
+@feature('eunit')
+@after_method('process_source')
+def add_erl_test_run(self):
+ test_modules = [t.outputs[0] for t in self.tasks]
+ test_task = self.create_task('erl_test')
+ test_task.set_inputs(self.source + test_modules)
+ test_task.cwd = test_modules[0].parent
+
+ test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', [])))
+
+ test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules])
+ test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list
+ test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE)
+ test_task.env.append_value('ERL_TEST_FLAGS', test_flag)
+
+
+class edoc(Task.Task):
+ color = 'BLUE'
+ run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}"
+ def keyword(self):
+ return 'Generating edoc'
+
+@feature('edoc')
+@before_method('process_source')
+def add_edoc_task(self):
+ # do not process source, it would create double erl->beam task
+ self.meths.remove('process_source')
+ e = self.path.find_resource(self.source)
+ t = e.change_ext('.html')
+ png = t.parent.make_node('erlang.png')
+ css = t.parent.make_node('stylesheet.css')
+ tsk = self.create_task('edoc', e, [t, png, css])
+ tsk.cwd = tsk.outputs[0].parent
+ tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE)
+ tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath())
+ # TODO the above can break if a file path contains '"'
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2017-2018 (ita)
+
+"""
+A system for fast partial rebuilds
+
+Creating a large amount of task objects up front can take some time.
+By making a few assumptions, it is possible to avoid posting creating
+task objects for targets that are already up-to-date.
+
+On a silly benchmark the gain observed for 1M tasks can be 5m->10s
+for a single file change.
+
+Usage::
+
+ def options(opt):
+ opt.load('fast_partial')
+
+Assuptions:
+* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled)
+* For full project builds: no --targets and no pruning from subfolders
+* The installation phase is ignored
+* `use=` dependencies are specified up front even across build groups
+* Task generator source files are not obtained from globs
+
+Implementation details:
+* The first layer obtains file timestamps to recalculate file hashes only
+ when necessary (similar to md5_tstamp); the timestamps are then stored
+ in a dedicated pickle file
+* A second layer associates each task generator to a file set to help
+ detecting changes. Task generators are to create their tasks only when
+ the related files have been modified. A specific db file is created
+ to store such data (5m -> 1m10)
+* A third layer binds build context proxies onto task generators, replacing
+ the default context. While loading data for the full build uses more memory
+ (4GB -> 9GB), partial builds are then much faster (1m10 -> 13s)
+* A fourth layer enables a 2-level cache on file signatures to
+ reduce the size of the main pickle file (13s -> 10s)
+"""
+
+import os
+from waflib import Build, Context, Errors, Logs, Task, TaskGen, Utils
+from waflib.TaskGen import feature, after_method, taskgen_method
+import waflib.Node
+
+DONE = 0
+DIRTY = 1
+NEEDED = 2
+
+SKIPPABLE = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib', 'cprogram', 'cxxprogram']
+
+TSTAMP_DB = '.wafpickle_tstamp_db_file'
+
+SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
+
+class bld_proxy(object):
+ def __init__(self, bld):
+ object.__setattr__(self, 'bld', bld)
+
+ object.__setattr__(self, 'node_class', type('Nod3', (waflib.Node.Node,), {}))
+ self.node_class.__module__ = 'waflib.Node'
+ self.node_class.ctx = self
+
+ object.__setattr__(self, 'root', self.node_class('', None))
+ for x in SAVED_ATTRS:
+ if x != 'root':
+ object.__setattr__(self, x, {})
+
+ self.fix_nodes()
+
+ def __setattr__(self, name, value):
+ bld = object.__getattribute__(self, 'bld')
+ setattr(bld, name, value)
+
+ def __delattr__(self, name):
+ bld = object.__getattribute__(self, 'bld')
+ delattr(bld, name)
+
+ def __getattribute__(self, name):
+ try:
+ return object.__getattribute__(self, name)
+ except AttributeError:
+ bld = object.__getattribute__(self, 'bld')
+ return getattr(bld, name)
+
+ def __call__(self, *k, **kw):
+ return self.bld(*k, **kw)
+
+ def fix_nodes(self):
+ for x in ('srcnode', 'path', 'bldnode'):
+ node = self.root.find_dir(getattr(self.bld, x).abspath())
+ object.__setattr__(self, x, node)
+
+ def set_key(self, store_key):
+ object.__setattr__(self, 'store_key', store_key)
+
+ def fix_tg_path(self, *tgs):
+ # changing Node objects on task generators is possible
+ # yet, all Node objects must belong to the same parent
+ for tg in tgs:
+ tg.path = self.root.make_node(tg.path.abspath())
+
+ def restore(self):
+ dbfn = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
+ Logs.debug('rev_use: reading %s', dbfn)
+ try:
+ data = Utils.readf(dbfn, 'rb')
+ except (EnvironmentError, EOFError):
+ # handle missing file/empty file
+ Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
+ else:
+ try:
+ waflib.Node.pickle_lock.acquire()
+ waflib.Node.Nod3 = self.node_class
+ try:
+ data = Build.cPickle.loads(data)
+ except Exception as e:
+ Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
+ else:
+ for x in SAVED_ATTRS:
+ object.__setattr__(self, x, data.get(x, {}))
+ finally:
+ waflib.Node.pickle_lock.release()
+ self.fix_nodes()
+
+ def store(self):
+ data = {}
+ for x in Build.SAVED_ATTRS:
+ data[x] = getattr(self, x)
+ db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
+
+ try:
+ waflib.Node.pickle_lock.acquire()
+ waflib.Node.Nod3 = self.node_class
+ x = Build.cPickle.dumps(data, Build.PROTOCOL)
+ finally:
+ waflib.Node.pickle_lock.release()
+
+ Logs.debug('rev_use: storing %s', db)
+ Utils.writef(db + '.tmp', x, m='wb')
+ try:
+ st = os.stat(db)
+ os.remove(db)
+ if not Utils.is_win32:
+ os.chown(db + '.tmp', st.st_uid, st.st_gid)
+ except (AttributeError, OSError):
+ pass
+ os.rename(db + '.tmp', db)
+
+class bld(Build.BuildContext):
+ def __init__(self, **kw):
+ super(bld, self).__init__(**kw)
+ self.hashes_md5_tstamp = {}
+
+ def __call__(self, *k, **kw):
+ # this is one way of doing it, one could use a task generator method too
+ bld = kw['bld'] = bld_proxy(self)
+ ret = TaskGen.task_gen(*k, **kw)
+ self.task_gen_cache_names = {}
+ self.add_to_group(ret, group=kw.get('group'))
+ ret.bld = bld
+ bld.set_key(ret.path.abspath().replace(os.sep, '') + str(ret.idx))
+ return ret
+
+ def is_dirty(self):
+ return True
+
+ def store_tstamps(self):
+ # Called after a build is finished
+ # For each task generator, record all files involved in task objects
+ # optimization: done only if there was something built
+ do_store = False
+ try:
+ f_deps = self.f_deps
+ except AttributeError:
+ f_deps = self.f_deps = {}
+ self.f_tstamps = {}
+
+ allfiles = set()
+ for g in self.groups:
+ for tg in g:
+ try:
+ staleness = tg.staleness
+ except AttributeError:
+ staleness = DIRTY
+
+ if staleness != DIRTY:
+ # DONE case: there was nothing built
+ # NEEDED case: the tg was brought in because of 'use' propagation
+ # but nothing really changed for them, there may be incomplete
+ # tasks (object files) and in this case it is best to let the next build
+ # figure out if an input/output file changed
+ continue
+
+ do_cache = False
+ for tsk in tg.tasks:
+ if tsk.hasrun == Task.SUCCESS:
+ do_cache = True
+ pass
+ elif tsk.hasrun == Task.SKIPPED:
+ pass
+ else:
+ # one failed task, clear the cache for this tg
+ try:
+ del f_deps[(tg.path.abspath(), tg.idx)]
+ except KeyError:
+ pass
+ else:
+ # just store the new state because there is a change
+ do_store = True
+
+ # skip the rest because there is no valid cache possible
+ break
+ else:
+ if not do_cache:
+ # all skipped, but is there anything in cache?
+ try:
+ f_deps[(tg.path.abspath(), tg.idx)]
+ except KeyError:
+ # probably cleared because a wscript file changed
+ # store it
+ do_cache = True
+
+ if do_cache:
+
+ # there was a rebuild, store the data structure too
+ tg.bld.store()
+
+ # all tasks skipped but no cache
+ # or a successful task build
+ do_store = True
+ st = set()
+ for tsk in tg.tasks:
+ st.update(tsk.inputs)
+ st.update(self.node_deps.get(tsk.uid(), []))
+
+ # TODO do last/when loading the tgs?
+ lst = []
+ for k in ('wscript', 'wscript_build'):
+ n = tg.path.find_node(k)
+ if n:
+ n.get_bld_sig()
+ lst.append(n.abspath())
+
+ lst.extend(sorted(x.abspath() for x in st))
+ allfiles.update(lst)
+ f_deps[(tg.path.abspath(), tg.idx)] = lst
+
+ for x in allfiles:
+ # f_tstamps has everything, while md5_tstamp can be relatively empty on partial builds
+ self.f_tstamps[x] = self.hashes_md5_tstamp[x][0]
+
+ if do_store:
+ dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
+ Logs.debug('rev_use: storing %s', dbfn)
+ dbfn_tmp = dbfn + '.tmp'
+ x = Build.cPickle.dumps([self.f_tstamps, f_deps], Build.PROTOCOL)
+ Utils.writef(dbfn_tmp, x, m='wb')
+ os.rename(dbfn_tmp, dbfn)
+ Logs.debug('rev_use: stored %s', dbfn)
+
+ def store(self):
+ self.store_tstamps()
+ if self.producer.dirty:
+ Build.BuildContext.store(self)
+
+ def compute_needed_tgs(self):
+ # assume the 'use' keys are not modified during the build phase
+
+ dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
+ Logs.debug('rev_use: Loading %s', dbfn)
+ try:
+ data = Utils.readf(dbfn, 'rb')
+ except (EnvironmentError, EOFError):
+ Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
+ self.f_deps = {}
+ self.f_tstamps = {}
+ else:
+ try:
+ self.f_tstamps, self.f_deps = Build.cPickle.loads(data)
+ except Exception as e:
+ Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
+ self.f_deps = {}
+ self.f_tstamps = {}
+ else:
+ Logs.debug('rev_use: Loaded %s', dbfn)
+
+
+ # 1. obtain task generators that contain rebuilds
+ # 2. obtain the 'use' graph and its dual
+ stales = set()
+ reverse_use_map = Utils.defaultdict(list)
+ use_map = Utils.defaultdict(list)
+
+ for g in self.groups:
+ for tg in g:
+ if tg.is_stale():
+ stales.add(tg)
+
+ try:
+ lst = tg.use = Utils.to_list(tg.use)
+ except AttributeError:
+ pass
+ else:
+ for x in lst:
+ try:
+ xtg = self.get_tgen_by_name(x)
+ except Errors.WafError:
+ pass
+ else:
+ use_map[tg].append(xtg)
+ reverse_use_map[xtg].append(tg)
+
+ Logs.debug('rev_use: found %r stale tgs', len(stales))
+
+ # 3. dfs to post downstream tg as stale
+ visited = set()
+ def mark_down(tg):
+ if tg in visited:
+ return
+ visited.add(tg)
+ Logs.debug('rev_use: marking down %r as stale', tg.name)
+ tg.staleness = DIRTY
+ for x in reverse_use_map[tg]:
+ mark_down(x)
+ for tg in stales:
+ mark_down(tg)
+
+ # 4. dfs to find ancestors tg to mark as needed
+ self.needed_tgs = needed_tgs = set()
+ def mark_needed(tg):
+ if tg in needed_tgs:
+ return
+ needed_tgs.add(tg)
+ if tg.staleness == DONE:
+ Logs.debug('rev_use: marking up %r as needed', tg.name)
+ tg.staleness = NEEDED
+ for x in use_map[tg]:
+ mark_needed(x)
+ for xx in visited:
+ mark_needed(xx)
+
+ # so we have the whole tg trees to post in the set "needed"
+ # load their build trees
+ for tg in needed_tgs:
+ tg.bld.restore()
+ tg.bld.fix_tg_path(tg)
+
+ # the stale ones should be fully build, while the needed ones
+ # may skip a few tasks, see create_compiled_task and apply_link_after below
+ Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs))
+
+ def post_group(self):
+ # assumption: we can ignore the folder/subfolders cuts
+ def tgpost(tg):
+ try:
+ f = tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+
+ if not self.targets or self.targets == '*':
+ for tg in self.groups[self.current_group]:
+ # this can cut quite a lot of tg objects
+ if tg in self.needed_tgs:
+ tgpost(tg)
+ else:
+ # default implementation
+ return Build.BuildContext.post_group()
+
+ def get_build_iterator(self):
+ if not self.targets or self.targets == '*':
+ self.compute_needed_tgs()
+ return Build.BuildContext.get_build_iterator(self)
+
+@taskgen_method
+def is_stale(self):
+ # assume no globs
+ self.staleness = DIRTY
+
+ # 1. the case of always stale targets
+ if getattr(self, 'always_stale', False):
+ return True
+
+ # 2. check if the db file exists
+ db = os.path.join(self.bld.variant_dir, Context.DBFILE)
+ try:
+ dbstat = os.stat(db).st_mtime
+ except OSError:
+ Logs.debug('rev_use: must post %r because this is a clean build')
+ return True
+
+ # 3. check if the configuration changed
+ if os.stat(self.bld.bldnode.find_node('c4che/build.config.py').abspath()).st_mtime > dbstat:
+ Logs.debug('rev_use: must post %r because the configuration has changed', self.name)
+ return True
+
+ # 3.a any tstamp data?
+ try:
+ f_deps = self.bld.f_deps
+ except AttributeError:
+ Logs.debug('rev_use: must post %r because there is no f_deps', self.name)
+ return True
+
+ # 4. check if this is the first build (no cache)
+ try:
+ lst = f_deps[(self.path.abspath(), self.idx)]
+ except KeyError:
+ Logs.debug('rev_use: must post %r because there it has no cached data', self.name)
+ return True
+
+ try:
+ cache = self.bld.cache_tstamp_rev_use
+ except AttributeError:
+ cache = self.bld.cache_tstamp_rev_use = {}
+
+ # 5. check the timestamp of each dependency files listed is unchanged
+ f_tstamps = self.bld.f_tstamps
+ for x in lst:
+ try:
+ old_ts = f_tstamps[x]
+ except KeyError:
+ Logs.debug('rev_use: must post %r because %r is not in cache', self.name, x)
+ return True
+
+ try:
+ try:
+ ts = cache[x]
+ except KeyError:
+ ts = cache[x] = os.stat(x).st_mtime
+ except OSError:
+ del f_deps[(self.path.abspath(), self.idx)]
+ Logs.debug('rev_use: must post %r because %r does not exist anymore', self.name, x)
+ return True
+ else:
+ if ts != old_ts:
+ Logs.debug('rev_use: must post %r because the timestamp on %r changed %r %r', self.name, x, old_ts, ts)
+ return True
+
+ self.staleness = DONE
+ return False
+
+@taskgen_method
+def create_compiled_task(self, name, node):
+ # skip the creation of object files
+ # assumption: object-only targets are not skippable
+ if self.staleness == NEEDED:
+ # only libraries/programs can skip object files
+ for x in SKIPPABLE:
+ if x in self.features:
+ return None
+
+ out = '%s.%d.o' % (node.name, self.idx)
+ task = self.create_task(name, node, node.parent.find_or_declare(out))
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks = [task]
+ return task
+
+@feature(*SKIPPABLE)
+@after_method('apply_link')
+def apply_link_after(self):
+ # cprogram/cxxprogram might be unnecessary
+ if self.staleness != NEEDED:
+ return
+ for tsk in self.tasks:
+ tsk.hasrun = Task.SKIPPED
+
+def path_from(self, node):
+ # handle nodes of distinct types
+ if node.ctx is not self.ctx:
+ node = self.ctx.root.make_node(node.abspath())
+ return self.default_path_from(node)
+waflib.Node.Node.default_path_from = waflib.Node.Node.path_from
+waflib.Node.Node.path_from = path_from
+
+def h_file(self):
+ # similar to md5_tstamp.py, but with 2-layer cache
+ # global_cache for the build context common for all task generators
+ # local_cache for the build context proxy (one by task generator)
+ #
+ # the global cache is not persistent
+ # the local cache is persistent and meant for partial builds
+ #
+ # assume all calls are made from a single thread
+ #
+ filename = self.abspath()
+ st = os.stat(filename)
+
+ global_cache = self.ctx.bld.hashes_md5_tstamp
+ local_cache = self.ctx.hashes_md5_tstamp
+
+ if filename in global_cache:
+ # value already calculated in this build
+ cval = global_cache[filename]
+
+ # the value in global cache is assumed to be calculated once
+ # reverifying it could cause task generators
+ # to get distinct tstamp values, thus missing rebuilds
+ local_cache[filename] = cval
+ return cval[1]
+
+ if filename in local_cache:
+ cval = local_cache[filename]
+ if cval[0] == st.st_mtime:
+ # correct value from a previous build
+ # put it in the global cache
+ global_cache[filename] = cval
+ return cval[1]
+
+ ret = Utils.h_file(filename)
+ local_cache[filename] = global_cache[filename] = (st.st_mtime, ret)
+ return ret
+waflib.Node.Node.h_file = h_file
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_bgxlf')
+
+@conf
+def find_bgxlf(conf):
+ fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC')
+ conf.get_xlf_version(fc)
+ conf.env.FC_NAME = 'BGXLF'
+
+@conf
+def bg_flags(self):
+ self.env.SONAME_ST = ''
+ self.env.FCSHLIB_MARKER = ''
+ self.env.FCSTLIB_MARKER = ''
+ self.env.FCFLAGS_fcshlib = ['-fPIC']
+ self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull']
+
+def configure(conf):
+ conf.find_bgxlf()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.xlf_flags()
+ conf.bg_flags()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_cray')
+
+@conf
+def find_crayftn(conf):
+ """Find the Cray fortran compiler (will look in the environment variable 'FC')"""
+ fc = conf.find_program(['crayftn'], var='FC')
+ conf.get_crayftn_version(fc)
+ conf.env.FC_NAME = 'CRAY'
+ conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
+
+@conf
+def crayftn_flags(conf):
+ v = conf.env
+ v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directoy
+ v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings
+ v['FCFLAGS_fcshlib'] = ['-h pic']
+ v['LINKFLAGS_fcshlib'] = ['-h shared']
+
+ v['FCSTLIB_MARKER'] = '-h static'
+ v['FCSHLIB_MARKER'] = '-h dynamic'
+
+@conf
+def get_crayftn_version(conf, fc):
+ version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the Cray Fortran compiler version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_crayftn()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.crayftn_flags()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_nag')
+
+@conf
+def find_nag(conf):
+ """Find the NAG Fortran Compiler (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['nagfor'], var='FC')
+ conf.get_nag_version(fc)
+ conf.env.FC_NAME = 'NAG'
+ conf.env.FC_MOD_CAPITALIZATION = 'lower'
+
+@conf
+def nag_flags(conf):
+ v = conf.env
+ v.FCFLAGS_DEBUG = ['-C=all']
+ v.FCLNK_TGT_F = ['-o', '']
+ v.FC_TGT_F = ['-c', '-o', '']
+
+@conf
+def nag_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None)
+ if nag_modifier_func:
+ nag_modifier_func()
+
+@conf
+def get_nag_version(conf, fc):
+ """Get the NAG compiler version"""
+
+ version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P<major>\d*)\.(?P<minor>\d*)", re.M).search
+ cmd = fc + ['-V']
+
+ out, err = fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match = version_re(out)
+ if not match:
+ match = version_re(err)
+ else: match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the NAG version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_nag()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.nag_flags()
+ conf.nag_modifier_platform()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_nec')
+
+@conf
+def find_sxfc(conf):
+ """Find the NEC fortran compiler (will look in the environment variable 'FC')"""
+ fc = conf.find_program(['sxf90','sxf03'], var='FC')
+ conf.get_sxfc_version(fc)
+ conf.env.FC_NAME = 'NEC'
+ conf.env.FC_MOD_CAPITALIZATION = 'lower'
+
+@conf
+def sxfc_flags(conf):
+ v = conf.env
+ v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directoy
+ v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings
+ v['FCFLAGS_fcshlib'] = []
+ v['LINKFLAGS_fcshlib'] = []
+
+ v['FCSTLIB_MARKER'] = ''
+ v['FCSHLIB_MARKER'] = ''
+
+@conf
+def get_sxfc_version(conf, fc):
+ version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the NEC Fortran compiler version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_sxfc()
+ conf.find_program('sxar',var='AR')
+ conf.add_os_flags('ARFLAGS')
+ if not conf.env.ARFLAGS:
+ conf.env.ARFLAGS=['rcs']
+
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.sxfc_flags()
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_open64')
+
+@conf
+def find_openf95(conf):
+ """Find the Open64 Fortran Compiler (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['openf95', 'openf90'], var='FC')
+ conf.get_open64_version(fc)
+ conf.env.FC_NAME = 'OPEN64'
+ conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
+
+@conf
+def openf95_flags(conf):
+ v = conf.env
+ v['FCFLAGS_DEBUG'] = ['-fullwarn']
+
+@conf
+def openf95_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None)
+ if openf95_modifier_func:
+ openf95_modifier_func()
+
+@conf
+def get_open64_version(conf, fc):
+ """Get the Open64 compiler version"""
+
+ version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-version']
+
+ out, err = fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the Open64 version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_openf95()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.openf95_flags()
+ conf.openf95_modifier_platform()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_pgfortran')
+
+@conf
+def find_pgfortran(conf):
+ """Find the PGI fortran compiler (will look in the environment variable 'FC')"""
+ fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC')
+ conf.get_pgfortran_version(fc)
+ conf.env.FC_NAME = 'PGFC'
+
+@conf
+def pgfortran_flags(conf):
+ v = conf.env
+ v['FCFLAGS_fcshlib'] = ['-shared']
+ v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not
+ v['FCSTLIB_MARKER'] = '-Bstatic'
+ v['FCSHLIB_MARKER'] = '-Bdynamic'
+ v['SONAME_ST'] = '-soname %s'
+
+@conf
+def get_pgfortran_version(conf,fc):
+ version_re = re.compile(r"The Portland Group", re.I).search
+ cmd = fc + ['-V']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not verify PGI signature')
+ cmd = fc + ['-help=variable']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out.find('COMPVER')<0:
+ conf.fatal('Could not determine the compiler type')
+ k = {}
+ prevk = ''
+ out = out.splitlines()
+ for line in out:
+ lst = line.partition('=')
+ if lst[1] == '=':
+ key = lst[0].rstrip()
+ if key == '':
+ key = prevk
+ val = lst[2].rstrip()
+ k[key] = val
+ else:
+ prevk = line.partition(' ')[0]
+ def isD(var):
+ return var in k
+ def isT(var):
+ return var in k and k[var]!='0'
+ conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))
+
+def configure(conf):
+ conf.find_pgfortran()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.pgfortran_flags()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_solstudio')
+
+@conf
+def find_solstudio(conf):
+ """Find the Solaris Studio compiler (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC')
+ conf.get_solstudio_version(fc)
+ conf.env.FC_NAME = 'SOL'
+
+@conf
+def solstudio_flags(conf):
+ v = conf.env
+ v['FCFLAGS_fcshlib'] = ['-Kpic']
+ v['FCFLAGS_DEBUG'] = ['-w3']
+ v['LINKFLAGS_fcshlib'] = ['-G']
+ v['FCSTLIB_MARKER'] = '-Bstatic'
+ v['FCSHLIB_MARKER'] = '-Bdynamic'
+ v['SONAME_ST'] = '-h %s'
+
+@conf
+def solstudio_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None)
+ if solstudio_modifier_func:
+ solstudio_modifier_func()
+
+@conf
+def get_solstudio_version(conf, fc):
+ """Get the compiler version"""
+
+ version_re = re.compile(r"Sun Fortran 95 *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+
+ out, err = fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the Sun Studio Fortran version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_solstudio()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.solstudio_flags()
+ conf.solstudio_modifier_platform()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils,Errors
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['aix'].insert(0, 'fc_xlf')
+
+@conf
+def find_xlf(conf):
+ """Find the xlf program (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['xlf2003_r', 'xlf2003', 'xlf95_r', 'xlf95', 'xlf90_r', 'xlf90', 'xlf_r', 'xlf'], var='FC')
+ conf.get_xlf_version(fc)
+ conf.env.FC_NAME='XLF'
+
+@conf
+def xlf_flags(conf):
+ v = conf.env
+ v['FCDEFINES_ST'] = '-WF,-D%s'
+ v['FCFLAGS_fcshlib'] = ['-qpic=small']
+ v['FCFLAGS_DEBUG'] = ['-qhalt=w']
+ v['LINKFLAGS_fcshlib'] = ['-Wl,-shared']
+
+@conf
+def xlf_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ xlf_modifier_func = getattr(conf, 'xlf_modifier_' + dest_os, None)
+ if xlf_modifier_func:
+ xlf_modifier_func()
+
+@conf
+def get_xlf_version(conf, fc):
+ """Get the compiler version"""
+
+ cmd = fc + ['-qversion']
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find xlf %r' % cmd)
+
+ for v in (r"IBM XL Fortran.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+ version_re = re.compile(v, re.I).search
+ match = version_re(out or err)
+ if match:
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+ break
+ else:
+ conf.fatal('Could not determine the XLF version.')
+
+def configure(conf):
+ conf.find_xlf()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.xlf_flags()
+ conf.xlf_modifier_platform()
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to embed file into objects
class file_to_object_s(Task.Task):
color = 'CYAN'
- dep_vars = ('DEST_CPU', 'DEST_BINFMT')
+ vars = ['DEST_CPU', 'DEST_BINFMT']
def run(self):
name = []
def configure(conf):
conf.load('gas')
conf.env.F2O_METHOD = ["c"]
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# encoding: utf-8
+# Grygoriy Fuchedzhy 2009
+
+"""
+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjunction with the 'cxx' feature.
+"""
+
+from waflib import Task
+from waflib.TaskGen import extension
+
+class fluid(Task.Task):
+ color = 'BLUE'
+ ext_out = ['.h']
+ run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}'
+
+@extension('.fl')
+def process_fluid(self, node):
+ """add the .fl to the source list; the cxx file generated will be compiled when possible"""
+ cpp = node.change_ext('.cpp')
+ hpp = node.change_ext('.hpp')
+ self.create_task('fluid', node, [cpp, hpp])
+
+ if 'cxx' in self.features:
+ self.source.append(cpp)
+
+def configure(conf):
+ conf.find_program('fluid', var='FLUID')
+ conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
#
if platform == 'win32' and not conf.options.fi_path:
return
conf.check_freeimage(conf.options.fi_path, conf.options.fip)
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
def compile(self):
pass
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Experimental F# stuff
+
+FSC="mono /path/to/fsc.exe" waf configure build
+"""
+
+from waflib import Utils, Task
+from waflib.TaskGen import before_method, after_method, feature
+from waflib.Tools import ccroot, cs
+
+ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
+
+@feature('fs')
+@before_method('process_source')
+def apply_fsc(self):
+ cs_nodes = []
+ no_nodes = []
+ for x in self.to_nodes(self.source):
+ if x.name.endswith('.fs'):
+ cs_nodes.append(x)
+ else:
+ no_nodes.append(x)
+ self.source = no_nodes
+
+ bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe')
+ self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen))
+ tsk.env.CSTYPE = '/target:%s' % bintype
+ tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
+
+ inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
+ if inst_to:
+ # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
+ mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
+ self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
+
+feature('fs')(cs.use_cs)
+after_method('apply_fsc')(cs.use_cs)
+
+feature('fs')(cs.debug_cs)
+after_method('apply_fsc', 'use_cs')(cs.debug_cs)
+
+class fsc(Task.Task):
+ """
+ Compile F# files
+ """
+ color = 'YELLOW'
+ run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+
+def configure(conf):
+ """
+ Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc)
+ """
+ conf.find_program(['fsc.exe', 'fsharpc'], var='FSC')
+ conf.env.ASS_ST = '/r:%s'
+ conf.env.RES_ST = '/resource:%s'
+
+ conf.env.FS_NAME = 'fsc'
+ if str(conf.env.FSC).lower().find('fsharpc') > -1:
+ conf.env.FS_NAME = 'mono'
+
and prepare the dependency calculation for the next run.
This affects the cxx class, so make sure to load Qt5 after this tool.
-Usage:
+Usage::
+
+ def options(opt):
+ opt.load('compiler_cxx')
def configure(conf):
- conf.load('gccdeps')
+ conf.load('compiler_cxx gccdeps')
"""
import os, re, threading
else:
conf.env.append_value('CXXFLAGS', gccdeps_flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
+
+def options(opt):
+ raise ValueError('Do not load gccdeps options')
+
+++ /dev/null
-#!/usr/bin/env python
-# encoding: utf-8
-# Tom Wambold tom5760 gmail.com 2009
-# Thomas Nagy 2010
-
-"""
-Go as a language may look nice, but its toolchain is one of the worse a developer
-has ever seen. It keeps changing though, and I would like to believe that it will get
-better eventually, but the crude reality is that this tool and the examples are
-getting broken every few months.
-
-If you have been lured into trying to use Go, you should stick to their Makefiles.
-"""
-
-import os, platform
-
-from waflib import Utils, Task, TaskGen
-from waflib.TaskGen import feature, extension, after_method, before_method
-from waflib.Tools.ccroot import link_task, stlink_task, propagate_uselib_vars, process_use
-
-class go(Task.Task):
- run_str = '${GOC} ${GOCFLAGS} ${CPPPATH_ST:INCPATHS} -o ${TGT} ${SRC}'
-
-class gopackage(stlink_task):
- run_str = '${GOP} grc ${TGT} ${SRC}'
-
-class goprogram(link_task):
- run_str = '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}'
- inst_to = '${BINDIR}'
- chmod = Utils.O755
-
-class cgopackage(stlink_task):
- color = 'YELLOW'
- inst_to = '${LIBDIR}'
- ext_in = ['.go']
- ext_out = ['.a']
-
- def run(self):
- src_dir = self.generator.bld.path
- source = self.inputs
- target = self.outputs[0].change_ext('')
-
- #print ("--> %s" % self.outputs)
- #print ('++> %s' % self.outputs[1])
- bld_dir = self.outputs[1]
- bld_dir.mkdir()
- obj_dir = bld_dir.make_node('_obj')
- obj_dir.mkdir()
-
- bld_srcs = []
- for s in source:
- # FIXME: it seems gomake/cgo stumbles on filenames like a/b/c.go
- # -> for the time being replace '/' with '_'...
- #b = bld_dir.make_node(s.path_from(src_dir))
- b = bld_dir.make_node(s.path_from(src_dir).replace(os.sep,'_'))
- b.parent.mkdir()
- #print ('++> %s' % (s.path_from(src_dir),))
- try:
- try:os.remove(b.abspath())
- except Exception:pass
- os.symlink(s.abspath(), b.abspath())
- except Exception:
- # if no support for symlinks, copy the file from src
- b.write(s.read())
- bld_srcs.append(b)
- #print("--|> [%s]" % b.abspath())
- b.sig = Utils.h_file(b.abspath())
- pass
- #self.set_inputs(bld_srcs)
- #self.generator.bld.raw_deps[self.uid()] = [self.signature()] + bld_srcs
- makefile_node = bld_dir.make_node("Makefile")
- makefile_tmpl = '''\
-# Copyright 2009 The Go Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file. ---
-
-include $(GOROOT)/src/Make.inc
-
-TARG=%(target)s
-
-GCIMPORTS= %(gcimports)s
-
-CGOFILES=\\
-\t%(source)s
-
-CGO_CFLAGS= %(cgo_cflags)s
-
-CGO_LDFLAGS= %(cgo_ldflags)s
-
-include $(GOROOT)/src/Make.pkg
-
-%%: install %%.go
- $(GC) $*.go
- $(LD) -o $@ $*.$O
-
-''' % {
-'gcimports': ' '.join(l for l in self.env['GOCFLAGS']),
-'cgo_cflags' : ' '.join(l for l in self.env['GOCFLAGS']),
-'cgo_ldflags': ' '.join(l for l in self.env['GOLFLAGS']),
-'target': target.path_from(obj_dir),
-'source': ' '.join([b.path_from(bld_dir) for b in bld_srcs])
-}
- makefile_node.write(makefile_tmpl)
- #print ("::makefile: %s"%makefile_node.abspath())
- cmd = Utils.subst_vars('gomake ${GOMAKE_FLAGS}', self.env).strip()
- o = self.outputs[0].change_ext('.gomake.log')
- fout_node = bld_dir.find_or_declare(o.name)
- fout = open(fout_node.abspath(), 'w')
- rc = self.generator.bld.exec_command(
- cmd,
- stdout=fout,
- stderr=fout,
- cwd=bld_dir.abspath(),
- )
- if rc != 0:
- import waflib.Logs as msg
- msg.error('** error running [%s] (cgo-%s)' % (cmd, target))
- msg.error(fout_node.read())
- return rc
- self.generator.bld.read_stlib(
- target,
- paths=[obj_dir.abspath(),],
- )
- tgt = self.outputs[0]
- if tgt.parent != obj_dir:
- install_dir = os.path.join('${LIBDIR}',
- tgt.parent.path_from(obj_dir))
- else:
- install_dir = '${LIBDIR}'
- #print('===> %s (%s)' % (tgt.abspath(), install_dir))
- self.generator.bld.install_files(
- install_dir,
- tgt.abspath(),
- relative_trick=False,
- postpone=False,
- )
- return rc
-
-@extension('.go')
-def compile_go(self, node):
- #print('*'*80, self.name)
- if not ('cgopackage' in self.features):
- return self.create_compiled_task('go', node)
- #print ('compile_go-cgo...')
- #bld_dir = node.parent.get_bld()
- #obj_dir = bld_dir.make_node('_obj')
- return self.create_task('cgopackage', node, node.change_ext('.a'))
-
-@feature('gopackage', 'goprogram', 'cgopackage')
-@before_method('process_source')
-def go_compiler_is_foobar(self):
- if self.env.GONAME == 'gcc':
- return
- self.source = self.to_nodes(self.source)
- src = []
- go = []
- for node in self.source:
- if node.name.endswith('.go'):
- go.append(node)
- else:
- src.append(node)
- self.source = src
- if not ('cgopackage' in self.features):
- #print('--> [%s]... (%s)' % (go[0], getattr(self, 'target', 'N/A')))
- tsk = self.create_compiled_task('go', go[0])
- tsk.inputs.extend(go[1:])
- else:
- #print ('+++ [%s] +++' % self.target)
- bld_dir = self.path.get_bld().make_node('cgopackage--%s' % self.target.replace(os.sep,'_'))
- obj_dir = bld_dir.make_node('_obj')
- target = obj_dir.make_node(self.target+'.a')
- tsk = self.create_task('cgopackage', go, [target, bld_dir])
- self.link_task = tsk
-
-@feature('gopackage', 'goprogram', 'cgopackage')
-@after_method('process_source', 'apply_incpaths',)
-def go_local_libs(self):
- names = self.to_list(getattr(self, 'use', []))
- #print ('== go-local-libs == [%s] == use: %s' % (self.name, names))
- for name in names:
- tg = self.bld.get_tgen_by_name(name)
- if not tg:
- raise Utils.WafError('no target of name %r necessary for %r in go uselib local' % (name, self))
- tg.post()
- #print ("-- tg[%s]: %s" % (self.name,name))
- lnk_task = getattr(tg, 'link_task', None)
- if lnk_task:
- for tsk in self.tasks:
- if isinstance(tsk, (go, gopackage, cgopackage)):
- tsk.set_run_after(lnk_task)
- tsk.dep_nodes.extend(lnk_task.outputs)
- path = lnk_task.outputs[0].parent.abspath()
- if isinstance(lnk_task, (go, gopackage)):
- # handle hierarchical packages
- path = lnk_task.generator.path.get_bld().abspath()
- elif isinstance(lnk_task, (cgopackage,)):
- # handle hierarchical cgopackages
- cgo_obj_dir = lnk_task.outputs[1].find_or_declare('_obj')
- path = cgo_obj_dir.abspath()
- # recursively add parent GOCFLAGS...
- self.env.append_unique('GOCFLAGS',
- getattr(lnk_task.env, 'GOCFLAGS',[]))
- # ditto for GOLFLAGS...
- self.env.append_unique('GOLFLAGS',
- getattr(lnk_task.env, 'GOLFLAGS',[]))
- self.env.append_unique('GOCFLAGS', ['-I%s' % path])
- self.env.append_unique('GOLFLAGS', ['-L%s' % path])
- for n in getattr(tg, 'includes_nodes', []):
- self.env.append_unique('GOCFLAGS', ['-I%s' % n.abspath()])
- pass
- pass
-
-def configure(conf):
-
- def set_def(var, val):
- if not conf.env[var]:
- conf.env[var] = val
-
- goarch = os.getenv('GOARCH')
- if goarch == '386':
- set_def('GO_PLATFORM', 'i386')
- elif goarch == 'amd64':
- set_def('GO_PLATFORM', 'x86_64')
- elif goarch == 'arm':
- set_def('GO_PLATFORM', 'arm')
- else:
- set_def('GO_PLATFORM', platform.machine())
-
- if conf.env.GO_PLATFORM == 'x86_64':
- set_def('GO_COMPILER', '6g')
- set_def('GO_LINKER', '6l')
- elif conf.env.GO_PLATFORM in ('i386', 'i486', 'i586', 'i686'):
- set_def('GO_COMPILER', '8g')
- set_def('GO_LINKER', '8l')
- elif conf.env.GO_PLATFORM == 'arm':
- set_def('GO_COMPILER', '5g')
- set_def('GO_LINKER', '5l')
- set_def('GO_EXTENSION', '.5')
-
- if not (conf.env.GO_COMPILER or conf.env.GO_LINKER):
- raise conf.fatal('Unsupported platform ' + platform.machine())
-
- set_def('GO_PACK', 'gopack')
- set_def('gopackage_PATTERN', '%s.a')
- set_def('CPPPATH_ST', '-I%s')
-
- set_def('GOMAKE_FLAGS', ['--quiet'])
- conf.find_program(conf.env.GO_COMPILER, var='GOC')
- conf.find_program(conf.env.GO_LINKER, var='GOL')
- conf.find_program(conf.env.GO_PACK, var='GOP')
-
- conf.find_program('cgo', var='CGO')
-
-TaskGen.feature('go')(process_use)
-TaskGen.feature('go')(propagate_uselib_vars)
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
def configure(conf):
conf.find_program('gob2', var='GOB2')
conf.env['GOB2FLAGS'] = ''
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Halide code generation tool
# Return a node with a new extension, in an appropriate folder
name = src.name
xpos = src.name.rfind('.')
- if xpos == -1: xpos = len(src.name)
+ if xpos == -1:
+ xpos = len(src.name)
newname = name[:xpos] + ext
if src.is_child_of(bld.bldnode):
node = src.get_src().parent.find_or_declare(newname)
opt.add_option('--halide-root',
help="path to Halide include and lib files",
)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Federico Pellegrin, 2017 (fedepell)
+
+"""
+Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
+task via the **javatest** feature.
+
+This gives the possibility to run unit test and have them integrated into the
+standard waf unit test environment. It has been tested with TestNG and JUnit
+but should be easily expandable to other frameworks given the flexibility of
+ut_str provided by the standard waf unit test environment.
+
+Example usage:
+
+def options(opt):
+ opt.load('java waf_unit_test javatest')
+
+def configure(conf):
+ conf.load('java javatest')
+
+def build(bld):
+
+ [ ... mainprog is built here ... ]
+
+ bld(features = 'javac javatest',
+ srcdir = 'test/',
+ outdir = 'test',
+ sourcepath = ['test'],
+ classpath = [ 'src' ],
+ basedir = 'test',
+ use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/
+ ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}',
+ jtest_source = bld.path.ant_glob('test/*.xml'),
+ )
+
+
+At command line the CLASSPATH where to find the testing environment and the
+test runner (default TestNG) that will then be seen in the environment as
+CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for
+dependencies and ut_str generation.
+
+Example configure for TestNG:
+ waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG
+ or as default runner is TestNG:
+ waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar
+
+Example configure for JUnit:
+ waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore
+
+The runner class presence on the system is checked for at configuration stage.
+
+"""
+
+import os
+from waflib import Task, TaskGen, Options
+
+@TaskGen.feature('javatest')
+@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath')
+def make_javatest(self):
+ """
+ Creates a ``utest`` task with a populated environment for Java Unit test execution
+
+ """
+ tsk = self.create_task('utest')
+ tsk.set_run_after(self.javac_task)
+
+ # Put test input files as waf_unit_test relies on that for some prints and log generation
+ # If jtest_source is there, this is specially useful for passing XML for TestNG
+ # that contain test specification, use that as inputs, otherwise test sources
+ if getattr(self, 'jtest_source', None):
+ tsk.inputs = self.to_nodes(self.jtest_source)
+ else:
+ if self.javac_task.srcdir[0].exists():
+ tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False)
+
+ if getattr(self, 'ut_str', None):
+ self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+ tsk.vars = lst + tsk.vars
+
+ if getattr(self, 'ut_cwd', None):
+ if isinstance(self.ut_cwd, str):
+ # we want a Node instance
+ if os.path.isabs(self.ut_cwd):
+ self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
+ else:
+ self.ut_cwd = self.path.make_node(self.ut_cwd)
+ else:
+ self.ut_cwd = self.bld.bldnode
+
+ # Get parent CLASSPATH and add output dir of test, we run from wscript dir
+ # We have to change it from list to the standard java -cp format (: separated)
+ tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath()
+
+ if not self.ut_cwd.exists():
+ self.ut_cwd.mkdir()
+
+ if not hasattr(self, 'ut_env'):
+ self.ut_env = dict(os.environ)
+
+def configure(ctx):
+ cp = ctx.env.CLASSPATH or '.'
+ if getattr(Options.options, 'jtpath', None):
+ ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':')
+ cp += ':' + getattr(Options.options, 'jtpath')
+
+ if getattr(Options.options, 'jtrunner', None):
+ ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner')
+
+ if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp):
+ ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER)
+
+def options(opt):
+ opt.add_option('--jtpath', action='store', default='', dest='jtpath',
+ help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH')
+ opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner',
+ help='Class to run javatest test [default: org.testng.TestNG]')
+
inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
- self.bld.install_as(
- inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
- task.outputs[0],
+ self.add_install_as(
+ inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
+ inst_from = task.outputs[0],
chmod = getattr(self, 'chmod', Utils.O644))
class msgfmt(Task.Task):
kdeconfig = self.find_program('kde4-config')
prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
- try: os.stat(fname)
+ try:
+ os.stat(fname)
except OSError:
fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
- try: os.stat(fname)
- except OSError: self.fatal('could not open %s' % fname)
+ try:
+ os.stat(fname)
+ except OSError:
+ self.fatal('could not open %s' % fname)
try:
txt = Utils.readf(fname)
self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
self.find_program('msgfmt', var='MSGFMT')
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
continue
self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
all.extend(self.to_list(getattr(tg, 'use', [])))
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
for pat in self.files.split(','):
matcher = self.get_matcher(pat)
for tg in g:
- if isinstance(tg, Task.TaskBase):
+ if isinstance(tg, Task.Task):
lst = [tg]
else:
lst = tg.tasks
all_tasks.append(tsk)
do_exec = False
- for node in getattr(tsk, 'inputs', []):
+ for node in tsk.inputs:
try:
uses[node].append(tsk)
except:
do_exec = True
break
- for node in getattr(tsk, 'outputs', []):
+ for node in tsk.outputs:
try:
provides[node].append(tsk)
except:
result = all_tasks
else:
# this is like a big filter...
- result = set([])
- seen = set([])
+ result = set()
+ seen = set()
cur = set(tasks)
while cur:
result |= cur
- tosee = set([])
+ tosee = set()
for tsk in cur:
- for node in getattr(tsk, 'inputs', []):
+ for node in tsk.inputs:
if node in seen:
continue
seen.add(node)
pattern = re.compile(pat)
def match(node, output):
- if output == True and not out:
+ if output and not out:
return False
- if output == False and not inn:
+ if not output and not inn:
return False
if anode:
else:
return pattern.match(node.abspath())
return match
+
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-This module assumes that only one build context is running at a given time, which
-is not the case if you want to execute configuration tests in parallel.
-
-Store some values on the buildcontext mapping file paths to
-stat values and md5 values (timestamp + md5)
-this way the md5 hashes are computed only when timestamp change (can be faster)
-There is usually little or no gain from enabling this, but it can be used to enable
-the second level cache with timestamps (WAFCACHE)
-
-You may have to run distclean or to remove the build directory before enabling/disabling
-this hashing scheme
-"""
-
-import os, stat
-from waflib import Utils, Build, Context
-
-STRONGEST = True
-
-try:
- Build.BuildContext.store_real
-except AttributeError:
-
- Context.DBFILE += '_md5tstamp'
-
- Build.hashes_md5_tstamp = {}
- Build.SAVED_ATTRS.append('hashes_md5_tstamp')
- def store(self):
- # save the hash cache as part of the default pickle file
- self.hashes_md5_tstamp = Build.hashes_md5_tstamp
- self.store_real()
- Build.BuildContext.store_real = Build.BuildContext.store
- Build.BuildContext.store = store
-
- def restore(self):
- # we need a module variable for h_file below
- self.restore_real()
- try:
- Build.hashes_md5_tstamp = self.hashes_md5_tstamp or {}
- except AttributeError:
- Build.hashes_md5_tstamp = {}
- Build.BuildContext.restore_real = Build.BuildContext.restore
- Build.BuildContext.restore = restore
-
- def h_file(filename):
- st = os.stat(filename)
- if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
-
- if filename in Build.hashes_md5_tstamp:
- if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime):
- return Build.hashes_md5_tstamp[filename][1]
- if STRONGEST:
- ret = Utils.h_file_no_md5(filename)
- Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), ret)
- return ret
- else:
- m = Utils.md5()
- m.update(str(st.st_mtime))
- m.update(str(st.st_size))
- m.update(filename)
- Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest())
- return m.digest()
- Utils.h_file_no_md5 = Utils.h_file
- Utils.h_file = h_file
+++ /dev/null
-#! /usr/bin/env python
-# encoding: UTF-8
-
-"""
-This tool can help to reduce the memory usage in very large builds featuring many tasks with after/before attributes.
-It may also improve the overall build time by decreasing the amount of iterations over tasks.
-
-Usage:
-def options(opt):
- opt.load('mem_reducer')
-"""
-
-import itertools
-from waflib import Utils, Task, Runner
-
-class SetOfTasks(object):
- """Wraps a set and a task which has a list of other sets.
- The interface is meant to mimic the interface of set. Add missing functions as needed.
- """
- def __init__(self, owner):
- self._set = owner.run_after
- self._owner = owner
-
- def __iter__(self):
- for g in self._owner.run_after_groups:
- #print len(g)
- for task in g:
- yield task
- for task in self._set:
- yield task
-
- def add(self, obj):
- self._set.add(obj)
-
- def update(self, obj):
- self._set.update(obj)
-
-def set_precedence_constraints(tasks):
- cstr_groups = Utils.defaultdict(list)
- for x in tasks:
- x.run_after = SetOfTasks(x)
- x.run_after_groups = []
- x.waiting_sets = []
-
- h = x.hash_constraints()
- cstr_groups[h].append(x)
-
- # create sets which can be reused for all tasks
- for k in cstr_groups.keys():
- cstr_groups[k] = set(cstr_groups[k])
-
- # this list should be short
- for key1, key2 in itertools.combinations(cstr_groups.keys(), 2):
- group1 = cstr_groups[key1]
- group2 = cstr_groups[key2]
- # get the first entry of the set
- t1 = next(iter(group1))
- t2 = next(iter(group2))
-
- # add the constraints based on the comparisons
- if Task.is_before(t1, t2):
- for x in group2:
- x.run_after_groups.append(group1)
- for k in group1:
- k.waiting_sets.append(group1)
- elif Task.is_before(t2, t1):
- for x in group1:
- x.run_after_groups.append(group2)
- for k in group2:
- k.waiting_sets.append(group2)
-
-Task.set_precedence_constraints = set_precedence_constraints
-
-def get_out(self):
- tsk = self.out.get()
- if not self.stop:
- self.add_more_tasks(tsk)
- self.count -= 1
- self.dirty = True
-
- # shrinking sets
- try:
- ws = tsk.waiting_sets
- except AttributeError:
- pass
- else:
- for k in ws:
- try:
- k.remove(tsk)
- except KeyError:
- pass
-
- return tsk
-Runner.Parallel.get_out = get_out
-
-def skip(self, tsk):
- tsk.hasrun = Task.SKIPPED
-
- # shrinking sets
- try:
- ws = tsk.waiting_sets
- except AttributeError:
- pass
- else:
- for k in ws:
- try:
- k.remove(tsk)
- except KeyError:
- pass
-Runner.Parallel.skip = skip
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# Issue 1185 ultrix gmail com
+
+"""
+Microsoft Interface Definition Language support. Given ComObject.idl, this tool
+will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c
+
+To declare targets using midl::
+
+ def configure(conf):
+ conf.load('msvc')
+ conf.load('midl')
+
+ def build(bld):
+ bld(
+ features='c cshlib',
+ # Note: ComObject_i.c is generated from ComObject.idl
+ source = 'main.c ComObject.idl ComObject_i.c',
+ target = 'ComObject.dll')
+"""
+
+from waflib import Task, Utils
+from waflib.TaskGen import feature, before_method
+import os
+
+def configure(conf):
+ conf.find_program(['midl'], var='MIDL')
+
+ conf.env.MIDLFLAGS = [
+ '/nologo',
+ '/D',
+ '_DEBUG',
+ '/W1',
+ '/char',
+ 'signed',
+ '/Oicf',
+ ]
+
+@feature('c', 'cxx')
+@before_method('process_source')
+def idl_file(self):
+ # Do this before process_source so that the generated header can be resolved
+ # when scanning source dependencies.
+ idl_nodes = []
+ src_nodes = []
+ for node in Utils.to_list(self.source):
+ if str(node).endswith('.idl'):
+ idl_nodes.append(node)
+ else:
+ src_nodes.append(node)
+
+ for node in self.to_nodes(idl_nodes):
+ t = node.change_ext('.tlb')
+ h = node.change_ext('_i.h')
+ c = node.change_ext('_i.c')
+ p = node.change_ext('_p.c')
+ d = node.parent.find_or_declare('dlldata.c')
+ self.create_task('midl', node, [t, h, c, p, d])
+
+ self.source = src_nodes
+
+class midl(Task.Task):
+ """
+ Compile idl files
+ """
+ color = 'YELLOW'
+ run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}'
+ before = ['winrc']
+
+++ /dev/null
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-This tool is totally deprecated
-
-Try using:
- .pc.in files for .pc files
- the feature intltool_in - see demos/intltool
- make-like rules
-"""
-
-import shutil, re, os
-from waflib import Node, Task, Utils, Errors
-from waflib.TaskGen import feature, after_method, before_method
-from waflib.Logs import debug
-
-def copy_attrs(orig, dest, names, only_if_set=False):
- """
- copy class attributes from an object to another
- """
- for a in Utils.to_list(names):
- u = getattr(orig, a, ())
- if u or not only_if_set:
- setattr(dest, a, u)
-
-def copy_func(tsk):
- "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
- infile = tsk.inputs[0].abspath()
- outfile = tsk.outputs[0].abspath()
- try:
- shutil.copy2(infile, outfile)
- except EnvironmentError:
- return 1
- else:
- if tsk.chmod: os.chmod(outfile, tsk.chmod)
- return 0
-
-def action_process_file_func(tsk):
- "Ask the function attached to the task to process it"
- if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!')
- return tsk.fun(tsk)
-
-@feature('cmd')
-def apply_cmd(self):
- "call a command everytime"
- if not self.fun: raise Errors.WafError('cmdobj needs a function!')
- tsk = Task.TaskBase()
- tsk.fun = self.fun
- tsk.env = self.env
- self.tasks.append(tsk)
- tsk.install_path = self.install_path
-
-@feature('copy')
-@before_method('process_source')
-def apply_copy(self):
- Utils.def_attrs(self, fun=copy_func)
- self.default_install_path = 0
-
- lst = self.to_list(self.source)
- self.meths.remove('process_source')
-
- for filename in lst:
- node = self.path.find_resource(filename)
- if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
-
- target = self.target
- if not target or len(lst)>1: target = node.name
-
- # TODO the file path may be incorrect
- newnode = self.path.find_or_declare(target)
-
- tsk = self.create_task('copy', node, newnode)
- tsk.fun = self.fun
- tsk.chmod = getattr(self, 'chmod', Utils.O644)
-
- if not tsk.env:
- tsk.debug()
- raise Errors.WafError('task without an environment')
-
-def subst_func(tsk):
- "Substitutes variables in a .in file"
-
- m4_re = re.compile('@(\w+)@', re.M)
-
- code = tsk.inputs[0].read() #Utils.readf(infile)
-
- # replace all % by %% to prevent errors by % signs in the input file while string formatting
- code = code.replace('%', '%%')
-
- s = m4_re.sub(r'%(\1)s', code)
-
- env = tsk.env
- di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {})
- if not di:
- names = m4_re.findall(code)
- for i in names:
- di[i] = env.get_flat(i) or env.get_flat(i.upper())
-
- tsk.outputs[0].write(s % di)
-
-@feature('subst')
-@before_method('process_source')
-def apply_subst(self):
- Utils.def_attrs(self, fun=subst_func)
- lst = self.to_list(self.source)
- self.meths.remove('process_source')
-
- self.dict = getattr(self, 'dict', {})
-
- for filename in lst:
- node = self.path.find_resource(filename)
- if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
-
- if self.target:
- newnode = self.path.find_or_declare(self.target)
- else:
- newnode = node.change_ext('')
-
- try:
- self.dict = self.dict.get_merged_dict()
- except AttributeError:
- pass
-
- if self.dict and not self.env['DICT_HASH']:
- self.env = self.env.derive()
- keys = list(self.dict.keys())
- keys.sort()
- lst = [self.dict[x] for x in keys]
- self.env['DICT_HASH'] = str(Utils.h_list(lst))
-
- tsk = self.create_task('copy', node, newnode)
- tsk.fun = self.fun
- tsk.dict = self.dict
- tsk.dep_vars = ['DICT_HASH']
- tsk.chmod = getattr(self, 'chmod', Utils.O644)
-
- if not tsk.env:
- tsk.debug()
- raise Errors.WafError('task without an environment')
-
-####################
-## command-output ####
-####################
-
-class cmd_arg(object):
- """command-output arguments for representing files or folders"""
- def __init__(self, name, template='%s'):
- self.name = name
- self.template = template
- self.node = None
-
-class input_file(cmd_arg):
- def find_node(self, base_path):
- assert isinstance(base_path, Node.Node)
- self.node = base_path.find_resource(self.name)
- if self.node is None:
- raise Errors.WafError("Input file %s not found in " % (self.name, base_path))
-
- def get_path(self, env, absolute):
- if absolute:
- return self.template % self.node.abspath()
- else:
- return self.template % self.node.srcpath()
-
-class output_file(cmd_arg):
- def find_node(self, base_path):
- assert isinstance(base_path, Node.Node)
- self.node = base_path.find_or_declare(self.name)
- if self.node is None:
- raise Errors.WafError("Output file %s not found in " % (self.name, base_path))
-
- def get_path(self, env, absolute):
- if absolute:
- return self.template % self.node.abspath()
- else:
- return self.template % self.node.bldpath()
-
-class cmd_dir_arg(cmd_arg):
- def find_node(self, base_path):
- assert isinstance(base_path, Node.Node)
- self.node = base_path.find_dir(self.name)
- if self.node is None:
- raise Errors.WafError("Directory %s not found in " % (self.name, base_path))
-
-class input_dir(cmd_dir_arg):
- def get_path(self, dummy_env, dummy_absolute):
- return self.template % self.node.abspath()
-
-class output_dir(cmd_dir_arg):
- def get_path(self, env, dummy_absolute):
- return self.template % self.node.abspath()
-
-
-class command_output(Task.Task):
- color = "BLUE"
- def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
- Task.Task.__init__(self, env=env)
- assert isinstance(command, (str, Node.Node))
- self.command = command
- self.command_args = command_args
- self.stdin = stdin
- self.stdout = stdout
- self.cwd = cwd
- self.os_env = os_env
- self.stderr = stderr
-
- if command_node is not None: self.dep_nodes = [command_node]
- self.dep_vars = [] # additional environment variables to look
-
- def run(self):
- task = self
- #assert len(task.inputs) > 0
-
- def input_path(node, template):
- if task.cwd is None:
- return template % node.bldpath()
- else:
- return template % node.abspath()
- def output_path(node, template):
- fun = node.abspath
- if task.cwd is None: fun = node.bldpath
- return template % fun()
-
- if isinstance(task.command, Node.Node):
- argv = [input_path(task.command, '%s')]
- else:
- argv = [task.command]
-
- for arg in task.command_args:
- if isinstance(arg, str):
- argv.append(arg)
- else:
- assert isinstance(arg, cmd_arg)
- argv.append(arg.get_path(task.env, (task.cwd is not None)))
-
- if task.stdin:
- stdin = open(input_path(task.stdin, '%s'))
- else:
- stdin = None
-
- if task.stdout:
- stdout = open(output_path(task.stdout, '%s'), "w")
- else:
- stdout = None
-
- if task.stderr:
- stderr = open(output_path(task.stderr, '%s'), "w")
- else:
- stderr = None
-
- if task.cwd is None:
- cwd = ('None (actually %r)' % os.getcwd())
- else:
- cwd = repr(task.cwd)
- debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
- (cwd, stdin, stdout, argv))
-
- if task.os_env is None:
- os_env = os.environ
- else:
- os_env = task.os_env
- command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
- return command.wait()
-
-@feature('command-output')
-def init_cmd_output(self):
- Utils.def_attrs(self,
- stdin = None,
- stdout = None,
- stderr = None,
- # the command to execute
- command = None,
-
- # whether it is an external command; otherwise it is assumed
- # to be an executable binary or script that lives in the
- # source or build tree.
- command_is_external = False,
-
- # extra parameters (argv) to pass to the command (excluding
- # the command itself)
- argv = [],
-
- # dependencies to other objects -> this is probably not what you want (ita)
- # values must be 'task_gen' instances (not names!)
- dependencies = [],
-
- # dependencies on env variable contents
- dep_vars = [],
-
- # input files that are implicit, i.e. they are not
- # stdin, nor are they mentioned explicitly in argv
- hidden_inputs = [],
-
- # output files that are implicit, i.e. they are not
- # stdout, nor are they mentioned explicitly in argv
- hidden_outputs = [],
-
- # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
- cwd = None,
-
- # OS environment variables to pass to the subprocess
- # if None, use the default environment variables unchanged
- os_env = None)
-
-@feature('command-output')
-@after_method('init_cmd_output')
-def apply_cmd_output(self):
- if self.command is None:
- raise Errors.WafError("command-output missing command")
- if self.command_is_external:
- cmd = self.command
- cmd_node = None
- else:
- cmd_node = self.path.find_resource(self.command)
- assert cmd_node is not None, ('''Could not find command '%s' in source tree.
-Hint: if this is an external command,
-use command_is_external=True''') % (self.command,)
- cmd = cmd_node
-
- if self.cwd is None:
- cwd = None
-
- inputs = []
- outputs = []
-
- for arg in self.argv:
- if isinstance(arg, cmd_arg):
- arg.find_node(self.path)
- if isinstance(arg, input_file):
- inputs.append(arg.node)
- if isinstance(arg, output_file):
- outputs.append(arg.node)
-
- if self.stdout is None:
- stdout = None
- else:
- assert isinstance(self.stdout, str)
- stdout = self.path.find_or_declare(self.stdout)
- if stdout is None:
- raise Errors.WafError("File %s not found" % (self.stdout,))
- outputs.append(stdout)
-
- if self.stderr is None:
- stderr = None
- else:
- assert isinstance(self.stderr, str)
- stderr = self.path.find_or_declare(self.stderr)
- if stderr is None:
- raise Errors.WafError("File %s not found" % (self.stderr,))
- outputs.append(stderr)
-
- if self.stdin is None:
- stdin = None
- else:
- assert isinstance(self.stdin, str)
- stdin = self.path.find_resource(self.stdin)
- if stdin is None:
- raise Errors.WafError("File %s not found" % (self.stdin,))
- inputs.append(stdin)
-
- for hidden_input in self.to_list(self.hidden_inputs):
- node = self.path.find_resource(hidden_input)
- if node is None:
- raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path))
- inputs.append(node)
-
- for hidden_output in self.to_list(self.hidden_outputs):
- node = self.path.find_or_declare(hidden_output)
- if node is None:
- raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path))
- outputs.append(node)
-
- if not (inputs or getattr(self, 'no_inputs', None)):
- raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs')
- if not (outputs or getattr(self, 'no_outputs', None)):
- raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs')
-
- cwd = self.bld.variant_dir
- task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
- task.generator = self
- copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
- self.tasks.append(task)
-
- task.inputs = inputs
- task.outputs = outputs
- task.dep_vars = self.to_list(self.dep_vars)
-
- for dep in self.dependencies:
- assert dep is not self
- dep.post()
- for dep_task in dep.tasks:
- task.set_run_after(dep_task)
-
- if not task.inputs:
- # the case for svnversion, always run, and update the output nodes
- task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
- task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
-
- # TODO the case with no outputs?
-
-def post_run(self):
- for x in self.outputs:
- x.sig = Utils.h_file(x.abspath())
-
-def runnable_status(self):
- return self.RUN_ME
-
-Task.task_factory('copy', vars=[], func=action_process_file_func)
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Copyright Garmin International or its subsidiaries, 2012-2013
The technique of gutting scan() and pushing the dependency calculation
down to post_run() is cribbed from gccdeps.py.
+
+This affects the cxx class, so make sure to load Qt5 after this tool.
+
+Usage::
+
+ def options(opt):
+ opt.load('compiler_cxx')
+ def configure(conf):
+ conf.load('compiler_cxx msvcdeps')
'''
-import os
-import sys
-import tempfile
-import threading
+import os, sys, tempfile, threading
from waflib import Context, Errors, Logs, Task, Utils
from waflib.Tools import c_preproc, c, cxx, msvc
@feature('c', 'cxx')
@before_method('process_source')
def apply_msvcdeps_flags(taskgen):
- if taskgen.env.CC_NAME not in supported_compilers:
- return
+ if taskgen.env.CC_NAME not in supported_compilers:
+ return
- for flag in ('CFLAGS', 'CXXFLAGS'):
- if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
- taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
+ for flag in ('CFLAGS', 'CXXFLAGS'):
+ if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
+ taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
- # Figure out what casing conventions the user's shell used when
- # launching Waf
- (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
- taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
+ # Figure out what casing conventions the user's shell used when
+ # launching Waf
+ (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
+ taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
def path_to_node(base_node, path, cached_nodes):
- # Take the base node and the path and return a node
- # Results are cached because searching the node tree is expensive
- # The following code is executed by threads, it is not safe, so a lock is needed...
- if getattr(path, '__hash__'):
- node_lookup_key = (base_node, path)
- else:
- # Not hashable, assume it is a list and join into a string
- node_lookup_key = (base_node, os.path.sep.join(path))
- try:
- lock.acquire()
- node = cached_nodes[node_lookup_key]
- except KeyError:
- node = base_node.find_resource(path)
- cached_nodes[node_lookup_key] = node
- finally:
- lock.release()
- return node
+ # Take the base node and the path and return a node
+ # Results are cached because searching the node tree is expensive
+ # The following code is executed by threads, it is not safe, so a lock is needed...
+ if getattr(path, '__hash__'):
+ node_lookup_key = (base_node, path)
+ else:
+ # Not hashable, assume it is a list and join into a string
+ node_lookup_key = (base_node, os.path.sep.join(path))
+ try:
+ lock.acquire()
+ node = cached_nodes[node_lookup_key]
+ except KeyError:
+ node = base_node.find_resource(path)
+ cached_nodes[node_lookup_key] = node
+ finally:
+ lock.release()
+ return node
+
+def post_run(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).post_run()
+
+ # TODO this is unlikely to work with netcache
+ if getattr(self, 'cached', None):
+ return Task.Task.post_run(self)
+
+ bld = self.generator.bld
+ unresolved_names = []
+ resolved_nodes = []
+
+ lowercase = self.generator.msvcdeps_drive_lowercase
+ correct_case_path = bld.path.abspath()
+ correct_case_path_len = len(correct_case_path)
+ correct_case_path_norm = os.path.normcase(correct_case_path)
+
+ # Dynamically bind to the cache
+ try:
+ cached_nodes = bld.cached_nodes
+ except AttributeError:
+ cached_nodes = bld.cached_nodes = {}
+
+ for path in self.msvcdeps_paths:
+ node = None
+ if os.path.isabs(path):
+ # Force drive letter to match conventions of main source tree
+ drive, tail = os.path.splitdrive(path)
+
+ if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
+ # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path.
+ path = correct_case_path + path[correct_case_path_len:]
+ else:
+ # Check the drive letter
+ if lowercase and (drive != drive.lower()):
+ path = drive.lower() + tail
+ elif (not lowercase) and (drive != drive.upper()):
+ path = drive.upper() + tail
+ node = path_to_node(bld.root, path, cached_nodes)
+ else:
+ base_node = bld.bldnode
+ # when calling find_resource, make sure the path does not begin by '..'
+ path = [k for k in Utils.split_path(path) if k and k != '.']
+ while path[0] == '..':
+ path = path[1:]
+ base_node = base_node.parent
+
+ node = path_to_node(base_node, path, cached_nodes)
+
+ if not node:
+ raise ValueError('could not find %r for %r' % (path, self))
+ else:
+ if not c_preproc.go_absolute:
+ if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
+ # System library
+ Logs.debug('msvcdeps: Ignoring system include %r', node)
+ continue
+
+ if id(node) == id(self.inputs[0]):
+ # Self-dependency
+ continue
+
+ resolved_nodes.append(node)
+
+ bld.node_deps[self.uid()] = resolved_nodes
+ bld.raw_deps[self.uid()] = unresolved_names
+
+ try:
+ del self.cache_sig
+ except AttributeError:
+ pass
+
+ Task.Task.post_run(self)
+
+def scan(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).scan()
+
+ resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
+ unresolved_names = []
+ return (resolved_nodes, unresolved_names)
+
+def sig_implicit_deps(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).sig_implicit_deps()
+
+ try:
+ return Task.Task.sig_implicit_deps(self)
+ except Errors.WafError:
+ return Utils.SIG_NIL
+
+def exec_command(self, cmd, **kw):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).exec_command(cmd, **kw)
+
+ if not 'cwd' in kw:
+ kw['cwd'] = self.get_cwd()
+
+ if self.env.PATH:
+ env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+ env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
+
+ # The Visual Studio IDE adds an environment variable that causes
+ # the MS compiler to send its textual output directly to the
+ # debugging window rather than normal stdout/stderr.
+ #
+ # This is unrecoverably bad for this tool because it will cause
+ # all the dependency scanning to see an empty stdout stream and
+ # assume that the file being compiled uses no headers.
+ #
+ # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
+ #
+ # Attempting to repair the situation by deleting the offending
+ # envvar at this point in tool execution will not be good enough--
+ # its presence poisons the 'waf configure' step earlier. We just
+ # want to put a sanity check here in order to help developers
+ # quickly diagnose the issue if an otherwise-good Waf tree
+ # is then executed inside the MSVS IDE.
+ assert 'VS_UNICODE_OUTPUT' not in kw['env']
+
+ cmd, args = self.split_argfile(cmd)
+ try:
+ (fd, tmp) = tempfile.mkstemp()
+ os.write(fd, '\r\n'.join(args).encode())
+ os.close(fd)
+
+ self.msvcdeps_paths = []
+ kw['env'] = kw.get('env', os.environ.copy())
+ kw['cwd'] = kw.get('cwd', os.getcwd())
+ kw['quiet'] = Context.STDOUT
+ kw['output'] = Context.STDOUT
+
+ out = []
+ if Logs.verbose:
+ Logs.debug('argfile: @%r -> %r', tmp, args)
+ try:
+ raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw)
+ ret = 0
+ except Errors.WafError as e:
+ raw_out = e.stdout
+ ret = e.returncode
+
+ for line in raw_out.splitlines():
+ if line.startswith(INCLUDE_PATTERN):
+ inc_path = line[len(INCLUDE_PATTERN):].strip()
+ Logs.debug('msvcdeps: Regex matched %s', inc_path)
+ self.msvcdeps_paths.append(inc_path)
+ else:
+ out.append(line)
+
+ # Pipe through the remaining stdout content (not related to /showIncludes)
+ if self.generator.bld.logger:
+ self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
+ else:
+ sys.stdout.write(os.linesep.join(out) + os.linesep)
+
+ return ret
+ finally:
+ try:
+ os.remove(tmp)
+ except OSError:
+ # anti-virus and indexers can keep files open -_-
+ pass
+
-'''
-Register a task subclass that has hooks for running our custom
-dependency calculations rather than the C/C++ stock c_preproc
-method.
-'''
def wrap_compiled_task(classname):
- derived_class = type(classname, (Task.classes[classname],), {})
-
- def post_run(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).post_run()
-
- if getattr(self, 'cached', None):
- return Task.Task.post_run(self)
-
- bld = self.generator.bld
- unresolved_names = []
- resolved_nodes = []
-
- lowercase = self.generator.msvcdeps_drive_lowercase
- correct_case_path = bld.path.abspath()
- correct_case_path_len = len(correct_case_path)
- correct_case_path_norm = os.path.normcase(correct_case_path)
-
- # Dynamically bind to the cache
- try:
- cached_nodes = bld.cached_nodes
- except AttributeError:
- cached_nodes = bld.cached_nodes = {}
-
- for path in self.msvcdeps_paths:
- node = None
- if os.path.isabs(path):
- # Force drive letter to match conventions of main source tree
- drive, tail = os.path.splitdrive(path)
-
- if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
- # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path.
- path = correct_case_path + path[correct_case_path_len:]
- else:
- # Check the drive letter
- if lowercase and (drive != drive.lower()):
- path = drive.lower() + tail
- elif (not lowercase) and (drive != drive.upper()):
- path = drive.upper() + tail
- node = path_to_node(bld.root, path, cached_nodes)
- else:
- base_node = bld.bldnode
- # when calling find_resource, make sure the path does not begin by '..'
- path = [k for k in Utils.split_path(path) if k and k != '.']
- while path[0] == '..':
- path = path[1:]
- base_node = base_node.parent
-
- node = path_to_node(base_node, path, cached_nodes)
-
- if not node:
- raise ValueError('could not find %r for %r' % (path, self))
- else:
- if not c_preproc.go_absolute:
- if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
- # System library
- Logs.debug('msvcdeps: Ignoring system include %r' % node)
- continue
-
- if id(node) == id(self.inputs[0]):
- # Self-dependency
- continue
-
- resolved_nodes.append(node)
-
- bld.node_deps[self.uid()] = resolved_nodes
- bld.raw_deps[self.uid()] = unresolved_names
-
- try:
- del self.cache_sig
- except:
- pass
-
- Task.Task.post_run(self)
-
- def scan(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).scan()
-
- resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
- unresolved_names = []
- return (resolved_nodes, unresolved_names)
-
- def sig_implicit_deps(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).sig_implicit_deps()
-
- try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
-
- def exec_response_command(self, cmd, **kw):
- # exec_response_command() is only called from inside msvc.py anyway
- assert self.env.CC_NAME in supported_compilers
-
- # Only bother adding '/showIncludes' to compile tasks
- if isinstance(self, (c.c, cxx.cxx)):
- try:
- # The Visual Studio IDE adds an environment variable that causes
- # the MS compiler to send its textual output directly to the
- # debugging window rather than normal stdout/stderr.
- #
- # This is unrecoverably bad for this tool because it will cause
- # all the dependency scanning to see an empty stdout stream and
- # assume that the file being compiled uses no headers.
- #
- # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
- #
- # Attempting to repair the situation by deleting the offending
- # envvar at this point in tool execution will not be good enough--
- # its presence poisons the 'waf configure' step earlier. We just
- # want to put a sanity check here in order to help developers
- # quickly diagnose the issue if an otherwise-good Waf tree
- # is then executed inside the MSVS IDE.
- assert 'VS_UNICODE_OUTPUT' not in kw['env']
-
- tmp = None
-
- # This block duplicated from Waflib's msvc.py
- if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
- program = cmd[0]
- cmd = [self.quote_response_command(x) for x in cmd]
- (fd, tmp) = tempfile.mkstemp()
- os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
- os.close(fd)
- cmd = [program, '@' + tmp]
- # ... end duplication
-
- self.msvcdeps_paths = []
-
- kw['env'] = kw.get('env', os.environ.copy())
- kw['cwd'] = kw.get('cwd', os.getcwd())
- kw['quiet'] = Context.STDOUT
- kw['output'] = Context.STDOUT
-
- out = []
-
- try:
- raw_out = self.generator.bld.cmd_and_log(cmd, **kw)
- ret = 0
- except Errors.WafError as e:
- raw_out = e.stdout
- ret = e.returncode
-
- for line in raw_out.splitlines():
- if line.startswith(INCLUDE_PATTERN):
- inc_path = line[len(INCLUDE_PATTERN):].strip()
- Logs.debug('msvcdeps: Regex matched %s' % inc_path)
- self.msvcdeps_paths.append(inc_path)
- else:
- out.append(line)
-
- # Pipe through the remaining stdout content (not related to /showIncludes)
- if self.generator.bld.logger:
- self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
- else:
- sys.stdout.write(os.linesep.join(out) + os.linesep)
-
- finally:
- if tmp:
- try:
- os.remove(tmp)
- except OSError:
- pass
-
- return ret
- else:
- # Use base class's version of this method for linker tasks
- return super(derived_class, self).exec_response_command(cmd, **kw)
-
- def can_retrieve_cache(self):
- # msvcdeps and netcaching are incompatible, so disable the cache
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).can_retrieve_cache()
- self.nocache = True # Disable sending the file to the cache
- return False
-
- derived_class.post_run = post_run
- derived_class.scan = scan
- derived_class.sig_implicit_deps = sig_implicit_deps
- derived_class.exec_response_command = exec_response_command
- derived_class.can_retrieve_cache = can_retrieve_cache
+ derived_class = type(classname, (Task.classes[classname],), {})
+ derived_class.derived_msvcdeps = derived_class
+ derived_class.post_run = post_run
+ derived_class.scan = scan
+ derived_class.sig_implicit_deps = sig_implicit_deps
+ derived_class.exec_command = exec_command
for k in ('c', 'cxx'):
- wrap_compiled_task(k)
+ if k in Task.classes:
+ wrap_compiled_task(k)
+
+def options(opt):
+ raise ValueError('Do not load msvcdeps options')
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Avalanche Studios 2009-2011
To generate solution files:
$ waf configure msvs
-To customize the outputs, provide subclasses in your wscript files:
-
-from waflib.extras import msvs
-class vsnode_target(msvs.vsnode_target):
- def get_build_command(self, props):
- # likely to be required
- return "waf.bat build"
- def collect_source(self):
- # likely to be required
- ...
-class msvs_bar(msvs.msvs_generator):
- def init(self):
- msvs.msvs_generator.init(self)
- self.vsnode_target = vsnode_target
+To customize the outputs, provide subclasses in your wscript files::
+
+ from waflib.extras import msvs
+ class vsnode_target(msvs.vsnode_target):
+ def get_build_command(self, props):
+ # likely to be required
+ return "waf.bat build"
+ def collect_source(self):
+ # likely to be required
+ ...
+ class msvs_bar(msvs.msvs_generator):
+ def init(self):
+ msvs.msvs_generator.init(self)
+ self.vsnode_target = vsnode_target
The msvs class re-uses the same build() function for reading the targets (task generators),
-you may therefore specify msvs settings on the context object:
+you may therefore specify msvs settings on the context object::
-def build(bld):
- bld.solution_name = 'foo.sln'
- bld.waf_command = 'waf.bat'
- bld.projects_dir = bld.srcnode.make_node('.depproj')
- bld.projects_dir.mkdir()
+ def build(bld):
+ bld.solution_name = 'foo.sln'
+ bld.waf_command = 'waf.bat'
+ bld.projects_dir = bld.srcnode.make_node('.depproj')
+ bld.projects_dir.mkdir()
For visual studio 2008, the command is called 'msvs2008', and the classes
such as vsnode_target are wrapped by a decorator class 'wrap_2008' to
provide special functionality.
+To customize platform toolsets, pass additional parameters, for example::
+
+ class msvs_2013(msvs.msvs_generator):
+ cmd = 'msvs2013'
+ numver = '13.00'
+ vsver = '2013'
+ platform_toolset_ver = 'v120'
+
ASSUMPTIONS:
* a project can be either a directory or a target, vcxproj files are written only for targets that have source files
* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'" Label="Configuration">
<ConfigurationType>Makefile</ConfigurationType>
<OutDir>${b.outdir}</OutDir>
- <PlatformToolset>v110</PlatformToolset>
+ <PlatformToolset>${project.platform_toolset_ver}</PlatformToolset>
</PropertyGroup>
${endfor}
extr = []
def repl(match):
g = match.group
- if g('dollar'): return "$"
+ if g('dollar'):
+ return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
app("lst.append(%r)" % params[x])
f = extr[x]
- if f.startswith('if') or f.startswith('for'):
+ if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
- elif f.startswith('endif') or f.startswith('endfor'):
+ elif f.startswith(('endif', 'endfor')):
indent -= 1
- elif f.startswith('else') or f.startswith('elif'):
+ elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
BOM = '\xef\xbb\xbf'
try:
- BOM = bytes(BOM, 'iso8859-1') # python 3
+ BOM = bytes(BOM, 'latin-1') # python 3
except TypeError:
pass
data = data.decode(sys.getfilesystemencoding(), 'replace')
data = data.encode('utf-8')
- if self.name.endswith('.vcproj') or self.name.endswith('.vcxproj'):
+ if self.name.endswith(('.vcproj', '.vcxproj')):
data = BOM + data
try:
except (IOError, ValueError):
self.write(data, flags=flags)
else:
- Logs.debug('msvs: skipping %s' % self.win32path())
+ Logs.debug('msvs: skipping %s', self.win32path())
Node.Node.stealth_write = stealth_write
re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I)
self.path = node
self.uuid = make_uuid(node.win32path())
self.name = node.name
+ self.platform_toolset_ver = getattr(ctx, 'platform_toolset_ver', None)
self.title = self.path.win32path()
self.source = [] # list of node objects
self.build_properties = [] # list of properties (nmake commands, output dir, etc)
return lst
def write(self):
- Logs.debug('msvs: creating %r' % self.path)
+ Logs.debug('msvs: creating %r', self.path)
# first write the project file
template1 = compile_template(PROJECT_TEMPLATE)
required for writing the source files
"""
name = node.name
- if name.endswith('.cpp') or name.endswith('.c'):
+ if name.endswith(('.cpp', '.c')):
return 'ClCompile'
return 'ClInclude'
vsnode_alias.__init__(self, ctx, node, name)
self.tg = self.ctx() # fake one, cannot remove
self.exclude_files = Node.exclude_regs + '''
-waf-1.8.*
-waf3-1.8.*/**
-.waf-1.8.*
-.waf3-1.8.*/**
+waf-2*
+waf3-2*/**
+.waf-2*
+.waf3-2*/**
**/*.sdf
**/*.suo
**/*.ncb
'''generates a visual studio 2010 solution'''
cmd = 'msvs'
fun = 'build'
+ numver = '11.00' # Visual Studio Version Number
+ vsver = '2010' # Visual Studio Version Year
+ platform_toolset_ver = 'v110' # Platform Toolset Version Number
def init(self):
"""
if not getattr(self, 'vsnode_project_view', None):
self.vsnode_project_view = vsnode_project_view
- self.numver = '11.00'
- self.vsver = '2010'
+ self.numver = self.__class__.numver
+ self.vsver = self.__class__.vsver
+ self.platform_toolset_ver = self.__class__.platform_toolset_ver
def execute(self):
"""
# and finally write the solution file
node = self.get_solution_node()
node.parent.mkdir()
- Logs.warn('Creating %r' % node)
+ Logs.warn('Creating %r', node)
template1 = compile_template(SOLUTION_TEMPLATE)
sln_str = template1(self)
sln_str = rm_blank_lines(sln_str)
return ''
def write(self):
- Logs.debug('msvs: creating %r' % self.path)
+ Logs.debug('msvs: creating %r', self.path)
template1 = compile_template(self.project_template)
proj_str = template1(self)
proj_str = rm_blank_lines(proj_str)
'''generates a visual studio 2008 solution'''
cmd = 'msvs2008'
fun = msvs_generator.fun
+ numver = '10.00'
+ vsver = '2008'
def init(self):
if not getattr(self, 'project_extension', None):
self.vsnode_project_view = wrap_2008(vsnode_project_view)
msvs_generator.init(self)
- self.numver = '10.00'
- self.vsver = '2008'
def options(ctx):
"""
else:
old(ctx)
BuildContext.execute = override_build_state
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011-2015 (ita)
def put_data(conn, data):
if sys.hexversion > 0x3000000:
- data = data.encode('iso8859-1')
+ data = data.encode('latin-1')
cnt = 0
while cnt < len(data):
sent = conn.send(data[cnt:])
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- ret = ret.decode('iso8859-1')
+ ret = ''.encode('latin-1').join(buf)
+ ret = ret.decode('latin-1')
else:
ret = ''.join(buf)
return ret
cnt += len(data)
if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- ret = ret.decode('iso8859-1')
+ ret = ''.encode('latin-1').join(buf)
+ ret = ret.decode('latin-1')
else:
ret = ''.join(buf)
all_sigs_in_cache = (time.time(), ret.splitlines())
- Logs.debug('netcache: server cache has %r entries' % len(all_sigs_in_cache[1]))
+ Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1]))
if not ssig in all_sigs_in_cache[1]:
raise ValueError('no file %s in cache' % ssig)
recv_file(conn, ssig, cnt, p)
cnt += 1
except MissingFile as e:
- Logs.debug('netcache: file is not in the cache %r' % e)
+ Logs.debug('netcache: file is not in the cache %r', e)
err = True
-
except Exception as e:
- Logs.debug('netcache: could not get the files %r' % e)
+ Logs.debug('netcache: could not get the files %r', self.outputs)
+ if Logs.verbose > 1:
+ Logs.debug('netcache: exception %r', e)
err = True
# broken connection? remove this one
close_connection(conn)
conn = None
+ else:
+ Logs.debug('netcache: obtained %r from cache', self.outputs)
+
finally:
release_connection(conn)
if err:
return False
- for node in self.outputs:
- node.sig = sig
- #if self.generator.bld.progress_bar < 1:
- # self.generator.bld.to_log('restoring from cache %r\n' % node.abspath())
-
self.cached = True
return True
if not conn:
conn = get_connection(push=True)
sock_send(conn, ssig, cnt, node.abspath())
+ Logs.debug('netcache: sent %r', node)
except Exception as e:
- Logs.debug("netcache: could not push the files %r" % e)
+ Logs.debug('netcache: could not push the files %r', e)
# broken connection? remove this one
close_connection(conn)
push_addr = None
setup_netcache(bld, push_addr, pull_addr)
+
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Override the build commands to write empty files.
-This is useful for profiling and evaluating the Python overhead.
-
-To use::
-
- def build(bld):
- ...
- bld.load('nobuild')
-
-"""
-
-from waflib import Task
-def build(bld):
- def run(self):
- for x in self.outputs:
- x.write('')
- for (name, cls) in Task.classes.items():
- cls.run = run
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# Grygoriy Fuchedzhy 2010
"""
Support for converting linked targets to ihex, srec or binary files using
-objcopy. Use the 'objcopy' feature in conjuction with the 'cc' or 'cxx'
+objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx'
feature. The 'objcopy' feature uses the following attributes:
objcopy_bfdname Target object format name (eg. ihex, srec, binary).
pass
if self.objcopy_install_path:
- self.bld.install_files(self.objcopy_install_path,
- task.outputs[0],
- env=task.env.derive())
+ self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0])
def configure(ctx):
ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"ocaml support"
+
+import os, re
+from waflib import Utils, Task
+from waflib.Logs import error
+from waflib.TaskGen import feature, before_method, after_method, extension
+
+EXT_MLL = ['.mll']
+EXT_MLY = ['.mly']
+EXT_MLI = ['.mli']
+EXT_MLC = ['.c']
+EXT_ML = ['.ml']
+
+open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
+foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
+def filter_comments(txt):
+ meh = [0]
+ def repl(m):
+ if m.group(1):
+ meh[0] += 1
+ elif m.group(2):
+ meh[0] -= 1
+ elif not meh[0]:
+ return m.group()
+ return ''
+ return foo.sub(repl, txt)
+
+def scan(self):
+ node = self.inputs[0]
+ code = filter_comments(node.read())
+
+ global open_re
+ names = []
+ import_iterator = open_re.finditer(code)
+ if import_iterator:
+ for import_match in import_iterator:
+ names.append(import_match.group(1))
+ found_lst = []
+ raw_lst = []
+ for name in names:
+ nd = None
+ for x in self.incpaths:
+ nd = x.find_resource(name.lower()+'.ml')
+ if not nd:
+ nd = x.find_resource(name+'.ml')
+ if nd:
+ found_lst.append(nd)
+ break
+ else:
+ raw_lst.append(name)
+
+ return (found_lst, raw_lst)
+
+native_lst=['native', 'all', 'c_object']
+bytecode_lst=['bytecode', 'all']
+
+@feature('ocaml')
+def init_ml(self):
+ Utils.def_attrs(self,
+ type = 'all',
+ incpaths_lst = [],
+ bld_incpaths_lst = [],
+ mlltasks = [],
+ mlytasks = [],
+ mlitasks = [],
+ native_tasks = [],
+ bytecode_tasks = [],
+ linktasks = [],
+ bytecode_env = None,
+ native_env = None,
+ compiled_tasks = [],
+ includes = '',
+ uselib = '',
+ are_deps_set = 0)
+
+@feature('ocaml')
+@after_method('init_ml')
+def init_envs_ml(self):
+
+ self.islibrary = getattr(self, 'islibrary', False)
+
+ global native_lst, bytecode_lst
+ self.native_env = None
+ if self.type in native_lst:
+ self.native_env = self.env.derive()
+ if self.islibrary:
+ self.native_env['OCALINKFLAGS'] = '-a'
+
+ self.bytecode_env = None
+ if self.type in bytecode_lst:
+ self.bytecode_env = self.env.derive()
+ if self.islibrary:
+ self.bytecode_env['OCALINKFLAGS'] = '-a'
+
+ if self.type == 'c_object':
+ self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
+
+@feature('ocaml')
+@before_method('apply_vars_ml')
+@after_method('init_envs_ml')
+def apply_incpaths_ml(self):
+ inc_lst = self.includes.split()
+ lst = self.incpaths_lst
+ for dir in inc_lst:
+ node = self.path.find_dir(dir)
+ if not node:
+ error("node not found: " + str(dir))
+ continue
+ if not node in lst:
+ lst.append(node)
+ self.bld_incpaths_lst.append(node)
+ # now the nodes are added to self.incpaths_lst
+
+@feature('ocaml')
+@before_method('process_source')
+def apply_vars_ml(self):
+ for i in self.incpaths_lst:
+ if self.bytecode_env:
+ app = self.bytecode_env.append_value
+ app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+ if self.native_env:
+ app = self.native_env.append_value
+ app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+ varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
+ for name in self.uselib.split():
+ for vname in varnames:
+ cnt = self.env[vname+'_'+name]
+ if cnt:
+ if self.bytecode_env:
+ self.bytecode_env.append_value(vname, cnt)
+ if self.native_env:
+ self.native_env.append_value(vname, cnt)
+
+@feature('ocaml')
+@after_method('process_source')
+def apply_link_ml(self):
+
+ if self.bytecode_env:
+ ext = self.islibrary and '.cma' or '.run'
+
+ linktask = self.create_task('ocalink')
+ linktask.bytecode = 1
+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+ linktask.env = self.bytecode_env
+ self.linktasks.append(linktask)
+
+ if self.native_env:
+ if self.type == 'c_object':
+ ext = '.o'
+ elif self.islibrary:
+ ext = '.cmxa'
+ else:
+ ext = ''
+
+ linktask = self.create_task('ocalinkx')
+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+ linktask.env = self.native_env
+ self.linktasks.append(linktask)
+
+ # we produce a .o file to be used by gcc
+ self.compiled_tasks.append(linktask)
+
+@extension(*EXT_MLL)
+def mll_hook(self, node):
+ mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
+ mll_task.env = self.native_env.derive()
+ self.mlltasks.append(mll_task)
+
+ self.source.append(mll_task.outputs[0])
+
+@extension(*EXT_MLY)
+def mly_hook(self, node):
+ mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
+ mly_task.env = self.native_env.derive()
+ self.mlytasks.append(mly_task)
+ self.source.append(mly_task.outputs[0])
+
+ task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
+ task.env = self.native_env.derive()
+
+@extension(*EXT_MLI)
+def mli_hook(self, node):
+ task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
+ task.env = self.native_env.derive()
+ self.mlitasks.append(task)
+
+@extension(*EXT_MLC)
+def mlc_hook(self, node):
+ task = self.create_task('ocamlcc', node, node.change_ext('.o'))
+ task.env = self.native_env.derive()
+ self.compiled_tasks.append(task)
+
+@extension(*EXT_ML)
+def ml_hook(self, node):
+ if self.native_env:
+ task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
+ task.env = self.native_env.derive()
+ task.incpaths = self.bld_incpaths_lst
+ self.native_tasks.append(task)
+
+ if self.bytecode_env:
+ task = self.create_task('ocaml', node, node.change_ext('.cmo'))
+ task.env = self.bytecode_env.derive()
+ task.bytecode = 1
+ task.incpaths = self.bld_incpaths_lst
+ self.bytecode_tasks.append(task)
+
+def compile_may_start(self):
+
+ if not getattr(self, 'flag_deps', ''):
+ self.flag_deps = 1
+
+ # the evil part is that we can only compute the dependencies after the
+ # source files can be read (this means actually producing the source files)
+ if getattr(self, 'bytecode', ''):
+ alltasks = self.generator.bytecode_tasks
+ else:
+ alltasks = self.generator.native_tasks
+
+ self.signature() # ensure that files are scanned - unfortunately
+ tree = self.generator.bld
+ for node in self.inputs:
+ lst = tree.node_deps[self.uid()]
+ for depnode in lst:
+ for t in alltasks:
+ if t == self:
+ continue
+ if depnode in t.inputs:
+ self.set_run_after(t)
+
+ # TODO necessary to get the signature right - for now
+ delattr(self, 'cache_sig')
+ self.signature()
+
+ return Task.Task.runnable_status(self)
+
+class ocamlx(Task.Task):
+ """native caml compilation"""
+ color = 'GREEN'
+ run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+ scan = scan
+ runnable_status = compile_may_start
+
+class ocaml(Task.Task):
+ """bytecode caml compilation"""
+ color = 'GREEN'
+ run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+ scan = scan
+ runnable_status = compile_may_start
+
+class ocamlcmi(Task.Task):
+ """interface generator (the .i files?)"""
+ color = 'BLUE'
+ run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
+ before = ['ocamlcc', 'ocaml', 'ocamlcc']
+
+class ocamlcc(Task.Task):
+ """ocaml to c interfaces"""
+ color = 'GREEN'
+ run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
+
+class ocamllex(Task.Task):
+ """lexical generator"""
+ color = 'BLUE'
+ run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
+ before = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+class ocamlyacc(Task.Task):
+ """parser generator"""
+ color = 'BLUE'
+ run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
+ before = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+ def base(self):
+ node = self.outputs[0]
+ s = os.path.splitext(node.name)[0]
+ return node.bld_dir() + os.sep + s
+
+def link_may_start(self):
+
+ if getattr(self, 'bytecode', 0):
+ alltasks = self.generator.bytecode_tasks
+ else:
+ alltasks = self.generator.native_tasks
+
+ for x in alltasks:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'order', ''):
+
+ # now reorder the inputs given the task dependencies
+ # this part is difficult, we do not have a total order on the tasks
+ # if the dependencies are wrong, this may not stop
+ seen = []
+ pendant = []+alltasks
+ while pendant:
+ task = pendant.pop(0)
+ if task in seen:
+ continue
+ for x in task.run_after:
+ if not x in seen:
+ pendant.append(task)
+ break
+ else:
+ seen.append(task)
+ self.inputs = [x.outputs[0] for x in seen]
+ self.order = 1
+ return Task.Task.runnable_status(self)
+
+class ocalink(Task.Task):
+ """bytecode caml link"""
+ color = 'YELLOW'
+ run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
+ runnable_status = link_may_start
+ after = ['ocaml', 'ocamlcc']
+
+class ocalinkx(Task.Task):
+ """native caml link"""
+ color = 'YELLOW'
+ run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
+ runnable_status = link_may_start
+ after = ['ocamlx', 'ocamlcc']
+
+def configure(conf):
+ opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
+ occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
+ if (not opt) or (not occ):
+ conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
+
+ v = conf.env
+ v['OCAMLC'] = occ
+ v['OCAMLOPT'] = opt
+ v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
+ v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
+ v['OCAMLFLAGS'] = ''
+ where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
+ v['OCAMLLIB'] = where
+ v['LIBPATH_OCAML'] = where
+ v['INCLUDES_OCAML'] = where
+ v['LIB_OCAML'] = 'camlrun'
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011
else:
tmp = self.root.make_node(dst)
tmp.write(web.read())
- Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url))
+ Logs.warn('Downloaded %s from %s', tmp.abspath(), url)
break
else:
self.fatal('Could not get the package %s' % src)
def load_packages(self):
self.get_package_cache_dir()
# read the dependencies, get the archives, ..
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007-2010 (ita)
...
"""
-import time, sys, re
-try: from Queue import Queue
-except: from queue import Queue
-from waflib import Runner, Options, Utils, Task, Logs, Errors
-
-#import random
-#random.seed(100)
+import re, sys, threading, time, traceback
+try:
+ from Queue import Queue
+except:
+ from queue import Queue
+from waflib import Runner, Options, Task, Logs, Errors
SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
if (x) {
g.setAttribute('class', g.getAttribute('class') + ' over');
x.setAttribute('class', x.getAttribute('class') + ' over');
- showInfo(e, g.id);
+ showInfo(e, g.id, e.target.attributes.tooltip.value);
}
}, false);
}
}, false);
-function showInfo(evt, txt) {
+function showInfo(evt, txt, details) {
+${if project.tooltip}
tooltip = document.getElementById('tooltip');
var t = document.getElementById('tooltiptext');
- t.firstChild.data = txt;
+ t.firstChild.data = txt + " " + details;
var x = evt.clientX + 9;
if (x > 250) { x -= t.getComputedTextLength() + 16; }
var r = document.getElementById('tooltiprect');
r.setAttribute('width', t.getComputedTextLength() + 6);
+${endif}
}
function hideInfo(evt) {
<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
<rect
x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
- style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"
- />
+ style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"></rect>
${if project.title}
<text x="${project.title_x}" y="${project.title_y}"
${for cls in project.groups}
<g id='${cls.classname}'>
${for rect in cls.rects}
- <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+ <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' tooltip='${rect.name}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
${endfor}
</g>
${endfor}
</g>
${endfor}
+${if project.tooltip}
<g transform="translate(0,0)" visibility="hidden" id="tooltip">
<rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
- <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;" />
+ <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
</g>
+${endif}
</svg>
"""
extr = []
def repl(match):
g = match.group
- if g('dollar'): return "$"
+ if g('dollar'):
+ return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
app("lst.append(%r)" % params[x])
f = extr[x]
- if f.startswith('if') or f.startswith('for'):
+ if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
- elif f.startswith('endif') or f.startswith('endfor'):
+ elif f.startswith(('endif', 'endfor')):
indent -= 1
- elif f.startswith('else') or f.startswith('elif'):
+ elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
return color2code['RED']
def process(self):
- m = self.master
- if m.stop:
- m.out.put(self)
- return
-
- self.master.set_running(1, id(Utils.threading.currentThread()), self)
-
- # remove the task signature immediately before it is executed
- # in case of failure the task will be executed again
+ m = self.generator.bld.producer
try:
+ # TODO another place for this?
del self.generator.bld.task_sigs[self.uid()]
- except:
+ except KeyError:
pass
+ self.generator.bld.producer.set_running(1, self)
+
try:
- self.generator.bld.returned_tasks.append(self)
- self.log_display(self.generator.bld)
ret = self.run()
except Exception:
- self.err_msg = Utils.ex_stack()
+ self.err_msg = traceback.format_exc()
self.hasrun = Task.EXCEPTION
# TODO cleanup
m.error_handler(self)
- m.out.put(self)
return
if ret:
except Errors.WafError:
pass
except Exception:
- self.err_msg = Utils.ex_stack()
+ self.err_msg = traceback.format_exc()
self.hasrun = Task.EXCEPTION
else:
self.hasrun = Task.SUCCESS
if self.hasrun != Task.SUCCESS:
m.error_handler(self)
- self.master.set_running(-1, id(Utils.threading.currentThread()), self)
- m.out.put(self)
-Task.TaskBase.process_back = Task.TaskBase.process
-Task.TaskBase.process = process
+ self.generator.bld.producer.set_running(-1, self)
+
+Task.Task.process_back = Task.Task.process
+Task.Task.process = process
old_start = Runner.Parallel.start
def do_start(self):
make_picture(self)
Runner.Parallel.start = do_start
-def set_running(self, by, i, tsk):
- self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
+lock_running = threading.Lock()
+def set_running(self, by, tsk):
+ with lock_running:
+ try:
+ cache = self.lock_cache
+ except AttributeError:
+ cache = self.lock_cache = {}
+
+ i = 0
+ if by > 0:
+ vals = cache.values()
+ for i in range(self.numjobs):
+ if i not in vals:
+ cache[tsk] = i
+ break
+ else:
+ i = cache[tsk]
+ del cache[tsk]
+
+ self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs))) )
Runner.Parallel.set_running = set_running
def name2class(name):
acc = []
for x in tmp:
thread_count += x[6]
- acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
+ acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7]))
data_node = producer.bld.path.make_node('pdebug.dat')
data_node.write('\n'.join(acc))
end = line[2]
#print id, thread_id, begin, end
#acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
- acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
+ acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) )
break
if Options.options.dmaxtime < 0.1:
model.width = gwidth + 4
model.height = gheight + 4
+ model.tooltip = not Options.options.dnotooltip
+
model.title = Options.options.dtitle
model.title_x = gwidth / 2
model.title_y = gheight + - 5
groups = {}
- for (x, y, w, h, clsname) in acc:
+ for (x, y, w, h, clsname, name) in acc:
try:
- groups[clsname].append((x, y, w, h))
+ groups[clsname].append((x, y, w, h, name))
except:
- groups[clsname] = [(x, y, w, h)]
+ groups[clsname] = [(x, y, w, h, name)]
# groups of rectangles (else js highlighting is slow)
model.groups = []
model.groups.append(g)
g.classname = name2class(cls)
g.rects = []
- for (x, y, w, h) in groups[cls]:
+ for (x, y, w, h, name) in groups[cls]:
r = tobject()
g.rects.append(r)
r.x = 2 + x * ratio
r.y = 2 + y
r.width = w * ratio
r.height = h
+ r.name = name
r.color = map_to_color(cls)
cnt = THREAD_AMOUNT
node = producer.bld.path.make_node('pdebug.svg')
node.write(txt)
- Logs.warn('Created the diagram %r' % node.abspath())
+ Logs.warn('Created the diagram %r', node)
def options(opt):
opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
+ opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip')
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Alexander Afanasyev (UCLA), 2014
x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
class gchx(Task.Task):
- run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()}'
+ run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
scan = c_preproc.scan
color = 'BLUE'
ext_out=['.h']
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
#
or
$ pip install pep8
-To add the boost tool to the waf file:
+To add the pep8 tool to the waf file:
$ ./waf-light --tools=compat15,pep8
or, if you have waf >= 1.6.2
$ ./waf update --files=pep8
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Antoine Dechaume 2011
+
+"""
+Detect the PGI C compiler
+"""
+
+import sys, re
+from waflib import Errors
+from waflib.Configure import conf
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('pgicc')
+
+@conf
+def find_pgi_compiler(conf, var, name):
+ """
+ Find the program name, and execute it to ensure it really is itself.
+ """
+ if sys.platform == 'cygwin':
+ conf.fatal('The PGI compiler does not work on Cygwin')
+
+ v = conf.env
+ cc = None
+ if v[var]:
+ cc = v[var]
+ elif var in conf.environ:
+ cc = conf.environ[var]
+ if not cc:
+ cc = conf.find_program(name, var=var)
+ if not cc:
+ conf.fatal('PGI Compiler (%s) was not found' % name)
+
+ v[var + '_VERSION'] = conf.get_pgi_version(cc)
+ v[var] = cc
+ v[var + '_NAME'] = 'pgi'
+
+@conf
+def get_pgi_version(conf, cc):
+ """Find the version of a pgi compiler."""
+ version_re = re.compile(r"The Portland Group", re.I).search
+ cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking
+
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find pgi compiler %r' % cmd)
+
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+
+ if not match:
+ conf.fatal('Could not verify PGI signature')
+
+ cmd = cc + ['-help=variable']
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find pgi compiler %r' % cmd)
+
+ version = re.findall('^COMPVER\s*=(.*)', out, re.M)
+ if len(version) != 1:
+ conf.fatal('Could not determine the compiler version')
+ return version[0]
+
+def configure(conf):
+ conf.find_pgi_compiler('CC', 'pgcc')
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Antoine Dechaume 2011
+
+"""
+Detect the PGI C++ compiler
+"""
+
+from waflib.Tools.compiler_cxx import cxx_compiler
+cxx_compiler['linux'].append('pgicxx')
+
+from waflib.extras import pgicc
+
+def configure(conf):
+ conf.find_pgi_compiler('CXX', 'pgCC')
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Execute commands through pre-forked servers. This tool creates as many servers as build threads.
-On a benchmark executed on Linux Kubuntu 14, 8 virtual cores and SSD drive::
-
- ./genbench.py /tmp/build 200 100 15 5
- waf clean build -j24
- # no prefork: 2m7.179s
- # prefork: 0m55.400s
-
-To use::
-
- def options(opt):
- # optional, will spawn 40 servers early
- opt.load('prefork')
-
- def build(bld):
- bld.load('prefork')
- ...
- more code
-
-The servers and the build process are using a shared nonce to prevent undesirable external connections.
-"""
-
-import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random, signal
-try:
- import SocketServer
-except ImportError:
- import socketserver as SocketServer
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-
-SHARED_KEY = None
-HEADER_SIZE = 64
-
-REQ = 'REQ'
-RES = 'RES'
-BYE = 'BYE'
-
-def make_header(params, cookie=''):
- header = ','.join(params)
- header = header.ljust(HEADER_SIZE - len(cookie))
- assert(len(header) == HEADER_SIZE - len(cookie))
- header = header + cookie
- if sys.hexversion > 0x3000000:
- header = header.encode('iso8859-1')
- return header
-
-def safe_compare(x, y):
- sum = 0
- for (a, b) in zip(x, y):
- sum |= ord(a) ^ ord(b)
- return sum == 0
-
-re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
-class req(SocketServer.StreamRequestHandler):
- def handle(self):
- try:
- while self.process_command():
- pass
- except KeyboardInterrupt:
- return
- except Exception as e:
- print(e)
-
- def send_response(self, ret, out, err, exc):
- if out or err or exc:
- data = (out, err, exc)
- data = cPickle.dumps(data, -1)
- else:
- data = ''
-
- params = [RES, str(ret), str(len(data))]
-
- # no need for the cookie in the response
- self.wfile.write(make_header(params))
- if data:
- self.wfile.write(data)
- self.wfile.flush()
-
- def process_command(self):
- query = self.rfile.read(HEADER_SIZE)
- if not query:
- return None
- #print(len(query))
- assert(len(query) == HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- query = query.decode('iso8859-1')
-
- # magic cookie
- key = query[-20:]
- if not safe_compare(key, SHARED_KEY):
- print('%r %r' % (key, SHARED_KEY))
- self.send_response(-1, '', '', 'Invalid key given!')
- return 'meh'
-
- query = query[:-20]
- #print "%r" % query
- if not re_valid_query.match(query):
- self.send_response(-1, '', '', 'Invalid query %r' % query)
- raise ValueError('Invalid query %r' % query)
-
- query = query.strip().split(',')
-
- if query[0] == REQ:
- self.run_command(query[1:])
- elif query[0] == BYE:
- raise ValueError('Exit')
- else:
- raise ValueError('Invalid query %r' % query)
- return 'ok'
-
- def run_command(self, query):
-
- size = int(query[0])
- data = self.rfile.read(size)
- assert(len(data) == size)
- kw = cPickle.loads(data)
-
- # run command
- ret = out = err = exc = None
- cmd = kw['cmd']
- del kw['cmd']
- #print(cmd)
-
- try:
- if kw['stdout'] or kw['stderr']:
- p = subprocess.Popen(cmd, **kw)
- (out, err) = p.communicate()
- ret = p.returncode
- else:
- ret = subprocess.Popen(cmd, **kw).wait()
- except KeyboardInterrupt:
- raise
- except Exception as e:
- ret = -1
- exc = str(e) + traceback.format_exc()
-
- self.send_response(ret, out, err, exc)
-
-def create_server(conn, cls):
- # child processes do not need the key, so we remove it from the OS environment
- global SHARED_KEY
- SHARED_KEY = os.environ['SHARED_KEY']
- os.environ['SHARED_KEY'] = ''
-
- ppid = int(os.environ['PREFORKPID'])
- def reap():
- if os.sep != '/':
- os.waitpid(ppid, 0)
- else:
- while 1:
- try:
- os.kill(ppid, 0)
- except OSError:
- break
- else:
- time.sleep(1)
- os.kill(os.getpid(), signal.SIGKILL)
- t = threading.Thread(target=reap)
- t.setDaemon(True)
- t.start()
-
- server = SocketServer.TCPServer(conn, req)
- print(server.server_address[1])
- sys.stdout.flush()
- #server.timeout = 6000 # seconds
- server.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- try:
- server.serve_forever(poll_interval=0.001)
- except KeyboardInterrupt:
- pass
-
-if __name__ == '__main__':
- conn = ("127.0.0.1", 0)
- #print("listening - %r %r\n" % conn)
- create_server(conn, req)
-else:
-
- from waflib import Logs, Utils, Runner, Errors, Options
-
- def init_task_pool(self):
- # lazy creation, and set a common pool for all task consumers
- pool = self.pool = []
- for i in range(self.numjobs):
- consumer = Runner.get_pool()
- pool.append(consumer)
- consumer.idx = i
- self.ready = Queue(0)
- def setq(consumer):
- consumer.ready = self.ready
- try:
- threading.current_thread().idx = consumer.idx
- except Exception as e:
- print(e)
- for x in pool:
- x.ready.put(setq)
- return pool
- Runner.Parallel.init_task_pool = init_task_pool
-
- def make_server(bld, idx):
- cmd = [sys.executable, os.path.abspath(__file__)]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- return proc
-
- def make_conn(bld, srv):
- port = srv.port
- conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- conn.connect(('127.0.0.1', port))
- return conn
-
-
- SERVERS = []
- CONNS = []
- def close_all():
- global SERVERS, CONNS
- while CONNS:
- conn = CONNS.pop()
- try:
- conn.close()
- except:
- pass
- while SERVERS:
- srv = SERVERS.pop()
- try:
- srv.kill()
- except:
- pass
- atexit.register(close_all)
-
- def put_data(conn, data):
- cnt = 0
- while cnt < len(data):
- sent = conn.send(data[cnt:])
- if sent == 0:
- raise RuntimeError('connection ended')
- cnt += sent
-
- def read_data(conn, siz):
- cnt = 0
- buf = []
- while cnt < siz:
- data = conn.recv(min(siz - cnt, 1024))
- if not data:
- raise RuntimeError('connection ended %r %r' % (cnt, siz))
- buf.append(data)
- cnt += len(data)
- if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- else:
- ret = ''.join(buf)
- return ret
-
- def exec_command(self, cmd, **kw):
- if 'stdout' in kw:
- if kw['stdout'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
- elif 'stderr' in kw:
- if kw['stderr'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
-
- kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r' % cmd)
- Logs.debug('runner_env: kw=%s' % kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- idx = threading.current_thread().idx
- kw['cmd'] = cmd
-
- # serialization..
- #print("sub %r %r" % (idx, cmd))
- #print("write to %r %r" % (idx, cmd))
-
- data = cPickle.dumps(kw, -1)
- params = [REQ, str(len(data))]
- header = make_header(params, self.SHARED_KEY)
-
- conn = CONNS[idx]
-
- put_data(conn, header + data)
- #put_data(conn, data)
-
- #print("running %r %r" % (idx, cmd))
- #print("read from %r %r" % (idx, cmd))
-
- data = read_data(conn, HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- data = data.decode('iso8859-1')
-
- #print("received %r" % data)
- lst = data.split(',')
- ret = int(lst[1])
- dlen = int(lst[2])
-
- out = err = None
- if dlen:
- data = read_data(conn, dlen)
- (out, err, exc) = cPickle.loads(data)
- if exc:
- raise Errors.WafError('Execution failure: %s' % exc)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
- def init_key(ctx):
- try:
- key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
- except KeyError:
- key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
- os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
-
- os.environ['PREFORKPID'] = str(os.getpid())
- return key
-
- def init_servers(ctx, maxval):
- while len(SERVERS) < maxval:
- i = len(SERVERS)
- srv = make_server(ctx, i)
- SERVERS.append(srv)
- while len(CONNS) < maxval:
- i = len(CONNS)
- srv = SERVERS[i]
-
- # postpone the connection
- srv.port = int(srv.stdout.readline())
-
- conn = None
- for x in range(30):
- try:
- conn = make_conn(ctx, srv)
- break
- except socket.error:
- time.sleep(0.01)
- if not conn:
- raise ValueError('Could not start the server!')
- if srv.poll() is not None:
- Logs.warn('Looks like it it not our server process - concurrent builds are unsupported at this stage')
- raise ValueError('Could not start the server')
- CONNS.append(conn)
-
- def init_smp(self):
- if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
- return
- if Utils.unversioned_sys_platform() in ('freebsd',):
- pid = os.getpid()
- cmd = ['cpuset', '-l', '0', '-p', str(pid)]
- elif Utils.unversioned_sys_platform() in ('linux',):
- pid = os.getpid()
- cmd = ['taskset', '-pc', '0', str(pid)]
- if cmd:
- self.cmd_and_log(cmd, quiet=0)
-
- def options(opt):
- init_key(opt)
- init_servers(opt, 40)
- opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
-
- def build(bld):
- if bld.cmd == 'clean':
- return
-
- init_key(bld)
- init_servers(bld, bld.jobs)
- init_smp(bld)
-
- bld.__class__.exec_command_old = bld.__class__.exec_command
- bld.__class__.exec_command = exec_command
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-# TODO: have the child process terminate if the parent is killed abruptly
-
-import os, socket, threading, sys, subprocess, time, atexit, random
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-
-import json as pickle
-
-SHARED_KEY = None
-HEADER_SIZE = 64
-
-REQ = 'REQ'
-RES = 'RES'
-BYE = 'BYE'
-
-def make_header(params, cookie=''):
- header = ','.join(params)
- header = header.ljust(HEADER_SIZE - len(cookie))
- assert(len(header) == HEADER_SIZE - len(cookie))
- header = header + cookie
- if sys.hexversion > 0x3000000:
- header = header.encode('iso8859-1')
- return header
-
-if 1:
- from waflib import Logs, Utils, Runner, Errors, Options
-
- def init_task_pool(self):
- # lazy creation, and set a common pool for all task consumers
- pool = self.pool = []
- for i in range(self.numjobs):
- consumer = Runner.get_pool()
- pool.append(consumer)
- consumer.idx = i
- self.ready = Queue(0)
- def setq(consumer):
- consumer.ready = self.ready
- try:
- threading.current_thread().idx = consumer.idx
- except Exception as e:
- print(e)
- for x in pool:
- x.ready.put(setq)
- return pool
- Runner.Parallel.init_task_pool = init_task_pool
-
- def make_server(bld, idx):
- top = getattr(bld, 'preforkjava_top', os.path.dirname(os.path.abspath('__file__')))
- cp = getattr(bld, 'preforkjava_cp', os.path.join(top, 'minimal-json-0.9.3-SNAPSHOT.jar') + os.pathsep + top)
-
- for x in cp.split(os.pathsep):
- if x and not os.path.exists(x):
- Logs.warn('Invalid classpath: %r' % cp)
- Logs.warn('Set for example bld.preforkjava_cp to /path/to/minimal-json:/path/to/Prefork.class/')
-
- cwd = getattr(bld, 'preforkjava_cwd', top)
- port = getattr(bld, 'preforkjava_port', 51200)
- cmd = getattr(bld, 'preforkjava_cmd', 'java -cp %s%s Prefork %d' % (cp, os.pathsep, port))
- proc = subprocess.Popen(cmd.split(), shell=False, cwd=cwd)
- proc.port = port
- return proc
-
- def make_conn(bld, srv):
- #port = PORT + idx
- port = srv.port
- conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- conn.connect(('127.0.0.1', port))
- return conn
-
- SERVERS = []
- CONNS = []
- def close_all():
- global SERVERS
- while SERVERS:
- srv = SERVERS.pop()
- #pid = srv.pid
- try:
- srv.kill()
- except Exception:
- pass
- atexit.register(close_all)
-
- def put_data(conn, data):
- cnt = 0
- while cnt < len(data):
- sent = conn.send(data[cnt:])
- if sent == 0:
- raise RuntimeError('connection ended')
- cnt += sent
-
- def read_data(conn, siz):
- cnt = 0
- buf = []
- while cnt < siz:
- data = conn.recv(min(siz - cnt, 1024))
- if not data:
- raise RuntimeError('connection ended %r %r' % (cnt, siz))
- buf.append(data)
- cnt += len(data)
- if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- else:
- ret = ''.join(buf)
- return ret
-
- def exec_command(self, cmd, **kw):
- if 'stdout' in kw:
- if kw['stdout'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
- elif 'stderr' in kw:
- if kw['stderr'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
-
- kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r' % cmd)
- Logs.debug('runner_env: kw=%s' % kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- idx = threading.current_thread().idx
- kw['cmd'] = cmd
-
- data = pickle.dumps(kw)
- params = [REQ, str(len(data))]
- header = make_header(params, self.SHARED_KEY)
-
- conn = CONNS[idx]
-
- if sys.hexversion > 0x3000000:
- data = data.encode('iso8859-1')
- put_data(conn, header + data)
-
- data = read_data(conn, HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- data = data.decode('iso8859-1')
-
- #print("received %r" % data)
- lst = data.split(',')
- ret = int(lst[1])
- dlen = int(lst[2])
-
- out = err = None
- if dlen:
- data = read_data(conn, dlen)
- (out, err, exc) = pickle.loads(data)
- if exc:
- raise Errors.WafError('Execution failure: %s' % exc)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
- def init_key(ctx):
- try:
- key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
- except KeyError:
- key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
- os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
- os.environ['PREFORKPID'] = str(os.getpid())
- return key
-
- def init_servers(ctx, maxval):
- while len(SERVERS) < 1:
- i = len(SERVERS)
- srv = make_server(ctx, i)
- SERVERS.append(srv)
- while len(CONNS) < maxval:
- i = len(CONNS)
- srv = SERVERS[0]
- conn = None
- for x in range(30):
- try:
- conn = make_conn(ctx, srv)
- break
- except socket.error:
- time.sleep(0.01)
- if not conn:
- raise ValueError('Could not start the server!')
- CONNS.append(conn)
-
- def init_smp(self):
- if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
- return
- if Utils.unversioned_sys_platform() in ('freebsd',):
- pid = os.getpid()
- cmd = ['cpuset', '-l', '0', '-p', str(pid)]
- elif Utils.unversioned_sys_platform() in ('linux',):
- pid = os.getpid()
- cmd = ['taskset', '-pc', '0', str(pid)]
- if cmd:
- self.cmd_and_log(cmd, quiet=0)
-
- def options(opt):
- opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
- init_key(opt)
- init_servers(opt, 40)
-
- def build(bld):
- if bld.cmd == 'clean':
- return
-
- init_key(bld)
- init_servers(bld, bld.jobs)
- init_smp(bld)
-
- bld.__class__.exec_command_old = bld.__class__.exec_command
- bld.__class__.exec_command = exec_command
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-A version of prefork.py that uses unix sockets. The advantage is that it does not expose
-connections to the outside. Yet performance it only works on unix-like systems
-and performance can be slightly worse.
-
-To use::
-
- def options(opt):
- # recommended, fork new processes before using more memory
- opt.load('preforkunix')
-
- def build(bld):
- bld.load('preforkunix')
- ...
- more code
-"""
-
-import os, re, socket, threading, sys, subprocess, atexit, traceback, signal, time
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-
-HEADER_SIZE = 20
-
-REQ = 'REQ'
-RES = 'RES'
-BYE = 'BYE'
-
-def make_header(params, cookie=''):
- header = ','.join(params)
- header = header.ljust(HEADER_SIZE - len(cookie))
- assert(len(header) == HEADER_SIZE - len(cookie))
- header = header + cookie
- if sys.hexversion > 0x3000000:
- header = header.encode('iso8859-1')
- return header
-
-re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
-if 1:
- def send_response(conn, ret, out, err, exc):
- if out or err or exc:
- data = (out, err, exc)
- data = cPickle.dumps(data, -1)
- else:
- data = ''
-
- params = [RES, str(ret), str(len(data))]
-
- # no need for the cookie in the response
- conn.send(make_header(params))
- if data:
- conn.send(data)
-
- def process_command(conn):
- query = conn.recv(HEADER_SIZE)
- if not query:
- return None
- #print(len(query))
- assert(len(query) == HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- query = query.decode('iso8859-1')
-
- #print "%r" % query
- if not re_valid_query.match(query):
- send_response(conn, -1, '', '', 'Invalid query %r' % query)
- raise ValueError('Invalid query %r' % query)
-
- query = query.strip().split(',')
-
- if query[0] == REQ:
- run_command(conn, query[1:])
- elif query[0] == BYE:
- raise ValueError('Exit')
- else:
- raise ValueError('Invalid query %r' % query)
- return 'ok'
-
- def run_command(conn, query):
-
- size = int(query[0])
- data = conn.recv(size)
- assert(len(data) == size)
- kw = cPickle.loads(data)
-
- # run command
- ret = out = err = exc = None
- cmd = kw['cmd']
- del kw['cmd']
- #print(cmd)
-
- try:
- if kw['stdout'] or kw['stderr']:
- p = subprocess.Popen(cmd, **kw)
- (out, err) = p.communicate()
- ret = p.returncode
- else:
- ret = subprocess.Popen(cmd, **kw).wait()
- except KeyboardInterrupt:
- raise
- except Exception as e:
- ret = -1
- exc = str(e) + traceback.format_exc()
-
- send_response(conn, ret, out, err, exc)
-
-if 1:
-
- from waflib import Logs, Utils, Runner, Errors, Options
-
- def init_task_pool(self):
- # lazy creation, and set a common pool for all task consumers
- pool = self.pool = []
- for i in range(self.numjobs):
- consumer = Runner.get_pool()
- pool.append(consumer)
- consumer.idx = i
- self.ready = Queue(0)
- def setq(consumer):
- consumer.ready = self.ready
- try:
- threading.current_thread().idx = consumer.idx
- except Exception as e:
- print(e)
- for x in pool:
- x.ready.put(setq)
- return pool
- Runner.Parallel.init_task_pool = init_task_pool
-
- def make_conn(bld):
- child_socket, parent_socket = socket.socketpair(socket.AF_UNIX)
- ppid = os.getpid()
- pid = os.fork()
- if pid == 0:
- parent_socket.close()
-
- # if the parent crashes, try to exit cleanly
- def reap():
- while 1:
- try:
- os.kill(ppid, 0)
- except OSError:
- break
- else:
- time.sleep(1)
- os.kill(os.getpid(), signal.SIGKILL)
- t = threading.Thread(target=reap)
- t.setDaemon(True)
- t.start()
-
- # write to child_socket only
- try:
- while process_command(child_socket):
- pass
- except KeyboardInterrupt:
- sys.exit(2)
- else:
- child_socket.close()
- return (pid, parent_socket)
-
- SERVERS = []
- CONNS = []
- def close_all():
- global SERVERS, CONS
- while CONNS:
- conn = CONNS.pop()
- try:
- conn.close()
- except:
- pass
- while SERVERS:
- pid = SERVERS.pop()
- try:
- os.kill(pid, 9)
- except:
- pass
- atexit.register(close_all)
-
- def put_data(conn, data):
- cnt = 0
- while cnt < len(data):
- sent = conn.send(data[cnt:])
- if sent == 0:
- raise RuntimeError('connection ended')
- cnt += sent
-
- def read_data(conn, siz):
- cnt = 0
- buf = []
- while cnt < siz:
- data = conn.recv(min(siz - cnt, 1024))
- if not data:
- raise RuntimeError('connection ended %r %r' % (cnt, siz))
- buf.append(data)
- cnt += len(data)
- if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- else:
- ret = ''.join(buf)
- return ret
-
- def exec_command(self, cmd, **kw):
- if 'stdout' in kw:
- if kw['stdout'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
- elif 'stderr' in kw:
- if kw['stderr'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
-
- kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r' % cmd)
- Logs.debug('runner_env: kw=%s' % kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- idx = threading.current_thread().idx
- kw['cmd'] = cmd
-
- # serialization..
- #print("sub %r %r" % (idx, cmd))
- #print("write to %r %r" % (idx, cmd))
-
- data = cPickle.dumps(kw, -1)
- params = [REQ, str(len(data))]
- header = make_header(params)
-
- conn = CONNS[idx]
-
- put_data(conn, header + data)
-
- #print("running %r %r" % (idx, cmd))
- #print("read from %r %r" % (idx, cmd))
-
- data = read_data(conn, HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- data = data.decode('iso8859-1')
-
- #print("received %r" % data)
- lst = data.split(',')
- ret = int(lst[1])
- dlen = int(lst[2])
-
- out = err = None
- if dlen:
- data = read_data(conn, dlen)
- (out, err, exc) = cPickle.loads(data)
- if exc:
- raise Errors.WafError('Execution failure: %s' % exc)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
- def init_smp(self):
- if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
- return
- if Utils.unversioned_sys_platform() in ('freebsd',):
- pid = os.getpid()
- cmd = ['cpuset', '-l', '0', '-p', str(pid)]
- elif Utils.unversioned_sys_platform() in ('linux',):
- pid = os.getpid()
- cmd = ['taskset', '-pc', '0', str(pid)]
- if cmd:
- self.cmd_and_log(cmd, quiet=0)
-
- def options(opt):
- # memory consumption might be at the lowest point while processing options
- opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
- if Utils.is_win32 or os.sep != '/':
- return
- while len(CONNS) < 30:
- (pid, conn) = make_conn(opt)
- SERVERS.append(pid)
- CONNS.append(conn)
-
- def build(bld):
- if Utils.is_win32 or os.sep != '/':
- return
- if bld.cmd == 'clean':
- return
- while len(CONNS) < bld.jobs:
- (pid, conn) = make_conn(bld)
- SERVERS.append(pid)
- CONNS.append(conn)
- init_smp(bld)
- bld.__class__.exec_command_old = bld.__class__.exec_command
- bld.__class__.exec_command = exec_command
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
-
-#! /usr/bin/env python
-
-"""
-Illustrate how to override a class method to do something
-
-In this case, print the commands being executed as strings
-(the commands are usually lists, so this can be misleading)
-"""
-
-import sys
-from waflib import Context, Utils, Errors, Logs
-
-def exec_command(self, cmd, **kw):
- subprocess = Utils.subprocess
- kw['shell'] = isinstance(cmd, str)
-
- if isinstance(cmd, str):
- kw['shell'] = True
- txt = cmd
- else:
- txt = ' '.join(repr(x) if ' ' in x else x for x in cmd)
-
- Logs.debug('runner: %s', txt)
- Logs.debug('runner_env: kw=%s', kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- wargs = {}
- if 'timeout' in kw:
- if kw['timeout'] is not None:
- wargs['timeout'] = kw['timeout']
- del kw['timeout']
- if 'input' in kw:
- if kw['input']:
- wargs['input'] = kw['input']
- kw['stdin'] = Utils.subprocess.PIPE
- del kw['input']
-
- if 'cwd' in kw:
- if not isinstance(kw['cwd'], str):
- kw['cwd'] = kw['cwd'].abspath()
-
- try:
- if kw['stdout'] or kw['stderr']:
- p = subprocess.Popen(cmd, **kw)
- (out, err) = p.communicate(**wargs)
- ret = p.returncode
- else:
- out, err = (None, None)
- ret = subprocess.Popen(cmd, **kw).wait(**wargs)
- except Exception ,e:
- raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
-Context.Context.exec_command = exec_command
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# per rosengren 2011
gen = tsk.generator
inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
- # FIXME the if-else construct will not work in python 2
cmd = (
[env.PROC] +
['SQLCHECK=SEMANTICS'] +
ext_in = '.pc',
ext_out = '.c',
)
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Philipp Bender, 2012
# Matt Clarkson, 2012
-import re
+import re, os
from waflib.Task import Task
from waflib.TaskGen import extension
+from waflib import Errors, Context
"""
A simple tool to integrate protocol buffers into your build system.
-Example::
+Example for C++:
def configure(conf):
conf.load('compiler_cxx cxx protoc')
bld(
features = 'cxx cxxprogram'
source = 'main.cpp file1.proto proto/file2.proto',
- include = '. proto',
+ includes = '. proto',
target = 'executable')
+Example for Python:
+
+ def configure(conf):
+ conf.load('python protoc')
+
+ def build(bld):
+ bld(
+ features = 'py'
+ source = 'main.py file1.proto proto/file2.proto',
+ protoc_includes = 'proto')
+
+Example for both Python and C++ at same time:
+
+ def configure(conf):
+ conf.load('cxx python protoc')
+
+ def build(bld):
+ bld(
+ features = 'cxx py'
+ source = 'file1.proto proto/file2.proto',
+ protoc_includes = 'proto') # or includes
+
+
+Example for Java:
+
+ def options(opt):
+ opt.load('java')
+
+ def configure(conf):
+ conf.load('python java protoc')
+ # Here you have to point to your protobuf-java JAR and have it in classpath
+ conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar']
+
+ def build(bld):
+ bld(
+ features = 'javac protoc',
+ name = 'pbjava',
+ srcdir = 'inc/ src', # directories used by javac
+ source = ['inc/message_inc.proto', 'inc/message.proto'],
+ # source is used by protoc for .proto files
+ use = 'PROTOBUF',
+ protoc_includes = ['inc']) # for protoc to search dependencies
+
+
+
+
Notes when using this tool:
- protoc command line parsing is tricky.
"""
class protoc(Task):
- # protoc expects the input proto file to be an absolute path.
- run_str = '${PROTOC} ${PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${SRC[0].abspath()}'
+ run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${SRC[0].bldpath()}'
color = 'BLUE'
- ext_out = ['.h', 'pb.cc']
+ ext_out = ['.h', 'pb.cc', '.py', '.java']
def scan(self):
"""
Scan .proto dependencies
nodes = []
names = []
seen = []
+ search_nodes = []
+
+ if not node:
+ return (nodes, names)
- if not node: return (nodes, names)
+ if 'cxx' in self.generator.features:
+ search_nodes = self.generator.includes_nodes
+
+ if 'py' in self.generator.features or 'javac' in self.generator.features:
+ for incpath in getattr(self.generator, 'protoc_includes', []):
+ search_nodes.append(self.generator.bld.path.find_node(incpath))
def parse_node(node):
if node in seen:
m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
if m:
dep = m.groups()[0]
- for incpath in self.env.INCPATHS:
- found = incpath.find_resource(dep)
+ for incnode in search_nodes:
+ found = incnode.find_resource(dep)
if found:
nodes.append(found)
parse_node(found)
@extension('.proto')
def process_protoc(self, node):
- cpp_node = node.change_ext('.pb.cc')
- hpp_node = node.change_ext('.pb.h')
- self.create_task('protoc', node, [cpp_node, hpp_node])
- self.source.append(cpp_node)
-
- if 'cxx' in self.features and not self.env.PROTOC_FLAGS:
- #self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().abspath() # <- this does not work
- self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().bldpath()
+ incdirs = []
+ out_nodes = []
+ protoc_flags = []
+
+ # ensure PROTOC_FLAGS is a list; a copy is used below anyway
+ self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS)
+
+ if 'cxx' in self.features:
+ cpp_node = node.change_ext('.pb.cc')
+ hpp_node = node.change_ext('.pb.h')
+ self.source.append(cpp_node)
+ out_nodes.append(cpp_node)
+ out_nodes.append(hpp_node)
+ protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath())
+
+ if 'py' in self.features:
+ py_node = node.change_ext('_pb2.py')
+ self.source.append(py_node)
+ out_nodes.append(py_node)
+ protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath())
+
+ if 'javac' in self.features:
+ pkgname, javapkg, javacn, nodename = None, None, None, None
+ messages = []
+
+ # .java file name is done with some rules depending on .proto file content:
+ # -) package is either derived from option java_package if present
+ # or from package directive
+ # -) file name is either derived from option java_outer_classname if present
+ # or the .proto file is converted to camelcase. If a message
+ # is named the same then the behaviour depends on protoc version
+ #
+ # See also: https://developers.google.com/protocol-buffers/docs/reference/java-generated#invocation
+
+ code = node.read().splitlines()
+ for line in code:
+ m = re.search(r'^package\s+(.*);', line)
+ if m:
+ pkgname = m.groups()[0]
+ m = re.search(r'^option\s+(\S*)\s*=\s*"(\S*)";', line)
+ if m:
+ optname = m.groups()[0]
+ if optname == 'java_package':
+ javapkg = m.groups()[1]
+ elif optname == 'java_outer_classname':
+ javacn = m.groups()[1]
+ if self.env.PROTOC_MAJOR > '2':
+ m = re.search(r'^message\s+(\w*)\s*{*', line)
+ if m:
+ messages.append(m.groups()[0])
+
+ if javapkg:
+ nodename = javapkg
+ elif pkgname:
+ nodename = pkgname
+ else:
+ raise Errors.WafError('Cannot derive java name from protoc file')
+
+ nodename = nodename.replace('.',os.sep) + os.sep
+ if javacn:
+ nodename += javacn + '.java'
+ else:
+ if self.env.PROTOC_MAJOR > '2' and node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title() in messages:
+ nodename += node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title() + 'OuterClass.java'
+ else:
+ nodename += node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title() + '.java'
+
+ java_node = node.parent.find_or_declare(nodename)
+ out_nodes.append(java_node)
+ protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath())
+
+ # Make javac get also pick java code generated in build
+ if not node.parent.get_bld() in self.javac_task.srcdir:
+ self.javac_task.srcdir.append(node.parent.get_bld())
+
+ if not out_nodes:
+ raise Errors.WafError('Feature %r not supported by protoc extra' % self.features)
+
+ tsk = self.create_task('protoc', node, out_nodes)
+ tsk.env.append_value('PROTOC_FLAGS', protoc_flags)
+
+ if 'javac' in self.features:
+ self.javac_task.set_run_after(tsk)
+
+ # Instruct protoc where to search for .proto included files.
+ # For C++ standard include files dirs are used,
+ # but this doesn't apply to Python for example
+ for incpath in getattr(self, 'protoc_includes', []):
+ incdirs.append(self.bld.path.find_node(incpath).bldpath())
+ tsk.env.PROTOC_INCPATHS = incdirs
use = getattr(self, 'use', '')
if not 'PROTOBUF' in use:
self.use = self.to_list(use) + ['PROTOBUF']
def configure(conf):
- conf.check_cfg(package="protobuf", uselib_store="PROTOBUF", args=['--cflags', '--libs'])
+ conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs'])
conf.find_program('protoc', var='PROTOC')
+ conf.start_msg('Checking for protoc version')
+ protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH)
+ protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:]
+ conf.end_msg(protocver)
+ conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')]
conf.env.PROTOC_ST = '-I%s'
+ conf.env.PROTOC_FL = '%s'
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Federico Pellegrin, 2016-2018 (fedepell) adapted for Python
+
+"""
+This tool helps with finding Python Qt5 tools and libraries,
+and provides translation from QT5 files to Python code.
+
+The following snippet illustrates the tool usage::
+
+ def options(opt):
+ opt.load('py pyqt5')
+
+ def configure(conf):
+ conf.load('py pyqt5')
+
+ def build(bld):
+ bld(
+ features = 'py pyqt5',
+ source = 'main.py textures.qrc aboutDialog.ui',
+ )
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "pyqt5" tool.
+
+Add into the sources list also the qrc resources files or ui5
+definition files and they will be translated into python code
+with the system tools (PyQt5, pyside2, PyQt4 are searched in this
+order) and then compiled
+"""
+
+try:
+ from xml.sax import make_parser
+ from xml.sax.handler import ContentHandler
+except ImportError:
+ has_xml = False
+ ContentHandler = object
+else:
+ has_xml = True
+
+import os
+from waflib.Tools import python
+from waflib import Task, Options
+from waflib.TaskGen import feature, extension
+from waflib.Configure import conf
+from waflib import Logs
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+
+class XMLHandler(ContentHandler):
+ """
+ Parses ``.qrc`` files
+ """
+ def __init__(self):
+ self.buf = []
+ self.files = []
+ def startElement(self, name, attrs):
+ if name == 'file':
+ self.buf = []
+ def endElement(self, name):
+ if name == 'file':
+ self.files.append(str(''.join(self.buf)))
+ def characters(self, cars):
+ self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_pyrcc_task(self, node):
+ "Creates rcc and py task for ``.qrc`` files"
+ rcnode = node.change_ext('.py')
+ self.create_task('pyrcc', node, rcnode)
+ if getattr(self, 'install_from', None):
+ self.install_from = self.install_from.get_bld()
+ else:
+ self.install_from = self.path.get_bld()
+ self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+ self.process_py(rcnode)
+
+@extension(*EXT_UI)
+def create_pyuic_task(self, node):
+ "Create uic tasks and py for user interface ``.ui`` definition files"
+ uinode = node.change_ext('.py')
+ self.create_task('ui5py', node, uinode)
+ if getattr(self, 'install_from', None):
+ self.install_from = self.install_from.get_bld()
+ else:
+ self.install_from = self.path.get_bld()
+ self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+ self.process_py(uinode)
+
+@extension('.ts')
+def add_pylang(self, node):
+ """Adds all the .ts file into ``self.lang``"""
+ self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('pyqt5')
+def apply_pyqt5(self):
+ """
+ The additional parameters are:
+
+ :param lang: list of translation files (\*.ts) to process
+ :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+ :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
+ :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+ """
+ if getattr(self, 'lang', None):
+ qmtasks = []
+ for x in self.to_list(self.lang):
+ if isinstance(x, str):
+ x = self.path.find_resource(x + '.ts')
+ qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+
+
+ if getattr(self, 'langname', None):
+ qmnodes = [k.outputs[0] for k in qmtasks]
+ rcnode = self.langname
+ if isinstance(rcnode, str):
+ rcnode = self.path.find_or_declare(rcnode + '.qrc')
+ t = self.create_task('qm2rcc', qmnodes, rcnode)
+ create_pyrcc_task(self, t.outputs[0])
+
+class pyrcc(Task.Task):
+ """
+ Processes ``.qrc`` files
+ """
+ color = 'BLUE'
+ run_str = '${QT_PYRCC} ${SRC} -o ${TGT}'
+ ext_out = ['.py']
+
+ def rcname(self):
+ return os.path.splitext(self.inputs[0].name)[0]
+
+ def scan(self):
+ """Parse the *.qrc* files"""
+ if not has_xml:
+ Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+ return ([], [])
+
+ parser = make_parser()
+ curHandler = XMLHandler()
+ parser.setContentHandler(curHandler)
+ fi = open(self.inputs[0].abspath(), 'r')
+ try:
+ parser.parse(fi)
+ finally:
+ fi.close()
+
+ nodes = []
+ names = []
+ root = self.inputs[0].parent
+ for x in curHandler.files:
+ nd = root.find_resource(x)
+ if nd:
+ nodes.append(nd)
+ else:
+ names.append(x)
+ return (nodes, names)
+
+
+class ui5py(Task.Task):
+ """
+ Processes ``.ui`` files for python
+ """
+ color = 'BLUE'
+ run_str = '${QT_PYUIC} ${SRC} -o ${TGT}'
+ ext_out = ['.py']
+
+class ts2qm(Task.Task):
+ """
+ Generates ``.qm`` files from ``.ts`` files
+ """
+ color = 'BLUE'
+ run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+ """
+ Generates ``.qrc`` files from ``.qm`` files
+ """
+ color = 'BLUE'
+ after = 'ts2qm'
+ def run(self):
+ """Create a qrc file including the inputs"""
+ txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+ code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+ self.outputs[0].write(code)
+
+def configure(self):
+ self.find_pyqt5_binaries()
+
+ # warn about this during the configuration too
+ if not has_xml:
+ Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+
+@conf
+def find_pyqt5_binaries(self):
+ """
+ Detects PyQt5 or pyside2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
+ """
+ env = self.env
+
+ if getattr(Options.options, 'want_pyside2', True):
+ self.find_program(['pyside2-uic'], var='QT_PYUIC')
+ self.find_program(['pyside2-rcc'], var='QT_PYRCC')
+ self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE')
+ elif getattr(Options.options, 'want_pyqt4', True):
+ self.find_program(['pyuic4'], var='QT_PYUIC')
+ self.find_program(['pyrcc4'], var='QT_PYRCC')
+ self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
+ else:
+ self.find_program(['pyuic5','pyside2-uic','pyuic4'], var='QT_PYUIC')
+ self.find_program(['pyrcc5','pyside2-rcc','pyrcc4'], var='QT_PYRCC')
+ self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4'], var='QT_PYLUPDATE')
+
+ if not env.QT_PYUIC:
+ self.fatal('cannot find the uic compiler for python for qt5')
+
+ if not env.QT_PYUIC:
+ self.fatal('cannot find the rcc compiler for python for qt5')
+
+ self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
+
+def options(opt):
+ """
+ Command-line options
+ """
+ pyqt5opt=opt.add_option_group("Python QT5 Options")
+ pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use pyside2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after)')
+ pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Calle Rosenquist, 2016-2018 (xbreak)
+
+"""
+Provides Python unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
+task via the **pytest** feature.
+
+To use pytest the following is needed:
+
+1. Load `pytest` and the dependency `waf_unit_test` tools.
+2. Create a task generator with feature `pytest` (not `test`) and customize behaviour with
+ the following attributes:
+
+ - `pytest_source`: Test input files.
+ - `ut_str`: Test runner command, e.g. ``${PYTHON} -B -m unittest discover`` or
+ if nose is used: ``${NOSETESTS} --no-byte-compile ${SRC}``.
+ - `ut_shell`: Determines if ``ut_str`` is executed in a shell. Default: False.
+ - `ut_cwd`: Working directory for test runner. Defaults to directory of
+ first ``pytest_source`` file.
+
+ Additionally the following `pytest` specific attributes are used in dependent taskgens:
+
+ - `pytest_path`: Node or string list of additional Python paths.
+ - `pytest_libpath`: Node or string list of additional library paths.
+
+The `use` dependencies are used for both update calculation and to populate
+the following environment variables for the `pytest` test runner:
+
+1. `PYTHONPATH` (`sys.path`) of any dependent taskgen that has the feature `py`:
+
+ - `install_from` attribute is used to determine where the root of the Python sources
+ are located. If `install_from` is not specified the default is to use the taskgen path
+ as the root.
+
+ - `pytest_path` attribute is used to manually specify additional Python paths.
+
+2. Dynamic linker search path variable (e.g. `LD_LIBRARY_PATH`) of any dependent taskgen with
+ non-static link_task.
+
+ - `pytest_libpath` attribute is used to manually specify additional linker paths.
+
+Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens
+ because the extension might be part of a Python package or used standalone:
+
+ - When used as part of another `py` package, the `PYTHONPATH` is provided by
+ that taskgen so no additional action is required.
+
+ - When used as a standalone module, the user needs to specify the `PYTHONPATH` explicitly
+ via the `pytest_path` attribute on the `pyext` taskgen.
+
+ For details c.f. the pytest playground examples.
+
+
+For example::
+
+ # A standalone Python C extension that demonstrates unit test environment population
+ # of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH.
+ #
+ # Note: `pytest_path` is provided here because pytest cannot automatically determine
+ # if the extension is part of another Python package or is used standalone.
+ bld(name = 'foo_ext',
+ features = 'c cshlib pyext',
+ source = 'src/foo_ext.c',
+ target = 'foo_ext',
+ pytest_path = [ bld.path.get_bld() ])
+
+ # Python package under test that also depend on the Python module `foo_ext`
+ #
+ # Note: `install_from` is added automatically to `PYTHONPATH`.
+ bld(name = 'foo',
+ features = 'py',
+ use = 'foo_ext',
+ source = bld.path.ant_glob('src/foo/*.py'),
+ install_from = 'src')
+
+ # Unit test example using the built in module unittest and let that discover
+ # any test cases.
+ bld(name = 'foo_test',
+ features = 'pytest',
+ use = 'foo',
+ pytest_source = bld.path.ant_glob('test/*.py'),
+ ut_str = '${PYTHON} -B -m unittest discover')
+
+"""
+
+import os
+from waflib import Task, TaskGen, Errors, Utils, Logs
+from waflib.Tools import ccroot
+
+def _process_use_rec(self, name):
+ """
+ Recursively process ``use`` for task generator with name ``name``..
+ Used by pytest_process_use.
+ """
+ if name in self.pytest_use_not or name in self.pytest_use_seen:
+ return
+ try:
+ tg = self.bld.get_tgen_by_name(name)
+ except Errors.WafError:
+ self.pytest_use_not.add(name)
+ return
+
+ self.pytest_use_seen.append(name)
+ tg.post()
+
+ for n in self.to_list(getattr(tg, 'use', [])):
+ _process_use_rec(self, n)
+
+
+@TaskGen.feature('pytest')
+@TaskGen.after_method('process_source', 'apply_link')
+def pytest_process_use(self):
+ """
+ Process the ``use`` attribute which contains a list of task generator names and store
+ paths that later is used to populate the unit test runtime environment.
+ """
+ self.pytest_use_not = set()
+ self.pytest_use_seen = []
+ self.pytest_paths = [] # strings or Nodes
+ self.pytest_libpaths = [] # strings or Nodes
+ self.pytest_dep_nodes = []
+
+ names = self.to_list(getattr(self, 'use', []))
+ for name in names:
+ _process_use_rec(self, name)
+
+ def extend_unique(lst, varlst):
+ ext = []
+ for x in varlst:
+ if x not in lst:
+ ext.append(x)
+ lst.extend(ext)
+
+ # Collect type specific info needed to construct a valid runtime environment
+ # for the test.
+ for name in self.pytest_use_seen:
+ tg = self.bld.get_tgen_by_name(name)
+
+ extend_unique(self.pytest_paths, Utils.to_list(getattr(tg, 'pytest_path', [])))
+ extend_unique(self.pytest_libpaths, Utils.to_list(getattr(tg, 'pytest_libpath', [])))
+
+ if 'py' in tg.features:
+ # Python dependencies are added to PYTHONPATH
+ pypath = getattr(tg, 'install_from', tg.path)
+
+ if 'buildcopy' in tg.features:
+ # Since buildcopy is used we assume that PYTHONPATH in build should be used,
+ # not source
+ extend_unique(self.pytest_paths, [pypath.get_bld().abspath()])
+
+ # Add buildcopy output nodes to dependencies
+ extend_unique(self.pytest_dep_nodes, [o for task in getattr(tg, 'tasks', []) \
+ for o in getattr(task, 'outputs', [])])
+ else:
+ # If buildcopy is not used, depend on sources instead
+ extend_unique(self.pytest_dep_nodes, tg.source)
+ extend_unique(self.pytest_paths, [pypath.abspath()])
+
+ if getattr(tg, 'link_task', None):
+ # For tasks with a link_task (C, C++, D et.c.) include their library paths:
+ if not isinstance(tg.link_task, ccroot.stlink_task):
+ extend_unique(self.pytest_dep_nodes, tg.link_task.outputs)
+ extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH)
+
+ if 'pyext' in tg.features:
+ # If the taskgen is extending Python we also want to add the interpreter libpath.
+ extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
+ else:
+ # Only add to libpath if the link task is not a Python extension
+ extend_unique(self.pytest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
+
+
+@TaskGen.feature('pytest')
+@TaskGen.after_method('pytest_process_use')
+def make_pytest(self):
+ """
+ Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``:
+
+ - Paths in `pytest_paths` attribute are used to populate PYTHONPATH
+ - Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH)
+ """
+ nodes = self.to_nodes(self.pytest_source)
+ tsk = self.create_task('utest', nodes)
+
+ tsk.dep_nodes.extend(self.pytest_dep_nodes)
+ if getattr(self, 'ut_str', None):
+ self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+ tsk.vars = lst + tsk.vars
+
+ if getattr(self, 'ut_cwd', None):
+ if isinstance(self.ut_cwd, str):
+ # we want a Node instance
+ if os.path.isabs(self.ut_cwd):
+ self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
+ else:
+ self.ut_cwd = self.path.make_node(self.ut_cwd)
+ else:
+ if tsk.inputs:
+ self.ut_cwd = tsk.inputs[0].parent
+ else:
+ raise Errors.WafError("no valid input files for pytest task, check pytest_source value")
+
+ if not self.ut_cwd.exists():
+ self.ut_cwd.mkdir()
+
+ if not hasattr(self, 'ut_env'):
+ self.ut_env = dict(os.environ)
+ def add_paths(var, lst):
+ # Add list of paths to a variable, lst can contain strings or nodes
+ lst = [ str(n) for n in lst ]
+ Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
+ self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
+
+ # Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH
+ add_paths('PYTHONPATH', self.pytest_paths)
+
+ if Utils.is_win32:
+ add_paths('PATH', self.pytest_libpaths)
+ elif Utils.unversioned_sys_platform() == 'darwin':
+ add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths)
+ add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
+ else:
+ add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero 2011 (zougloub)
+# QNX neutrino compatibility functions
+
+import sys, os
+from waflib import Utils
+
+class Popen(object):
+ """
+ Popen cannot work on QNX from a threaded program:
+ Forking in threads is not implemented in neutrino.
+
+ Python's os.popen / spawn / fork won't work when running in threads (they will if in the main program thread)
+
+ In waf, this happens mostly in build.
+ And the use cases can be replaced by os.system() calls.
+ """
+ __slots__ = ["prog", "kw", "popen", "verbose"]
+ verbose = 0
+ def __init__(self, prog, **kw):
+ try:
+ self.prog = prog
+ self.kw = kw
+ self.popen = None
+ if Popen.verbose:
+ sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw))
+
+ do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1
+ if do_delegate:
+ if Popen.verbose:
+ print("Delegating to real Popen")
+ self.popen = self.real_Popen(prog, **kw)
+ else:
+ if Popen.verbose:
+ print("Emulating")
+ except Exception as e:
+ if Popen.verbose:
+ print("Exception: %s" % e)
+ raise
+
+ def __getattr__(self, name):
+ if Popen.verbose:
+ sys.stdout.write("Getattr: %s..." % name)
+ if name in Popen.__slots__:
+ return object.__getattribute__(self, name)
+ else:
+ if self.popen is not None:
+ if Popen.verbose:
+ print("from Popen")
+ return getattr(self.popen, name)
+ else:
+ if name == "wait":
+ return self.emu_wait
+ else:
+ raise Exception("subprocess emulation: not implemented: %s" % name)
+
+ def emu_wait(self):
+ if Popen.verbose:
+ print("emulated wait (%r kw=%r)" % (self.prog, self.kw))
+ if isinstance(self.prog, str):
+ cmd = self.prog
+ else:
+ cmd = " ".join(self.prog)
+ if 'cwd' in self.kw:
+ cmd = 'cd "%s" && %s' % (self.kw['cwd'], cmd)
+ return os.system(cmd)
+
+if sys.platform == "qnx6":
+ Popen.real_Popen = Utils.subprocess.Popen
+ Utils.subprocess.Popen = Popen
+
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
- self.includes = list(incs)
+ self.includes = sorted(incs)
Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.
# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
- gen.outstanding.insert(0, tsk)
+ gen.outstanding.append(tsk)
gen.total += 1
return tsk
include_nodes = [node.parent] + self.generator.includes_nodes
moctasks = []
- mocfiles = set([])
+ mocfiles = set()
for d in bld.raw_deps.get(self.uid(), []):
if not d.endswith('.moc'):
continue
"""Update a .ts files from a list of C++ files"""
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
color = 'BLUE'
-Task.update_outputs(trans_update)
class XMLHandler(ContentHandler):
"""
lst = []
for flag in self.to_list(self.env['CXXFLAGS']):
- if len(flag) < 2: continue
+ if len(flag) < 2:
+ continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
- if nd: nodes.append(nd)
- else: names.append(x)
+ if nd:
+ nodes.append(nd)
+ else:
+ names.append(x)
return (nodes, names)
class moc(Task.Task):
# the qt directory has been given from QT4_ROOT - deduce the qt binary path
if not qtdir:
qtdir = os.environ.get('QT4_ROOT', '')
- qtbin = os.environ.get('QT4_BIN', None) or os.path.join(qtdir, 'bin')
+ qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin')
if qtbin:
paths = [qtbin]
@conf
def find_qt4_libraries(self):
- qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR", None)
+ qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR")
if not qtlibs:
try:
qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
qtlibs = os.path.join(qtdir, 'lib')
self.msg('Found the Qt4 libraries in', qtlibs)
- qtincludes = os.environ.get("QT4_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+ qtincludes = os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
env = self.env
if not 'PKG_CONFIG_PATH' in os.environ:
os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
try:
- if os.environ.get("QT4_XCOMPILE", None):
+ if os.environ.get("QT4_XCOMPILE"):
raise self.errors.ConfigurationError()
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
opt.add_option('--'+i, type='string', default='', dest=i)
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
self.includes_nodes = lst
bld = self.bld
self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
+
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Remote Builds tool using rsync+ssh
4. Setup the ssh server and ssh keys
- The ssh key should not be protected by a password, or it will prompt for it everytime.
+ The ssh key should not be protected by a password, or it will prompt for it every time.
Create the key on the client:
.. code:: bash
ret = task.exec_command(bld.make_save_command(task))
if ret:
return ret
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os
+from waflib import Task
+from waflib.TaskGen import extension
+
+def configure(conf):
+ conf.find_program(['resgen'], var='RESGEN')
+ conf.env.RESGENFLAGS = '/useSourcePath'
+
+@extension('.resx')
+def resx_file(self, node):
+ """
+ Bind the .resx extension to a resgen task
+ """
+ if not getattr(self, 'cs_task', None):
+ self.bld.fatal('resx_file has no link task for use %r' % self)
+
+ # Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources'
+ assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0])
+ res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.')
+ out = self.path.find_or_declare(assembly + '.' + res + '.resources')
+
+ tsk = self.create_task('resgen', node, out)
+
+ self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
+ self.env.append_value('RESOURCES', tsk.outputs[0].bldpath())
+
+class resgen(Task.Task):
+ """
+ Compile C# resource files
+ """
+ color = 'YELLOW'
+ run_str = '${RESGEN} ${RESGENFLAGS} ${SRC} ${TGT}'
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Laurent Birtz, 2011
"""
Return true if the review sets specified are equal.
"""
- if len(set1.keys()) != len(set2.keys()): return False
+ if len(set1.keys()) != len(set2.keys()):
+ return False
for key in set1.keys():
if not key in set2 or set1[key] != set2[key]:
return False
name = ", ".join(opt._short_opts + opt._long_opts)
help = opt.help
actual = None
- if dest in review_set: actual = review_set[dest]
+ if dest in review_set:
+ actual = review_set[dest]
default = review_defaults[dest]
lines.append(self.format_option(name, help, actual, default, term_width))
return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
w = textwrap.TextWrapper()
w.width = term_width - 1
- if w.width < 60: w.width = 60
+ if w.width < 60:
+ w.width = 60
out = ""
old_configure_execute(self)
Context.create_context('review').store_review_set(new_review_set)
Configure.ConfigurationContext.execute = new_configure_execute
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero, 2013 (zougloub)
rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
-def parse_rst_node(node, nodes, names, seen):
+def parse_rst_node(task, node, nodes, names, seen, dirs=None):
# TODO add extensibility, to handle custom rst include tags...
+ if dirs is None:
+ dirs = (node.parent,node.get_bld().parent)
+
if node in seen:
return
seen.append(node)
for match in re_rst.finditer(code):
ipath = match.group('file')
itype = match.group('type')
- Logs.debug("rst: visiting %s: %s" % (itype, ipath))
- found = node.parent.find_resource(ipath)
- if found:
- nodes.append(found)
- if itype == 'include':
- parse_rst_node(found, nodes, names, seen)
- else:
- names.append(ipath)
+ Logs.debug('rst: visiting %s: %s', itype, ipath)
+ found = False
+ for d in dirs:
+ Logs.debug('rst: looking for %s in %s', ipath, d.abspath())
+ found = d.find_node(ipath)
+ if found:
+ Logs.debug('rst: found %s as %s', ipath, found.abspath())
+ nodes.append((itype, found))
+ if itype == 'include':
+ parse_rst_node(task, found, nodes, names, seen)
+ break
+ if not found:
+ names.append((itype, ipath))
class docutils(Task.Task):
"""
if not node:
return (nodes, names)
- parse_rst_node(node, nodes, names, seen)
+ parse_rst_node(self, node, nodes, names, seen)
- Logs.debug("rst: %s: found the following file deps: %s" % (repr(self), nodes))
+ Logs.debug('rst: %r: found the following file deps: %r', self, nodes)
if names:
- Logs.warn("rst: %s: could not find the following file deps: %s" % (repr(self), names))
+ Logs.warn('rst: %r: could not find the following file deps: %r', self, names)
- return (nodes, names)
+ return ([v for (t,v) in nodes], [v for (t,v) in names])
def check_status(self, msg, retcode):
"""
:type retcode: boolean
"""
if retcode != 0:
- raise Errors.WafError("%r command exit status %r" % (msg, retcode))
+ raise Errors.WafError('%r command exit status %r' % (msg, retcode))
def run(self):
"""
if stylesheet is not None:
ssnode = self.generator.to_nodes(stylesheet)[0]
nodes.append(ssnode)
- Logs.debug("rst: adding dep to %s %s" % (attribute, stylesheet))
+ Logs.debug('rst: adding dep to %s %s', attribute, stylesheet)
return nodes, names
inst_to = getattr(self, 'install_path', None)
if inst_to:
- self.install_task = self.bld.install_files(inst_to, task.outputs[:], env=self.env)
+ self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:])
self.source = []
"""
for p in rst_progs:
self.find_program(p, mandatory=False)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Stata do-script in the directory specified by **ctx.bldnode**. The
+first and only argument will be the name of the do-script (no extension),
+which can be accessed inside the do-script by the local macro `1'. Useful
+for keeping a log file.
+
+The tool uses the log file that is automatically kept by Stata only
+for error-catching purposes, it will be destroyed if the task finished
+without error. In case of an error in **some_script.do**, you can inspect
+it as **some_script.log** in the **ctx.bldnode** directory.
+
+Note that Stata will not return an error code if it exits abnormally --
+catching errors relies on parsing the log file mentioned before. Should
+the parser behave incorrectly please send an email to hmgaudecker [at] gmail.
+
+**WARNING**
+
+ The tool will not work if multiple do-scripts of the same name---but in
+ different directories---are run at the same time! Avoid this situation.
+
+Usage::
+
+ ctx(features='run_do_script',
+ source='some_script.do',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.csv')
+"""
+
+
+import os, re, sys
+from waflib import Task, TaskGen, Logs
+
+if sys.platform == 'darwin':
+ STATA_COMMANDS = ['Stata64MP', 'StataMP',
+ 'Stata64SE', 'StataSE',
+ 'Stata64', 'Stata']
+ STATAFLAGS = '-e -q do'
+ STATAENCODING = 'MacRoman'
+elif sys.platform.startswith('linux'):
+ STATA_COMMANDS = ['stata-mp', 'stata-se', 'stata']
+ STATAFLAGS = '-b -q do'
+ # Not sure whether this is correct...
+ STATAENCODING = 'Latin-1'
+elif sys.platform.lower().startswith('win'):
+ STATA_COMMANDS = ['StataMP-64', 'StataMP-ia',
+ 'StataMP', 'StataSE-64',
+ 'StataSE-ia', 'StataSE',
+ 'Stata-64', 'Stata-ia',
+ 'Stata.e', 'WMPSTATA',
+ 'WSESTATA', 'WSTATA']
+ STATAFLAGS = '/e do'
+ STATAENCODING = 'Latin-1'
+else:
+ raise Exception("Unknown sys.platform: %s " % sys.platform)
+
+def configure(ctx):
+ ctx.find_program(STATA_COMMANDS, var='STATACMD', errmsg="""\n
+No Stata executable found!\n\n
+If Stata is needed:\n
+ 1) Check the settings of your system path.
+ 2) Note we are looking for Stata executables called: %s
+ If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+ Do not load the 'run_do_script' tool in the main wscript.\n\n""" % STATA_COMMANDS)
+ ctx.env.STATAFLAGS = STATAFLAGS
+ ctx.env.STATAENCODING = STATAENCODING
+
+class run_do_script_base(Task.Task):
+ """Run a Stata do-script from the bldnode directory."""
+ run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"'
+ shell = True
+
+class run_do_script(run_do_script_base):
+ """Use the log file automatically kept by Stata for error-catching.
+ Erase it if the task finished without error. If not, it will show
+ up as do_script.log in the bldnode directory.
+ """
+ def run(self):
+ run_do_script_base.run(self)
+ ret, log_tail = self.check_erase_log_file()
+ if ret:
+ Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+ self.inputs[0], ret, self.env.LOGFILEPATH, log_tail)
+ return ret
+
+ def check_erase_log_file(self):
+ """Parse Stata's default log file and erase it if everything okay.
+
+ Parser is based on Brendan Halpin's shell script found here:
+ http://teaching.sociology.ul.ie/bhalpin/wordpress/?p=122
+ """
+
+ if sys.version_info.major >= 3:
+ kwargs = {'file': self.env.LOGFILEPATH, 'mode': 'r', 'encoding': self.env.STATAENCODING}
+ else:
+ kwargs = {'name': self.env.LOGFILEPATH, 'mode': 'r'}
+ with open(**kwargs) as log:
+ log_tail = log.readlines()[-10:]
+ for line in log_tail:
+ error_found = re.match("r\(([0-9]+)\)", line)
+ if error_found:
+ return error_found.group(1), ''.join(log_tail)
+ else:
+ pass
+ # Only end up here if the parser did not identify an error.
+ os.remove(self.env.LOGFILEPATH)
+ return None, None
+
+
+@TaskGen.feature('run_do_script')
+@TaskGen.before_method('process_source')
+def apply_run_do_script(tg):
+ """Task generator customising the options etc. to call Stata in batch
+ mode for running a do-script.
+ """
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ tsk = tg.create_task('run_do_script', src=src_node, tgt=tgt_nodes)
+ tsk.env.DOFILETRUNK = os.path.splitext(src_node.name)[0]
+ tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s.log' % (tsk.env.DOFILETRUNK))
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Matlab script.
+
+Note that the script is run in the directory where it lives -- Matlab won't
+allow it any other way.
+
+For error-catching purposes, keep an own log-file that is destroyed if the
+task finished without error. If not, it will show up as mscript_[index].log
+in the bldnode directory.
+
+Usage::
+
+ ctx(features='run_m_script',
+ source='some_script.m',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.mat')
+"""
+
+import os, sys
+from waflib import Task, TaskGen, Logs
+
+MATLAB_COMMANDS = ['matlab']
+
+def configure(ctx):
+ ctx.find_program(MATLAB_COMMANDS, var='MATLABCMD', errmsg = """\n
+No Matlab executable found!\n\n
+If Matlab is needed:\n
+ 1) Check the settings of your system path.
+ 2) Note we are looking for Matlab executables called: %s
+ If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+ Do not load the 'run_m_script' tool in the main wscript.\n\n""" % MATLAB_COMMANDS)
+ ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize'
+
+class run_m_script_base(Task.Task):
+ """Run a Matlab script."""
+ run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"'
+ shell = True
+
+class run_m_script(run_m_script_base):
+ """Erase the Matlab overall log file if everything went okay, else raise an
+ error and print its 10 last lines.
+ """
+ def run(self):
+ ret = run_m_script_base.run(self)
+ logfile = self.env.LOGFILEPATH
+ if ret:
+ mode = 'r'
+ if sys.version_info.major >= 3:
+ mode = 'rb'
+ with open(logfile, mode=mode) as f:
+ tail = f.readlines()[-10:]
+ Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+ self.inputs[0], ret, logfile, '\n'.join(tail))
+ else:
+ os.remove(logfile)
+ return ret
+
+@TaskGen.feature('run_m_script')
+@TaskGen.before_method('process_source')
+def apply_run_m_script(tg):
+ """Task generator customising the options etc. to call Matlab in batch
+ mode for running a m-script.
+ """
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ tsk = tg.create_task('run_m_script', src=src_node, tgt=tgt_nodes)
+ tsk.cwd = src_node.parent.abspath()
+ tsk.env.MSCRIPTTRUNK = os.path.splitext(src_node.name)[0]
+ tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (tsk.env.MSCRIPTTRUNK, tg.idx))
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Python script in the directory specified by **ctx.bldnode**.
+
+Select a Python version by specifying the **version** keyword for
+the task generator instance as integer 2 or 3. Default is 3.
+
+If the build environment has an attribute "PROJECT_PATHS" with
+a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
+Same a string passed to the optional **add_to_pythonpath**
+keyword (appended after the PROJECT_ROOT).
+
+Usage::
+
+ ctx(features='run_py_script', version=3,
+ source='some_script.py',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.csv',
+ add_to_pythonpath='src/some/library')
+"""
+
+import os, re
+from waflib import Task, TaskGen, Logs
+
+
+def configure(conf):
+ """TODO: Might need to be updated for Windows once
+ "PEP 397":http://www.python.org/dev/peps/pep-0397/ is settled.
+ """
+ conf.find_program('python', var='PY2CMD', mandatory=False)
+ conf.find_program('python3', var='PY3CMD', mandatory=False)
+ if not conf.env.PY2CMD and not conf.env.PY3CMD:
+ conf.fatal("No Python interpreter found!")
+
+class run_py_2_script(Task.Task):
+ """Run a Python 2 script."""
+ run_str = '${PY2CMD} ${SRC[0].abspath()}'
+ shell=True
+
+class run_py_3_script(Task.Task):
+ """Run a Python 3 script."""
+ run_str = '${PY3CMD} ${SRC[0].abspath()}'
+ shell=True
+
+@TaskGen.feature('run_py_script')
+@TaskGen.before_method('process_source')
+def apply_run_py_script(tg):
+ """Task generator for running either Python 2 or Python 3 on a single
+ script.
+
+ Attributes:
+
+ * source -- A **single** source node or string. (required)
+ * target -- A single target or list of targets (nodes or strings)
+ * deps -- A single dependency or list of dependencies (nodes or strings)
+ * add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable
+
+ If the build environment has an attribute "PROJECT_PATHS" with
+ a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
+ """
+
+ # Set the Python version to use, default to 3.
+ v = getattr(tg, 'version', 3)
+ if v not in (2, 3):
+ raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ # Create the task.
+ tsk = tg.create_task('run_py_%d_script' %v, src=src_node, tgt=tgt_nodes)
+
+ # custom execution environment
+ # TODO use a list and os.sep.join(lst) at the end instead of concatenating strings
+ tsk.env.env = dict(os.environ)
+ tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '')
+ project_paths = getattr(tsk.env, 'PROJECT_PATHS', None)
+ if project_paths and 'PROJECT_ROOT' in project_paths:
+ tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths['PROJECT_ROOT'].abspath()
+ if getattr(tg, 'add_to_pythonpath', None):
+ tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath
+
+ # Clean up the PYTHONPATH -- replace double occurrences of path separator
+ tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH'])
+
+ # Clean up the PYTHONPATH -- doesn't like starting with path separator
+ if tsk.env.env['PYTHONPATH'].startswith(os.pathsep):
+ tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:]
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a R script in the directory specified by **ctx.bldnode**.
+
+For error-catching purposes, keep an own log-file that is destroyed if the
+task finished without error. If not, it will show up as rscript_[index].log
+in the bldnode directory.
+
+Usage::
+
+ ctx(features='run_r_script',
+ source='some_script.r',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.csv')
+"""
+
+
+import os, sys
+from waflib import Task, TaskGen, Logs
+
+R_COMMANDS = ['RTerm', 'R', 'r']
+
+def configure(ctx):
+ ctx.find_program(R_COMMANDS, var='RCMD', errmsg = """\n
+No R executable found!\n\n
+If R is needed:\n
+ 1) Check the settings of your system path.
+ 2) Note we are looking for R executables called: %s
+ If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+ Do not load the 'run_r_script' tool in the main wscript.\n\n""" % R_COMMANDS)
+ ctx.env.RFLAGS = 'CMD BATCH --slave'
+
+class run_r_script_base(Task.Task):
+ """Run a R script."""
+ run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"'
+ shell = True
+
+class run_r_script(run_r_script_base):
+ """Erase the R overall log file if everything went okay, else raise an
+ error and print its 10 last lines.
+ """
+ def run(self):
+ ret = run_r_script_base.run(self)
+ logfile = self.env.LOGFILEPATH
+ if ret:
+ mode = 'r'
+ if sys.version_info.major >= 3:
+ mode = 'rb'
+ with open(logfile, mode=mode) as f:
+ tail = f.readlines()[-10:]
+ Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+ self.inputs[0], ret, logfile, '\n'.join(tail))
+ else:
+ os.remove(logfile)
+ return ret
+
+
+@TaskGen.feature('run_r_script')
+@TaskGen.before_method('process_source')
+def apply_run_r_script(tg):
+ """Task generator customising the options etc. to call R in batch
+ mode for running a R script.
+ """
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ tsk = tg.create_task('run_r_script', src=src_node, tgt=tgt_nodes)
+ tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (os.path.splitext(src_node.name)[0], tg.idx))
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Mark Coggeshall, 2010
+
+"SAS support"
+
+import os
+from waflib import Task, Errors, Logs
+from waflib.TaskGen import feature, before_method
+
+sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False)
+
+class sas(Task.Task):
+ vars = ['SAS', 'SASFLAGS']
+ def run(task):
+ command = 'SAS'
+ fun = sas_fun
+
+ node = task.inputs[0]
+ logfilenode = node.change_ext('.log')
+ lstfilenode = node.change_ext('.lst')
+
+ # set the cwd
+ task.cwd = task.inputs[0].parent.get_src().abspath()
+ Logs.debug('runner: %r on %r', command, node)
+
+ SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep
+ task.env.env = {'SASINPUTS': SASINPUTS}
+
+ task.env.SRCFILE = node.abspath()
+ task.env.LOGFILE = logfilenode.abspath()
+ task.env.LSTFILE = lstfilenode.abspath()
+ ret = fun(task)
+ if ret:
+ Logs.error('Running %s on %r returned a non-zero exit', command, node)
+ Logs.error('SRCFILE = %r', node)
+ Logs.error('LOGFILE = %r', logfilenode)
+ Logs.error('LSTFILE = %r', lstfilenode)
+ return ret
+
+@feature('sas')
+@before_method('process_source')
+def apply_sas(self):
+ if not getattr(self, 'type', None) in ('sas',):
+ self.type = 'sas'
+
+ self.env['logdir'] = getattr(self, 'logdir', 'log')
+ self.env['lstdir'] = getattr(self, 'lstdir', 'lst')
+
+ deps_lst = []
+
+ if getattr(self, 'deps', None):
+ deps = self.to_list(self.deps)
+ for filename in deps:
+ n = self.path.find_resource(filename)
+ if not n:
+ n = self.bld.root.find_resource(filename)
+ if not n:
+ raise Errors.WafError('cannot find input file %s for processing' % filename)
+ if not n in deps_lst:
+ deps_lst.append(n)
+
+ for node in self.to_nodes(self.source):
+ if self.type == 'sas':
+ task = self.create_task('sas', src=node)
+ task.dep_nodes = deps_lst
+ self.source = []
+
+def configure(self):
+ self.find_program('sas', var='SAS', mandatory=False)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# encoding: utf-8
+# vim: tabstop=4 noexpandtab
+
+"""
+Create a satellite assembly from "*.??.txt" files. ?? stands for a language code.
+
+The projects Resources subfolder contains resources.??.txt string files for several languages.
+The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll
+
+#gen becomes template (It is called gen because it also uses resx.py).
+bld(source='Resources/resources.de.txt',gen=ExeName)
+"""
+
+import os, re
+from waflib import Task
+from waflib.TaskGen import feature,before_method
+
+class al(Task.Task):
+ run_str = '${AL} ${ALFLAGS}'
+
+@feature('satellite_assembly')
+@before_method('process_source')
+def satellite_assembly(self):
+ if not getattr(self, 'gen', None):
+ self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter')
+ res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I)
+
+ # self.source can contain node objects, so this will break in one way or another
+ self.source = self.to_list(self.source)
+ for i, x in enumerate(self.source):
+ #x = 'resources/resources.de.resx'
+ #x = 'resources/resources.de.txt'
+ mo = res_lang.match(x)
+ if mo:
+ template = os.path.splitext(self.gen)[0]
+ templatedir, templatename = os.path.split(template)
+ res = mo.group(1)
+ lang = mo.group(2)
+ #./Resources/resources.de.resources
+ resources = self.path.find_or_declare(res+ '.' + lang + '.resources')
+ self.create_task('resgen', self.to_nodes(x), [resources])
+ #./de/Exename.resources.dll
+ satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll')
+ tsk = self.create_task('al',[resources],[satellite])
+ tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen))
+ tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath())
+ tsk.env.append_value('ALFLAGS','/culture:'+lang)
+ tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath())
+ self.source[i] = None
+ # remove the None elements that we just substituted
+ self.source = list(filter(lambda x:x, self.source))
+
+def configure(ctx):
+ ctx.find_program('al', var='AL', mandatory=True)
+ ctx.load('resx')
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+Scala support
+
+scalac outputs files a bit where it wants to
+"""
+
+import os
+from waflib import Task, Utils, Node
+from waflib.TaskGen import feature, before_method, after_method
+
+from waflib.Tools import ccroot
+ccroot.USELIB_VARS['scalac'] = set(['CLASSPATH', 'SCALACFLAGS'])
+
+from waflib.Tools import javaw
+
+@feature('scalac')
+@before_method('process_source')
+def apply_scalac(self):
+
+ Utils.def_attrs(self, jarname='', classpath='',
+ sourcepath='.', srcdir='.',
+ jar_mf_attributes={}, jar_mf_classpath=[])
+
+ outdir = getattr(self, 'outdir', None)
+ if outdir:
+ if not isinstance(outdir, Node.Node):
+ outdir = self.path.get_bld().make_node(self.outdir)
+ else:
+ outdir = self.path.get_bld()
+ outdir.mkdir()
+ self.env['OUTDIR'] = outdir.abspath()
+
+ self.scalac_task = tsk = self.create_task('scalac')
+ tmp = []
+
+ srcdir = getattr(self, 'srcdir', '')
+ if isinstance(srcdir, Node.Node):
+ srcdir = [srcdir]
+ for x in Utils.to_list(srcdir):
+ if isinstance(x, Node.Node):
+ y = x
+ else:
+ y = self.path.find_dir(x)
+ if not y:
+ self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
+ tmp.append(y)
+ tsk.srcdir = tmp
+
+# reuse some code
+feature('scalac')(javaw.use_javac_files)
+after_method('apply_scalac')(javaw.use_javac_files)
+
+feature('scalac')(javaw.set_classpath)
+after_method('apply_scalac', 'use_scalac_files')(javaw.set_classpath)
+
+
+SOURCE_RE = '**/*.scala'
+class scalac(javaw.javac):
+ color = 'GREEN'
+ vars = ['CLASSPATH', 'SCALACFLAGS', 'SCALAC', 'OUTDIR']
+
+ def runnable_status(self):
+ """
+ Wait for dependent tasks to be complete, then read the file system to find the input nodes.
+ """
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+
+ if not self.inputs:
+ global SOURCE_RE
+ self.inputs = []
+ for x in self.srcdir:
+ self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
+ return super(javaw.javac, self).runnable_status()
+
+ def run(self):
+ """
+ Execute the scalac compiler
+ """
+ env = self.env
+ gen = self.generator
+ bld = gen.bld
+ wd = bld.bldnode.abspath()
+ def to_list(xx):
+ if isinstance(xx, str):
+ return [xx]
+ return xx
+ self.last_cmd = lst = []
+ lst.extend(to_list(env['SCALAC']))
+ lst.extend(['-classpath'])
+ lst.extend(to_list(env['CLASSPATH']))
+ lst.extend(['-d'])
+ lst.extend(to_list(env['OUTDIR']))
+ lst.extend(to_list(env['SCALACFLAGS']))
+ lst.extend([a.abspath() for a in self.inputs])
+ lst = [x for x in lst if x]
+ try:
+ self.out = self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, output=0, quiet=0)[1]
+ except:
+ self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None)
+
+def configure(self):
+ """
+ Detect the scalac program
+ """
+ # If SCALA_HOME is set, we prepend it to the path list
+ java_path = self.environ['PATH'].split(os.pathsep)
+ v = self.env
+
+ if 'SCALA_HOME' in self.environ:
+ java_path = [os.path.join(self.environ['SCALA_HOME'], 'bin')] + java_path
+ self.env['SCALA_HOME'] = [self.environ['SCALA_HOME']]
+
+ for x in 'scalac scala'.split():
+ self.find_program(x, var=x.upper(), path_list=java_path)
+
+ if 'CLASSPATH' in self.environ:
+ v['CLASSPATH'] = self.environ['CLASSPATH']
+
+ v.SCALACFLAGS = ['-verbose']
+ if not v['SCALAC']:
+ self.fatal('scalac is required for compiling scala classes')
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# Thomas Nagy, 2011 (ita)
+
+"""
+Create _moc.cpp files
+
+The builds are 30-40% faster when .moc files are included,
+you should NOT use this tool. If you really
+really want it:
+
+def configure(conf):
+ conf.load('compiler_cxx qt4')
+ conf.load('slow_qt4')
+
+See playground/slow_qt/wscript for a complete example.
+"""
+
+from waflib.TaskGen import extension
+from waflib import Task
+import waflib.Tools.qt4
+import waflib.Tools.cxx
+
+@extension(*waflib.Tools.qt4.EXT_QT4)
+def cxx_hook(self, node):
+ self.create_compiled_task('cxx_qt', node)
+
+class cxx_qt(Task.classes['cxx']):
+ def runnable_status(self):
+ ret = Task.classes['cxx'].runnable_status(self)
+ if ret != Task.ASK_LATER and not getattr(self, 'moc_done', None):
+
+ try:
+ cache = self.generator.moc_cache
+ except AttributeError:
+ cache = self.generator.moc_cache = {}
+
+ deps = self.generator.bld.node_deps[self.uid()]
+ for x in [self.inputs[0]] + deps:
+ if x.read().find('Q_OBJECT') > 0:
+
+ # process "foo.h -> foo.moc" only if "foo.cpp" is in the sources for the current task generator
+ # this code will work because it is in the main thread (runnable_status)
+ if x.name.rfind('.') > -1: # a .h file...
+ name = x.name[:x.name.rfind('.')]
+ for tsk in self.generator.compiled_tasks:
+ if tsk.inputs and tsk.inputs[0].name.startswith(name):
+ break
+ else:
+ # no corresponding file, continue
+ continue
+
+ # the file foo.cpp could be compiled for a static and a shared library - hence the %number in the name
+ cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx)
+ if cxx_node in cache:
+ continue
+ cache[cxx_node] = self
+
+ tsk = Task.classes['moc'](env=self.env, generator=self.generator)
+ tsk.set_inputs(x)
+ tsk.set_outputs(cxx_node)
+
+ if x.name.endswith('.cpp'):
+ # moc is trying to be too smart but it is too dumb:
+ # why forcing the #include when Q_OBJECT is in the cpp file?
+ gen = self.generator.bld.producer
+ gen.outstanding.append(tsk)
+ gen.total += 1
+ self.set_run_after(tsk)
+ else:
+ cxxtsk = Task.classes['cxx'](env=self.env, generator=self.generator)
+ cxxtsk.set_inputs(tsk.outputs)
+ cxxtsk.set_outputs(cxx_node.change_ext('.o'))
+ cxxtsk.set_run_after(tsk)
+
+ try:
+ self.more_tasks.extend([tsk, cxxtsk])
+ except AttributeError:
+ self.more_tasks = [tsk, cxxtsk]
+
+ try:
+ link = self.generator.link_task
+ except AttributeError:
+ pass
+ else:
+ link.set_run_after(cxxtsk)
+ link.inputs.extend(cxxtsk.outputs)
+ link.inputs.sort(key=lambda x: x.abspath())
+
+ self.moc_done = True
+
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+
+ return ret
+
+++ /dev/null
-#! /usr/bin/env python
-# Thomas Nagy, 2011
-
-# Try to cancel the tasks that cannot run with the option -k when an error occurs:
-# 1 direct file dependencies
-# 2 tasks listed in the before/after/ext_in/ext_out attributes
-
-from waflib import Task, Runner
-
-Task.CANCELED = 4
-
-def cancel_next(self, tsk):
- if not isinstance(tsk, Task.TaskBase):
- return
- if tsk.hasrun >= Task.SKIPPED:
- # normal execution, no need to do anything here
- return
-
- try:
- canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
- except AttributeError:
- canceled_tasks = self.canceled_tasks = set([])
- canceled_nodes = self.canceled_nodes = set([])
-
- try:
- canceled_nodes.update(tsk.outputs)
- except AttributeError:
- pass
-
- try:
- canceled_tasks.add(tsk)
- except AttributeError:
- pass
-
-def get_out(self):
- tsk = self.out.get()
- if not self.stop:
- self.add_more_tasks(tsk)
- self.count -= 1
- self.dirty = True
- self.cancel_next(tsk) # new code
-
-def error_handler(self, tsk):
- if not self.bld.keep:
- self.stop = True
- self.error.append(tsk)
- self.cancel_next(tsk) # new code
-
-Runner.Parallel.cancel_next = cancel_next
-Runner.Parallel.get_out = get_out
-Runner.Parallel.error_handler = error_handler
-
-def get_next_task(self):
- tsk = self.get_next_task_smart_continue()
- if not tsk:
- return tsk
-
- try:
- canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
- except AttributeError:
- pass
- else:
- # look in the tasks that this one is waiting on
- # if one of them was canceled, cancel this one too
- for x in tsk.run_after:
- if x in canceled_tasks:
- tsk.hasrun = Task.CANCELED
- self.cancel_next(tsk)
- break
- else:
- # so far so good, now consider the nodes
- for x in getattr(tsk, 'inputs', []) + getattr(tsk, 'deps', []):
- if x in canceled_nodes:
- tsk.hasrun = Task.CANCELED
- self.cancel_next(tsk)
- break
- return tsk
-
-Runner.Parallel.get_next_task_smart_continue = Runner.Parallel.get_next_task
-Runner.Parallel.get_next_task = get_next_task
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# per rosengren 2011
+
+from waflib.TaskGen import feature, after_method
+from waflib.Task import Task, always_run
+from os.path import basename, isabs
+from os import tmpfile, linesep
+
+def options(opt):
+ grp = opt.add_option_group('Softlink Libraries Options')
+ grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]')
+
+def configure(cnf):
+ cnf.find_program('ldd')
+ if not cnf.env.SOFTLINK_EXCLUDE:
+ cnf.env.SOFTLINK_EXCLUDE = cnf.options.exclude.split(',')
+
+@feature('softlink_libs')
+@after_method('process_rule')
+def add_finder(self):
+ tgt = self.path.find_or_declare(self.target)
+ self.create_task('sll_finder', tgt=tgt)
+ self.create_task('sll_installer', tgt=tgt)
+ always_run(sll_installer)
+
+class sll_finder(Task):
+ ext_out = 'softlink_libs'
+ def run(self):
+ bld = self.generator.bld
+ linked=[]
+ target_paths = []
+ for g in bld.groups:
+ for tgen in g:
+ # FIXME it might be better to check if there is a link_task (getattr?)
+ target_paths += [tgen.path.get_bld().bldpath()]
+ linked += [t.outputs[0].bldpath()
+ for t in getattr(tgen, 'tasks', [])
+ if t.__class__.__name__ in
+ ['cprogram', 'cshlib', 'cxxprogram', 'cxxshlib']]
+ lib_list = []
+ if len(linked):
+ cmd = [self.env.LDD] + linked
+ # FIXME add DYLD_LIBRARY_PATH+PATH for osx+win32
+ ldd_env = {'LD_LIBRARY_PATH': ':'.join(target_paths + self.env.LIBPATH)}
+ # FIXME the with syntax will not work in python 2
+ with tmpfile() as result:
+ self.exec_command(cmd, env=ldd_env, stdout=result)
+ result.seek(0)
+ for line in result.readlines():
+ words = line.split()
+ if len(words) < 3 or words[1] != '=>':
+ continue
+ lib = words[2]
+ if lib == 'not':
+ continue
+ if any([lib.startswith(p) for p in
+ [bld.bldnode.abspath(), '('] +
+ self.env.SOFTLINK_EXCLUDE]):
+ continue
+ if not isabs(lib):
+ continue
+ lib_list.append(lib)
+ lib_list = sorted(set(lib_list))
+ self.outputs[0].write(linesep.join(lib_list + self.env.DYNAMIC_LIBS))
+ return 0
+
+class sll_installer(Task):
+ ext_in = 'softlink_libs'
+ def run(self):
+ tgt = self.outputs[0]
+ self.generator.bld.install_files('${LIBDIR}', tgt, postpone=False)
+ lib_list=tgt.read().split()
+ for lib in lib_list:
+ self.generator.bld.symlink_as('${LIBDIR}/'+basename(lib), lib, postpone=False)
+ return 0
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy, 2006-2015 (ita)
Of course, it will only work if there are no dynamically generated
nodes/tasks, in which case the method will have to be modified
to exclude some folders for example.
+
+Make sure to set bld.post_mode = waflib.Build.POST_AT_ONCE
"""
from waflib import Logs, Build
else:
if not node in nodes:
if can_delete(node):
- Logs.warn("Removing stale file -> %s" % node.abspath())
+ Logs.warn('Removing stale file -> %r', node)
node.delete()
old = Parallel.refill_task_list
self.stale_done = True
# this does not work in partial builds
- if hasattr(bld, 'options') and bld.options.targets and bld.options.targets != '*':
+ if bld.targets != '*':
return iit
# this does not work in dynamic builds
- if not hasattr(bld, 'post_mode') or bld.post_mode == Build.POST_LAZY:
+ if getattr(bld, 'post_mode') == Build.POST_AT_ONCE:
return iit
# obtain the nodes to use during the build
nodes = []
- for i in range(len(bld.groups)):
- tasks = bld.get_tasks_group(i)
+ for tasks in bld.groups:
for x in tasks:
try:
nodes.extend(x.outputs)
- except:
+ except AttributeError:
pass
stale_rec(bld.bldnode, nodes)
return iit
Parallel.refill_task_list = refill_task_list
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)
@task_method
def exec_command(self, cmd, **kw):
bld = self.generator.bld
- try:
- if not kw.get('cwd', None):
- kw['cwd'] = bld.cwd
- except AttributeError:
- bld.cwd = kw['cwd'] = bld.variant_dir
+ if not 'cwd' in kw:
+ kw['cwd'] = self.get_cwd()
args = self.get_strace_args()
fname = self.get_strace_file()
except OSError:
pass
+ if not isinstance(cwd, str):
+ cwd = cwd.abspath()
+
nodes = []
bld = self.generator.bld
try:
pid_to_cwd = {}
global BANNED
- done = set([])
+ done = set()
for m in re.finditer(re_lines, cnt):
# scraping the output of strace
pid = m.group('pid')
# record the dependencies then force the task signature recalculation for next time
if Logs.verbose:
- Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
+ Logs.debug('deps: real scanner for %r returned %r', self, nodes)
bld = self.generator.bld
bld.node_deps[self.uid()] = nodes
bld.raw_deps[self.uid()] = []
except AttributeError:
pass
self.signature()
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: UTF-8
# Petar Forai
import re
from waflib import Task, Logs
-from waflib.TaskGen import extension
+from waflib.TaskGen import extension, feature, after_method
from waflib.Configure import conf
from waflib.Tools import c_preproc
lst_src = []
seen = []
+ missing = []
to_see = [self.inputs[0]]
while to_see:
to_see.append(u)
break
else:
- Logs.warn('could not find %r' % n)
-
- return (lst_src, [])
+ missing.append(n)
+ return (lst_src, missing)
# provide additional language processing
swig_langs = {}
def swigf(fun):
swig_langs[fun.__name__.replace('swig_', '')] = fun
+ return fun
swig.swigf = swigf
def swig_c(self):
c_tsk.set_run_after(self)
ge = self.generator.bld.producer
- ge.outstanding.insert(0, c_tsk)
+ ge.outstanding.append(c_tsk)
ge.total += 1
try:
pass
else:
ltask.set_run_after(c_tsk)
+ # setting input nodes does not declare the build order
+ # because the build already started
ltask.inputs.append(c_tsk.outputs[0])
+ # set the build order after the build started:
+ ge.revdeps[c_tsk].add(ltask)
self.outputs.append(out_node)
outdir.mkdir()
tsk.outdir = outdir
+@feature('c', 'cxx', 'd', 'fc', 'asm')
+@after_method('apply_link', 'process_source')
+def enforce_swig_before_link(self):
+ try:
+ link_task = self.link_task
+ except AttributeError:
+ pass
+ else:
+ for x in self.tasks:
+ if x.__class__.__name__ == 'swig':
+ link_task.run_after.add(x)
+
@conf
-def check_swig_version(self):
- """Returns a tuple representing the swig version, like (1,3,28)"""
+def check_swig_version(conf, minver=None):
+ """
+ Check if the swig tool is found matching a given minimum version.
+ minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver.
+
+ If successful, SWIG_VERSION is defined as 'MAJOR.MINOR'
+ (eg. '1.3') of the actual swig version found.
+
+ :param minver: minimum version
+ :type minver: tuple of int
+ :return: swig version
+ :rtype: tuple of int
+ """
+ assert minver is None or isinstance(minver, tuple)
+ swigbin = conf.env['SWIG']
+ if not swigbin:
+ conf.fatal('could not find the swig executable')
+
+ # Get swig version string
+ cmd = swigbin + ['-version']
+ Logs.debug('swig: Running swig command %r', cmd)
reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
- swig_out = self.cmd_and_log(self.env.SWIG + ['-version'])
+ swig_out = conf.cmd_and_log(cmd)
+ swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
+
+ # Compare swig version with the minimum required
+ result = (minver is None) or (swigver_tuple >= minver)
- swigver = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
- self.env['SWIG_VERSION'] = swigver
- msg = 'Checking for swig version'
- self.msg(msg, '.'.join(map(str, swigver)))
- return swigver
+ if result:
+ # Define useful environment variables
+ swigver = '.'.join([str(x) for x in swigver_tuple[:2]])
+ conf.env['SWIG_VERSION'] = swigver
+
+ # Feedback
+ swigver_full = '.'.join(map(str, swigver_tuple[:3]))
+ if minver is None:
+ conf.msg('Checking for swig version', swigver_full)
+ else:
+ minver_str = '.'.join(map(str, minver))
+ conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW')
+
+ if not result:
+ conf.fatal('The swig version is too old, expecting %r' % (minver,))
+
+ return swigver_tuple
def configure(conf):
conf.find_program('swig', var='SWIG')
conf.env.SWIGPATH_ST = '-I%s'
conf.env.SWIGDEF_ST = '-D%s'
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
"""
-this tool supports the export_symbols_regex to export the symbols in a shared library.
+This tool supports the export_symbols_regex to export the symbols in a shared library.
by default, all symbols are exported by gcc, and nothing by msvc.
to use the tool, do something like:
only the symbols starting with 'mylib_' will be exported.
"""
-import os
import re
from waflib.Context import STDOUT
from waflib.Task import Task
def run(self):
obj = self.inputs[0]
kw = {}
- if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
- re_nm = re.compile(r'External\s+\|\s+_(' + self.generator.export_symbols_regex + r')\b')
+ reg = getattr(self.generator, 'export_symbols_regex', '.+?')
+ if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+ re_nm = re.compile(r'External\s+\|\s+_(?P<symbol>%s)\b' % reg)
cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
-
- # Dumpbin requires custom environment sniffed out by msvc.py earlier
- if self.env['PATH']:
- env = dict(self.env.env or os.environ)
- env.update(PATH = os.pathsep.join(self.env['PATH']))
- kw['env'] = env
-
else:
if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
- re_nm = re.compile(r'T\s+_(' + self.generator.export_symbols_regex + r')\b')
+ re_nm = re.compile(r'(T|D)\s+_(?P<symbol>%s)\b' % reg)
elif self.env.DEST_BINFMT=='mac-o':
- re_nm=re.compile(r'T\s+(_?'+self.generator.export_symbols_regex+r')\b')
+ re_nm=re.compile(r'(T|D)\s+(?P<symbol>_?%s)\b' % reg)
else:
- re_nm = re.compile(r'T\s+(' + self.generator.export_symbols_regex + r')\b')
- cmd = [self.env.NM[0] or 'nm', '-g', obj.abspath()]
- syms = re_nm.findall(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))
+ re_nm = re.compile(r'(T|D)\s+(?P<symbol>%s)\b' % reg)
+ cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()]
+ syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))]
self.outputs[0].write('%r' % syms)
class compile_sym(Task):
raise WafError('NotImplemented')
@feature('syms')
-@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local')
+@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars')
def do_the_symbol_stuff(self):
- ins = [x.outputs[0] for x in self.compiled_tasks]
- self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+ def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def'))
+ compiled_tasks = getattr(self, 'compiled_tasks', None)
+ if compiled_tasks:
+ ins = [x.outputs[0] for x in compiled_tasks]
+ self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+ self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node)
+
+ link_task = getattr(self, 'link_task', None)
+ if link_task:
+ self.link_task.dep_nodes.append(def_node)
+
+ if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+ self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()])
+ elif self.env.DEST_BINFMT == 'pe':
+ # gcc on windows takes *.def as an additional input
+ self.link_task.inputs.append(def_node)
+ elif self.env.DEST_BINFMT == 'elf':
+ self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()])
+ elif self.env.DEST_BINFMT=='mac-o':
+ self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()])
+ else:
+ raise WafError('NotImplemented')
- tsk = self.create_task('compile_sym',
- [x.outputs[0] for x in self.gen_sym_tasks],
- self.path.find_or_declare(getattr(self, 'sym_filename', self.target + '.def')))
- self.link_task.set_run_after(tsk)
- self.link_task.dep_nodes.append(tsk.outputs[0])
- if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
- self.link_task.env.append_value('LINKFLAGS', ['/def:' + tsk.outputs[0].bldpath()])
- elif self.env.DEST_BINFMT == 'pe': #gcc on windows takes *.def as an additional input
- self.link_task.inputs.append(tsk.outputs[0])
- elif self.env.DEST_BINFMT == 'elf':
- self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + tsk.outputs[0].bldpath()])
- elif self.env.DEST_BINFMT=='mac-o':
- self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,'+tsk.outputs[0].bldpath()])
- else:
- raise WafError('NotImplemented')
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-This tool is obsolete, the sync_exec feature is now the default
-"""
-
-pass
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Texas Instruments code generator support (experimental)
+# When reporting issues, please directly assign the bug to the maintainer.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+"""
+TI cgt6x is a compiler suite for TI DSPs.
+
+The toolchain does pretty weird things, and I'm sure I'm missing some of them.
+But still, the tool saves time.
+
+What this tool does is:
+
+- create a TI compiler environment
+- create TI compiler features, to handle some specifics about this compiler
+ It has a few idiosyncracies, such as not giving the liberty of the .o file names
+- automatically activate them when using the TI compiler
+- handle the tconf tool
+ The tool
+
+TODO:
+
+- the set_platform_flags() function is not nice
+- more tests
+- broaden tool scope, if needed
+
+"""
+
+import os, re
+
+from waflib import Options, Utils, Task, TaskGen
+from waflib.Tools import c, ccroot, c_preproc
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method
+from waflib.Tools.c import cprogram
+
+opj = os.path.join
+
+@conf
+def find_ticc(conf):
+ conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+ conf.env.CC_NAME = 'ticc'
+
+@conf
+def find_tild(conf):
+ conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+ conf.env.LINK_CC_NAME = 'tild'
+
+@conf
+def find_tiar(conf):
+ conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+ conf.env.AR_NAME = 'tiar'
+ conf.env.ARFLAGS = 'qru'
+
+@conf
+def ticc_common_flags(conf):
+ v = conf.env
+
+ if not v['LINK_CC']:
+ v['LINK_CC'] = v['CC']
+ v['CCLNK_SRC_F'] = []
+ v['CCLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-d%s'
+
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-i%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l=%s.lib'
+ v['STLIBPATH_ST'] = '-i%s'
+
+ # program
+ v['cprogram_PATTERN'] = '%s.out'
+
+ # static lib
+ #v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
+ v['cstlib_PATTERN'] = '%s.lib'
+
+def configure(conf):
+ v = conf.env
+ v.TI_CGT_DIR = getattr(Options.options, 'ti-cgt-dir', "")
+ v.TI_DSPLINK_DIR = getattr(Options.options, 'ti-dsplink-dir', "")
+ v.TI_BIOSUTILS_DIR = getattr(Options.options, 'ti-biosutils-dir', "")
+ v.TI_DSPBIOS_DIR = getattr(Options.options, 'ti-dspbios-dir', "")
+ v.TI_XDCTOOLS_DIR = getattr(Options.options, 'ti-xdctools-dir', "")
+ conf.find_ticc()
+ conf.find_tiar()
+ conf.find_tild()
+ conf.ticc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
+ conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR)
+
+ conf.env.TCONF_INCLUDES += [
+ opj(conf.env.TI_DSPBIOS_DIR, 'packages'),
+ ]
+
+ conf.env.INCLUDES += [
+ opj(conf.env.TI_CGT_DIR, 'include'),
+ ]
+
+ conf.env.LIBPATH += [
+ opj(conf.env.TI_CGT_DIR, "lib"),
+ ]
+
+ conf.env.INCLUDES_DSPBIOS += [
+ opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'),
+ ]
+
+ conf.env.LIBPATH_DSPBIOS += [
+ opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'),
+ ]
+
+ conf.env.INCLUDES_DSPLINK += [
+ opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'),
+ ]
+
+@conf
+def ti_set_debug(cfg, debug=1):
+ """
+ Sets debug flags for the compiler.
+
+ TODO:
+ - for each TI CFLAG/INCLUDES/LINKFLAGS/LIBPATH replace RELEASE by DEBUG
+ - -g --no_compress
+ """
+ if debug:
+ cfg.env.CFLAGS += "-d_DEBUG -dDEBUG -dDDSP_DEBUG".split()
+
+@conf
+def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board):
+ """
+ Sets the INCLUDES, LINKFLAGS for DSPLINK and TCONF_INCLUDES
+ For the specific hardware.
+
+ Assumes that DSPLINK was built in its own folder.
+
+ :param splat: short platform name (eg. OMAPL138)
+ :param dsp: DSP name (eg. 674X)
+ :param dspbios_ver: string identifying DspBios version (eg. 5.XX)
+ :param board: board name (eg. OMAPL138GEM)
+
+ """
+ d1 = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver)
+ d = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver, board)
+ cfg.env.TCONF_INCLUDES += [d1, d]
+ cfg.env.INCLUDES_DSPLINK += [
+ opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp),
+ d,
+ ]
+
+ cfg.env.LINKFLAGS_DSPLINK += [
+ opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x)
+ for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio')
+ ]
+
+
+def options(opt):
+ opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
+ opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
+ opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
+ opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
+ opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
+
+class ti_cprogram(cprogram):
+ """
+ Link object files into a c program
+
+ Changes:
+
+ - the linked executable to have a relative path (because we can)
+ - put the LIBPATH first
+ """
+ run_str = '${LINK_CC} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].bldpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} '
+
+@feature("c")
+@before_method('apply_link')
+def use_ti_cprogram(self):
+ """
+ Automatically uses ti_cprogram link process
+ """
+ if 'cprogram' in self.features and self.env.CC_NAME == 'ticc':
+ self.features.insert(0, "ti_cprogram")
+
+class ti_c(Task.Task):
+ """
+ Compile task for the TI codegen compiler
+
+ This compiler does not allow specifying the output file name, only the output path.
+
+ """
+ "Compile C files into object files"
+ run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}'
+ vars = ['CCDEPS'] # unused variable to depend on, just in case
+ ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
+ scan = c_preproc.scan
+
+def create_compiled_task(self, name, node):
+ """
+ Overrides ccroot.create_compiled_task to support ti_c
+ """
+ out = '%s' % (node.change_ext('.obj').name)
+ if self.env.CC_NAME == 'ticc':
+ name = 'ti_c'
+ task = self.create_task(name, node, node.parent.find_or_declare(out))
+ self.env.OUT = '-fr%s' % (node.parent.get_bld().abspath())
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks = [task]
+ return task
+
+@TaskGen.extension('.c')
+def c_hook(self, node):
+ "Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
+ if self.env.CC_NAME == 'ticc':
+ return create_compiled_task(self, 'ti_c', node)
+ else:
+ return self.create_compiled_task('c', node)
+
+
+@feature("ti-tconf")
+@before_method('process_source')
+def apply_tconf(self):
+ sources = [x.get_src() for x in self.to_nodes(self.source, path=self.path.get_src())]
+ node = sources[0]
+ assert(sources[0].name.endswith(".tcf"))
+ if len(sources) > 1:
+ assert(sources[1].name.endswith(".cmd"))
+
+ target = getattr(self, 'target', self.source)
+ target_node = node.get_bld().parent.find_or_declare(node.name)
+
+ procid = "%d" % int(getattr(self, 'procid', 0))
+
+ importpaths = []
+ includes = Utils.to_list(getattr(self, 'includes', []))
+ for x in includes + self.env.TCONF_INCLUDES:
+ if x == os.path.abspath(x):
+ importpaths.append(x)
+ else:
+ relpath = self.path.find_node(x).path_from(target_node.parent)
+ importpaths.append(relpath)
+
+ task = self.create_task('ti_tconf', sources, target_node.change_ext('.cdb'))
+ task.path = self.path
+ task.includes = includes
+ task.cwd = target_node.parent.abspath()
+ task.env = self.env.derive()
+ task.env["TCONFSRC"] = node.path_from(target_node.parent)
+ task.env["TCONFINC"] = '-Dconfig.importPath=%s' % ";".join(importpaths)
+ task.env['TCONFPROGNAME'] = '-Dconfig.programName=%s' % target
+ task.env['PROCID'] = procid
+ task.outputs = [
+ target_node.change_ext("cfg_c.c"),
+ target_node.change_ext("cfg.s62"),
+ target_node.change_ext("cfg.cmd"),
+ ]
+
+ create_compiled_task(self, 'ti_c', task.outputs[1])
+ ctask = create_compiled_task(self, 'ti_c', task.outputs[0])
+ ctask.env = self.env.derive()
+
+ self.add_those_o_files(target_node.change_ext("cfg.cmd"))
+ if len(sources) > 1:
+ self.add_those_o_files(sources[1])
+ self.source = []
+
+re_tconf_include = re.compile(r'(?P<type>utils\.importFile)\("(?P<file>.*)"\)',re.M)
+class ti_tconf(Task.Task):
+ run_str = '${TCONF} ${TCONFINC} ${TCONFPROGNAME} ${TCONFSRC} ${PROCID}'
+ color = 'PINK'
+
+ def scan(self):
+ includes = Utils.to_list(getattr(self, 'includes', []))
+
+ def deps(node):
+ nodes, names = [], []
+ if node:
+ code = Utils.readf(node.abspath())
+ for match in re_tconf_include.finditer(code):
+ path = match.group('file')
+ if path:
+ for x in includes:
+ filename = opj(x, path)
+ fi = self.path.find_resource(filename)
+ if fi:
+ subnodes, subnames = deps(fi)
+ nodes += subnodes
+ names += subnames
+ nodes.append(fi)
+ names.append(path)
+ break
+ return nodes, names
+ return deps(self.inputs[0])
+
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2014 (ita)
-
-"""
-This module enables automatic handling of network paths of the form \\server\share for both input
-and output files. While a typical script may require the following::
-
- import os
- def build(bld):
-
- node = bld.root.make_node('\\\\COMPUTER\\share\\test.txt')
-
- # mark the server/share levels as folders
- k = node.parent
- while k:
- k.cache_isdir = True
- k = k.parent
-
- # clear the file if removed
- if not os.path.isfile(node.abspath()):
- node.sig = None
-
- # create the folder structure
- if node.parent.height() > 2:
- node.parent.mkdir()
-
- # then the task generator
- def myfun(tsk):
- tsk.outputs[0].write("data")
- bld(rule=myfun, source='wscript', target=[nd])
-
-this tool will make the process much easier, for example::
-
- def configure(conf):
- conf.load('unc') # do not import the module directly
-
- def build(bld):
- def myfun(tsk):
- tsk.outputs[0].write("data")
- bld(rule=myfun, update_outputs=True,
- source='wscript',
- target='\\\\COMPUTER\\share\\test.txt')
- bld(rule=myfun, update_outputs=True,
- source='\\\\COMPUTER\\share\\test.txt',
- target='\\\\COMPUTER\\share\\test2.txt')
-"""
-
-import os
-from waflib import Node, Utils, Context
-
-def find_resource(self, lst):
- if isinstance(lst, str):
- lst = [x for x in Node.split_path(lst) if x and x != '.']
-
- if lst[0].startswith('\\\\'):
- if len(lst) < 3:
- return None
- node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
- node.cache_isdir = True
- node.parent.cache_isdir = True
-
- ret = node.search_node(lst[2:])
- if not ret:
- ret = node.find_node(lst[2:])
- if ret and os.path.isdir(ret.abspath()):
- return None
- return ret
-
- return self.find_resource_orig(lst)
-
-def find_or_declare(self, lst):
- if isinstance(lst, str):
- lst = [x for x in Node.split_path(lst) if x and x != '.']
-
- if lst[0].startswith('\\\\'):
- if len(lst) < 3:
- return None
- node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
- node.cache_isdir = True
- node.parent.cache_isdir = True
- ret = node.find_node(lst[2:])
- if not ret:
- ret = node.make_node(lst[2:])
- if not os.path.isfile(ret.abspath()):
- ret.sig = None
- ret.parent.mkdir()
- return ret
-
- return self.find_or_declare_orig(lst)
-
-def abspath(self):
- """For MAX_PATH limitations"""
- ret = self.abspath_orig()
- if not ret.startswith("\\"):
- return "\\\\?\\" + ret
- return ret
-
-if Utils.is_win32:
- Node.Node.find_resource_orig = Node.Node.find_resource
- Node.Node.find_resource = find_resource
-
- Node.Node.find_or_declare_orig = Node.Node.find_or_declare
- Node.Node.find_or_declare = find_or_declare
-
- Node.Node.abspath_orig = Node.Node.abspath
- Node.Node.abspath = abspath
-
- for k in list(Context.cache_modules.keys()):
- Context.cache_modules["\\\\?\\" + k] = Context.cache_modules[k]
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
"""
-Compile whole groups of C/C++ files at once.
+Compile whole groups of C/C++ files at once
+(C and C++ files are processed independently though).
+
+To enable globally::
+
+ def options(opt):
+ opt.load('compiler_cxx')
+ def build(bld):
+ bld.load('compiler_cxx unity')
+
+To enable for specific task generators only::
+
+ def build(bld):
+ bld(features='c cprogram unity', source='main.c', ...)
+
+The file order is often significant in such builds, so it can be
+necessary to adjust the order of source files and the batch sizes.
+To control the amount of files processed in a batch per target
+(the default is 50)::
+
+ def build(bld):
+ bld(features='c cprogram', unity_size=20)
-def build(bld):
- bld.load('compiler_cxx unity')
"""
-import sys
from waflib import Task, Options
from waflib.Tools import c_preproc
from waflib import TaskGen
MAX_BATCH = 50
+EXTS_C = ('.c',)
+EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++')
+
def options(opt):
global MAX_BATCH
- opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH, help='batch size (0 for no batch)')
+ opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH,
+ help='default unity batch size (0 disables unity builds)')
+
+@TaskGen.taskgen_method
+def batch_size(self):
+ default = getattr(Options.options, 'batchsize', MAX_BATCH)
+ if default < 1:
+ return 0
+ return getattr(self, 'unity_size', default)
+
class unity(Task.Task):
color = 'BLUE'
scan = c_preproc.scan
+ def to_include(self, node):
+ ret = node.path_from(self.outputs[0].parent)
+ ret = ret.replace('\\', '\\\\').replace('"', '\\"')
+ return ret
def run(self):
- lst = ['#include "%s"\n' % node.abspath() for node in self.inputs]
+ lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs]
txt = ''.join(lst)
self.outputs[0].write(txt)
+ def __str__(self):
+ node = self.outputs[0]
+ return node.path_from(node.ctx.launch_node())
-@TaskGen.taskgen_method
-def batch_size(self):
- return getattr(Options.options, 'batchsize', MAX_BATCH)
-
-def make_batch_fun(ext):
- # this generic code makes this quite unreadable, defining the function two times might have been better
- def make_batch(self, node):
- cnt = self.batch_size()
- if cnt <= 1:
- return self.create_compiled_task(ext, node)
- x = getattr(self, 'master_%s' % ext, None)
- if not x or len(x.inputs) >= cnt:
- x = self.create_task('unity')
- setattr(self, 'master_%s' % ext, x)
-
- cnt_cur = getattr(self, 'cnt_%s' % ext, 0)
- cxxnode = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, ext))
- x.outputs = [cxxnode]
- setattr(self, 'cnt_%s' % ext, cnt_cur + 1)
- self.create_compiled_task(ext, cxxnode)
- x.inputs.append(node)
- return make_batch
-
-def enable_support(cc, cxx):
- if cxx or not cc:
- TaskGen.extension('.cpp', '.cc', '.cxx', '.C', '.c++')(make_batch_fun('cxx'))
- if cc:
- TaskGen.extension('.c')(make_batch_fun('c'))
- else:
- TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
-
-has_c = '.c' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_c' in sys.modules
-has_cpp = '.cpp' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_cxx' in sys.modules
-enable_support(has_c, has_cpp) # by default
+def bind_unity(obj, cls_name, exts):
+ if not 'mappings' in obj.__dict__:
+ obj.mappings = dict(obj.mappings)
+
+ for j in exts:
+ fun = obj.mappings[j]
+ if fun.__name__ == 'unity_fun':
+ raise ValueError('Attempt to bind unity mappings multiple times %r' % j)
+
+ def unity_fun(self, node):
+ cnt = self.batch_size()
+ if cnt <= 1:
+ return fun(self, node)
+ x = getattr(self, 'master_%s' % cls_name, None)
+ if not x or len(x.inputs) >= cnt:
+ x = self.create_task('unity')
+ setattr(self, 'master_%s' % cls_name, x)
+
+ cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0)
+ c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name))
+ x.outputs = [c_node]
+ setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1)
+ fun(self, c_node)
+ x.inputs.append(node)
+
+ obj.mappings[j] = unity_fun
+
+@TaskGen.feature('unity')
+@TaskGen.before('process_source')
+def single_unity(self):
+ lst = self.to_list(self.features)
+ if 'c' in lst:
+ bind_unity(self, 'c', EXTS_C)
+ if 'cxx' in lst:
+ bind_unity(self, 'cxx', EXTS_CXX)
def build(bld):
- # it is best to do this
- enable_support(bld.env.CC_NAME, bld.env.CXX_NAME)
+ if bld.env.CC_NAME:
+ bind_unity(TaskGen.task_gen, 'c', EXTS_C)
+ if bld.env.CXX_NAME:
+ bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX)
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# coding=utf-8
# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
import os.path as osp
import os
+local_repo = ''
+"""Local repository containing additional Waf tools (plugins)"""
+remote_repo = 'https://raw.githubusercontent.com/waf-project/waf/master/'
+"""
+Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
+
+ $ waf configure --download
+"""
+
+remote_locs = ['waflib/extras', 'waflib/Tools']
+"""
+Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo`
+"""
+
+
try:
from urllib import request
except ImportError:
def download_tool(tool, force=False, ctx=None):
"""
- Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::
+ Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`::
$ waf configure --download
"""
- for x in Utils.to_list(Context.remote_repo):
- for sub in Utils.to_list(Context.remote_locs):
+ for x in Utils.to_list(remote_repo):
+ for sub in Utils.to_list(remote_locs):
url = '/'.join((x, sub, tool + '.py'))
try:
web = urlopen(url)
else:
tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
tmp.write(web.read(), 'wb')
- Logs.warn('Downloaded %s from %s' % (tool, url))
+ Logs.warn('Downloaded %s from %s', tool, url)
download_check(tmp)
try:
module = Context.load_tool(tool)
except Exception:
- Logs.warn('The tool %s from %s is unusable' % (tool, url))
+ Logs.warn('The tool %s from %s is unusable', tool, url)
try:
tmp.delete()
except Exception:
try:
module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
except ImportError as e:
+ if not ctx or not hasattr(Options.options, 'download'):
+ Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool)
+ raise
if Options.options.download:
module = download_tool(tool, ctx=ctx)
if not module:
self.load(cfg, tooldir=tooldir, **kwargs)
self.start_msg('Checking for configuration')
self.end_msg(use_cfg)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: UTF-8
+# Nicolas Joseph 2009
+
+"""
+ported from waf 1.5:
+TODO: tabs vs spaces
+"""
+
+from waflib import Task, Utils, Errors, Logs
+from waflib.TaskGen import feature
+
+VALADOC_STR = '${VALADOC}'
+
+class valadoc(Task.Task):
+ vars = ['VALADOC', 'VALADOCFLAGS']
+ color = 'BLUE'
+ after = ['cprogram', 'cstlib', 'cshlib', 'cxxprogram', 'cxxstlib', 'cxxshlib']
+ quiet = True # no outputs .. this is weird
+
+ def __init__(self, *k, **kw):
+ Task.Task.__init__(self, *k, **kw)
+ self.output_dir = ''
+ self.doclet = ''
+ self.package_name = ''
+ self.package_version = ''
+ self.files = []
+ self.vapi_dirs = []
+ self.protected = True
+ self.private = False
+ self.inherit = False
+ self.deps = False
+ self.vala_defines = []
+ self.vala_target_glib = None
+ self.enable_non_null_experimental = False
+ self.force = False
+
+ def run(self):
+ if not self.env['VALADOCFLAGS']:
+ self.env['VALADOCFLAGS'] = ''
+ cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
+ cmd.append ('-o %s' % self.output_dir)
+ if getattr(self, 'doclet', None):
+ cmd.append ('--doclet %s' % self.doclet)
+ cmd.append ('--package-name %s' % self.package_name)
+ if getattr(self, 'package_version', None):
+ cmd.append ('--package-version %s' % self.package_version)
+ if getattr(self, 'packages', None):
+ for package in self.packages:
+ cmd.append ('--pkg %s' % package)
+ if getattr(self, 'vapi_dirs', None):
+ for vapi_dir in self.vapi_dirs:
+ cmd.append ('--vapidir %s' % vapi_dir)
+ if not getattr(self, 'protected', None):
+ cmd.append ('--no-protected')
+ if getattr(self, 'private', None):
+ cmd.append ('--private')
+ if getattr(self, 'inherit', None):
+ cmd.append ('--inherit')
+ if getattr(self, 'deps', None):
+ cmd.append ('--deps')
+ if getattr(self, 'vala_defines', None):
+ for define in self.vala_defines:
+ cmd.append ('--define %s' % define)
+ if getattr(self, 'vala_target_glib', None):
+ cmd.append ('--target-glib=%s' % self.vala_target_glib)
+ if getattr(self, 'enable_non_null_experimental', None):
+ cmd.append ('--enable-non-null-experimental')
+ if getattr(self, 'force', None):
+ cmd.append ('--force')
+ cmd.append (' '.join ([x.abspath() for x in self.files]))
+ return self.generator.bld.exec_command(' '.join(cmd))
+
+@feature('valadoc')
+def process_valadoc(self):
+ """
+ Generate API documentation from Vala source code with valadoc
+
+ doc = bld(
+ features = 'valadoc',
+ output_dir = '../doc/html',
+ package_name = 'vala-gtk-example',
+ package_version = '1.0.0',
+ packages = 'gtk+-2.0',
+ vapi_dirs = '../vapi',
+ force = True
+ )
+
+ path = bld.path.find_dir ('../src')
+ doc.files = path.ant_glob (incl='**/*.vala')
+ """
+
+ task = self.create_task('valadoc')
+ if getattr(self, 'output_dir', None):
+ task.output_dir = self.path.find_or_declare(self.output_dir).abspath()
+ else:
+ Errors.WafError('no output directory')
+ if getattr(self, 'doclet', None):
+ task.doclet = self.doclet
+ else:
+ Errors.WafError('no doclet directory')
+ if getattr(self, 'package_name', None):
+ task.package_name = self.package_name
+ else:
+ Errors.WafError('no package name')
+ if getattr(self, 'package_version', None):
+ task.package_version = self.package_version
+ if getattr(self, 'packages', None):
+ task.packages = Utils.to_list(self.packages)
+ if getattr(self, 'vapi_dirs', None):
+ vapi_dirs = Utils.to_list(self.vapi_dirs)
+ for vapi_dir in vapi_dirs:
+ try:
+ task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
+ except AttributeError:
+ Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
+ if getattr(self, 'files', None):
+ task.files = self.files
+ else:
+ Errors.WafError('no input file')
+ if getattr(self, 'protected', None):
+ task.protected = self.protected
+ if getattr(self, 'private', None):
+ task.private = self.private
+ if getattr(self, 'inherit', None):
+ task.inherit = self.inherit
+ if getattr(self, 'deps', None):
+ task.deps = self.deps
+ if getattr(self, 'vala_defines', None):
+ task.vala_defines = Utils.to_list(self.vala_defines)
+ if getattr(self, 'vala_target_glib', None):
+ task.vala_target_glib = self.vala_target_glib
+ if getattr(self, 'enable_non_null_experimental', None):
+ task.enable_non_null_experimental = self.enable_non_null_experimental
+ if getattr(self, 'force', None):
+ task.force = self.force
+
+def configure(conf):
+ conf.find_program('valadoc', errmsg='You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)
def signature(self):
# compute the result one time, and suppose the scan_signature will give the good result
- try: return self.cache_sig
- except AttributeError: pass
+ try:
+ return self.cache_sig
+ except AttributeError:
+ pass
self.m = Utils.md5()
self.m.update(self.hcode)
def v(x):
return Utils.to_hex(x)
- Logs.debug("Task %r" % self)
+ Logs.debug('Task %r', self)
msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable']
tmp = 'task: -> %s: %s %s'
for x in range(len(msgs)):
a = new_sigs[x*l : (x+1)*l]
b = old_sigs[x*l : (x+1)*l]
if (a != b):
- Logs.debug(tmp % (msgs[x].ljust(35), v(a), v(b)))
+ Logs.debug(tmp, msgs[x].ljust(35), v(a), v(b))
return ret
Task.Task.runnable_status = runnable_status
+
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#! /usr/bin/env python
# encoding: utf-8
"""
Windows-specific optimizations
-This module can help reducing the overhead of listing files on windows (more than 10000 files).
+This module can help reducing the overhead of listing files on windows
+(more than 10000 files). Python 3.5 already provides the listdir
+optimization though.
"""
import os
TP = '%s\\*'
if Utils.is_win32:
- from waflib.extras import md5_tstamp
+ from waflib.Tools import md5_tstamp
import ctypes, ctypes.wintypes
FindFirstFile = ctypes.windll.kernel32.FindFirstFileW
pass
except AttributeError:
self.ctx.hash_cache = {}
-
- if not self.is_bld():
- if self.is_child_of(self.ctx.srcnode):
- self.sig = self.cached_hash_file()
- else:
- self.sig = Utils.h_file(self.abspath())
- self.ctx.hash_cache[id(self)] = ret = self.sig
+ self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath())
return ret
Node.Node.get_bld_sig = get_bld_sig_win32
find = FindFirstFile(TP % curpath, ctypes.byref(findData))
if find == INVALID_HANDLE_VALUE:
- Logs.error("invalid win32 handle isfile_cached %r" % self.abspath())
+ Logs.error("invalid win32 handle isfile_cached %r", self.abspath())
return os.path.isfile(self.abspath())
try:
if not FindNextFile(find, ctypes.byref(findData)):
break
except Exception as e:
- Logs.error('exception while listing a folder %r %r' % (self.abspath(), e))
+ Logs.error('exception while listing a folder %r %r', self.abspath(), e)
return os.path.isfile(self.abspath())
finally:
FindClose(find)
def find_or_declare_win32(self, lst):
# assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
if isinstance(lst, str):
- lst = [x for x in Node.split_path(lst) if x and x != '.']
+ lst = [x for x in Utils.split_path(lst) if x and x != '.']
- node = self.get_bld().search(lst)
+ node = self.get_bld().search_node(lst)
if node:
if not node.isfile_cached():
- node.sig = None
try:
node.parent.mkdir()
except OSError:
node = self.find_node(lst)
if node:
if not node.isfile_cached():
- node.sig = None
try:
node.parent.mkdir()
except OSError:
node.parent.mkdir()
return node
Node.Node.find_or_declare = find_or_declare_win32
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# encoding: utf-8
+# vim: tabstop=4 noexpandtab
+
+"""
+Windows Installer XML Tool (WiX)
+
+.wxs --- candle ---> .wxobj --- light ---> .msi
+
+bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..])
+
+bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..])
+"""
+
+import os, copy
+from waflib import TaskGen
+from waflib import Task
+from waflib.Utils import winreg
+
+class candle(Task.Task):
+ run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}',
+
+class light(Task.Task):
+ run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}"
+
+@TaskGen.feature('wix')
+@TaskGen.before_method('process_source')
+def wix(self):
+ #X.wxs -> ${SRC} for CANDLE
+ #X.wxobj -> ${SRC} for LIGHT
+ #X.dll -> -ext X in ${LIGHTFLAGS}
+ #X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS}
+ wxobj = []
+ wxs = []
+ exts = []
+ wxl = []
+ rest = []
+ for x in self.source:
+ if x.endswith('.wxobj'):
+ wxobj.append(x)
+ elif x.endswith('.wxs'):
+ wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj'))
+ wxs.append(x)
+ elif x.endswith('.dll'):
+ exts.append(x[:-4])
+ elif '.' not in x:
+ exts.append(x)
+ elif x.endswith('.wxl'):
+ wxl.append(x)
+ else:
+ rest.append(x)
+ self.source = self.to_nodes(rest) #.wxs
+
+ cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj))
+ lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen))
+
+ cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[]))
+ lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[]))
+
+ for x in wxl:
+ lght.env.append_value('LIGHTFLAGS','wixui.wixlib')
+ lght.env.append_value('LIGHTFLAGS','-loc')
+ lght.env.append_value('LIGHTFLAGS',x)
+ for x in exts:
+ cndl.env.append_value('CANDLEFLAGS','-ext')
+ cndl.env.append_value('CANDLEFLAGS',x)
+ lght.env.append_value('LIGHTFLAGS','-ext')
+ lght.env.append_value('LIGHTFLAGS',x)
+
+#wix_bin_path()
+def wix_bin_path():
+ basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders"
+ query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey)
+ cnt=winreg.QueryInfoKey(query)[0]
+ thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK'
+ for i in range(cnt-1,-1,-1):
+ thiskey = winreg.EnumKey(query,i)
+ if 'WiX' in thiskey:
+ break
+ winreg.CloseKey(query)
+ return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin')
+
+def configure(ctx):
+ path_list=[wix_bin_path()]
+ ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list)
+ ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list)
+
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf
+# Based on work by Nicolas Mercier 2011
+# Extended by Simon Warg 2015, https://github.com/mimon
+# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html
+
+"""
+See playground/xcode6/ for usage examples.
+
+"""
+
+from waflib import Context, TaskGen, Build, Utils, Errors, Logs
+import os, sys
+
+# FIXME too few extensions
+XCODE_EXTS = ['.c', '.cpp', '.m', '.mm']
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+MAP_EXT = {
+ '': "folder",
+ '.h' : "sourcecode.c.h",
+
+ '.hh': "sourcecode.cpp.h",
+ '.inl': "sourcecode.cpp.h",
+ '.hpp': "sourcecode.cpp.h",
+
+ '.c': "sourcecode.c.c",
+
+ '.m': "sourcecode.c.objc",
+
+ '.mm': "sourcecode.cpp.objcpp",
+
+ '.cc': "sourcecode.cpp.cpp",
+
+ '.cpp': "sourcecode.cpp.cpp",
+ '.C': "sourcecode.cpp.cpp",
+ '.cxx': "sourcecode.cpp.cpp",
+ '.c++': "sourcecode.cpp.cpp",
+
+ '.l': "sourcecode.lex", # luthor
+ '.ll': "sourcecode.lex",
+
+ '.y': "sourcecode.yacc",
+ '.yy': "sourcecode.yacc",
+
+ '.plist': "text.plist.xml",
+ ".nib": "wrapper.nib",
+ ".xib": "text.xib",
+}
+
+# Used in PBXNativeTarget elements
+PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application'
+PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework'
+PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool'
+PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static'
+PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic'
+PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension'
+PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit'
+
+# Used in PBXFileReference elements
+FILE_TYPE_APPLICATION = 'wrapper.cfbundle'
+FILE_TYPE_FRAMEWORK = 'wrapper.framework'
+FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib'
+FILE_TYPE_LIB_STATIC = 'archive.ar'
+FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable'
+
+# Tuple packs of the above
+TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework')
+TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app')
+TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib')
+TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a')
+TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '')
+
+# Maps target type string to its data
+TARGET_TYPES = {
+ 'framework': TARGET_TYPE_FRAMEWORK,
+ 'app': TARGET_TYPE_APPLICATION,
+ 'dylib': TARGET_TYPE_DYNAMIC_LIB,
+ 'stlib': TARGET_TYPE_STATIC_LIB,
+ 'exe' :TARGET_TYPE_EXECUTABLE,
+}
+
+def delete_invalid_values(dct):
+ """ Deletes entries that are dictionaries or sets """
+ for k, v in list(dct.items()):
+ if isinstance(v, dict) or isinstance(v, set):
+ del dct[k]
+ return dct
+
+"""
+Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION'
+which is a dictionary of configuration name and buildsettings pair.
+E.g.:
+env.PROJ_CONFIGURATION = {
+ 'Debug': {
+ 'ARCHS': 'x86',
+ ...
+ }
+ 'Release': {
+ 'ARCHS' x86_64'
+ ...
+ }
+}
+The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created
+based on env variable
+"""
+def configure(self):
+ if not self.env.PROJ_CONFIGURATION:
+ self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n")
+
+ # Check for any added config files added by the tool 'c_config'.
+ if 'cfg_files' in self.env:
+ self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files]
+
+ # Create default project configuration?
+ if 'PROJ_CONFIGURATION' not in self.env:
+ defaults = delete_invalid_values(self.env.get_merged_dict())
+ self.env.PROJ_CONFIGURATION = {
+ "Debug": defaults,
+ "Release": defaults,
+ }
+
+ # Some build settings are required to be present by XCode. We will supply default values
+ # if user hasn't defined any.
+ defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
+ for cfgname,settings in self.env.PROJ_CONFIGURATION.iteritems():
+ for default_var, default_val in defaults_required:
+ if default_var not in settings:
+ settings[default_var] = default_val
+
+ # Error check customization
+ if not isinstance(self.env.PROJ_CONFIGURATION, dict):
+ raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.")
+
+part1 = 0
+part2 = 10000
+part3 = 0
+id = 562000999
+def newid():
+ global id
+ id += 1
+ return "%04X%04X%04X%012d" % (0, 10000, 0, id)
+
+"""
+Represents a tree node in the XCode project plist file format.
+When written to a file, all attributes of XCodeNode are stringified together with
+its value. However, attributes starting with an underscore _ are ignored
+during that process and allows you to store arbitray values that are not supposed
+to be written out.
+"""
+class XCodeNode(object):
+ def __init__(self):
+ self._id = newid()
+ self._been_written = False
+
+ def tostring(self, value):
+ if isinstance(value, dict):
+ result = "{\n"
+ for k,v in value.items():
+ result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v))
+ result = result + "\t\t}"
+ return result
+ elif isinstance(value, str):
+ return "\"%s\"" % value
+ elif isinstance(value, list):
+ result = "(\n"
+ for i in value:
+ result = result + "\t\t\t%s,\n" % self.tostring(i)
+ result = result + "\t\t)"
+ return result
+ elif isinstance(value, XCodeNode):
+ return value._id
+ else:
+ return str(value)
+
+ def write_recursive(self, value, file):
+ if isinstance(value, dict):
+ for k,v in value.items():
+ self.write_recursive(v, file)
+ elif isinstance(value, list):
+ for i in value:
+ self.write_recursive(i, file)
+ elif isinstance(value, XCodeNode):
+ value.write(file)
+
+ def write(self, file):
+ if not self._been_written:
+ self._been_written = True
+ for attribute,value in self.__dict__.items():
+ if attribute[0] != '_':
+ self.write_recursive(value, file)
+ w = file.write
+ w("\t%s = {\n" % self._id)
+ w("\t\tisa = %s;\n" % self.__class__.__name__)
+ for attribute,value in self.__dict__.items():
+ if attribute[0] != '_':
+ w("\t\t%s = %s;\n" % (attribute, self.tostring(value)))
+ w("\t};\n\n")
+
+# Configurations
+class XCBuildConfiguration(XCodeNode):
+ def __init__(self, name, settings = {}, env=None):
+ XCodeNode.__init__(self)
+ self.baseConfigurationReference = ""
+ self.buildSettings = settings
+ self.name = name
+ if env and env.ARCH:
+ settings['ARCHS'] = " ".join(env.ARCH)
+
+
+class XCConfigurationList(XCodeNode):
+ def __init__(self, configlst):
+ """ :param configlst: list of XCConfigurationList """
+ XCodeNode.__init__(self)
+ self.buildConfigurations = configlst
+ self.defaultConfigurationIsVisible = 0
+ self.defaultConfigurationName = configlst and configlst[0].name or ""
+
+# Group/Files
+class PBXFileReference(XCodeNode):
+ def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):
+
+ XCodeNode.__init__(self)
+ self.fileEncoding = 4
+ if not filetype:
+ _, ext = os.path.splitext(name)
+ filetype = MAP_EXT.get(ext, 'text')
+ self.lastKnownFileType = filetype
+ self.explicitFileType = filetype
+ self.name = name
+ self.path = path
+ self.sourceTree = sourcetree
+
+ def __hash__(self):
+ return (self.path+self.name).__hash__()
+
+ def __eq__(self, other):
+ return (self.path, self.name) == (other.path, other.name)
+
+class PBXBuildFile(XCodeNode):
+ """ This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """
+ def __init__(self, fileRef, settings={}):
+ XCodeNode.__init__(self)
+
+ # fileRef is a reference to a PBXFileReference object
+ self.fileRef = fileRef
+
+ # A map of key/value pairs for additionnal settings.
+ self.settings = settings
+
+ def __hash__(self):
+ return (self.fileRef).__hash__()
+
+ def __eq__(self, other):
+ return self.fileRef == other.fileRef
+
+class PBXGroup(XCodeNode):
+ def __init__(self, name, sourcetree = 'SOURCE_TREE'):
+ XCodeNode.__init__(self)
+ self.children = []
+ self.name = name
+ self.sourceTree = sourcetree
+
+ # Maintain a lookup table for all PBXFileReferences
+ # that are contained in this group.
+ self._filerefs = {}
+
+ def add(self, sources):
+ """
+ Add a list of PBXFileReferences to this group
+
+ :param sources: list of PBXFileReferences objects
+ """
+ self._filerefs.update(dict(zip(sources, sources)))
+ self.children.extend(sources)
+
+ def get_sub_groups(self):
+ """
+ Returns all child PBXGroup objects contained in this group
+ """
+ return list(filter(lambda x: isinstance(x, PBXGroup), self.children))
+
+ def find_fileref(self, fileref):
+ """
+ Recursively search this group for an existing PBXFileReference. Returns None
+ if none were found.
+
+ The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy.
+ If it isn't, the consequence is that certain UI features like 'Reveal in Finder'
+ stops working.
+ """
+ if fileref in self._filerefs:
+ return self._filerefs[fileref]
+ elif self.children:
+ for childgroup in self.get_sub_groups():
+ f = childgroup.find_fileref(fileref)
+ if f:
+ return f
+ return None
+
+class PBXContainerItemProxy(XCodeNode):
+ """ This is the element for to decorate a target item. """
+ def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1):
+ XCodeNode.__init__(self)
+ self.containerPortal = containerPortal # PBXProject
+ self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget
+ self.remoteInfo = remoteInfo # Target name
+ self.proxyType = proxyType
+
+class PBXTargetDependency(XCodeNode):
+ """ This is the element for referencing other target through content proxies. """
+ def __init__(self, native_target, proxy):
+ XCodeNode.__init__(self)
+ self.target = native_target
+ self.targetProxy = proxy
+
+class PBXFrameworksBuildPhase(XCodeNode):
+ """ This is the element for the framework link build phase, i.e. linking to frameworks """
+ def __init__(self, pbxbuildfiles):
+ XCodeNode.__init__(self)
+ self.buildActionMask = 2147483647
+ self.runOnlyForDeploymentPostprocessing = 0
+ self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
+
+class PBXHeadersBuildPhase(XCodeNode):
+ """ This is the element for adding header files to be packaged into the .framework """
+ def __init__(self, pbxbuildfiles):
+ XCodeNode.__init__(self)
+ self.buildActionMask = 2147483647
+ self.runOnlyForDeploymentPostprocessing = 0
+ self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
+
+class PBXCopyFilesBuildPhase(XCodeNode):
+ """
+ Represents the PBXCopyFilesBuildPhase section. PBXBuildFile
+ can be added to this node to copy files after build is done.
+ """
+ def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs):
+ XCodeNode.__init__(self)
+ self.files = pbxbuildfiles
+ self.dstPath = dstpath
+ self.dstSubfolderSpec = dstSubpathSpec
+
+class PBXSourcesBuildPhase(XCodeNode):
+ """ Represents the 'Compile Sources' build phase in a Xcode target """
+ def __init__(self, buildfiles):
+ XCodeNode.__init__(self)
+ self.files = buildfiles # List of PBXBuildFile objects
+
+class PBXLegacyTarget(XCodeNode):
+ def __init__(self, action, target=''):
+ XCodeNode.__init__(self)
+ self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
+ if not target:
+ self.buildArgumentsString = "%s %s" % (sys.argv[0], action)
+ else:
+ self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target)
+ self.buildPhases = []
+ self.buildToolPath = sys.executable
+ self.buildWorkingDirectory = ""
+ self.dependencies = []
+ self.name = target or action
+ self.productName = target or action
+ self.passBuildSettingsInEnvironment = 0
+
+class PBXShellScriptBuildPhase(XCodeNode):
+ def __init__(self, action, target):
+ XCodeNode.__init__(self)
+ self.buildActionMask = 2147483647
+ self.files = []
+ self.inputPaths = []
+ self.outputPaths = []
+ self.runOnlyForDeploymentPostProcessing = 0
+ self.shellPath = "/bin/sh"
+ self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target)
+
+class PBXNativeTarget(XCodeNode):
+ """ Represents a target in XCode, e.g. App, DyLib, Framework etc. """
+ def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]):
+ XCodeNode.__init__(self)
+ product_type = target_type[0]
+ file_type = target_type[1]
+
+ self.buildConfigurationList = XCConfigurationList(configlist)
+ self.buildPhases = buildphases
+ self.buildRules = []
+ self.dependencies = []
+ self.name = target
+ self.productName = target
+ self.productType = product_type # See TARGET_TYPE_ tuples constants
+ self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '')
+
+ def add_configuration(self, cf):
+ """ :type cf: XCBuildConfiguration """
+ self.buildConfigurationList.buildConfigurations.append(cf)
+
+ def add_build_phase(self, phase):
+ # Some build phase types may appear only once. If a phase type already exists, then merge them.
+ if ( (phase.__class__ == PBXFrameworksBuildPhase)
+ or (phase.__class__ == PBXSourcesBuildPhase) ):
+ for b in self.buildPhases:
+ if b.__class__ == phase.__class__:
+ b.files.extend(phase.files)
+ return
+ self.buildPhases.append(phase)
+
+ def add_dependency(self, depnd):
+ self.dependencies.append(depnd)
+
+# Root project object
+class PBXProject(XCodeNode):
+ def __init__(self, name, version, env):
+ XCodeNode.__init__(self)
+
+ if not isinstance(env.PROJ_CONFIGURATION, dict):
+ raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?")
+
+ # Retrieve project configuration
+ configurations = []
+ for config_name, settings in env.PROJ_CONFIGURATION.items():
+ cf = XCBuildConfiguration(config_name, settings)
+ configurations.append(cf)
+
+ self.buildConfigurationList = XCConfigurationList(configurations)
+ self.compatibilityVersion = version[0]
+ self.hasScannedForEncodings = 1
+ self.mainGroup = PBXGroup(name)
+ self.projectRoot = ""
+ self.projectDirPath = ""
+ self.targets = []
+ self._objectVersion = version[1]
+
+ def create_target_dependency(self, target, name):
+ """ : param target : PXBNativeTarget """
+ proxy = PBXContainerItemProxy(self, target, name)
+ dependecy = PBXTargetDependency(target, proxy)
+ return dependecy
+
+ def write(self, file):
+
+ # Make sure this is written only once
+ if self._been_written:
+ return
+
+ w = file.write
+ w("// !$*UTF8*$!\n")
+ w("{\n")
+ w("\tarchiveVersion = 1;\n")
+ w("\tclasses = {\n")
+ w("\t};\n")
+ w("\tobjectVersion = %d;\n" % self._objectVersion)
+ w("\tobjects = {\n\n")
+
+ XCodeNode.write(self, file)
+
+ w("\t};\n")
+ w("\trootObject = %s;\n" % self._id)
+ w("}\n")
+
+ def add_target(self, target):
+ self.targets.append(target)
+
+ def get_target(self, name):
+ """ Get a reference to PBXNativeTarget if it exists """
+ for t in self.targets:
+ if t.name == name:
+ return t
+ return None
+
+@TaskGen.feature('c', 'cxx')
+@TaskGen.after('propagate_uselib_vars', 'apply_incpaths')
+def process_xcode(self):
+ bld = self.bld
+ try:
+ p = bld.project
+ except AttributeError:
+ return
+
+ if not hasattr(self, 'target_type'):
+ return
+
+ products_group = bld.products_group
+
+ target_group = PBXGroup(self.name)
+ p.mainGroup.children.append(target_group)
+
+ # Determine what type to build - framework, app bundle etc.
+ target_type = getattr(self, 'target_type', 'app')
+ if target_type not in TARGET_TYPES:
+ raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
+ else:
+ target_type = TARGET_TYPES[target_type]
+ file_ext = target_type[2]
+
+ # Create the output node
+ target_node = self.path.find_or_declare(self.name+file_ext)
+ target = PBXNativeTarget(self.name, target_node, target_type, [], [])
+
+ products_group.children.append(target.productReference)
+
+ # Pull source files from the 'source' attribute and assign them to a UI group.
+ # Use a default UI group named 'Source' unless the user
+ # provides a 'group_files' dictionary to customize the UI grouping.
+ sources = getattr(self, 'source', [])
+ if hasattr(self, 'group_files'):
+ group_files = getattr(self, 'group_files', [])
+ for grpname,files in group_files.items():
+ group = bld.create_group(grpname, files)
+ target_group.children.append(group)
+ else:
+ group = bld.create_group('Source', sources)
+ target_group.children.append(group)
+
+ # Create a PBXFileReference for each source file.
+ # If the source file already exists as a PBXFileReference in any of the UI groups, then
+ # reuse that PBXFileReference object (XCode does not like it if we don't reuse)
+ for idx, path in enumerate(sources):
+ fileref = PBXFileReference(path.name, path.abspath())
+ existing_fileref = target_group.find_fileref(fileref)
+ if existing_fileref:
+ sources[idx] = existing_fileref
+ else:
+ sources[idx] = fileref
+
+ # If the 'source' attribute contains any file extension that XCode can't work with,
+ # then remove it. The allowed file extensions are defined in XCODE_EXTS.
+ is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS
+ sources = list(filter(is_valid_file_extension, sources))
+
+ buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
+ target.add_build_phase(PBXSourcesBuildPhase(buildfiles))
+
+ # Check if any framework to link against is some other target we've made
+ libs = getattr(self, 'tmp_use_seen', [])
+ for lib in libs:
+ use_target = p.get_target(lib)
+ if use_target:
+ # Create an XCode dependency so that XCode knows to build the other target before this target
+ dependency = p.create_target_dependency(use_target, use_target.name)
+ target.add_dependency(dependency)
+
+ buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)])
+ target.add_build_phase(buildphase)
+ if lib in self.env.LIB:
+ self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))
+
+ # If 'export_headers' is present, add files to the Headers build phase in xcode.
+ # These are files that'll get packed into the Framework for instance.
+ exp_hdrs = getattr(self, 'export_headers', [])
+ hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
+ files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs]
+ files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]
+ buildphase = PBXHeadersBuildPhase(files)
+ target.add_build_phase(buildphase)
+
+ # Merge frameworks and libs into one list, and prefix the frameworks
+ frameworks = Utils.to_list(self.env.FRAMEWORK)
+ frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks])
+
+ libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
+ libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)
+
+ # Override target specific build settings
+ bldsettings = {
+ 'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
+ 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) ,
+ 'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
+ 'OTHER_LDFLAGS': libs + ' ' + frameworks,
+ 'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'],
+ 'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
+ 'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
+ 'INSTALL_PATH': []
+ }
+
+ # Install path
+ installpaths = Utils.to_list(getattr(self, 'install', []))
+ prodbuildfile = PBXBuildFile(target.productReference)
+ for instpath in installpaths:
+ bldsettings['INSTALL_PATH'].append(instpath)
+ target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))
+
+ if not bldsettings['INSTALL_PATH']:
+ del bldsettings['INSTALL_PATH']
+
+ # Create build settings which can override the project settings. Defaults to none if user
+ # did not pass argument. This will be filled up with target specific
+ # search paths, libs to link etc.
+ settings = getattr(self, 'settings', {})
+
+ # The keys represents different build configuration, e.g. Debug, Release and so on..
+ # Insert our generated build settings to all configuration names
+ keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
+ for k in keys:
+ if k in settings:
+ settings[k].update(bldsettings)
+ else:
+ settings[k] = bldsettings
+
+ for k,v in settings.items():
+ target.add_configuration(XCBuildConfiguration(k, v))
+
+ p.add_target(target)
+
+
+class xcode(Build.BuildContext):
+ cmd = 'xcode6'
+ fun = 'build'
+
+ def as_nodes(self, files):
+ """ Returns a list of waflib.Nodes from a list of string of file paths """
+ nodes = []
+ for x in files:
+ if not isinstance(x, str):
+ d = x
+ else:
+ d = self.srcnode.find_node(x)
+ if not d:
+ raise Errors.WafError('File \'%s\' was not found' % x)
+ nodes.append(d)
+ return nodes
+
+ def create_group(self, name, files):
+ """
+ Returns a new PBXGroup containing the files (paths) passed in the files arg
+ :type files: string
+ """
+ group = PBXGroup(name)
+ """
+ Do not use unique file reference here, since XCode seem to allow only one file reference
+ to be referenced by a group.
+ """
+ files_ = []
+ for d in self.as_nodes(Utils.to_list(files)):
+ fileref = PBXFileReference(d.name, d.abspath())
+ files_.append(fileref)
+ group.add(files_)
+ return group
+
+ def unique_buildfile(self, buildfile):
+ """
+ Returns a unique buildfile, possibly an existing one.
+ Use this after you've constructed a PBXBuildFile to make sure there is
+ only one PBXBuildFile for the same file in the same project.
+ """
+ try:
+ build_files = self.build_files
+ except AttributeError:
+ build_files = self.build_files = {}
+
+ if buildfile not in build_files:
+ build_files[buildfile] = buildfile
+ return build_files[buildfile]
+
+ def execute(self):
+ """
+ Entry point
+ """
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+
+ appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
+
+ p = PBXProject(appname, ('Xcode 3.2', 46), self.env)
+
+ # If we don't create a Products group, then
+ # XCode will create one, which entails that
+ # we'll start to see duplicate files in the UI
+ # for some reason.
+ products_group = PBXGroup('Products')
+ p.mainGroup.children.append(products_group)
+
+ self.project = p
+ self.products_group = products_group
+
+ # post all task generators
+ # the process_xcode method above will be called for each target
+ if self.targets and self.targets != '*':
+ (self._min_grp, self._exact_tg) = self.get_targets()
+
+ self.current_group = 0
+ while self.current_group < len(self.groups):
+ self.post_group()
+ self.current_group += 1
+
+ node = self.bldnode.make_node('%s.xcodeproj' % appname)
+ node.mkdir()
+ node = node.make_node('project.pbxproj')
+ with open(node.abspath(), 'w') as f:
+ p.write(f)
+ Logs.pprint('GREEN', 'Wrote %r' % node.abspath())
+
+def bind_fun(tgtype):
+ def fun(self, *k, **kw):
+ tgtype = fun.__name__
+ if tgtype == 'shlib' or tgtype == 'dylib':
+ features = 'cxx cxxshlib'
+ tgtype = 'dylib'
+ elif tgtype == 'framework':
+ features = 'cxx cxxshlib'
+ tgtype = 'framework'
+ elif tgtype == 'program':
+ features = 'cxx cxxprogram'
+ tgtype = 'exe'
+ elif tgtype == 'app':
+ features = 'cxx cxxprogram'
+ tgtype = 'app'
+ elif tgtype == 'stlib':
+ features = 'cxx cxxstlib'
+ tgtype = 'stlib'
+ lst = kw['features'] = Utils.to_list(kw.get('features', []))
+ for x in features.split():
+ if not x in kw['features']:
+ lst.append(x)
+
+ kw['target_type'] = tgtype
+ return self(*k, **kw)
+ fun.__name__ = tgtype
+ setattr(Build.BuildContext, tgtype, fun)
+ return fun
+
+for xx in 'app framework dylib shlib stlib program'.split():
+ bind_fun(xx)
+
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2010-2016 (ita)
+# Thomas Nagy, 2010-2018 (ita)
+
+from __future__ import with_statement
import os
def fixdir(dir):
"""Call all substitution functions on Waf folders"""
- global all_modifs
for k in all_modifs:
for v in all_modifs[k]:
modif(os.path.join(dir, 'waflib'), k, v)
return
filename = os.path.join(dir, name)
- f = open(filename, 'r')
- try:
+ with open(filename, 'r') as f:
txt = f.read()
- finally:
- f.close()
txt = fun(txt)
- f = open(filename, 'w')
- try:
+ with open(filename, 'w') as f:
f.write(txt)
- finally:
- f.close()
def subst(*k):
"""register a substitution function"""
def do_subst(fun):
- global all_modifs
for x in k:
try:
all_modifs[x].append(fun)
@subst('*')
def r1(code):
"utf-8 fixes for python < 2.6"
- code = code.replace(',e:', ',e:')
- code = code.replace("", '')
- return code.replace('', '')
+ code = code.replace('as e:', ',e:')
+ code = code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')", '')
+ return code.replace('.encode()', '')
@subst('Runner.py')
def r4(code):
@subst('Context.py')
def r5(code):
return code.replace("('Execution failure: %s'%str(e),ex=e)", "('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")
+
#! /usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2016 (ita)
+# Thomas Nagy, 2016-2018 (ita)
import os, sys, traceback, base64, signal
try:
try:
TimeoutExpired = subprocess.TimeoutExpired
except AttributeError:
- class TimeoutExpired(object):
+ class TimeoutExpired(Exception):
pass
def run():
exc.stderr = err
raise exc
ret = proc.returncode
- except Exception ,e:
+ except Exception as e:
exc_type, exc_value, tb = sys.exc_info()
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
trace = str(cmd) + '\n' + ''.join(exc_lines)
run()
except KeyboardInterrupt:
break
+