third_party: Update waf to verison 2.0.23
authorAndreas Schneider <asn@samba.org>
Thu, 17 Feb 2022 14:40:20 +0000 (15:40 +0100)
committerAndreas Schneider <asn@cryptomilk.org>
Mon, 21 Feb 2022 10:06:27 +0000 (10:06 +0000)
Signed-off-by: Andreas Schneider <asn@samba.org>
Reviewed-by: Alexander Bokovoy <ab@samba.org>
Autobuild-User(master): Andreas Schneider <asn@cryptomilk.org>
Autobuild-Date(master): Mon Feb 21 10:06:27 UTC 2022 on sn-devel-184

21 files changed:
buildtools/bin/waf
buildtools/wafsamba/wafsamba.py
third_party/waf/waflib/Context.py
third_party/waf/waflib/Runner.py
third_party/waf/waflib/TaskGen.py
third_party/waf/waflib/Tools/c_config.py
third_party/waf/waflib/Tools/compiler_c.py
third_party/waf/waflib/Tools/compiler_cxx.py
third_party/waf/waflib/Tools/python.py
third_party/waf/waflib/Tools/qt5.py
third_party/waf/waflib/Tools/winres.py
third_party/waf/waflib/extras/clang_compilation_database.py
third_party/waf/waflib/extras/classic_runner.py [new file with mode: 0644]
third_party/waf/waflib/extras/color_gcc.py
third_party/waf/waflib/extras/eclipse.py
third_party/waf/waflib/extras/gccdeps.py
third_party/waf/waflib/extras/msvcdeps.py
third_party/waf/waflib/extras/msvs.py
third_party/waf/waflib/extras/swig.py
third_party/waf/waflib/extras/wafcache.py
third_party/waf/waflib/fixpy2.py

index b0ccb09a8772766560d89b68420f8f7a8c5daa97..2001ccdbd8ae1c9ab60b62201edc7576464a1ad0 100755 (executable)
@@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE.
 
 import os, sys, inspect
 
-VERSION="2.0.22"
+VERSION="2.0.23"
 REVISION="x"
 GIT="x"
 INSTALL="x"
@@ -164,4 +164,3 @@ if __name__ == '__main__':
 
        from waflib import Scripting
        Scripting.waf_entry_point(cwd, VERSION, wafdir[0])
-
index 185ef3b73a22705fe8963a54bc084a840df91108..710b82af663a590408b8b4b98e6b501a30605d12 100644 (file)
@@ -38,7 +38,7 @@ LIB_PATH="shared"
 
 os.environ['PYTHONUNBUFFERED'] = '1'
 
-if Context.HEXVERSION not in (0x2001600,):
+if Context.HEXVERSION not in (0x2001700,):
     Logs.error('''
 Please use the version of waf that comes with Samba, not
 a system installed version. See http://wiki.samba.org/index.php/Waf
index 07ee1201f0366ceec7bdc68695221d4a5ee9361e..36d1ca74fef93c763864bc6d1660f90212828b19 100644 (file)
@@ -18,13 +18,13 @@ else:
        import imp
 
 # the following 3 constants are updated on each new release (do not touch)
-HEXVERSION=0x2001600
+HEXVERSION=0x2001700
 """Constant updated on new releases"""
 
-WAFVERSION="2.0.22"
+WAFVERSION="2.0.23"
 """Constant updated on new releases"""
 
-WAFREVISION="816d5bc48ba2abc4ac22f2b44d94d322bf992b9c"
+WAFREVISION="cc6b34cf555d354c34f554c41206134072588de7"
 """Git revision when the waf version is updated"""
 
 WAFNAME="waf"
index 91d55479e20aed7f4e3efbf4f02f404bf1a3e715..350c86a22c0736a317c7cae8a05f84ab2b13fa87 100644 (file)
@@ -71,7 +71,7 @@ class Consumer(Utils.threading.Thread):
                """Task to execute"""
                self.spawner = spawner
                """Coordinator object"""
-               self.setDaemon(1)
+               self.daemon = True
                self.start()
        def run(self):
                """
@@ -98,7 +98,7 @@ class Spawner(Utils.threading.Thread):
                """:py:class:`waflib.Runner.Parallel` producer instance"""
                self.sem = Utils.threading.Semaphore(master.numjobs)
                """Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
-               self.setDaemon(1)
+               self.daemon = True
                self.start()
        def run(self):
                """
index f8f92bd57c16c78e4691e333c6784a3760af204a..89f631699108a351e5514419409d79eb8a54913e 100644 (file)
@@ -631,12 +631,8 @@ def process_rule(self):
                        cls.scan = self.scan
                elif has_deps:
                        def scan(self):
-                               nodes = []
-                               for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
-                                       node = self.generator.path.find_resource(x)
-                                       if not node:
-                                               self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
-                                       nodes.append(node)
+                               deps = getattr(self.generator, 'deps', None)
+                               nodes = self.generator.to_nodes(deps)
                                return [nodes, []]
                        cls.scan = scan
 
index 03b6bf61bc0002a5041dc69784f8b28a7b6f5f8c..f5ab19bf6ce6be7bfb8d7e58dbf3ac71c37c5d78 100644 (file)
@@ -69,6 +69,7 @@ MACRO_TO_DEST_CPU = {
 '__sh__'      : 'sh',
 '__xtensa__'  : 'xtensa',
 '__e2k__'     : 'e2k',
+'__riscv'     : 'riscv',
 }
 
 @conf
index 931dc57efec014b5bf4d3481c301dc28886f5e09..e033ce6c5c34a3dfbc76d8419879d95d73321713 100644 (file)
@@ -36,18 +36,19 @@ from waflib import Utils
 from waflib.Logs import debug
 
 c_compiler = {
-'win32':  ['msvc', 'gcc', 'clang'],
-'cygwin': ['gcc', 'clang'],
-'darwin': ['clang', 'gcc'],
-'aix':    ['xlc', 'gcc', 'clang'],
-'linux':  ['gcc', 'clang', 'icc'],
-'sunos':  ['suncc', 'gcc'],
-'irix':   ['gcc', 'irixcc'],
-'hpux':   ['gcc'],
-'osf1V':  ['gcc'],
-'gnu':    ['gcc', 'clang'],
-'java':   ['gcc', 'msvc', 'clang', 'icc'],
-'default':['clang', 'gcc'],
+'win32':       ['msvc', 'gcc', 'clang'],
+'cygwin':      ['gcc', 'clang'],
+'darwin':      ['clang', 'gcc'],
+'aix':         ['xlc', 'gcc', 'clang'],
+'linux':       ['gcc', 'clang', 'icc'],
+'sunos':       ['suncc', 'gcc'],
+'irix':        ['gcc', 'irixcc'],
+'hpux':        ['gcc'],
+'osf1V':       ['gcc'],
+'gnu':         ['gcc', 'clang'],
+'java':        ['gcc', 'msvc', 'clang', 'icc'],
+'gnukfreebsd': ['gcc', 'clang'],
+'default':     ['clang', 'gcc'],
 }
 """
 Dict mapping platform names to Waf tools finding specific C compilers::
index 09fca7e4dc63cfea3a19d1acce4bd4356e41f943..42658c5847eee3304d28f6df8e2eb7cd4a15e651 100644 (file)
@@ -37,18 +37,19 @@ from waflib import Utils
 from waflib.Logs import debug
 
 cxx_compiler = {
-'win32':  ['msvc', 'g++', 'clang++'],
-'cygwin': ['g++', 'clang++'],
-'darwin': ['clang++', 'g++'],
-'aix':    ['xlc++', 'g++', 'clang++'],
-'linux':  ['g++', 'clang++', 'icpc'],
-'sunos':  ['sunc++', 'g++'],
-'irix':   ['g++'],
-'hpux':   ['g++'],
-'osf1V':  ['g++'],
-'gnu':    ['g++', 'clang++'],
-'java':   ['g++', 'msvc', 'clang++', 'icpc'],
-'default': ['clang++', 'g++']
+'win32':       ['msvc', 'g++', 'clang++'],
+'cygwin':      ['g++', 'clang++'],
+'darwin':      ['clang++', 'g++'],
+'aix':         ['xlc++', 'g++', 'clang++'],
+'linux':       ['g++', 'clang++', 'icpc'],
+'sunos':       ['sunc++', 'g++'],
+'irix':        ['g++'],
+'hpux':        ['g++'],
+'osf1V':       ['g++'],
+'gnu':         ['g++', 'clang++'],
+'java':        ['g++', 'msvc', 'clang++', 'icpc'],
+'gnukfreebsd': ['g++', 'clang++'],
+'default':     ['clang++', 'g++']
 }
 """
 Dict mapping the platform names to Waf tools finding specific C++ compilers::
index 07442561dff93088ac893ea1841c1066f55ac105..fb641e5e20d955b18288611135d07e0b543d81e8 100644 (file)
@@ -416,9 +416,14 @@ def check_python_headers(conf, features='pyembed pyext'):
 
                if not result:
                        path = [os.path.join(dct['prefix'], "libs")]
-                       conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
+                       conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY rather than pythonX.Y (win32)\n")
                        result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
 
+               if not result:
+                       path = [os.path.normpath(os.path.join(dct['INCLUDEPY'], '..', 'libs'))]
+                       conf.to_log("\n\n# try again with -L$INCLUDEPY/../libs, and pythonXY rather than pythonX.Y (win32)\n")
+                       result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $INCLUDEPY/../libs' % name)
+
                if result:
                        break # do not forget to set LIBPATH_PYEMBED
 
index 82c83e18c8ae718a7315d4687175aee3e7ce795c..b3e61325e508e29cff48aa4ca0decab6385ddd4a 100644 (file)
@@ -783,8 +783,8 @@ def set_qt5_libs_to_check(self):
                        pat = self.env.cxxstlib_PATTERN
                if Utils.unversioned_sys_platform() == 'darwin':
                        pat = r"%s\.framework"
-               re_qt = re.compile(pat % 'Qt5?(?P<name>\\D+)' + '$')
-               for x in dirlst:
+               re_qt = re.compile(pat % 'Qt5?(?P<name>\\w+)' + '$')
+               for x in sorted(dirlst):
                        m = re_qt.match(x)
                        if m:
                                self.qt5_vars.append("Qt5%s" % m.group('name'))
index 9be1ed660099406a4c871aef936104df47684d81..73c0e95315b4983d060ace69d75a382d41a93cbe 100644 (file)
@@ -4,10 +4,12 @@
 
 "Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"
 
+import os
 import re
 from waflib import Task
 from waflib.TaskGen import extension
 from waflib.Tools import c_preproc
+from waflib import Utils
 
 @extension('.rc')
 def rc_file(self, node):
@@ -61,6 +63,39 @@ class winrc(Task.Task):
                tmp.start(self.inputs[0], self.env)
                return (tmp.nodes, tmp.names)
 
+       def exec_command(self, cmd, **kw):
+               if self.env.WINRC_TGT_F == '/fo':
+                       # Since winres include paths may contain spaces, they do not fit in
+                       # response files and are best passed as environment variables
+                       replace_cmd = []
+                       incpaths = []
+                       while cmd:
+                               # filter include path flags
+                               flag = cmd.pop(0)
+                               if flag.upper().startswith('/I'):
+                                       if len(flag) == 2:
+                                               incpaths.append(cmd.pop(0))
+                                       else:
+                                               incpaths.append(flag[2:])
+                               else:
+                                       replace_cmd.append(flag)
+                       cmd = replace_cmd
+                       if incpaths:
+                               # append to existing environment variables in INCLUDE
+                               env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+                               pre_includes = env.get('INCLUDE', '')
+                               env['INCLUDE'] = pre_includes + os.pathsep + os.pathsep.join(incpaths)
+
+               return super(winrc, self).exec_command(cmd, **kw)
+
+       def quote_flag(self, flag):
+               if self.env.WINRC_TGT_F == '/fo':
+                       # winres does not support quotes around flags in response files
+                       return flag
+
+               return super(winrc, self).quote_flag(flag)
+
+
 def configure(conf):
        """
        Detects the programs RC or windres, depending on the C/C++ compiler in use
index 17f66949376243601357078bf5e876e8bddd20e6..bd29db93fd502e31b805ea635c461711219f403d 100644 (file)
@@ -126,7 +126,7 @@ def patch_execute():
                Invoke clangdb command before build
                """
                if self.cmd.startswith('build'):
-                       Scripting.run_command('clangdb')
+                       Scripting.run_command(self.cmd.replace('build','clangdb'))
 
                old_execute_build(self)
 
diff --git a/third_party/waf/waflib/extras/classic_runner.py b/third_party/waf/waflib/extras/classic_runner.py
new file mode 100644 (file)
index 0000000..b08c794
--- /dev/null
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2021 (ita)
+
+from waflib import Utils, Runner
+
+"""
+Re-enable the classic threading system from waf 1.x
+
+def configure(conf):
+       conf.load('classic_runner')
+"""
+
+class TaskConsumer(Utils.threading.Thread):
+       """
+       Task consumers belong to a pool of workers
+
+       They wait for tasks in the queue and then use ``task.process(...)``
+       """
+       def __init__(self, spawner):
+               Utils.threading.Thread.__init__(self)
+               """
+               Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
+               """
+               self.spawner = spawner
+               self.daemon = True
+               self.start()
+
+       def run(self):
+               """
+               Loop over the tasks to execute
+               """
+               try:
+                       self.loop()
+               except Exception:
+                       pass
+
+       def loop(self):
+               """
+               Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
+               :py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
+               """
+               master = self.spawner.master
+               while 1:
+                       if not master.stop:
+                               try:
+                                       tsk = master.ready.get()
+                                       if tsk:
+                                               tsk.log_display(tsk.generator.bld)
+                                               master.process_task(tsk)
+                                       else:
+                                               break
+                               finally:
+                                       master.out.put(tsk)
+
+class Spawner(object):
+       """
+       Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
+       spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
+       :py:class:`waflib.Task.Task` instance.
+       """
+       def __init__(self, master):
+               self.master = master
+               """:py:class:`waflib.Runner.Parallel` producer instance"""
+
+               self.pool = [TaskConsumer(self) for i in range(master.numjobs)]
+
+Runner.Spawner = Spawner
index b68c5ebf2df1cc46daf2ca9d6ee0d44c8ad3d45d..09729035fecb44bf5e39969f49129561fddc2204 100644 (file)
@@ -19,7 +19,7 @@ class ColorGCCFormatter(Logs.formatter):
                        func = frame.f_code.co_name
                        if func == 'exec_command':
                                cmd = frame.f_locals.get('cmd')
-                               if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
+                               if isinstance(cmd, list) and (len(cmd) > 0) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
                                        lines = []
                                        for line in rec.msg.splitlines():
                                                if 'warning: ' in line:
index bb787416e9fbe55feeedcff511a697023b775033..49ca9686b7b6a5db671db6f5c676d347f69e3ffe 100644 (file)
@@ -10,6 +10,9 @@ Usage:
 def options(opt):
        opt.load('eclipse')
 
+To add additional targets beside standard ones (configure, dist, install, check)
+the environment ECLIPSE_EXTRA_TARGETS can be set (ie. to ['test', 'lint', 'docs'])
+
 $ waf configure eclipse
 """
 
@@ -25,6 +28,8 @@ cdt_core = oe_cdt + '.core'
 cdt_bld = oe_cdt + '.build.core'
 extbuilder_dir = '.externalToolBuilders'
 extbuilder_name = 'Waf_Builder.launch'
+settings_dir = '.settings'
+settings_name = 'language.settings.xml'
 
 class eclipse(Build.BuildContext):
        cmd = 'eclipse'
@@ -131,9 +136,11 @@ class eclipse(Build.BuildContext):
                                        path = p.path_from(self.srcnode)
 
                                        if (path.startswith("/")):
-                                               cpppath.append(path)
+                                               if path not in cpppath:
+                                                       cpppath.append(path)
                                        else:
-                                               workspace_includes.append(path)
+                                               if path not in workspace_includes:
+                                                       workspace_includes.append(path)
 
                                        if is_cc and path not in source_dirs:
                                                source_dirs.append(path)
@@ -156,6 +163,61 @@ class eclipse(Build.BuildContext):
                        project = self.impl_create_javaproject(javasrcpath, javalibpath)
                        self.write_conf_to_xml('.classpath', project)
 
+               # Create editor language settings to have correct standards applied in IDE, as per project configuration
+               try:
+                       os.mkdir(settings_dir)
+               except OSError:
+                       pass    # Ignore if dir already exists
+
+               lang_settings = Document()
+               project = lang_settings.createElement('project')
+
+               # Language configurations for C and C++ via cdt
+               if hasc:
+                       configuration = self.add(lang_settings, project, 'configuration',
+                                                       {'id' : 'org.eclipse.cdt.core.default.config.1', 'name': 'Default'})
+
+                       extension = self.add(lang_settings, configuration, 'extension', {'point': 'org.eclipse.cdt.core.LanguageSettingsProvider'})
+
+                       provider = self.add(lang_settings, extension, 'provider',
+                                                       { 'copy-of': 'extension',
+                                                         'id': 'org.eclipse.cdt.ui.UserLanguageSettingsProvider'})
+
+                       provider = self.add(lang_settings, extension, 'provider-reference',
+                                                       { 'id': 'org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider',
+                                                         'ref': 'shared-provider'})
+
+                       provider = self.add(lang_settings, extension, 'provider-reference',
+                                                       { 'id': 'org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider',
+                                                         'ref': 'shared-provider'})
+
+                       # C and C++ are kept as separated providers so appropriate flags are used also in mixed projects
+                       if self.env.CC:
+                               provider = self.add(lang_settings, extension, 'provider',
+                                                       { 'class': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector',
+                                                         'console': 'false',
+                                                         'id': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector.1',
+                                                         'keep-relative-paths' : 'false',
+                                                         'name': 'CDT GCC Built-in Compiler Settings',
+                                                         'parameter': '%s %s ${FLAGS} -E -P -v -dD "${INPUTS}"'%(self.env.CC[0],' '.join(self.env['CFLAGS'])),
+                                                         'prefer-non-shared': 'true' })
+
+                               self.add(lang_settings, provider, 'language-scope', { 'id': 'org.eclipse.cdt.core.gcc'})
+
+                       if self.env.CXX:
+                               provider = self.add(lang_settings, extension, 'provider',
+                                                       { 'class': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector',
+                                                         'console': 'false',
+                                                         'id': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector.2',
+                                                         'keep-relative-paths' : 'false',
+                                                         'name': 'CDT GCC Built-in Compiler Settings',
+                                                         'parameter': '%s %s ${FLAGS} -E -P -v -dD "${INPUTS}"'%(self.env.CXX[0],' '.join(self.env['CXXFLAGS'])),
+                                                         'prefer-non-shared': 'true' })
+                               self.add(lang_settings, provider, 'language-scope', { 'id': 'org.eclipse.cdt.core.g++'})
+
+               lang_settings.appendChild(project)
+               self.write_conf_to_xml('%s%s%s'%(settings_dir, os.path.sep, settings_name), lang_settings)
+
        def impl_create_project(self, executable, appname, hasc, hasjava, haspython, waf_executable):
                doc = Document()
                projectDescription = doc.createElement('projectDescription')
@@ -341,6 +403,8 @@ class eclipse(Build.BuildContext):
                addTargetWrap('dist', False)
                addTargetWrap('install', False)
                addTargetWrap('check', False)
+               for addTgt in self.env.ECLIPSE_EXTRA_TARGETS or []:
+                       addTargetWrap(addTgt, False)
 
                storageModule = self.add(doc, cproject, 'storageModule',
                                                        {'moduleId': 'cdtBuildSystem',
@@ -348,6 +412,12 @@ class eclipse(Build.BuildContext):
 
                self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname})
 
+               storageModule = self.add(doc, cproject, 'storageModule',
+                                                       {'moduleId': 'org.eclipse.cdt.core.LanguageSettingsProviders'})
+
+               storageModule = self.add(doc, cproject, 'storageModule',
+                                                       {'moduleId': 'scannerConfiguration'})
+
                doc.appendChild(cproject)
                return doc
 
index 1fc9373489a214e22baf14c108e99ba45beb7ff3..9e9952f2f7d21b7e0a74f0b9e4dd3acd28506bd8 100644 (file)
@@ -29,13 +29,6 @@ if not c_preproc.go_absolute:
 # Third-party tools are allowed to add extra names in here with append()
 supported_compilers = ['gas', 'gcc', 'icc', 'clang']
 
-def scan(self):
-       if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
-               return super(self.derived_gccdeps, self).scan()
-       nodes = self.generator.bld.node_deps.get(self.uid(), [])
-       names = []
-       return (nodes, names)
-
 re_o = re.compile(r"\.o$")
 re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
 
@@ -61,28 +54,30 @@ def path_to_node(base_node, path, cached_nodes):
        else:
                # Not hashable, assume it is a list and join into a string
                node_lookup_key = (base_node, os.path.sep.join(path))
+
        try:
-               lock.acquire()
                node = cached_nodes[node_lookup_key]
        except KeyError:
-               node = base_node.find_resource(path)
-               cached_nodes[node_lookup_key] = node
-       finally:
-               lock.release()
+               # retry with lock on cache miss
+               with lock:
+                       try:
+                               node = cached_nodes[node_lookup_key]
+                       except KeyError:
+                               node = cached_nodes[node_lookup_key] = base_node.find_resource(path)
+
        return node
 
 def post_run(self):
        if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
                return super(self.derived_gccdeps, self).post_run()
 
-       name = self.outputs[0].abspath()
-       name = re_o.sub('.d', name)
+       deps_filename = self.outputs[0].abspath()
+       deps_filename = re_o.sub('.d', deps_filename)
        try:
-               txt = Utils.readf(name)
+               deps_txt = Utils.readf(deps_filename)
        except EnvironmentError:
                Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
                raise
-       #os.remove(name)
 
        # Compilers have the choice to either output the file's dependencies
        # as one large Makefile rule:
@@ -102,15 +97,16 @@ def post_run(self):
        # So the first step is to sanitize the input by stripping out the left-
        # hand side of all these lines. After that, whatever remains are the
        # implicit dependencies of task.outputs[0]
-       txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()])
+       deps_txt = '\n'.join([remove_makefile_rule_lhs(line) for line in deps_txt.splitlines()])
 
        # Now join all the lines together
-       txt = txt.replace('\\\n', '')
+       deps_txt = deps_txt.replace('\\\n', '')
 
-       val = txt.strip()
-       val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]
+       dep_paths = deps_txt.strip()
+       dep_paths = [x.replace('\\ ', ' ') for x in re_splitter.split(dep_paths) if x]
 
-       nodes = []
+       resolved_nodes = []
+       unresolved_names = []
        bld = self.generator.bld
 
        # Dynamically bind to the cache
@@ -119,39 +115,41 @@ def post_run(self):
        except AttributeError:
                cached_nodes = bld.cached_nodes = {}
 
-       for x in val:
+       for path in dep_paths:
 
                node = None
-               if os.path.isabs(x):
-                       node = path_to_node(bld.root, x, cached_nodes)
+               if os.path.isabs(path):
+                       node = path_to_node(bld.root, path, cached_nodes)
                else:
                        # TODO waf 1.9 - single cwd value
-                       path = getattr(bld, 'cwdx', bld.bldnode)
+                       base_node = getattr(bld, 'cwdx', bld.bldnode)
                        # when calling find_resource, make sure the path does not contain '..'
-                       x = [k for k in Utils.split_path(x) if k and k != '.']
-                       while '..' in x:
-                               idx = x.index('..')
+                       path = [k for k in Utils.split_path(path) if k and k != '.']
+                       while '..' in path:
+                               idx = path.index('..')
                                if idx == 0:
-                                       x = x[1:]
-                                       path = path.parent
+                                       path = path[1:]
+                                       base_node = base_node.parent
                                else:
-                                       del x[idx]
-                                       del x[idx-1]
+                                       del path[idx]
+                                       del path[idx-1]
 
-                       node = path_to_node(path, x, cached_nodes)
+                       node = path_to_node(base_node, path, cached_nodes)
 
                if not node:
-                       raise ValueError('could not find %r for %r' % (x, self))
+                       raise ValueError('could not find %r for %r' % (path, self))
+
                if id(node) == id(self.inputs[0]):
                        # ignore the source file, it is already in the dependencies
                        # this way, successful config tests may be retrieved from the cache
                        continue
-               nodes.append(node)
 
-       Logs.debug('deps: gccdeps for %s returned %s', self, nodes)
+               resolved_nodes.append(node)
 
-       bld.node_deps[self.uid()] = nodes
-       bld.raw_deps[self.uid()] = []
+       Logs.debug('deps: gccdeps for %s returned %s', self, resolved_nodes)
+
+       bld.node_deps[self.uid()] = resolved_nodes
+       bld.raw_deps[self.uid()] = unresolved_names
 
        try:
                del self.cache_sig
@@ -160,6 +158,14 @@ def post_run(self):
 
        Task.Task.post_run(self)
 
+def scan(self):
+       if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+               return super(self.derived_gccdeps, self).scan()
+
+       resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
+       unresolved_names = []
+       return (resolved_nodes, unresolved_names)
+
 def sig_implicit_deps(self):
        if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
                return super(self.derived_gccdeps, self).sig_implicit_deps()
index 52985dce058677741591913b59a3b4260cf9ee31..e8985bde7c7bafcb4d91c4c202c138bc1fe69a12 100644 (file)
@@ -32,7 +32,6 @@ from waflib.Tools import c_preproc, c, cxx, msvc
 from waflib.TaskGen import feature, before_method
 
 lock = threading.Lock()
-nodes = {} # Cache the path -> Node lookup
 
 PREPROCESSOR_FLAG = '/showIncludes'
 INCLUDE_PATTERN = 'Note: including file:'
@@ -50,23 +49,47 @@ def apply_msvcdeps_flags(taskgen):
                if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
                        taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
 
+
+def get_correct_path_case(base_path, path):
+       '''
+       Return a case-corrected version of ``path`` by searching the filesystem for
+       ``path``, relative to ``base_path``, using the case returned by the filesystem.
+       '''
+       components = Utils.split_path(path)
+
+       corrected_path = ''
+       if os.path.isabs(path):
+               corrected_path = components.pop(0).upper() + os.sep
+
+       for part in components:
+               part = part.lower()
+               search_path = os.path.join(base_path, corrected_path)
+               if part == '..':
+                       corrected_path = os.path.join(corrected_path, part)
+                       search_path = os.path.normpath(search_path)
+                       continue
+
+               for item in sorted(os.listdir(search_path)):
+                       if item.lower() == part:
+                               corrected_path = os.path.join(corrected_path, item)
+                               break
+               else:
+                       raise ValueError("Can't find %r in %r" % (part, search_path))
+
+       return corrected_path
+
+
 def path_to_node(base_node, path, cached_nodes):
        '''
        Take the base node and the path and return a node
        Results are cached because searching the node tree is expensive
        The following code is executed by threads, it is not safe, so a lock is needed...
        '''
-       # normalize the path because ant_glob() does not understand
-       # parent path components (..)
+       # normalize the path to remove parent path components (..)
        path = os.path.normpath(path)
 
        # normalize the path case to increase likelihood of a cache hit
-       path = os.path.normcase(path)
-
-       # ant_glob interprets [] and () characters, so those must be replaced
-       path = path.replace('[', '?').replace(']', '?').replace('(', '[(]').replace(')', '[)]')
-
-       node_lookup_key = (base_node, path)
+       node_lookup_key = (base_node, os.path.normcase(path))
 
        try:
                node = cached_nodes[node_lookup_key]
@@ -76,8 +99,8 @@ def path_to_node(base_node, path, cached_nodes):
                        try:
                                node = cached_nodes[node_lookup_key]
                        except KeyError:
-                               node_list = base_node.ant_glob([path], ignorecase=True, remove=False, quiet=True, regex=False)
-                               node = cached_nodes[node_lookup_key] = node_list[0] if node_list else None
+                               path = get_correct_path_case(base_node.abspath(), path)
+                               node = cached_nodes[node_lookup_key] = base_node.find_node(path)
 
        return node
 
@@ -89,9 +112,9 @@ def post_run(self):
        if getattr(self, 'cached', None):
                return Task.Task.post_run(self)
 
-       bld = self.generator.bld
-       unresolved_names = []
        resolved_nodes = []
+       unresolved_names = []
+       bld = self.generator.bld
 
        # Dynamically bind to the cache
        try:
@@ -124,11 +147,14 @@ def post_run(self):
                                        continue
 
                        if id(node) == id(self.inputs[0]):
-                               # Self-dependency
+                               # ignore the source file, it is already in the dependencies
+                               # this way, successful config tests may be retrieved from the cache
                                continue
 
                        resolved_nodes.append(node)
 
+       Logs.debug('deps: msvcdeps for %s returned %s', self, resolved_nodes)
+
        bld.node_deps[self.uid()] = resolved_nodes
        bld.raw_deps[self.uid()] = unresolved_names
 
index 8aa2db0b751ccb9ea957a1a1011714bd82a477e8..03b739f849c034ac52ae5a2405d4ace6e42ec702 100644 (file)
@@ -787,8 +787,12 @@ class msvs_generator(BuildContext):
                self.collect_dirs()
                default_project = getattr(self, 'default_project', None)
                def sortfun(x):
-                       if x.name == default_project:
+                       # folders should sort to the top
+                       if getattr(x, 'VS_GUID_SOLUTIONFOLDER', None):
                                return ''
+                       # followed by the default project
+                       elif x.name == default_project:
+                               return ' '
                        return getattr(x, 'path', None) and x.path.win32path() or x.name
                self.all_projects.sort(key=sortfun)
 
index 740ab46d963a090c14fea596f004cd9c6fb3e9ac..967caeb5a82945ba8f337604dd62129f7688bc53 100644 (file)
@@ -17,7 +17,7 @@ tasks have to be added dynamically:
 
 SWIG_EXTS = ['.swig', '.i']
 
-re_module = re.compile(r'%module(?:\s*\(.*\))?\s+(.+)', re.M)
+re_module = re.compile(r'%module(?:\s*\(.*\))?\s+([^\r\n]+)', re.M)
 
 re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
 re_2 = re.compile(r'[#%](?:include|import(?:\(module=".*"\))+|python(?:begin|code)) [<"](.*)[">]', re.M)
index cc23fcd6673f9690a0fd366c2ff32ca9c2d4fcf2..2cef46c0e1c4a573c965bd41c79c3429fbfc17ba 100644 (file)
@@ -258,6 +258,19 @@ def build(bld):
        """
        Called during the build process to enable file caching
        """
+       if WAFCACHE_STATS:
+               # Init counter for statistics and hook to print results at the end
+               bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0
+
+               def printstats(bld):
+                       hit_ratio = 0
+                       if bld.cache_reqs > 0:
+                               hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100
+                       Logs.pprint('CYAN', '  wafcache stats: requests: %s, hits, %s, ratio: %.2f%%, writes %s' %
+                                        (bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) )
+
+               bld.add_post_fun(printstats)
+
        if process_pool:
                # already called once
                return
@@ -273,19 +286,6 @@ def build(bld):
        for x in reversed(list(Task.classes.values())):
                make_cached(x)
 
-       if WAFCACHE_STATS:
-               # Init counter for statistics and hook to print results at the end
-               bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0
-
-               def printstats(bld):
-                       hit_ratio = 0
-                       if bld.cache_reqs > 0:
-                               hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100
-                       Logs.pprint('CYAN', '  wafcache stats: requests: %s, hits, %s, ratio: %.2f%%, writes %s' %
-                                        (bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) )
-
-               bld.add_post_fun(printstats)
-
 def cache_command(sig, files_from, files_to):
        """
        Create a command for cache worker processes, returns a pickled
index 24176e066455dda44ce77cac802182bdca4aa8b7..c99bff4b9aeea0831e71a0ebf465dd68577d43ca 100644 (file)
@@ -56,7 +56,7 @@ def r1(code):
 @subst('Runner.py')
 def r4(code):
        "generator syntax"
-       return code.replace('next(self.biter)', 'self.biter.next()')
+       return code.replace('next(self.biter)', 'self.biter.next()').replace('self.daemon = True', 'self.setDaemon(1)')
 
 @subst('Context.py')
 def r5(code):