Update the copy of waf to current 1.5
authorJelmer Vernooij <jelmer@samba.org>
Sat, 7 Mar 2015 14:31:17 +0000 (15:31 +0100)
committerAndrew Bartlett <abartlet@samba.org>
Mon, 16 Mar 2015 02:00:06 +0000 (03:00 +0100)
After making the update, this commit reverts the upstream addition of
LDVERSION support in Python.py, which is necessary for Python 3
support in waf. This change conflicts with the last remaining
Samba-specific change in waf to help with cross-compilation.

Change-Id: Iedfdc0199e9d10dfbd910c4eab50c23f984b6e2d
Signed-Off-By: Jelmer Vernooij <jelmer@samba.org>
Reviewed-by: Andrew Bartlett <abartlet@samba.org>
buildtools/wafadmin/3rdparty/build_file_tracker.py [new file with mode: 0644]
buildtools/wafadmin/3rdparty/prefork.py [new file with mode: 0755]
buildtools/wafadmin/Node.py
buildtools/wafadmin/TaskGen.py
buildtools/wafadmin/Tools/tex.py
buildtools/wafadmin/Utils.py

diff --git a/buildtools/wafadmin/3rdparty/build_file_tracker.py b/buildtools/wafadmin/3rdparty/build_file_tracker.py
new file mode 100644 (file)
index 0000000..9c48928
--- /dev/null
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015
+
+"""
+Force tasks to use file timestamps to force partial rebuilds when touch-ing build files
+
+touch out/libfoo.a
+... rebuild what depends on libfoo.a
+
+to use::
+    def options(opt):
+        opt.tool_options('build_file_tracker')
+"""
+
+import os
+import Task, Utils
+
+def signature(self):
+       try: return self.cache_sig[0]
+       except AttributeError: pass
+
+       self.m = Utils.md5()
+
+       # explicit deps
+       exp_sig = self.sig_explicit_deps()
+
+       # env vars
+       var_sig = self.sig_vars()
+
+       # implicit deps
+       imp_sig = Task.SIG_NIL
+       if self.scan:
+               try:
+                       imp_sig = self.sig_implicit_deps()
+               except ValueError:
+                       return self.signature()
+
+       # timestamp dependency on build files only (source files are hashed)
+       buf = []
+       for k in self.inputs + getattr(self, 'dep_nodes', []) + self.generator.bld.node_deps.get(self.unique_id(), []):
+               if k.id & 3 == 3:
+                       t = os.stat(k.abspath(self.env)).st_mtime
+                       buf.append(t)
+       self.m.update(str(buf))
+
+       # we now have the signature (first element) and the details (for debugging)
+       ret = self.m.digest()
+       self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
+       return ret
+
+Task.Task.signature_bak = Task.Task.signature # unused, kept just in case
+Task.Task.signature = signature # overridden
+
diff --git a/buildtools/wafadmin/3rdparty/prefork.py b/buildtools/wafadmin/3rdparty/prefork.py
new file mode 100755 (executable)
index 0000000..1c760c2
--- /dev/null
@@ -0,0 +1,276 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+#
+# prefer the waf 1.8 version
+
+"""
+The full samba build can be faster by ~10%, but there are a few limitations:
+* only one build process should be run at a time as the servers would use the same ports
+* only one build command is going to be called ("waf build configure build" would not work)
+
+def build(bld):
+
+    mod = Utils.load_tool('prefork')
+    mod.build(bld)
+    ...
+    (build declarations after)
+"""
+
+import os, re, socket, threading, sys, subprocess, time, atexit, traceback
+try:
+       import SocketServer
+except ImportError:
+       import socketserver as SocketServer
+try:
+       from queue import Queue
+except ImportError:
+       from Queue import Queue
+try:
+       import cPickle
+except ImportError:
+       import pickle as cPickle
+
+DEFAULT_PORT = 51200
+
+HEADER_SIZE = 128
+
+REQ = 'REQ'
+RES = 'RES'
+BYE = 'BYE'
+
+def make_header(params):
+       header = ','.join(params)
+       if sys.hexversion > 0x3000000:
+               header = header.encode('iso8859-1')
+       header = header.ljust(HEADER_SIZE)
+       assert(len(header) == HEADER_SIZE)
+       return header
+
+
+re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
+class req(SocketServer.StreamRequestHandler):
+       def handle(self):
+               while 1:
+                       try:
+                               self.process_command()
+                       except Exception as e:
+                               print(e)
+                               break
+
+       def process_command(self):
+               query = self.rfile.read(HEADER_SIZE)
+               if not query:
+                       return
+               #print(len(query))
+               assert(len(query) == HEADER_SIZE)
+               if sys.hexversion > 0x3000000:
+                       query = query.decode('iso8859-1')
+               #print "%r" % query
+               if not re_valid_query.match(query):
+                       raise ValueError('Invalid query %r' % query)
+
+               query = query.strip().split(',')
+
+               if query[0] == REQ:
+                       self.run_command(query[1:])
+               elif query[0] == BYE:
+                       raise ValueError('Exit')
+               else:
+                       raise ValueError('Invalid query %r' % query)
+
+       def run_command(self, query):
+
+               size = int(query[0])
+               data = self.rfile.read(size)
+               assert(len(data) == size)
+               kw = cPickle.loads(data)
+
+               # run command
+               ret = out = err = exc = None
+               cmd = kw['cmd']
+               del kw['cmd']
+               #print(cmd)
+
+               try:
+                       if kw['stdout'] or kw['stderr']:
+                               p = subprocess.Popen(cmd, **kw)
+                               (out, err) = p.communicate()
+                               ret = p.returncode
+                       else:
+                               ret = subprocess.Popen(cmd, **kw).wait()
+               except Exception as e:
+                       ret = -1
+                       exc = str(e) + traceback.format_exc()
+
+               # write the results
+               if out or err or exc:
+                       data = (out, err, exc)
+                       data = cPickle.dumps(data, -1)
+               else:
+                       data = ''
+
+               params = [RES, str(ret), str(len(data))]
+
+               self.wfile.write(make_header(params))
+
+               if data:
+                       self.wfile.write(data)
+
+def create_server(conn, cls):
+       #SocketServer.ThreadingTCPServer.allow_reuse_address = True
+       #server = SocketServer.ThreadingTCPServer(conn, req)
+
+       SocketServer.TCPServer.allow_reuse_address = True
+       server = SocketServer.TCPServer(conn, req)
+       #server.timeout = 6000 # seconds
+       server.serve_forever(poll_interval=0.001)
+
+if __name__ == '__main__':
+       if len(sys.argv) > 1:
+               port = int(sys.argv[1])
+       else:
+               port = DEFAULT_PORT
+       #conn = (socket.gethostname(), port)
+       conn = ("127.0.0.1", port)
+       #print("listening - %r %r\n" % conn)
+       create_server(conn, req)
+else:
+
+       import Runner, Utils
+
+       def init_task_pool(self):
+               # lazy creation, and set a common pool for all task consumers
+               pool = self.pool = []
+               for i in range(self.numjobs):
+                       consumer = Runner.get_pool()
+                       pool.append(consumer)
+                       consumer.idx = i
+               self.ready = Queue(0)
+               def setq(consumer):
+                       consumer.ready = self.ready
+                       try:
+                               threading.current_thread().idx = consumer.idx
+                       except Exception as e:
+                               print(e)
+               for x in pool:
+                       x.ready.put(setq)
+               return pool
+       Runner.Parallel.init_task_pool = init_task_pool
+
+       PORT = 51200
+
+       def make_server(idx):
+               port = PORT + idx
+               cmd = [sys.executable, os.path.abspath(__file__), str(port)]
+               proc = subprocess.Popen(cmd)
+               proc.port = port
+               return proc
+
+       def make_conn(srv):
+               #port = PORT + idx
+               port = srv.port
+               conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+               conn.connect(('127.0.0.1', port))
+               return conn
+
+       SERVERS = []
+       CONNS = []
+       def close_all():
+               while CONNS:
+                       conn = CONNS.pop()
+                       try:
+                               conn.close()
+                       except:
+                               pass
+               while SERVERS:
+                       srv = SERVERS.pop()
+                       try:
+                               srv.kill()
+                       except:
+                               pass
+       atexit.register(close_all)
+
+       def put_data(conn, data):
+               conn.send(data)
+
+       def read_data(conn, siz):
+               ret = conn.recv(siz)
+               if not ret:
+                       print("closed connection?")
+
+               assert(len(ret) == siz)
+               return ret
+
+       def exec_command(cmd, **kw):
+               if 'log' in kw:
+                       log = kw['log']
+                       kw['stdout'] = kw['stderr'] = subprocess.PIPE
+                       del(kw['log'])
+               else:
+                       kw['stdout'] = kw['stderr'] = None
+               kw['shell'] = isinstance(cmd, str)
+
+               idx = threading.current_thread().idx
+               kw['cmd'] = cmd
+
+               data = cPickle.dumps(kw, -1)
+               params = [REQ, str(len(data))]
+               header = make_header(params)
+
+               conn = CONNS[idx]
+
+               put_data(conn, header)
+               put_data(conn, data)
+
+               data = read_data(conn, HEADER_SIZE)
+               if sys.hexversion > 0x3000000:
+                       data = data.decode('iso8859-1')
+
+               lst = data.split(',')
+               ret = int(lst[1])
+               dlen = int(lst[2])
+
+               out = err = None
+               if dlen:
+                       data = read_data(conn, dlen)
+                       (out, err, exc) = cPickle.loads(data)
+                       if exc:
+                               raise Utils.WafError('Execution failure: %s' % exc)
+
+               if out:
+                       log.write(out)
+               if err:
+                       log.write(err)
+
+               return ret
+
+       def __init__(self):
+               threading.Thread.__init__(self)
+
+               # identifier of the current thread
+               self.idx = len(SERVERS)
+
+               # create a server and wait for the connection
+               srv = make_server(self.idx)
+               SERVERS.append(srv)
+
+               conn = None
+               for x in range(30):
+                       try:
+                               conn = make_conn(srv)
+                               break
+                       except socket.error:
+                               time.sleep(0.01)
+               if not conn:
+                       raise ValueError('Could not start the server!')
+               CONNS.append(conn)
+
+               self.setDaemon(1)
+               self.start()
+       Runner.TaskConsumer.__init__ = __init__
+
+       def build(bld):
+               # dangerous, there is no other command hopefully
+               Utils.exec_command = exec_command
+
index 236dd0d2b3f5ee044bb4a31eda439dd8840a4846..4fa205c96becd472709380a802937d9e7c929d51 100644 (file)
@@ -663,7 +663,7 @@ class Node(object):
        def update_build_dir(self, env=None):
 
                if not env:
-                       for env in bld.all_envs:
+                       for env in self.bld.all_envs:
                                self.update_build_dir(env)
                        return
 
index ae1834a10a4212c4e533ab5e32045c1519e82734..52d0236c850966c96fbeff89870ad022746c1a5a 100644 (file)
@@ -567,7 +567,7 @@ def exec_rule(self):
        if getattr(self, 'cwd', None):
                tsk.cwd = self.cwd
 
-       if getattr(self, 'on_results', None):
+       if getattr(self, 'on_results', None) or getattr(self, 'update_outputs', None):
                Task.update_outputs(cls)
 
        if getattr(self, 'always', None):
index 2dd748b23205d4d8a482091d2c4cada7f9a6bccc..700c54fc6e4ffd50dec58f3e95824e9383e72450 100644 (file)
@@ -154,7 +154,7 @@ def tex_build(task, command='LATEX'):
                task.env.SRCFILE = srcfile
                ret = fun(task)
                if ret:
-                       error('error when calling %s %s' % (command, latex_compile_cmd))
+                       error('error when calling %s %s' % (command, latex_fun))
                        return ret
 
        return None # ok
index 91ded93b6e7d0ef194b2166e3d773b3f9fe7e8e5..cff6719261d37bce390a57bf71cd0b37c0db944d 100644 (file)
@@ -416,15 +416,15 @@ def pprint(col, str, label='', sep='\n'):
        "print messages in color"
        sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
 
-def check_dir(dir):
+def check_dir(path):
        """If a folder doesn't exists, create it."""
-       try:
-               os.lstat(dir)
-       except OSError:
+       if not os.path.isdir(path):
                try:
-                       os.makedirs(dir)
+                       os.makedirs(path)
                except OSError, e:
-                       raise WafError("Cannot create folder '%s' (original error: %s)" % (dir, e))
+                       if not os.path.isdir(path):
+                               raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
+
 
 def cmd_output(cmd, **kw):