3 # WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
9 waf-powered distributed network builds, with a network cache.
11 Caching files from a server has advantages over a NFS/Samba shared folder:
13 - builds are much faster because they use local files
14 - builds just continue to work in case of a network glitch
15 - permissions are much simpler to manage
18 import os, urllib, tarfile, re, shutil, tempfile, sys
19 from collections import OrderedDict
20 from waflib import Context, Utils, Logs
23 from urllib.parse import urlencode
25 urlencode = urllib.urlencode
27 def safe_urlencode(data):
36 from urllib.error import URLError
38 from urllib2 import URLError
41 from urllib.request import Request, urlopen
43 from urllib2 import Request, urlopen
45 DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
46 DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
49 REQUIRES = 'requires.txt'
51 re_com = re.compile('\s*#.*', re.M)
53 def total_version_order(num):
55 template = '%10s' * len(lst)
56 ret = template % tuple(lst)
59 def get_distnet_cache():
60 return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
63 return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
65 def get_download_url():
66 return '%s/download.py' % get_server_url()
69 return '%s/upload.py' % get_server_url()
71 def get_resolve_url():
72 return '%s/resolve.py' % get_server_url()
74 def send_package_name():
75 out = getattr(Context.g_module, 'out', 'build')
76 pkgfile = '%s/package_to_upload.tarfile' % out
79 class package(Context.Context):
86 except AttributeError:
87 files = self.files = []
89 Context.Context.execute(self)
90 pkgfile = send_package_name()
91 if not pkgfile in files:
92 if not REQUIRES in files:
93 files.append(REQUIRES)
94 self.make_tarfile(pkgfile, files, add_to_package=False)
96 def make_tarfile(self, filename, files, **kw):
97 if kw.get('add_to_package', True):
98 self.files.append(filename)
100 with tarfile.open(filename, TARFORMAT) as tar:
101 endname = os.path.split(filename)[-1]
102 endname = endname.split('.')[0] + '/'
104 tarinfo = tar.gettarinfo(x, x)
105 tarinfo.uid = tarinfo.gid = 0
106 tarinfo.uname = tarinfo.gname = 'root'
107 tarinfo.size = os.stat(x).st_size
109 # TODO - more archive creation options?
110 if kw.get('bare', True):
111 tarinfo.name = os.path.split(x)[1]
113 tarinfo.name = endname + x # todo, if tuple, then..
114 Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
115 with open(x, 'rb') as f:
116 tar.addfile(tarinfo, f)
117 Logs.info('Created %s', filename)
119 class publish(Context.Context):
123 if hasattr(Context.g_module, 'publish'):
124 Context.Context.execute(self)
125 mod = Context.g_module
127 rfile = getattr(self, 'rfile', send_package_name())
128 if not os.path.isfile(rfile):
129 self.fatal('Create the release file with "waf release" first! %r' % rfile)
131 fdata = Utils.readf(rfile, m='rb')
132 data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
134 req = Request(get_upload_url(), data)
135 response = urlopen(req, timeout=TIMEOUT)
136 data = response.read().strip()
138 if sys.hexversion>0x300000f:
139 data = data.decode('utf-8')
142 self.fatal('Could not publish the package %r' % data)
144 class constraint(object):
145 def __init__(self, line=''):
146 self.required_line = line
153 lst = line.split(',')
155 self.pkgname = lst[0]
156 self.required_version = lst[1]
158 a, b, c = k.partition('=')
160 self.info.append((a, c))
163 buf.append(self.pkgname)
164 buf.append(self.required_version)
166 buf.append('%s=%s' % k)
170 return "requires %s-%s" % (self.pkgname, self.required_version)
172 def human_display(self, pkgname, pkgver):
173 return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
182 def add_reason(self, reason):
183 self.info.append(('reason', reason))
185 def parse_constraints(text):
186 assert(text is not None)
188 text = re.sub(re_com, '', text)
189 lines = text.splitlines()
194 constraints.append(constraint(line))
197 def list_package_versions(cachedir, pkgname):
198 pkgdir = os.path.join(cachedir, pkgname)
200 versions = os.listdir(pkgdir)
203 versions.sort(key=total_version_order)
207 class package_reader(Context.Context):
211 def __init__(self, **kw):
212 Context.Context.__init__(self, **kw)
214 self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
215 self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
216 self.cache_constraints = {}
217 self.constraints = []
219 def compute_dependencies(self, filename=REQUIRES):
220 text = Utils.readf(filename)
221 data = safe_urlencode([('text', text)])
223 if '--offline' in sys.argv:
224 self.constraints = self.local_resolve(text)
226 req = Request(get_resolve_url(), data)
228 response = urlopen(req, timeout=TIMEOUT)
229 except URLError as e:
230 Logs.warn('The package server is down! %r', e)
231 self.constraints = self.local_resolve(text)
233 ret = response.read()
235 ret = ret.decode('utf-8')
239 self.constraints = parse_constraints(ret)
242 def check_errors(self):
244 for c in self.constraints:
245 if not c.required_version:
249 if len(reasons) == 1:
250 Logs.error('%s but no matching package could be found in this repository', reasons[0])
252 Logs.error('Conflicts on package %r:', c.pkgname)
256 self.fatal('The package requirements cannot be satisfied!')
258 def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
260 return self.cache_constraints[(pkgname, pkgver)]
262 text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
263 ret = parse_constraints(text)
264 self.cache_constraints[(pkgname, pkgver)] = ret
267 def apply_constraint(self, domain, constraint):
268 vname = constraint.required_version.replace('*', '.*')
269 rev = re.compile(vname, re.M)
270 ret = [x for x in domain if rev.match(x)]
274 if getattr(self, 'debug', None):
277 def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
278 # breadth first search
279 n_packages_to_versions = dict(packages_to_versions)
280 n_packages_to_constraints = dict(packages_to_constraints)
282 self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
283 done = done + [pkgname]
285 constraints = self.load_constraints(pkgname, pkgver)
286 self.trace("constraints %r" % constraints)
288 for k in constraints:
290 domain = n_packages_to_versions[k.pkgname]
292 domain = list_package_versions(get_distnet_cache(), k.pkgname)
295 self.trace("constraints?")
296 if not k.pkgname in done:
297 todo = todo + [k.pkgname]
299 self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
301 # apply the constraint
302 domain = self.apply_constraint(domain, k)
304 self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
306 n_packages_to_versions[k.pkgname] = domain
308 # then store the constraint applied
309 constraints = list(packages_to_constraints.get(k.pkgname, []))
310 constraints.append((pkgname, pkgver, k))
311 n_packages_to_constraints[k.pkgname] = constraints
314 self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
315 return (n_packages_to_versions, n_packages_to_constraints)
317 # next package on the todo list
319 return (n_packages_to_versions, n_packages_to_constraints)
322 n_pkgver = n_packages_to_versions[n_pkgname][0]
323 tmp = dict(n_packages_to_versions)
324 tmp[n_pkgname] = [n_pkgver]
326 self.trace("fixed point %s" % n_pkgname)
328 return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
330 def get_results(self):
331 return '\n'.join([str(c) for c in self.constraints])
333 def solution_to_constraints(self, versions, constraints):
341 c.required_version = versions[p][0]
343 c.required_version = ''
344 for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
345 c.add_reason(c2.human_display(from_pkgname, from_pkgver))
348 def local_resolve(self, text):
349 self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
350 p2v = OrderedDict({self.myproject: [self.myversion]})
351 (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
352 return self.solution_to_constraints(versions, constraints)
354 def download_to_file(self, pkgname, pkgver, subdir, tmp):
355 data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
356 req = urlopen(get_download_url(), data, timeout=TIMEOUT)
357 with open(tmp, 'wb') as f:
364 def extract_tar(self, subdir, pkgdir, tmpfile):
365 with tarfile.open(tmpfile) as f:
366 temp = tempfile.mkdtemp(dir=pkgdir)
369 os.rename(temp, os.path.join(pkgdir, subdir))
376 def get_pkg_dir(self, pkgname, pkgver, subdir):
377 pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
378 if not os.path.isdir(pkgdir):
381 target = os.path.join(pkgdir, subdir)
383 if os.path.exists(target):
386 (fd, tmp) = tempfile.mkstemp(dir=pkgdir)
389 self.download_to_file(pkgname, pkgver, subdir, tmp)
390 if subdir == REQUIRES:
391 os.rename(tmp, target)
393 self.extract_tar(subdir, pkgdir, tmp)
403 if not self.constraints:
404 self.compute_dependencies()
405 for x in self.constraints:
406 if x.pkgname == self.myproject:
412 self.compute_dependencies()
414 packages = package_reader()
416 def load_tools(ctx, extra):
419 packages.get_pkg_dir(c.pkgname, c.required_version, extra)
420 noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
421 for x in os.listdir(noarchdir):
422 if x.startswith('waf_') and x.endswith('.py'):
423 ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
426 opt.add_option('--offline', action='store_true')
428 load_tools(opt, REQUIRES)
431 load_tools(conf, conf.variant)
434 load_tools(bld, bld.variant)