5 waf-powered distributed network builds, with a network cache.
7 Caching files from a server has advantages over a NFS/Samba shared folder:
9 - builds are much faster because they use local files
10 - builds just continue to work in case of a network glitch
11 - permissions are much simpler to manage
14 import os, urllib, tarfile, re, shutil, tempfile, sys
15 from collections import OrderedDict
16 from waflib import Context, Utils, Logs
19 from urllib.parse import urlencode
21 urlencode = urllib.urlencode
23 def safe_urlencode(data):
32 from urllib.error import URLError
34 from urllib2 import URLError
37 from urllib.request import Request, urlopen
39 from urllib2 import Request, urlopen
41 DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
42 DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
45 REQUIRES = 'requires.txt'
47 re_com = re.compile('\s*#.*', re.M)
49 def total_version_order(num):
51 template = '%10s' * len(lst)
52 ret = template % tuple(lst)
55 def get_distnet_cache():
56 return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
59 return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
61 def get_download_url():
62 return '%s/download.py' % get_server_url()
65 return '%s/upload.py' % get_server_url()
67 def get_resolve_url():
68 return '%s/resolve.py' % get_server_url()
70 def send_package_name():
71 out = getattr(Context.g_module, 'out', 'build')
72 pkgfile = '%s/package_to_upload.tarfile' % out
75 class package(Context.Context):
82 except AttributeError:
83 files = self.files = []
85 Context.Context.execute(self)
86 pkgfile = send_package_name()
87 if not pkgfile in files:
88 if not REQUIRES in files:
89 files.append(REQUIRES)
90 self.make_tarfile(pkgfile, files, add_to_package=False)
92 def make_tarfile(self, filename, files, **kw):
93 if kw.get('add_to_package', True):
94 self.files.append(filename)
96 with tarfile.open(filename, TARFORMAT) as tar:
97 endname = os.path.split(filename)[-1]
98 endname = endname.split('.')[0] + '/'
100 tarinfo = tar.gettarinfo(x, x)
101 tarinfo.uid = tarinfo.gid = 0
102 tarinfo.uname = tarinfo.gname = 'root'
103 tarinfo.size = os.stat(x).st_size
105 # TODO - more archive creation options?
106 if kw.get('bare', True):
107 tarinfo.name = os.path.split(x)[1]
109 tarinfo.name = endname + x # todo, if tuple, then..
110 Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
111 with open(x, 'rb') as f:
112 tar.addfile(tarinfo, f)
113 Logs.info('Created %s', filename)
115 class publish(Context.Context):
119 if hasattr(Context.g_module, 'publish'):
120 Context.Context.execute(self)
121 mod = Context.g_module
123 rfile = getattr(self, 'rfile', send_package_name())
124 if not os.path.isfile(rfile):
125 self.fatal('Create the release file with "waf release" first! %r' % rfile)
127 fdata = Utils.readf(rfile, m='rb')
128 data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
130 req = Request(get_upload_url(), data)
131 response = urlopen(req, timeout=TIMEOUT)
132 data = response.read().strip()
134 if sys.hexversion>0x300000f:
135 data = data.decode('utf-8')
138 self.fatal('Could not publish the package %r' % data)
140 class constraint(object):
141 def __init__(self, line=''):
142 self.required_line = line
149 lst = line.split(',')
151 self.pkgname = lst[0]
152 self.required_version = lst[1]
154 a, b, c = k.partition('=')
156 self.info.append((a, c))
159 buf.append(self.pkgname)
160 buf.append(self.required_version)
162 buf.append('%s=%s' % k)
166 return "requires %s-%s" % (self.pkgname, self.required_version)
168 def human_display(self, pkgname, pkgver):
169 return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
178 def add_reason(self, reason):
179 self.info.append(('reason', reason))
181 def parse_constraints(text):
182 assert(text is not None)
184 text = re.sub(re_com, '', text)
185 lines = text.splitlines()
190 constraints.append(constraint(line))
193 def list_package_versions(cachedir, pkgname):
194 pkgdir = os.path.join(cachedir, pkgname)
196 versions = os.listdir(pkgdir)
199 versions.sort(key=total_version_order)
203 class package_reader(Context.Context):
207 def __init__(self, **kw):
208 Context.Context.__init__(self, **kw)
210 self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
211 self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
212 self.cache_constraints = {}
213 self.constraints = []
215 def compute_dependencies(self, filename=REQUIRES):
216 text = Utils.readf(filename)
217 data = safe_urlencode([('text', text)])
219 if '--offline' in sys.argv:
220 self.constraints = self.local_resolve(text)
222 req = Request(get_resolve_url(), data)
224 response = urlopen(req, timeout=TIMEOUT)
225 except URLError as e:
226 Logs.warn('The package server is down! %r', e)
227 self.constraints = self.local_resolve(text)
229 ret = response.read()
231 ret = ret.decode('utf-8')
235 self.constraints = parse_constraints(ret)
238 def check_errors(self):
240 for c in self.constraints:
241 if not c.required_version:
245 if len(reasons) == 1:
246 Logs.error('%s but no matching package could be found in this repository', reasons[0])
248 Logs.error('Conflicts on package %r:', c.pkgname)
252 self.fatal('The package requirements cannot be satisfied!')
254 def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
256 return self.cache_constraints[(pkgname, pkgver)]
258 text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
259 ret = parse_constraints(text)
260 self.cache_constraints[(pkgname, pkgver)] = ret
263 def apply_constraint(self, domain, constraint):
264 vname = constraint.required_version.replace('*', '.*')
265 rev = re.compile(vname, re.M)
266 ret = [x for x in domain if rev.match(x)]
270 if getattr(self, 'debug', None):
273 def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
274 # breadth first search
275 n_packages_to_versions = dict(packages_to_versions)
276 n_packages_to_constraints = dict(packages_to_constraints)
278 self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
279 done = done + [pkgname]
281 constraints = self.load_constraints(pkgname, pkgver)
282 self.trace("constraints %r" % constraints)
284 for k in constraints:
286 domain = n_packages_to_versions[k.pkgname]
288 domain = list_package_versions(get_distnet_cache(), k.pkgname)
291 self.trace("constraints?")
292 if not k.pkgname in done:
293 todo = todo + [k.pkgname]
295 self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
297 # apply the constraint
298 domain = self.apply_constraint(domain, k)
300 self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
302 n_packages_to_versions[k.pkgname] = domain
304 # then store the constraint applied
305 constraints = list(packages_to_constraints.get(k.pkgname, []))
306 constraints.append((pkgname, pkgver, k))
307 n_packages_to_constraints[k.pkgname] = constraints
310 self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
311 return (n_packages_to_versions, n_packages_to_constraints)
313 # next package on the todo list
315 return (n_packages_to_versions, n_packages_to_constraints)
318 n_pkgver = n_packages_to_versions[n_pkgname][0]
319 tmp = dict(n_packages_to_versions)
320 tmp[n_pkgname] = [n_pkgver]
322 self.trace("fixed point %s" % n_pkgname)
324 return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
326 def get_results(self):
327 return '\n'.join([str(c) for c in self.constraints])
329 def solution_to_constraints(self, versions, constraints):
337 c.required_version = versions[p][0]
339 c.required_version = ''
340 for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
341 c.add_reason(c2.human_display(from_pkgname, from_pkgver))
344 def local_resolve(self, text):
345 self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
346 p2v = OrderedDict({self.myproject: [self.myversion]})
347 (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
348 return self.solution_to_constraints(versions, constraints)
350 def download_to_file(self, pkgname, pkgver, subdir, tmp):
351 data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
352 req = urlopen(get_download_url(), data, timeout=TIMEOUT)
353 with open(tmp, 'wb') as f:
360 def extract_tar(self, subdir, pkgdir, tmpfile):
361 with tarfile.open(tmpfile) as f:
362 temp = tempfile.mkdtemp(dir=pkgdir)
365 os.rename(temp, os.path.join(pkgdir, subdir))
372 def get_pkg_dir(self, pkgname, pkgver, subdir):
373 pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
374 if not os.path.isdir(pkgdir):
377 target = os.path.join(pkgdir, subdir)
379 if os.path.exists(target):
382 (fd, tmp) = tempfile.mkstemp(dir=pkgdir)
385 self.download_to_file(pkgname, pkgver, subdir, tmp)
386 if subdir == REQUIRES:
387 os.rename(tmp, target)
389 self.extract_tar(subdir, pkgdir, tmp)
399 if not self.constraints:
400 self.compute_dependencies()
401 for x in self.constraints:
402 if x.pkgname == self.myproject:
407 self.compute_dependencies()
409 packages = package_reader()
411 def load_tools(ctx, extra):
414 packages.get_pkg_dir(c.pkgname, c.required_version, extra)
415 noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
416 for x in os.listdir(noarchdir):
417 if x.startswith('waf_') and x.endswith('.py'):
418 ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
421 opt.add_option('--offline', action='store_true')
423 load_tools(opt, REQUIRES)
426 load_tools(conf, conf.variant)
429 load_tools(bld, bld.variant)