# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-from buildfarm import setup_db
from cStringIO import StringIO
+import collections
import hashlib
import os
import re
-import sqlite3
import time
-import util
class BuildSummary(object):
self.status = status
+BuildStageResult = collections.namedtuple("BuildStageResult", "name result")
+
+
+class MissingRevisionInfo(Exception):
+ """Revision info could not be found in the build log."""
+
+ def __init__(self, build):
+ self.build = build
+
+
class BuildStatus(object):
def __init__(self, stages=None, other_failures=None):
else:
self.other_failures = set()
+ @property
+ def failed(self):
+ if self.other_failures:
+ return True
+ return not all([x == 0 for x in self._status_tuple()])
+
+ def __serialize__(self):
+ return repr(self)
+
+ @classmethod
+ def __deserialize__(cls, text):
+ return eval(text)
+
def __str__(self):
if self.other_failures:
return ",".join(self.other_failures)
return False
def _status_tuple(self):
- return [v for (k, v) in self.stages]
+ return [sr.result for sr in self.stages]
def regressed_since(self, other):
"""Check if this build has regressed since another build."""
if "disk full" in self.other_failures:
return False
+ if "timeout" in self.other_failures and "timeout" in other.other_failures:
+ # When the timeout happens exactly can differ slightly, so it's okay
+ # if the numbers are a bit different..
+ return False
+ if "panic" in self.other_failures and not "panic" in other.other_failures:
+ return True
return cmp(self._status_tuple(), other._status_tuple())
def __cmp__(self, other):
return cmp(other.stages, self.stages)
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, (self.stages, self.other_failures))
+ return "%s(%r, %r)" % (self.__class__.__name__, self.stages, self.other_failures)
def check_dir_exists(kind, path):
ret = BuildStatus()
stages = []
+ re_status = re.compile("^([A-Z_]+) STATUS:(\s*\d+)$")
+ re_action = re.compile("^ACTION (PASSED|FAILED):\s+test$")
for l in log:
- m = re.match("^([A-Z_]+) STATUS:(\s*\d+)$", l)
- if m:
- stages.append((m.group(1), int(m.group(2).strip())))
- if m.group(1) == "TEST":
- test_seen = 1
- continue
- m = re.match("^ACTION (PASSED|FAILED):\s+test$", l)
- if m and not test_seen:
- if m.group(1) == "PASSED":
- stages.append(("TEST", 0))
- else:
- stages.append(("TEST", 1))
- continue
-
if l.startswith("No space left on device"):
ret.other_failures.add("disk full")
continue
if l.startswith("maximum runtime exceeded"):
ret.other_failures.add("timeout")
continue
- m = re.match("^(PANIC|INTERNAL ERROR):.*$", l)
- if m:
+ if l.startswith("PANIC:") or l.startswith("INTERNAL ERROR:"):
ret.other_failures.add("panic")
continue
if l.startswith("testsuite-failure: ") or l.startswith("testsuite-error: "):
if l.startswith("testsuite-success: "):
test_successes += 1
continue
+ m = re_status.match(l)
+ if m:
+ stages.append(BuildStageResult(m.group(1), int(m.group(2).strip())))
+ if m.group(1) == "TEST":
+ test_seen = 1
+ continue
+ m = re_action.match(l)
+ if m and not test_seen:
+ if m.group(1) == "PASSED":
+ stages.append(BuildStageResult("TEST", 0))
+ else:
+ stages.append(BuildStageResult("TEST", 1))
+ continue
# Scan err file for specific errors
for l in err:
if "No space left on device" in l:
ret.other_failures.add("disk full")
- def map_stage(name, result):
- if name != "TEST":
- return (name, result)
+ def map_stage(sr):
+ if sr.name != "TEST":
+ return sr
# TEST is special
if test_successes + test_failures == 0:
# No granular test output
- return ("TEST", result)
- if result == 1 and test_failures == 0:
+ return BuildStageResult("TEST", sr.result)
+ if sr.result == 1 and test_failures == 0:
ret.other_failures.add("inconsistent test result")
- return ("TEST", -1)
- return ("TEST", test_failures)
+ return BuildStageResult("TEST", -1)
+ return BuildStageResult("TEST", test_failures)
- ret.stages = [map_stage(name, result) for (name, result) in stages]
+ ret.stages = map(map_stage, stages)
return ret
"""A single build of a tree on a particular host using a particular compiler.
"""
- def __init__(self, store, basename, tree, host, compiler, rev=None):
- self._store = store
+ def __init__(self, basename, tree, host, compiler, rev=None):
self.basename = basename
self.tree = tree
self.host = host
self.compiler = compiler
- self.revision = rev
+ self.commit_revision = self.revision = rev
def __repr__(self):
- if self.revision:
+ if self.revision is not None:
return "<%s: revision %s of %s on %s using %s>" % (self.__class__.__name__, self.revision, self.tree, self.host, self.compiler)
else:
return "<%s: %s on %s using %s>" % (self.__class__.__name__, self.tree, self.host, self.compiler)
- def remove(self):
+ def remove_logs(self):
os.unlink(self.basename + ".log")
if os.path.exists(self.basename+".err"):
os.unlink(self.basename+".err")
+ def remove(self):
+ self.remove_logs()
+
###################
# the mtime age is used to determine if builds are still happening
# on a host.
finally:
f.close()
+ if revid is None:
+ raise MissingRevisionInfo(self)
+
return (revid, timestamp)
def status(self):
return len(file.readlines())
-class CachingBuild(Build):
- """Build subclass that caches some of the results that are expensive
- to calculate."""
-
- def revision_details(self):
- if self.revision:
- cachef = self._store.cache_fname(self.tree, self.host, self.compiler, self.revision)
- else:
- cachef = self._store.cache_fname(self.tree, self.host, self.compiler)
- st1 = os.stat("%s.log" % self.basename)
-
- try:
- st2 = os.stat("%s.revision" % cachef)
- except OSError:
- # File does not exist
- st2 = None
-
- # the ctime/mtime asymmetry is needed so we don't get fooled by
- # the mtime update from rsync
- if st2 and st1.st_ctime <= st2.st_mtime:
- (revid, timestamp) = util.FileLoad("%s.revision" % cachef).split(":", 2)
- if timestamp == "":
- timestamp = None
- if revid == "":
- revid = None
- return (revid, timestamp)
- (revid, timestamp) = super(CachingBuild, self).revision_details()
- if not self._store.readonly:
- util.FileSave("%s.revision" % cachef, "%s:%s" % (revid, timestamp or ""))
- return (revid, timestamp)
-
- def err_count(self):
- cachef = self._store.cache_fname(self.tree, self.host, self.compiler, self.revision)
- st1 = os.stat("%s.err" % self.basename)
-
- try:
- st2 = os.stat("%s.errcount" % cachef)
- except OSError:
- # File does not exist
- st2 = None
-
- if st2 and st1.st_ctime <= st2.st_mtime:
- return util.FileLoad("%s.errcount" % cachef)
-
- ret = super(CachingBuild, self).err_count()
-
- if not self._store.readonly:
- util.FileSave("%s.errcount" % cachef, str(ret))
-
- return ret
-
- def status(self):
- if self.revsion:
- cachefile = self._store.cache_fname(self.tree, self.host, self.compiler, self.revision)+".status"
- else:
- cachefile = self._store.cache_fname(self.tree, self.host, self.compiler)+".status"
-
- st1 = os.stat("%s.log" % self.basename)
-
- try:
- st2 = os.stat(cachefile)
- except OSError:
- # No such file
- st2 = None
-
- if st2 and st1.st_ctime <= st2.st_mtime:
- return eval(util.FileLoad(cachefile))
-
- ret = super(CachingBuild, self).status()
-
- if not self._store.readonly:
- util.FileSave(cachefile, repr(ret))
-
- return ret
-
-
class UploadBuildResultStore(object):
def __init__(self, path):
logf = "%s.log" % basename
if not os.path.exists(logf):
raise NoSuchBuildError(tree, host, compiler)
- return Build(self, basename, tree, host, compiler)
-
-
-class CachingUploadBuildResultStore(UploadBuildResultStore):
-
- def __init__(self, basedir, cachedir, readonly=False):
- """Open the database.
-
- :param readonly: Whether to avoid saving cache files
- """
- super(CachingUploadBuildResultStore, self).__init__(basedir)
- self.cachedir = cachedir
- self.readonly = readonly
-
- def cache_fname(self, tree, host, compiler):
- return os.path.join(self.cachedir, "build.%s.%s.%s" % (tree, host, compiler))
-
- def get_build(self, tree, host, compiler):
- basename = self.build_fname(tree, host, compiler)
- logf = "%s.log" % basename
- if not os.path.exists(logf):
- raise NoSuchBuildError(tree, host, compiler)
- return CachingBuild(self, basename, tree, host, compiler)
+ return Build(basename, tree, host, compiler)
class BuildResultStore(object):
"""
self.path = path
+ def __contains__(self, build):
+ try:
+ if build.revision:
+ rev = build.revision
+ else:
+ rev, timestamp = build.revision_details()
+ self.get_build(build.tree, build.host, build.compiler, rev)
+ except NoSuchBuildError:
+ return False
+ else:
+ return True
+
def get_build(self, tree, host, compiler, rev):
basename = self.build_fname(tree, host, compiler, rev)
logf = "%s.log" % basename
if not os.path.exists(logf):
raise NoSuchBuildError(tree, host, compiler, rev)
- return Build(self, basename, tree, host, compiler, rev)
+ return Build(basename, tree, host, compiler, rev)
def build_fname(self, tree, host, compiler, rev):
"""get the name of the build file"""
# skip the current build
if stat.st_nlink == 2:
continue
- build = self.get_build(tree, host, compiler, rev)
- r = {
- "STATUS": build.status(),
- "REVISION": rev,
- "TIMESTAMP": build.age_ctime(),
- }
- ret.append(r)
+ ret.append(self.get_build(tree, host, compiler, rev))
- ret.sort(lambda a, b: cmp(a["TIMESTAMP"], b["TIMESTAMP"]))
+ ret.sort(lambda a, b: cmp(a.age_mtime(), b.age_mtime()))
return ret
def upload_build(self, build):
(rev, rev_timestamp) = build.revision_details()
- if not rev:
- raise Exception("Unable to find revision in %r log" % build)
-
new_basename = self.build_fname(build.tree, build.host, build.compiler, rev)
+ try:
+ existing_build = self.get_build(build.tree, build.host, build.compiler, rev)
+ except NoSuchBuildError:
+ pass
+ else:
+ existing_build.remove_logs()
os.link(build.basename+".log", new_basename+".log")
if os.path.exists(build.basename+".err"):
os.link(build.basename+".err", new_basename+".err")
+ return Build(new_basename, build.tree, build.host, build.compiler, rev)
def get_previous_revision(self, tree, host, compiler, revision):
raise NoSuchBuildError(tree, host, compiler, revision)
-
-class CachingBuildResultStore(BuildResultStore):
-
- def __init__(self, basedir, cachedir, readonly=False):
- super(CachingBuildResultStore, self).__init__(basedir)
-
- self.cachedir = cachedir
- check_dir_exists("cache", self.cachedir)
-
- self.readonly = readonly
-
- def get_build(self, tree, host, compiler, rev):
- basename = self.build_fname(tree, host, compiler, rev)
- logf = "%s.log" % basename
- if not os.path.exists(logf):
- raise NoSuchBuildError(tree, host, compiler, rev)
- return CachingBuild(self, basename, tree, host, compiler, rev)
-
- def cache_fname(self, tree, host, compiler, rev):
- return os.path.join(self.cachedir, "build.%s.%s.%s-%s" % (tree, host, compiler, rev))
-
-
-class SQLCachingBuildResultStore(BuildResultStore):
-
- def __init__(self, basedir, db=None):
- super(SQLCachingBuildResultStore, self).__init__(basedir)
-
- if db is None:
- db = sqlite3.connect(":memory:")
- setup_db(db)
-
- self.db = db
-
- def get_previous_revision(self, tree, host, compiler, revision):
- cursor = self.db.execute("SELECT revision FROM build WHERE tree = ? AND host = ? AND compiler = ? AND revision < ? ORDER BY id DESC LIMIT 1", (tree, host, compiler, revision))
- row = cursor.fetchone()
- if row is None:
- raise NoSuchBuildError(tree, host, compiler, revision)
- return row[0]
-
- def upload_build(self, build):
- super(SQLCachingBuildResultStore, self).upload_build(build)
- self.db.execute("INSERT INTO build (tree, revision, commit_revision, host, compiler, checksum, age, status) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", (build.tree, build.revision, build.revision, build.host, build.compiler, build.log_checksum(), build.age_mtime(), repr(build.status())))
+ def get_latest_revision(self, tree, host, compiler):
+ raise NoSuchBuildError(tree, host, compiler)