remove .fns file for removed host fjall
[build-farm.git] / buildfarm / sqldb.py
index 9434f0f0556fa344ab6f94ad50a9404bd6cf6281..f679fb40159151951460787561355882f5e29bcc 100644 (file)
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 #
 
-from buildfarm import (
-    BuildFarm,
+from buildfarm.tree import (
+    Tree,
     )
-from buildfarm.data import (
-    Build,
-    BuildResultStore,
-    BuildStatus,
-    NoSuchBuildError,
+from buildfarm.build import (
+    StormBuild,
+    Test,
+    TestResult,
     )
 from buildfarm.hostdb import (
     Host,
@@ -33,41 +32,31 @@ from buildfarm.hostdb import (
     NoSuchHost,
     )
 
-import os
+
 try:
     from pysqlite2 import dbapi2 as sqlite3
 except ImportError:
     import sqlite3
 from storm.database import create_database
-from storm.locals import Bool, Desc, Int, Unicode, RawStr
+from storm.expr import EXPR, FuncExpr, compile
+from storm.locals import Bool, Desc, Int, RawStr, Reference, Unicode
 from storm.store import Store
 
 
-class StormBuild(Build):
-    __storm_table__ = "build"
-
-    id = Int(primary=True)
-    tree = RawStr()
-    revision = RawStr()
-    host = RawStr()
-    compiler = RawStr()
-    checksum = RawStr()
-    upload_time = Int(name="age")
-    status_str = RawStr(name="status")
-    basename = RawStr()
-
-    def status(self):
-        return BuildStatus.__deserialize__(self.status_str)
-
-    def revision_details(self):
-        return (self.revision, None)
+class Cast(FuncExpr):
+    __slots__ = ("column", "type")
+    name = "CAST"
 
-    def log_checksum(self):
-        return self.checksum
+    def __init__(self, column, type):
+        self.column = column
+        self.type = type
 
-    def remove(self):
-        super(StormBuild, self).remove()
-        Store.of(self).remove(self)
+@compile.when(Cast)
+def compile_count(compile, cast, state):
+    state.push("context", EXPR)
+    column = compile(cast.column, state)
+    state.pop()
+    return "CAST(%s AS %s)" % (column, cast.type)
 
 
 class StormHost(Host):
@@ -130,7 +119,9 @@ class StormHostDatabase(HostDatabase):
         return self.store.find(StormHost).order_by(StormHost.name)
 
     def __getitem__(self, name):
-        ret = self.store.find(StormHost, StormHost.name==name).one()
+        result = self.store.find(StormHost,
+            Cast(StormHost.name, "TEXT") == Cast(name, "TEXT"))
+        ret = result.one()
         if ret is None:
             raise NoSuchHost(name)
         return ret
@@ -139,133 +130,117 @@ class StormHostDatabase(HostDatabase):
         self.store.commit()
 
 
-class StormCachingBuildResultStore(BuildResultStore):
+def distinct_builds(builds):
+    done = set()
+    for build in builds:
+        key = (build.tree, build.compiler, build.host)
+        if key in done:
+            continue
+        done.add(key)
+        yield build
 
-    def __init__(self, basedir, store=None):
-        super(StormCachingBuildResultStore, self).__init__(basedir)
 
-        if store is None:
-            store = memory_store()
-
-        self.store = store
-
-    def __contains__(self, build):
-        return (self._get_by_checksum(build) is not None)
-
-    def get_previous_revision(self, tree, host, compiler, revision):
-        result = self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler,
-            StormBuild.revision == revision)
-        cur_build = result.any()
-        if cur_build is None:
-            raise NoSuchBuildError(tree, host, compiler, revision)
-
-        result = self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler,
-            StormBuild.revision != revision,
-            StormBuild.id < cur_build.id)
-        result = result.order_by(Desc(StormBuild.id))
-        prev_build = result.first()
-        if prev_build is None:
-            raise NoSuchBuildError(tree, host, compiler, revision)
-        return prev_build.revision
-
-    def get_latest_revision(self, tree, host, compiler):
-        result = self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler)
-        result = result.order_by(Desc(StormBuild.id))
-        build = result.first()
-        if build is None:
-            raise NoSuchBuildError(tree, host, compiler)
-        return build.revision
-
-    def _get_by_checksum(self, build):
-        result = self.store.find(StormBuild,
-            StormBuild.checksum == build.log_checksum())
-        return result.one()
-
-    def upload_build(self, build):
-        existing_build = self._get_by_checksum(build)
-        if existing_build is not None:
-            # Already present
-            assert build.tree == existing_build.tree
-            assert build.host == existing_build.host
-            assert build.compiler == existing_build.compiler
-            return existing_build
-        rev, timestamp = build.revision_details()
-        super(StormCachingBuildResultStore, self).upload_build(build)
-        new_basename = self.build_fname(build.tree, build.host, build.compiler, rev)
-        new_build = StormBuild(new_basename, build.tree, build.host,
-            build.compiler, rev)
-        new_build.checksum = build.log_checksum()
-        new_build.upload_time = build.upload_time
-        new_build.status_str = build.status().__serialize__()
-        new_build.basename = new_basename
-        self.store.add(new_build)
-        return new_build
-
-    def get_old_revs(self, tree, host, compiler):
-        return self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler).order_by(Desc(StormBuild.upload_time))
-
-
-class StormCachingBuildFarm(BuildFarm):
-
-    def __init__(self, path=None, store=None, timeout=0.5):
-        self.timeout = timeout
-        self.store = store
-        super(StormCachingBuildFarm, self).__init__(path)
-
-    def _get_store(self):
-        if self.store is not None:
-            return self.store
-        db_path = os.path.join(self.path, "db", "hostdb.sqlite")
-        umask = os.umask(0664)
-        try:
-            db = create_database("sqlite:%s?timeout=%f" % (db_path, self.timeout))
-            self.store = Store(db)
-            setup_schema(self.store)
-        finally:
-            os.umask(umask)
-        return self.store
+class StormTree(Tree):
+    __storm_table__ = "tree"
+
+    id = Int(primary=True)
+    name = RawStr()
+    scm = Int()
+    branch = RawStr()
+    subdir = RawStr()
+    repo = RawStr()
+    scm = RawStr()
 
-    def _open_hostdb(self):
-        return StormHostDatabase(self._get_store())
 
-    def _open_build_results(self):
-        return StormCachingBuildResultStore(os.path.join(self.path, "data", "oldrevs"),
-            self._get_store())
+class StormTest(Test):
+    __storm_table__ = "test"
+
+    id = Int(primary=True)
+    name = RawStr()
 
-    def get_host_builds(self, host):
-        return self._get_store().find(StormBuild,
-            StormBuild.host == host).group_by(StormBuild.compiler, StormBuild.tree)
 
-    def get_tree_builds(self, tree):
-        return self._get_store().find(StormBuild,
-            StormBuild.tree == tree).order_by(Desc(StormBuild.upload_time))
+class StormTestResult(TestResult):
+    __storm_table__ = "test_result"
 
-    def get_last_builds(self):
-        return self._get_store().find(StormBuild).group_by(
-            StormBuild.tree, StormBuild.compiler, StormBuild.host).order_by(
-                Desc(StormBuild.upload_time))
+    id = Int(primary=True)
+    build_id = Int(name="build")
+    build = Reference(build_id, StormBuild)
 
-    def commit(self):
-        self.store.commit()
+    test_id = Int(name="test")
+    test = Reference(test_id, StormTest)
 
 
 def setup_schema(db):
-    db.execute("CREATE TABLE IF NOT EXISTS host (id integer primary key autoincrement, name blob not null, owner text, owner_email text, password text, ssh_access int, fqdn text, platform text, permission text, last_dead_mail int, join_time int);", noresult=True)
+    db.execute("PRAGMA foreign_keys = 1;", noresult=True)
+    db.execute("""
+CREATE TABLE IF NOT EXISTS host (
+    id integer primary key autoincrement,
+    name blob not null,
+    owner text,
+    owner_email text,
+    password text,
+    ssh_access int,
+    fqdn text,
+    platform text,
+    permission text,
+    last_dead_mail int,
+    join_time int
+);""", noresult=True)
     db.execute("CREATE UNIQUE INDEX IF NOT EXISTS unique_hostname ON host (name);", noresult=True)
-    db.execute("CREATE TABLE IF NOT EXISTS build (id integer primary key autoincrement, tree blob not null, revision blob, host blob not null, compiler blob not null, checksum blob, age int, status blob, basename blob);", noresult=True)
+    db.execute("""
+CREATE TABLE IF NOT EXISTS build (
+    id integer primary key autoincrement,
+    tree blob not null,
+    tree_id int,
+    revision blob,
+    host blob not null,
+    host_id integer,
+    compiler blob not null,
+    compiler_id int,
+    checksum blob,
+    age int,
+    status blob,
+    basename blob,
+    FOREIGN KEY (host_id) REFERENCES host (id),
+    FOREIGN KEY (tree_id) REFERENCES tree (id),
+    FOREIGN KEY (compiler_id) REFERENCES compiler (id)
+);""", noresult=True)
     db.execute("CREATE UNIQUE INDEX IF NOT EXISTS unique_checksum ON build (checksum);", noresult=True)
+    db.execute("""
+CREATE TABLE IF NOT EXISTS tree (
+    id integer primary key autoincrement,
+    name blob not null,
+    scm int,
+    branch blob,
+    subdir blob,
+    repo blob
+    );
+    """, noresult=True)
+    db.execute("""
+CREATE UNIQUE INDEX IF NOT EXISTS unique_tree_name ON tree(name);
+""", noresult=True)
+    db.execute("""
+CREATE TABLE IF NOT EXISTS compiler (
+    id integer primary key autoincrement,
+    name blob not null
+    );
+    """, noresult=True)
+    db.execute("""
+CREATE UNIQUE INDEX IF NOT EXISTS unique_compiler_name ON compiler(name);
+""", noresult=True)
+    db.execute("""
+CREATE TABLE IF NOT EXISTS test (
+    id integer primary key autoincrement,
+    name text not null);
+    """, noresult=True)
+    db.execute("CREATE UNIQUE INDEX IF NOT EXISTS test_name ON test(name);",
+        noresult=True)
+    db.execute("""CREATE TABLE IF NOT EXISTS test_result (
+        build int,
+        test int,
+        result int
+        );""", noresult=True)
+    db.execute("""CREATE UNIQUE INDEX IF NOT EXISTS build_test_result ON test_result(build, test);""", noresult=True)
 
 
 def memory_store():