remove .fns file for removed host fjall
[build-farm.git] / buildfarm / sqldb.py
index 0e561379007d19ef5e70fe18c16b95a7f9513314..f679fb40159151951460787561355882f5e29bcc 100644 (file)
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 #
 
-from buildfarm import (
-    BuildFarm,
+from buildfarm.tree import (
     Tree,
     )
-from buildfarm.data import (
-    Build,
-    BuildResultStore,
-    BuildStatus,
-    NoSuchBuildError,
+from buildfarm.build import (
+    StormBuild,
+    Test,
+    TestResult,
     )
 from buildfarm.hostdb import (
     Host,
@@ -34,44 +32,31 @@ from buildfarm.hostdb import (
     NoSuchHost,
     )
 
-import os
+
 try:
     from pysqlite2 import dbapi2 as sqlite3
 except ImportError:
     import sqlite3
 from storm.database import create_database
-from storm.locals import Bool, Desc, Int, Unicode, RawStr
+from storm.expr import EXPR, FuncExpr, compile
+from storm.locals import Bool, Desc, Int, RawStr, Reference, Unicode
 from storm.store import Store
 
 
-class StormBuild(Build):
-    __storm_table__ = "build"
-
-    id = Int(primary=True)
-    tree = RawStr()
-    revision = RawStr()
-    host = RawStr()
-    compiler = RawStr()
-    checksum = RawStr()
-    upload_time = Int(name="age")
-    commit = Unicode()
-    status_str = RawStr(name="status")
-    commit_revision = RawStr()
-    basename = RawStr()
-    host_id = Int()
-
-    def status(self):
-        return BuildStatus.__deserialize__(self.status_str)
-
-    def revision_details(self):
-        return (self.revision, None)
+class Cast(FuncExpr):
+    __slots__ = ("column", "type")
+    name = "CAST"
 
-    def log_checksum(self):
-        return self.checksum
+    def __init__(self, column, type):
+        self.column = column
+        self.type = type
 
-    def remove(self):
-        super(StormBuild, self).remove()
-        Store.of(self).remove(self)
+@compile.when(Cast)
+def compile_count(compile, cast, state):
+    state.push("context", EXPR)
+    column = compile(cast.column, state)
+    state.pop()
+    return "CAST(%s AS %s)" % (column, cast.type)
 
 
 class StormHost(Host):
@@ -134,7 +119,9 @@ class StormHostDatabase(HostDatabase):
         return self.store.find(StormHost).order_by(StormHost.name)
 
     def __getitem__(self, name):
-        ret = self.store.find(StormHost, StormHost.name==name).one()
+        result = self.store.find(StormHost,
+            Cast(StormHost.name, "TEXT") == Cast(name, "TEXT"))
+        ret = result.one()
         if ret is None:
             raise NoSuchHost(name)
         return ret
@@ -143,133 +130,14 @@ class StormHostDatabase(HostDatabase):
         self.store.commit()
 
 
-class StormCachingBuildResultStore(BuildResultStore):
-
-    def __init__(self, basedir, store=None):
-        super(StormCachingBuildResultStore, self).__init__(basedir)
-
-        if store is None:
-            store = memory_store()
-
-        self.store = store
-
-    def __contains__(self, build):
-        return (self._get_by_checksum(build) is not None)
-
-    def get_previous_revision(self, tree, host, compiler, revision):
-        result = self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler,
-            StormBuild.revision == revision)
-        cur_build = result.any()
-        if cur_build is None:
-            raise NoSuchBuildError(tree, host, compiler, revision)
-
-        result = self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler,
-            StormBuild.revision != revision,
-            StormBuild.id < cur_build.id)
-        result = result.order_by(Desc(StormBuild.id))
-        prev_build = result.first()
-        if prev_build is None:
-            raise NoSuchBuildError(tree, host, compiler, revision)
-        return prev_build.revision
-
-    def get_latest_revision(self, tree, host, compiler):
-        result = self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler)
-        result = result.order_by(Desc(StormBuild.id))
-        build = result.first()
-        if build is None:
-            raise NoSuchBuildError(tree, host, compiler)
-        return build.revision
-
-    def _get_by_checksum(self, build):
-        result = self.store.find(StormBuild,
-            StormBuild.checksum == build.log_checksum())
-        return result.one()
-
-    def upload_build(self, build):
-        existing_build = self._get_by_checksum(build)
-        if existing_build is not None:
-            # Already present
-            assert build.tree == existing_build.tree
-            assert build.host == existing_build.host
-            assert build.compiler == existing_build.compiler
-            return existing_build
-        rev, timestamp = build.revision_details()
-        super(StormCachingBuildResultStore, self).upload_build(build)
-        new_basename = self.build_fname(build.tree, build.host, build.compiler, rev)
-        new_build = StormBuild(new_basename, build.tree, build.host,
-            build.compiler, rev)
-        new_build.checksum = build.log_checksum()
-        new_build.upload_time = build.upload_time
-        new_build.status_str = build.status().__serialize__()
-        new_build.basename = new_basename
-        self.store.add(new_build)
-        return new_build
-
-    def get_old_revs(self, tree, host, compiler):
-        return self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler).order_by(Desc(StormBuild.upload_time))
-
-    def get_build(self, tree, host, compiler, revision):
-        result = self.store.find(StormBuild,
-            StormBuild.tree == tree,
-            StormBuild.host == host,
-            StormBuild.compiler == compiler,
-            StormBuild.revision == revision)
-        ret = result.one()
-        if ret is None:
-            raise NoSuchBuildError(tree, host, compiler, revision)
-        return ret
-
-
-class StormCachingBuildFarm(BuildFarm):
-
-    def __init__(self, path=None, store=None, timeout=0.5):
-        self.timeout = timeout
-        self.store = store
-        super(StormCachingBuildFarm, self).__init__(path)
-
-    def _get_store(self):
-        if self.store is not None:
-            return self.store
-        db_path = os.path.join(self.path, "db", "hostdb.sqlite")
-        db = create_database("sqlite:%s?timeout=%f" % (db_path, self.timeout))
-        self.store = Store(db)
-        setup_schema(self.store)
-        return self.store
-
-    def _open_hostdb(self):
-        return StormHostDatabase(self._get_store())
-
-    def _open_build_results(self):
-        return StormCachingBuildResultStore(os.path.join(self.path, "data", "oldrevs"),
-            self._get_store())
-
-    def get_host_builds(self, host):
-        return self._get_store().find(StormBuild,
-            StormBuild.host==host).group_by(StormBuild.compiler, StormBuild.tree)
-
-    def get_tree_builds(self, tree):
-        result = self._get_store().find(StormBuild, StormBuild.tree==tree)
-        return result.order_by(Desc(StormBuild.upload_time))
-
-    def get_last_builds(self):
-        return self._get_store().find(StormBuild).group_by(
-            StormBuild.tree, StormBuild.compiler, StormBuild.host).order_by(
-                Desc(StormBuild.upload_time))
-
-    def commit(self):
-        self.store.commit()
+def distinct_builds(builds):
+    done = set()
+    for build in builds:
+        key = (build.tree, build.compiler, build.host)
+        if key in done:
+            continue
+        done.add(key)
+        yield build
 
 
 class StormTree(Tree):
@@ -284,6 +152,24 @@ class StormTree(Tree):
     scm = RawStr()
 
 
+class StormTest(Test):
+    __storm_table__ = "test"
+
+    id = Int(primary=True)
+    name = RawStr()
+
+
+class StormTestResult(TestResult):
+    __storm_table__ = "test_result"
+
+    id = Int(primary=True)
+    build_id = Int(name="build")
+    build = Reference(build_id, StormBuild)
+
+    test_id = Int(name="test")
+    test = Reference(test_id, StormTest)
+
+
 def setup_schema(db):
     db.execute("PRAGMA foreign_keys = 1;", noresult=True)
     db.execute("""
@@ -305,15 +191,19 @@ CREATE TABLE IF NOT EXISTS host (
 CREATE TABLE IF NOT EXISTS build (
     id integer primary key autoincrement,
     tree blob not null,
+    tree_id int,
     revision blob,
     host blob not null,
     host_id integer,
     compiler blob not null,
+    compiler_id int,
     checksum blob,
     age int,
     status blob,
     basename blob,
-    FOREIGN KEY (host_id) REFERENCES host (id)
+    FOREIGN KEY (host_id) REFERENCES host (id),
+    FOREIGN KEY (tree_id) REFERENCES tree (id),
+    FOREIGN KEY (compiler_id) REFERENCES compiler (id)
 );""", noresult=True)
     db.execute("CREATE UNIQUE INDEX IF NOT EXISTS unique_checksum ON build (checksum);", noresult=True)
     db.execute("""
@@ -324,7 +214,33 @@ CREATE TABLE IF NOT EXISTS tree (
     branch blob,
     subdir blob,
     repo blob
-    );""", noresult=True)
+    );
+    """, noresult=True)
+    db.execute("""
+CREATE UNIQUE INDEX IF NOT EXISTS unique_tree_name ON tree(name);
+""", noresult=True)
+    db.execute("""
+CREATE TABLE IF NOT EXISTS compiler (
+    id integer primary key autoincrement,
+    name blob not null
+    );
+    """, noresult=True)
+    db.execute("""
+CREATE UNIQUE INDEX IF NOT EXISTS unique_compiler_name ON compiler(name);
+""", noresult=True)
+    db.execute("""
+CREATE TABLE IF NOT EXISTS test (
+    id integer primary key autoincrement,
+    name text not null);
+    """, noresult=True)
+    db.execute("CREATE UNIQUE INDEX IF NOT EXISTS test_name ON test(name);",
+        noresult=True)
+    db.execute("""CREATE TABLE IF NOT EXISTS test_result (
+        build int,
+        test int,
+        result int
+        );""", noresult=True)
+    db.execute("""CREATE UNIQUE INDEX IF NOT EXISTS build_test_result ON test_result(build, test);""", noresult=True)
 
 
 def memory_store():