Use tuple for sqlite arguments - breaks on older versions of storm
[build-farm.git] / buildfarm / __init__.py
index 5bfaf35436e1f72727e38ebd344b9df849b909c7..0e9eb3922b3d0aba0411b665009d4860476639db 100644 (file)
 #   along with this program; if not, write to the Free Software
 #   Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 
-import ConfigParser
-import os
-import re
+from buildfarm.build import BuildStatus
+from buildfarm.sqldb import distinct_builds, Cast, StormBuild, setup_schema, StormHostDatabase
+from buildfarm.tree import Tree
 from storm.database import create_database
+from storm.expr import Desc
 from storm.store import Store
 
-
-class Tree(object):
-    """A tree to build."""
-
-    def __init__(self, name, scm, repo, branch, subdir="", srcdir=""):
-        self.name = name
-        self.repo = repo
-        self.scm = scm
-        self.branch = branch
-        self.subdir = subdir
-        self.srcdir = srcdir
-        self.scm = scm
-
-    def get_branch(self):
-        from buildfarm.history import GitBranch
-        return GitBranch(self.repo, self.branch)
-
-    def __repr__(self):
-        return "<%s %r>" % (self.__class__.__name__, self.name)
-
+import ConfigParser
+import os
+import re
 
 def read_trees_from_conf(path):
     """Read trees from a configuration file.
@@ -58,9 +42,9 @@ def read_trees_from_conf(path):
     return ret
 
 
-def lcov_extract_percentage(text):
+def lcov_extract_percentage(f):
     """Extract the coverage percentage from the lcov file."""
-    m = re.search('\<td class="headerItem".*?\>Code\&nbsp\;covered\:\<\/td\>.*?\n.*?\<td class="headerValue".*?\>([0-9.]+) \%', text)
+    m = re.search('\<td class="headerCovTableEntryLo".*?\>([0-9.]+) \%', f.read())
     if m:
         return m.group(1)
     else:
@@ -69,11 +53,13 @@ def lcov_extract_percentage(text):
 
 class BuildFarm(object):
 
-    LCOVHOST = "magni"
+    LCOVHOST = "coverage"
     OLDAGE = 60*60*4,
     DEADAGE = 60*60*24*4
 
-    def __init__(self, path=None):
+    def __init__(self, path=None, store=None, timeout=0.5):
+        self.timeout = timeout
+        self.store = store
         if path is None:
             path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
         self.path = path
@@ -91,131 +77,120 @@ class BuildFarm(object):
         return "%s(%r)" % (self.__class__.__name__, self.path)
 
     def _open_build_results(self):
-        from buildfarm import data
-        return data.BuildResultStore(os.path.join(self.path, "data", "oldrevs"))
+        path = os.path.join(self.path, "data", "oldrevs")
+        from buildfarm.build import BuildResultStore
+        return BuildResultStore(path, self._get_store())
 
     def _open_upload_build_results(self):
-        from buildfarm import data
-        return data.UploadBuildResultStore(os.path.join(self.path, "data", "upload"))
+        from buildfarm.build import UploadBuildResultStore
+        path = os.path.join(self.path, "data", "upload")
+        return UploadBuildResultStore(path)
 
     def _open_hostdb(self):
-        from buildfarm import hostdb
-        return hostdb.HostDatabase(
-            os.path.join(self.path, "hostdb.sqlite"))
+        return StormHostDatabase(self._get_store())
 
     def _load_compilers(self):
         from buildfarm import util
         return set(util.load_list(os.path.join(self.webdir, "compilers.list")))
 
+    def commit(self):
+        if self.store is not None:
+            self.store.commit()
+
     def lcov_status(self, tree):
         """get status of build"""
-        from buildfarm import data, util
+        from buildfarm.build import NoSuchBuildError
         file = os.path.join(self.lcovdir, self.LCOVHOST, tree, "index.html")
         try:
-            lcov_html = util.FileLoad(file)
+            lcov_html = open(file, 'r')
         except (OSError, IOError):
             # File does not exist
-            raise data.NoSuchBuildError(tree, self.LCOVHOST, "lcov")
-
-        perc = lcov_extract_percentage(lcov_html)
-        if perc is None:
-            ret = ""
-        else:
-            ret = perc
-        return perc
-
-    def get_build(self, tree, host, compiler, rev=None):
-        if rev:
-            return self.builds.get_build(tree, host, compiler, rev)
-        else:
-            return self.upload_builds.get_build(tree, host, compiler)
-
-    def get_new_builds(self):
-        hosts = set([host.name for host in self.hostdb.hosts()])
-        for build in self.upload_builds.get_new_builds():
-            if build.tree in self.trees and build.compiler in self.compilers and build.host in hosts:
-                yield build
-
-
-class CachingBuildFarm(BuildFarm):
-
-    def __init__(self, path=None, readonly=False, cachedirname=None):
-        self._cachedirname = cachedirname
-        self.readonly = readonly
-        super(CachingBuildFarm, self).__init__(path)
-
-    def _get_cachedir(self):
-        if self._cachedirname is not None:
-            return os.path.join(self.path, self._cachedirname)
-        else:
-            return os.path.join(self.path, "cache")
-
-    def _open_build_results(self):
-        from buildfarm import data
-        return data.CachingBuildResultStore(os.path.join(self.path, "data", "oldrevs"),
-                self._get_cachedir(), readonly=self.readonly)
-
-    def _open_upload_build_results(self):
-        from buildfarm import data
-        return data.CachingUploadBuildResultStore(os.path.join(self.path, "data", "upload"),
-                self._get_cachedir(), readonly=self.readonly)
+            raise NoSuchBuildError(tree, self.LCOVHOST, "lcov")
+        try:
+            return lcov_extract_percentage(lcov_html)
+        finally:
+            lcov_html.close()
 
-    def lcov_status(self, tree):
+    def unused_fns(self, tree):
         """get status of build"""
-        from buildfarm import data, util
-        cachefile = os.path.join(self._get_cachedir(),
-                                    "lcov.%s.%s.status" % (self.LCOVHOST, tree))
-        file = os.path.join(self.lcovdir, self.LCOVHOST, tree, "index.html")
+        from buildfarm.build import NoSuchBuildError
+        file = os.path.join(self.lcovdir, self.LCOVHOST, tree, "unused-fns.txt")
         try:
-            st1 = os.stat(file)
-        except OSError:
+            unused_fns_file = open(file, 'r')
+        except (OSError, IOError):
             # File does not exist
-            raise data.NoSuchBuildError(tree, self.LCOVHOST, "lcov")
+            raise NoSuchBuildError(tree, self.LCOVHOST, "unused_fns")
         try:
-            st2 = os.stat(cachefile)
-        except OSError:
-            # file does not exist
-            st2 = None
-
-        if st2 and st1.st_ctime <= st2.st_mtime:
-            ret = util.FileLoad(cachefile)
-            if ret == "":
-                return None
-            return ret
-
-        perc = super(CachingBuildFarm, self).lcov_status(tree)
-        if not self.readonly:
-            util.FileSave(cachefile, perc)
-        return perc
-
-
-class SQLCachingBuildFarm(BuildFarm):
-
-    def __init__(self, path=None, db=None):
-        self.db = db
-        super(SQLCachingBuildFarm, self).__init__(path)
-
-    def _get_db(self):
-        if self.db is not None:
-            return self.db
+            return "unused-fns.txt"
+        finally:
+            unused_fns_file.close()
+
+    def get_build(self, tree, host, compiler, rev=None, checksum=None):
+        if rev is not None:
+            return self.builds.get_build(tree, host, compiler, rev,
+                checksum=checksum)
         else:
-            return create_database("sqlite:" + os.path.join(self.path, "hostdb.sqlite"))
-
-    def _open_build_results(self):
-        from buildfarm import data
-        return data.SQLCachingBuildResultStore(os.path.join(self.path, "data", "oldrevs"),
-            self.db)
-
-
-def setup_db(db):
-    db.execute("CREATE TABLE IF NOT EXISTS host (name text, owner text, owner_email text, password text, ssh_access int, fqdn text, platform text, permission text, last_dead_mail int, join_time int);", noresult=True)
-    db.execute("CREATE UNIQUE INDEX IF NOT EXISTS unique_hostname ON host (name);", noresult=True)
-    db.execute("CREATE TABLE IF NOT EXISTS build (id integer primary key autoincrement, tree text, revision text, host text, compiler text, checksum text, age int, status text, commit_revision text);", noresult=True)
-    db.execute("CREATE UNIQUE INDEX IF NOT EXISTS unique_checksum ON build (checksum);", noresult=True)
+            return self.upload_builds.get_build(tree, host, compiler)
 
+    def get_new_builds(self):
+        hostnames = set([host.name for host in self.hostdb.hosts()])
+        for build in self.upload_builds.get_all_builds():
+            if (build.tree in self.trees and
+                build.compiler in self.compilers and
+                build.host in hostnames):
+                yield build
 
-def memory_store():
-    db = create_database("sqlite:")
-    store = Store(db)
-    setup_db(store)
-    return store
+    def get_last_builds(self):
+        result = self._get_store().find(StormBuild)
+        return distinct_builds(result.order_by(Desc(StormBuild.upload_time)))
+
+    def get_summary_builds(self, min_age=0):
+        """Return last build age, status for each tree/host/compiler.
+
+        :param min_age: Minimum timestamp of builds to report
+        :return: iterator over tree, status
+        """
+        store = self._get_store()
+        return ((tree, BuildStatus.__deserialize__(status_str))
+                for (tree, status_str) in store.execute("""
+SELECT obd.tree, obd.status AS status_str
+FROM build obd
+INNER JOIN(
+    SELECT MAX(age) age, tree, host, compiler
+    FROM build
+    WHERE age > ?
+    GROUP BY tree, host, compiler
+) ibd ON obd.age = ibd.age AND
+         obd.tree = ibd.tree AND
+         obd.host = ibd.host AND
+         obd.compiler = ibd.compiler;
+""", (min_age, )))
+
+    def get_tree_builds(self, tree):
+        result = self._get_store().find(StormBuild,
+            Cast(StormBuild.tree, "TEXT") == Cast(tree, "TEXT"))
+        return distinct_builds(result.order_by(Desc(StormBuild.upload_time)))
+
+    def host_last_build(self, host):
+        return max([build.upload_time for build in self.get_host_builds(host)])
+
+    def get_host_builds(self, host):
+        result = self._get_store().find(StormBuild, StormBuild.host == host)
+        return distinct_builds(result.order_by(Desc(StormBuild.upload_time)))
+
+    def _get_store(self):
+        if self.store is not None:
+            return self.store
+        db_dir_path = os.path.join(self.path, "db")
+        if not os.path.isdir(db_dir_path):
+            os.mkdir(db_dir_path)
+        db_path = os.path.join(db_dir_path, "hostdb.sqlite")
+        db = create_database("sqlite:%s?timeout=%f" % (db_path, self.timeout))
+        self.store = Store(db)
+        setup_schema(self.store)
+        return self.store
+
+    def get_revision_builds(self, tree, revision=None):
+        return self._get_store().find(StormBuild,
+            Cast(StormBuild.tree, "TEXT") == Cast(tree, "TEXT"),
+            Cast(StormBuild.revision, "TEXT") == Cast(revision, "TEXT"))