Merge pull request 'Build a Tree of commits (at least prep it)' (#4) from tree_builder into main

Reviewed-on: https://gitea.opensuse.org/importers/git-importer/pulls/4
This commit is contained in:
coolo 2022-10-26 19:33:13 +02:00
commit 22b70d8bb7
12 changed files with 32393 additions and 33 deletions

View File

@ -1,3 +1,5 @@
sudo zypper in python3-psycopg2 sudo zypper in python3-psycopg2
sudo su - postgres sudo su - postgres
# `createdb -O <LOCAL_USER> imported_git` # `createdb -O <LOCAL_USER> imported_git`
To reset the database, drop table scheme

View File

@ -8,6 +8,7 @@ import sys
import osc.core import osc.core
from lib.exporter import Exporter
from lib.importer import Importer from lib.importer import Importer
URL_OBS = "https://api.opensuse.org" URL_OBS = "https://api.opensuse.org"
@ -88,6 +89,11 @@ def main():
action="store_true", action="store_true",
help="Import revisions into database only", help="Import revisions into database only",
) )
parser.add_argument(
"--export",
action="store_true",
help="Export database fields for the given package as JSON",
)
args = parser.parse_args() args = parser.parse_args()
@ -103,6 +109,10 @@ def main():
requests_log.setLevel(logging.DEBUG) requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True requests_log.propagate = True
if args.export:
Exporter(args.package).run()
return
if not args.repodir: if not args.repodir:
args.repodir = pathlib.Path(args.package) args.repodir = pathlib.Path(args.package)
@ -117,6 +127,7 @@ def main():
importer = Importer( importer = Importer(
PROJECTS, args.package, args.repodir, args.search_ancestor, args.rebase_devel PROJECTS, args.package, args.repodir, args.search_ancestor, args.rebase_devel
) )
if args.db: if args.db:
importer.import_into_db() importer.import_into_db()
return return

View File

@ -1,4 +1,7 @@
import logging
import psycopg2 import psycopg2
from psycopg2.extras import LoggingConnection
from lib.config import config from lib.config import config
@ -14,7 +17,9 @@ class DB:
# read the connection parameters # read the connection parameters
params = config(section=self.config_section) params = config(section=self.config_section)
# connect to the PostgreSQL server # connect to the PostgreSQL server
self.conn = psycopg2.connect(**params) self.conn = psycopg2.connect(connection_factory=LoggingConnection, **params)
logger = logging.getLogger(__name__)
self.conn.initialize(logger)
except (Exception, psycopg2.DatabaseError) as error: except (Exception, psycopg2.DatabaseError) as error:
print(error) print(error)
@ -96,6 +101,7 @@ class DB:
"UPDATE scheme SET version=4", "UPDATE scheme SET version=4",
) )
schemes[5] = ( schemes[5] = (
"""DROP TABLE IF EXISTS files""",
""" """
CREATE TABLE files ( CREATE TABLE files (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
@ -109,6 +115,7 @@ class DB:
"UPDATE scheme SET version=5", "UPDATE scheme SET version=5",
) )
schemes[6] = ( schemes[6] = (
"""DROP TABLE IF EXISTS requests""",
""" """
CREATE TABLE requests ( CREATE TABLE requests (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
@ -154,17 +161,51 @@ class DB:
"UPDATE scheme SET version=12", "UPDATE scheme SET version=12",
) )
schemes[13] = ( schemes[13] = (
""" "DROP TABLE IF EXISTS users",
CREATE TABLE users ( """CREATE TABLE users (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
userid VARCHAR(255) NOT NULL, userid VARCHAR(255) NOT NULL,
email VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL,
realname VARCHAR(255) NOT NULL realname VARCHAR(255) NOT NULL
) )
""", """,
"UPDATE scheme SET version=13", "UPDATE scheme SET version=13",
) )
schemes[14] = (
"DROP TABLE IF EXISTS linked_revs",
"""
CREATE TABLE linked_revs (
id SERIAL PRIMARY KEY,
revision_id INTEGER NOT NULL,
linked_id INTEGER NOT NULL
)
""",
"UPDATE scheme SET version=14",
)
schemes[15] = (
"ALTER TABLE revisions ALTER COLUMN rev TYPE real USING rev::real",
"UPDATE scheme SET version=15",
)
schemes[16] = (
"""DROP TABLE IF EXISTS fake_revs""",
"""
CREATE TABLE fake_revs (
id SERIAL PRIMARY KEY,
revision_id INTEGER NOT NULL,
linked_id INTEGER NOT NULL
)
""",
"create index revs_linked on fake_revs (revision_id,linked_id)",
"UPDATE scheme SET version=16",
)
schemes[17] = (
"ALTER TABLE revisions ADD COLUMN files_hash VARCHAR(40)",
"UPDATE scheme SET version=17",
)
schemes[18] = (
"ALTER TABLE linked_revs ADD COLUMN considered BOOLEAN DEFAULT FALSE",
"UPDATE scheme SET version=18",
)
schema_version = self.schema_version() schema_version = self.schema_version()
if (schema_version + 1) not in schemes: if (schema_version + 1) not in schemes:
return return

View File

@ -1,3 +1,8 @@
from hashlib import md5
from lib.request import Request
class DBRevision: class DBRevision:
def __init__(self, row): def __init__(self, row):
# need to stay in sync with the schema creation in db.py # need to stay in sync with the schema creation in db.py
@ -14,14 +19,46 @@ class DBRevision:
self.expanded_srcmd5, self.expanded_srcmd5,
self.request_number, self.request_number,
self.request_id, self.request_id,
self.files_hash,
) = row ) = row
self.rev = float(self.rev)
self._files = None
def __str__(self): def __str__(self):
return f"Rev {self.project}/{self.rev} Md5 {self.unexpanded_srcmd5} {self.commit_time} {self.userid} {self.request_number}" return f"Rev {self.project}/{self.package}/{self.rev} Md5 {self.unexpanded_srcmd5} {self.commit_time} {self.userid} {self.request_number}"
def __repr__(self): def __repr__(self):
return f"[{self.__str__()}]" return f"[{self.__str__()}]"
def __eq__(self, other):
return self.dbid == other.dbid
def __lt__(self, other):
if self.project != other.project:
return self.project < other.project
if self.package != other.package:
return self.package < other.package
return self.rev < other.rev
def as_dict(self, db):
"""Return a dict we can put into YAML for test cases"""
ret = {
"project": self.project,
"package": self.package,
"rev": self.rev,
"unexpanded_srcmd5": self.unexpanded_srcmd5,
"commit_time": self.commit_time,
"userid": self.userid,
"comment": self.comment,
"broken": self.broken,
"expanded_srcmd5": self.expanded_srcmd5,
"files_hash": self.files_hash,
"files": self.files_list(db),
}
if self.request_id:
ret["request"] = Request.find(db, self.request_id).as_dict()
return ret
def links_to(self, db, project, package): def links_to(self, db, project, package):
with db.cursor() as cur: with db.cursor() as cur:
cur.execute( cur.execute(
@ -56,7 +93,8 @@ class DBRevision:
(project, package, str(rev)), (project, package, str(rev)),
) )
row = cur.fetchone() row = cur.fetchone()
return DBRevision(row) if row:
return DBRevision(row)
@staticmethod @staticmethod
def latest_revision(db, project, package): def latest_revision(db, project, package):
@ -67,7 +105,7 @@ class DBRevision:
) )
max = cur.fetchone()[0] max = cur.fetchone()[0]
if max: if max:
return DBRevision.fetch_revision(db, project, package, int(max)) return DBRevision.fetch_revision(db, project, package, max)
return None return None
@staticmethod @staticmethod
@ -83,6 +121,8 @@ class DBRevision:
return ret return ret
def linked_rev(self, db): def linked_rev(self, db):
if self.broken:
return None
with db.cursor() as cur: with db.cursor() as cur:
cur.execute( cur.execute(
"SELECT project,package FROM links where revision_id=%s", (self.dbid,) "SELECT project,package FROM links where revision_id=%s", (self.dbid,)
@ -98,13 +138,14 @@ class DBRevision:
revisions = [DBRevision(row) for row in cur.fetchall()] revisions = [DBRevision(row) for row in cur.fetchall()]
if revisions: if revisions:
return revisions[0] return revisions[0]
else:
self.set_broken(db)
return None return None
def set_broken(self, db): def set_broken(self, db):
with db.cursor() as cur: with db.cursor() as cur:
cur.execute("UPDATE revisions SET broken=TRUE where id=%s", (self.dbid,)) cur.execute("UPDATE revisions SET broken=TRUE where id=%s", (self.dbid,))
def import_dir_list(self, db, xml): def import_dir_list(self, db, xml):
with db.cursor() as cur: with db.cursor() as cur:
cur.execute( cur.execute(
@ -124,14 +165,101 @@ class DBRevision:
), ),
) )
def previous_commit(self, db):
return self.fetch_revision(db, self.project, self.package, int(self.rev) - 1)
def next_commit(self, db):
return self.fetch_revision(db, self.project, self.package, int(self.rev) + 1)
def calculate_files_hash(self, db):
m = md5()
for file_dict in self.files_list(db):
m.update(
(
file_dict["name"]
+ "/"
+ file_dict["md5"]
+ "/"
+ str(file_dict["size"])
).encode("utf-8")
)
return m.hexdigest()
def files_list(self, db):
if self._files:
return self._files
with db.cursor() as cur:
cur.execute("SELECT * from files where revision_id=%s", (self.dbid,))
self._files = []
for row in cur.fetchall():
(_, _, name, md5, size, mtime) = row
self._files.append(
{"md5": md5, "size": size, "mtime": mtime, "name": name}
)
self._files.sort(key=lambda x: x["name"])
return self._files
@staticmethod @staticmethod
def requests_to_fetch(db, project, package): def requests_to_fetch(db, project, package):
with db.cursor() as cur: with db.cursor() as cur:
cur.execute( cur.execute(
"""SELECT request_number FROM revisions revs left join requests """SELECT request_number FROM revisions revs LEFT JOIN requests
reqs on reqs.number=revs.request_number where reqs.id is null and reqs ON reqs.number=revs.request_number WHERE reqs.id is null AND
revs.request_number is not null and project=%s and package=%s;""", revs.request_number IS NOT NULL and project=%s AND package=%s;""",
(project, package), (project, package),
) )
ret = [row[0] for row in cur.fetchall()] return [row[0] for row in cur.fetchall()]
return ret
@staticmethod
def import_fixture_dict(db, rev_dict):
"""Used in test cases to read a revision from fixtures into the test database"""
with db.cursor() as cur:
cur.execute(
"""INSERT INTO revisions (project, package, rev, unexpanded_srcmd5, expanded_srcmd5,
commit_time, userid, comment, broken, files_hash)
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id""",
(
rev_dict["project"],
rev_dict["package"],
rev_dict["rev"],
rev_dict["unexpanded_srcmd5"],
rev_dict["expanded_srcmd5"],
rev_dict["commit_time"],
rev_dict["userid"],
rev_dict["comment"],
rev_dict["broken"],
rev_dict["files_hash"],
),
)
rev_id = cur.fetchone()[0]
for file_dict in rev_dict["files"]:
cur.execute(
"INSERT INTO files (md5, mtime, name, size, revision_id) VALUES(%s, %s, %s, %s, %s)",
(
file_dict["md5"],
file_dict["mtime"],
file_dict["name"],
file_dict["size"],
rev_id,
),
)
request = rev_dict.get("request")
if request:
cur.execute(
"""INSERT INTO requests (creator, number, source_project, source_package,
source_rev, state, type) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id""",
(
request["creator"],
request["number"],
request.get("source_project"),
request.get("source_package"),
request.get("source_rev"),
request["state"],
request["type"],
),
)
request_id = cur.fetchone()[0]
cur.execute(
"UPDATE revisions SET request_id=%s, request_number=%s WHERE id=%s",
(request_id, request["number"], rev_id),
)

24
lib/exporter.py Normal file
View File

@ -0,0 +1,24 @@
import sys
import yaml
from lib.db import DB
from lib.db_revision import DBRevision
class Exporter:
def __init__(self, package):
self.package = package
def run(self):
db = DB()
with db.cursor() as cur:
cur.execute(
"SELECT * from revisions where package=%s ORDER BY project,rev",
(self.package,),
)
data = {"revisions": []}
for row in cur.fetchall():
data["revisions"].append(DBRevision(row).as_dict(db))
yaml.dump(data, sys.stdout, default_flow_style=False)

View File

@ -1,5 +1,8 @@
import functools import functools
import logging import logging
import xml.etree.ElementTree as ET
import psycopg2
from lib.binary import is_binary_or_large from lib.binary import is_binary_or_large
from lib.db import DB from lib.db import DB
@ -9,8 +12,7 @@ from lib.history import History
from lib.obs import OBS from lib.obs import OBS
from lib.obs_revision import OBSRevision from lib.obs_revision import OBSRevision
from lib.proxy_sha256 import ProxySHA256, md5, sha256 from lib.proxy_sha256 import ProxySHA256, md5, sha256
from lib.request import Request from lib.tree_builder import TreeBuilder
import xml.etree.ElementTree as ET
from lib.user import User from lib.user import User
@ -146,19 +148,134 @@ class Importer:
dbrev.links_to(db, tprj, tpkg) dbrev.links_to(db, tprj, tpkg)
db.conn.commit() db.conn.commit()
def find_linked_revs(self, db):
with db.cursor() as cur:
cur.execute(
"""SELECT * from revisions WHERE id in (SELECT l.revision_id FROM links l
LEFT JOIN linked_revs lrevs ON lrevs.revision_id=l.revision_id
WHERE lrevs.id IS NULL) and broken is FALSE;"""
)
for row in cur.fetchall():
rev = DBRevision(row)
linked_rev = rev.linked_rev(db)
if not linked_rev:
logging.debug(f"No link {rev}")
continue
cur.execute(
"""INSERT INTO linked_revs (revision_id, linked_id)
VALUES (%s,%s)""",
(rev.dbid, linked_rev.dbid),
)
def calculate_file_hashes(self, db):
with db.cursor() as cur:
cur.execute(
"SELECT * from revisions where files_hash IS NULL AND broken is FALSE"
)
for row in cur.fetchall():
rev = DBRevision(row)
md5 = rev.calculate_files_hash(db)
cur.execute(
"UPDATE revisions SET files_hash=%s WHERE id=%s", (md5, rev.dbid)
)
def fetch_all_linked_packages(self, db, project, package):
with db.cursor() as cur:
cur.execute(
"""SELECT DISTINCT l.project, l.package from links l JOIN revisions r
on r.id=l.revision_id WHERE r.project=%s AND r.package=%s""",
(project, package),
)
for row in cur.fetchall():
(lproject, lpackage) = row
self.update_db_package(db, lproject, lpackage)
def find_fake_revisions(self, db):
with db.cursor() as cur:
cur.execute(
"""SELECT * from revisions WHERE
id in (SELECT revision_id from linked_revs WHERE considered=FALSE) AND
id not in (SELECT revision_id FROM fake_revs) ORDER by project,package,rev"""
)
for row in cur.fetchall():
DBRevision(row)
def _find_fake_revision(self, db, rev):
prev = rev.previous_commit(db)
if not prev:
with db.cursor() as cur:
cur.execute(
"UPDATE linked_revs SET considered=TRUE where revision_id=%s",
(rev.dbid,),
)
return
with db.cursor() as cur:
cur.execute(
"""SELECT * from revisions where id in
(SELECT revision_id from linked_revs WHERE linked_id=%s)
AND commit_time <= %s ORDER BY commit_time""",
(prev.dbid, rev.commit_time),
)
last_linked = None
for linked in cur.fetchall():
linked = DBRevision(linked)
nextrev = linked.next_commit(db)
if nextrev and nextrev.commit_time < rev.commit_time:
continue
last_linked = linked
cur.execute(
"UPDATE linked_revs SET considered=TRUE where revision_id=%s",
(rev.dbid,),
)
if not last_linked:
return
with db.cursor() as cur:
linked = last_linked
cur.execute(
"SELECT 1 FROM fake_revs where revision_id=%s AND linked_id=%s",
(rev.dbid, linked.dbid),
)
if cur.fetchone():
cur.execute(
"UPDATE linked_revs SET considered=TRUE where revision_id=%s",
(rev.dbid,),
)
return
fake_rev = linked.rev + rev.rev / 1000.0
comment = f"Updating link to change in {rev.project}/{rev.package} revision {rev.rev}"
cur.execute(
"""INSERT INTO revisions (project,package,rev,unexpanded_srcmd5,
commit_time, userid, comment) VALUES(%s,%s,%s,%s,%s,%s,%s) RETURNING id""",
(
linked.project,
linked.package,
fake_rev,
linked.unexpanded_srcmd5,
rev.commit_time,
"buildservice-autocommit",
comment,
),
)
new_id = cur.fetchone()[0]
cur.execute(
"""INSERT INTO linked_revs (revision_id, linked_id) VALUES (%s,%s)""",
(new_id, rev.dbid),
)
cur.execute(
"""INSERT INTO fake_revs (revision_id, linked_id) VALUES (%s,%s)""",
(rev.dbid, linked.dbid),
)
def import_into_db(self): def import_into_db(self):
db = DB() db = DB()
for project, _, api_url in self.projects: for project, _, api_url in self.projects:
self.obs.change_url(api_url) self.obs.change_url(api_url)
self.update_db_package(db, project, self.package) self.update_db_package(db, project, self.package)
with db.cursor() as cur: self.fetch_all_linked_packages(db, project, self.package)
cur.execute( # all remaining, no filtering here
"SELECT DISTINCT l.project, l.package from links l join revisions r on r.id=l.revision_id WHERE r.project=%s AND r.package=%s", self.find_linked_revs(db)
(project, self.package), self.find_fake_revisions(db)
)
for row in cur.fetchall():
(lproject, lpackage) = row
self.update_db_package(db, lproject, lpackage)
missing_users = User.missing_users(db) missing_users = User.missing_users(db)
for userid in missing_users: for userid in missing_users:
@ -170,9 +287,15 @@ class Importer:
# TODO move into SELECT # TODO move into SELECT
if rev.broken or rev.expanded_srcmd5: if rev.broken or rev.expanded_srcmd5:
continue continue
linked_rev = rev.linked_rev(db) with db.cursor() as cur:
cur.execute(
"""SELECT unexpanded_srcmd5 from revisions WHERE
id=(SELECT linked_id FROM linked_revs WHERE revision_id=%s)""",
(rev.dbid,),
)
linked_rev = cur.fetchone()
if linked_rev: if linked_rev:
linked_rev = linked_rev.unexpanded_srcmd5 linked_rev = linked_rev[0]
list = self.obs.list( list = self.obs.list(
project, self.package, rev.unexpanded_srcmd5, linked_rev project, self.package, rev.unexpanded_srcmd5, linked_rev
) )
@ -181,10 +304,12 @@ class Importer:
else: else:
rev.set_broken(db) rev.set_broken(db)
for number in DBRevision.requests_to_fetch(db, project, self.package): for number in DBRevision.requests_to_fetch(db, project, self.package):
self.obs.request(number).import_into_db(db) self.obs.request(number).import_into_db(db)
self.calculate_file_hashes(db)
db.conn.commit() db.conn.commit()
TreeBuilder(db).build(self.package)
def import_all_revisions(self, gc): def import_all_revisions(self, gc):
# Fetch all the requests and sort them. Ideally we should # Fetch all the requests and sort them. Ideally we should

View File

@ -56,7 +56,7 @@ class OBSRevision:
return self return self
def __str__(self): def __str__(self):
return f"Rev {self.project}/{self.rev} Md5 {self.srcmd5} {self.time} {self.userid} {self.request_number}" return f"Rev {self.project}/{self.package}/{self.rev}.0 Md5 {self.srcmd5} {self.time} {self.userid} {self.request_number}"
def __repr__(self): def __repr__(self):
return f"[{self.__str__()}]" return f"[{self.__str__()}]"

View File

@ -3,6 +3,7 @@ class Request:
self.number = int(xml.get("id")) self.number = int(xml.get("id"))
self.creator = xml.get("creator") self.creator = xml.get("creator")
self.state = xml.find("state").get("name") self.state = xml.find("state").get("name")
# not used in the database
self.target = xml.find("action/target").get("project") self.target = xml.find("action/target").get("project")
self.type_ = xml.find("action").get("type") self.type_ = xml.find("action").get("type")
@ -28,7 +29,7 @@ class Request:
return self.type_ return self.type_
def __str__(self): def __str__(self):
return f"Req {self.number} {self.creator} {self.type_} {self.source_project}->{self.target} {self.state}" return f"Req {self.number} {self.creator} {self.type_} {self.source_project} {self.state}"
def __repr__(self): def __repr__(self):
return f"[{self.__str__()}]" return f"[{self.__str__()}]"
@ -54,3 +55,35 @@ class Request:
"UPDATE revisions SET request_id=%s WHERE request_number=%s", "UPDATE revisions SET request_id=%s WHERE request_number=%s",
(rowid, self.number), (rowid, self.number),
) )
@staticmethod
def find(db, request_id):
with db.cursor() as cur:
cur.execute("""SELECT * from requests WHERE id=%s""", (request_id,))
row = cur.fetchone()
ret = Request()
ret._from_db(row)
return ret
def _from_db(self, row):
(
self.dbid,
self.number,
self.creator,
self.type_,
self.state,
self.source_package,
self.source_project,
self.source_rev,
) = row
def as_dict(self):
return {
"number": self.number,
"creator": self.creator,
"type": self.type_,
"state": self.state,
"source_project": self.source_project,
"source_package": self.source_package,
"source_rev": self.source_rev,
}

41
lib/tree_builder.py Normal file
View File

@ -0,0 +1,41 @@
from lib.db_revision import DBRevision
from lib.request import Request
class TreeBuilder:
def __init__(self, db):
self.db = db
def filtered_revisions(self, project, package):
revisions = DBRevision.all_revisions(self.db, project, package)
revisions.sort()
ret = []
prev = None
for rev in revisions:
if rev.broken:
continue
if prev and prev.files_hash == rev.files_hash:
continue
ret.append(rev)
prev = rev
return ret
def build(self, package):
factory_revisions = self.filtered_revisions("openSUSE:Factory", package)
source_revisions = dict()
for rev in factory_revisions:
print(rev, rev.files_hash)
if rev.request_id:
req = Request.find(self.db, rev.request_id)
print(" ", req)
key = f"{req.source_project}/{req.source_package}"
if key not in source_revisions:
source_revisions[key] = self.filtered_revisions(
req.source_project, req.source_package
)
for rev2 in source_revisions.get(key):
# this happened after the fact - possibly a revert
if rev2.commit_time > rev.commit_time:
continue
if rev2.files_hash == rev.files_hash:
print(" ", rev2)

View File

@ -1,4 +1,4 @@
FAKE_ACCOUNTS = ('unknown', 'buildservice-autocommit', 'autobuild', '_service') FAKE_ACCOUNTS = ("unknown", "buildservice-autocommit", "autobuild", "_service")
class User: class User:
@ -44,7 +44,9 @@ class User:
cur.execute( cur.execute(
"""SELECT DISTINCT revisions.userid """SELECT DISTINCT revisions.userid
FROM revisions LEFT JOIN users ON revisions.userid = users.userid FROM revisions LEFT JOIN users ON revisions.userid = users.userid
WHERE users.userid IS NULL AND revisions.userid NOT IN {}""".format(FAKE_ACCOUNTS) WHERE users.userid IS NULL AND revisions.userid NOT IN {}""".format(
FAKE_ACCOUNTS
)
) )
missing_users = [row[0] for row in cur.fetchall()] missing_users = [row[0] for row in cur.fetchall()]
return missing_users return missing_users

31928
tests/fixtures/zsh-data.yaml vendored Normal file

File diff suppressed because it is too large Load Diff

25
tests/tree_test.py Normal file
View File

@ -0,0 +1,25 @@
import os
import unittest
import yaml
from lib.db import DB
from lib.db_revision import DBRevision
from lib.tree_builder import TreeBuilder
class TestTreeMethods(unittest.TestCase):
def setUp(self):
self.db = DB(section="test")
path = os.path.join(os.path.dirname(__file__), "fixtures/zsh-data.yaml")
with open(path, "r") as f:
zsh_data = yaml.safe_load(f)
for rev in zsh_data["revisions"]:
DBRevision.import_fixture_dict(self.db, rev)
def test_create_tree(self):
TreeBuilder(self.db).build("zsh")
if __name__ == "__main__":
unittest.main()