forked from importers/git-importer
Allow to import multiple packages in one go
This way we avoid duplicating all startup and SQL queries
This commit is contained in:
parent
651bd94771
commit
ab38332642
@ -44,11 +44,12 @@ PROJECTS = [
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(description="OBS history importer into git")
|
parser = argparse.ArgumentParser(description="OBS history importer into git")
|
||||||
parser.add_argument("package", help="OBS package name")
|
parser.add_argument("packages", help="OBS package names", nargs="*")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-r",
|
"-r",
|
||||||
"--repodir",
|
"--repodir",
|
||||||
required=False,
|
required=False,
|
||||||
|
default=pathlib.Path("repos"),
|
||||||
type=pathlib.Path,
|
type=pathlib.Path,
|
||||||
help="Local git repository directory",
|
help="Local git repository directory",
|
||||||
)
|
)
|
||||||
@ -94,22 +95,23 @@ def main():
|
|||||||
requests_log.propagate = True
|
requests_log.propagate = True
|
||||||
|
|
||||||
if args.export:
|
if args.export:
|
||||||
TestExporter(args.package).run()
|
if len(args.packages) != 0:
|
||||||
|
print("Can only export one package")
|
||||||
|
sys.exit(1)
|
||||||
|
TestExporter(args.packages[0]).run()
|
||||||
return
|
return
|
||||||
|
|
||||||
if not args.repodir:
|
|
||||||
args.repodir = pathlib.Path("repos") / args.package
|
|
||||||
|
|
||||||
if not args.cachedir:
|
if not args.cachedir:
|
||||||
args.cachedir = pathlib.Path("~/.cache/git-import/").expanduser()
|
args.cachedir = pathlib.Path("~/.cache/git-import/").expanduser()
|
||||||
|
|
||||||
importer = Importer(URL_OBS, "openSUSE:Factory", args.package)
|
importer = Importer(URL_OBS, "openSUSE:Factory", args.packages)
|
||||||
importer.import_into_db()
|
importer.import_into_db()
|
||||||
exporter = GitExporter(
|
for package in args.packages:
|
||||||
URL_OBS, "openSUSE:Factory", args.package, args.repodir, args.cachedir
|
exporter = GitExporter(
|
||||||
)
|
URL_OBS, "openSUSE:Factory", package, args.repodir, args.cachedir
|
||||||
exporter.set_gc_interval(args.gc)
|
)
|
||||||
exporter.export_as_git()
|
exporter.set_gc_interval(args.gc)
|
||||||
|
exporter.export_as_git()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
65
lib/git.py
65
lib/git.py
@ -109,69 +109,6 @@ class Git:
|
|||||||
"HEAD", author, committer, message, tree, parents
|
"HEAD", author, committer, message, tree, parents
|
||||||
)
|
)
|
||||||
|
|
||||||
def merge(
|
|
||||||
self,
|
|
||||||
user,
|
|
||||||
user_email,
|
|
||||||
user_time,
|
|
||||||
message,
|
|
||||||
commit,
|
|
||||||
committer=None,
|
|
||||||
committer_email=None,
|
|
||||||
committer_time=None,
|
|
||||||
clean_on_conflict=True,
|
|
||||||
merged=False,
|
|
||||||
allow_empty=False,
|
|
||||||
):
|
|
||||||
new_branch = False
|
|
||||||
|
|
||||||
if not merged:
|
|
||||||
try:
|
|
||||||
self.repo.merge(commit)
|
|
||||||
except KeyError:
|
|
||||||
# If it is the first commit, we will have a missing
|
|
||||||
# "HEAD", but the files will be there. We can proceed
|
|
||||||
# to the commit directly.
|
|
||||||
new_branch = True
|
|
||||||
|
|
||||||
if not merged and self.repo.index.conflicts:
|
|
||||||
for conflict in self.repo.index.conflicts:
|
|
||||||
conflict = [c for c in conflict if c]
|
|
||||||
if conflict:
|
|
||||||
logging.info(f"CONFLICT {conflict[0].path}")
|
|
||||||
|
|
||||||
if clean_on_conflict:
|
|
||||||
self.clean()
|
|
||||||
# Now I miss Rust enums
|
|
||||||
return "CONFLICT"
|
|
||||||
|
|
||||||
# Some merges are empty in OBS (no changes, not sure
|
|
||||||
# why), for now we signal them
|
|
||||||
if not allow_empty and not self.is_dirty():
|
|
||||||
# I really really do miss Rust enums
|
|
||||||
return "EMPTY"
|
|
||||||
|
|
||||||
if new_branch:
|
|
||||||
parents = [commit]
|
|
||||||
else:
|
|
||||||
parents = [
|
|
||||||
self.repo.head.target,
|
|
||||||
commit,
|
|
||||||
]
|
|
||||||
commit = self.commit(
|
|
||||||
user,
|
|
||||||
user_email,
|
|
||||||
user_time,
|
|
||||||
message,
|
|
||||||
parents,
|
|
||||||
committer,
|
|
||||||
committer_email,
|
|
||||||
committer_time,
|
|
||||||
allow_empty=allow_empty,
|
|
||||||
)
|
|
||||||
|
|
||||||
return commit
|
|
||||||
|
|
||||||
def merge_abort(self):
|
def merge_abort(self):
|
||||||
self.repo.state_cleanup()
|
self.repo.state_cleanup()
|
||||||
|
|
||||||
@ -188,7 +125,7 @@ class Git:
|
|||||||
self.repo.references["refs/heads/" + branch].set_target(commit)
|
self.repo.references["refs/heads/" + branch].set_target(commit)
|
||||||
|
|
||||||
def gc(self):
|
def gc(self):
|
||||||
logging.info(f"Garbage recollect and repackage {self.path}")
|
logging.debug(f"Garbage recollect and repackage {self.path}")
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
["git", "gc", "--auto"],
|
["git", "gc", "--auto"],
|
||||||
cwd=self.path,
|
cwd=self.path,
|
||||||
|
@ -20,7 +20,7 @@ class GitExporter:
|
|||||||
self.obs.change_url(api_url)
|
self.obs.change_url(api_url)
|
||||||
self.proxy_sha256 = ProxySHA256(self.obs, enabled=True)
|
self.proxy_sha256 = ProxySHA256(self.obs, enabled=True)
|
||||||
self.git = Git(
|
self.git = Git(
|
||||||
repodir,
|
repodir / package,
|
||||||
committer="Git OBS Bridge",
|
committer="Git OBS Bridge",
|
||||||
committer_email="obsbridge@suse.de",
|
committer_email="obsbridge@suse.de",
|
||||||
).create()
|
).create()
|
||||||
|
@ -9,9 +9,9 @@ from lib.user import User
|
|||||||
|
|
||||||
|
|
||||||
class Importer:
|
class Importer:
|
||||||
def __init__(self, api_url, project, package):
|
def __init__(self, api_url, project, packages):
|
||||||
# Import a Factory package into the database
|
# Import multiple Factory packages into the database
|
||||||
self.package = package
|
self.packages = packages
|
||||||
self.project = project
|
self.project = project
|
||||||
|
|
||||||
self.obs = OBS()
|
self.obs = OBS()
|
||||||
@ -191,23 +191,30 @@ class Importer:
|
|||||||
def import_into_db(self):
|
def import_into_db(self):
|
||||||
db = DB()
|
db = DB()
|
||||||
|
|
||||||
self.refresh_package(db, self.project, self.package)
|
for package in self.packages:
|
||||||
|
self.refresh_package(db, self.project, package)
|
||||||
|
db.conn.commit()
|
||||||
|
|
||||||
for number in DBRevision.requests_to_fetch(db):
|
for number in DBRevision.requests_to_fetch(db):
|
||||||
self.obs.request(number).import_into_db(db)
|
self.obs.request(number).import_into_db(db)
|
||||||
|
db.conn.commit()
|
||||||
|
|
||||||
with db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""SELECT DISTINCT source_project,source_package FROM requests
|
"""SELECT DISTINCT source_project,source_package FROM requests
|
||||||
WHERE id IN (SELECT request_id FROM revisions WHERE project=%s and package=%s);""",
|
WHERE id IN (SELECT request_id FROM revisions WHERE project=%s and package = ANY(%s));""",
|
||||||
(self.project, self.package),
|
(self.project, self.packages),
|
||||||
)
|
)
|
||||||
for project, package in cur.fetchall():
|
for project, package in cur.fetchall():
|
||||||
self.refresh_package(db, project, package)
|
self.refresh_package(db, project, package)
|
||||||
|
db.conn.commit()
|
||||||
|
|
||||||
missing_users = User.missing_users(db)
|
missing_users = User.missing_users(db)
|
||||||
for userid in missing_users:
|
for userid in missing_users:
|
||||||
missing_user = self.obs.user(userid)
|
missing_user = self.obs.user(userid)
|
||||||
if missing_user:
|
if missing_user:
|
||||||
missing_user.import_into_db(db)
|
missing_user.import_into_db(db)
|
||||||
|
db.conn.commit()
|
||||||
|
|
||||||
self.fill_file_lists(db)
|
self.fill_file_lists(db)
|
||||||
db.conn.commit()
|
db.conn.commit()
|
||||||
|
Loading…
Reference in New Issue
Block a user